ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.208
Committed: Thu Apr 25 16:15:11 2013 UTC (11 years, 1 month ago) by dl
Branch: MAIN
Changes since 1.207: +44 -37 lines
Log Message:
Incorporate serialization changes suggested by Peter Levart

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.io.Serializable;
9 import java.io.ObjectStreamField;
10 import java.lang.reflect.ParameterizedType;
11 import java.lang.reflect.Type;
12 import java.util.AbstractCollection;
13 import java.util.AbstractMap;
14 import java.util.AbstractSet;
15 import java.util.Arrays;
16 import java.util.Collection;
17 import java.util.Comparator;
18 import java.util.ConcurrentModificationException;
19 import java.util.Enumeration;
20 import java.util.HashMap;
21 import java.util.Hashtable;
22 import java.util.Iterator;
23 import java.util.Map;
24 import java.util.NoSuchElementException;
25 import java.util.Set;
26 import java.util.Spliterator;
27 import java.util.concurrent.ConcurrentMap;
28 import java.util.concurrent.ForkJoinPool;
29 import java.util.concurrent.atomic.AtomicReference;
30 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
31 import java.util.concurrent.locks.ReentrantLock;
32 import java.util.function.BiConsumer;
33 import java.util.function.BiFunction;
34 import java.util.function.BinaryOperator;
35 import java.util.function.Consumer;
36 import java.util.function.DoubleBinaryOperator;
37 import java.util.function.Function;
38 import java.util.function.IntBinaryOperator;
39 import java.util.function.LongBinaryOperator;
40 import java.util.function.ToDoubleBiFunction;
41 import java.util.function.ToDoubleFunction;
42 import java.util.function.ToIntBiFunction;
43 import java.util.function.ToIntFunction;
44 import java.util.function.ToLongBiFunction;
45 import java.util.function.ToLongFunction;
46
47 /**
48 * A hash table supporting full concurrency of retrievals and
49 * high expected concurrency for updates. This class obeys the
50 * same functional specification as {@link java.util.Hashtable}, and
51 * includes versions of methods corresponding to each method of
52 * {@code Hashtable}. However, even though all operations are
53 * thread-safe, retrieval operations do <em>not</em> entail locking,
54 * and there is <em>not</em> any support for locking the entire table
55 * in a way that prevents all access. This class is fully
56 * interoperable with {@code Hashtable} in programs that rely on its
57 * thread safety but not on its synchronization details.
58 *
59 * <p>Retrieval operations (including {@code get}) generally do not
60 * block, so may overlap with update operations (including {@code put}
61 * and {@code remove}). Retrievals reflect the results of the most
62 * recently <em>completed</em> update operations holding upon their
63 * onset. (More formally, an update operation for a given key bears a
64 * <em>happens-before</em> relation with any (non-null) retrieval for
65 * that key reporting the updated value.) For aggregate operations
66 * such as {@code putAll} and {@code clear}, concurrent retrievals may
67 * reflect insertion or removal of only some entries. Similarly,
68 * Iterators and Enumerations return elements reflecting the state of
69 * the hash table at some point at or since the creation of the
70 * iterator/enumeration. They do <em>not</em> throw {@link
71 * ConcurrentModificationException}. However, iterators are designed
72 * to be used by only one thread at a time. Bear in mind that the
73 * results of aggregate status methods including {@code size}, {@code
74 * isEmpty}, and {@code containsValue} are typically useful only when
75 * a map is not undergoing concurrent updates in other threads.
76 * Otherwise the results of these methods reflect transient states
77 * that may be adequate for monitoring or estimation purposes, but not
78 * for program control.
79 *
80 * <p>The table is dynamically expanded when there are too many
81 * collisions (i.e., keys that have distinct hash codes but fall into
82 * the same slot modulo the table size), with the expected average
83 * effect of maintaining roughly two bins per mapping (corresponding
84 * to a 0.75 load factor threshold for resizing). There may be much
85 * variance around this average as mappings are added and removed, but
86 * overall, this maintains a commonly accepted time/space tradeoff for
87 * hash tables. However, resizing this or any other kind of hash
88 * table may be a relatively slow operation. When possible, it is a
89 * good idea to provide a size estimate as an optional {@code
90 * initialCapacity} constructor argument. An additional optional
91 * {@code loadFactor} constructor argument provides a further means of
92 * customizing initial table capacity by specifying the table density
93 * to be used in calculating the amount of space to allocate for the
94 * given number of elements. Also, for compatibility with previous
95 * versions of this class, constructors may optionally specify an
96 * expected {@code concurrencyLevel} as an additional hint for
97 * internal sizing. Note that using many keys with exactly the same
98 * {@code hashCode()} is a sure way to slow down performance of any
99 * hash table.
100 *
101 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
102 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
103 * (using {@link #keySet(Object)} when only keys are of interest, and the
104 * mapped values are (perhaps transiently) not used or all take the
105 * same mapping value.
106 *
107 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
108 * form of histogram or multiset) by using {@link
109 * java.util.concurrent.atomic.LongAdder} values and initializing via
110 * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
111 * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
112 * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
113 *
114 * <p>This class and its views and iterators implement all of the
115 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
116 * interfaces.
117 *
118 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
119 * does <em>not</em> allow {@code null} to be used as a key or value.
120 *
121 * <p>ConcurrentHashMaps support sequential and parallel operations
122 * bulk operations. (Parallel forms use the {@link
123 * ForkJoinPool#commonPool()}). Tasks that may be used in other
124 * contexts are available in class {@link ForkJoinTasks}. These
125 * operations are designed to be safely, and often sensibly, applied
126 * even with maps that are being concurrently updated by other
127 * threads; for example, when computing a snapshot summary of the
128 * values in a shared registry. There are three kinds of operation,
129 * each with four forms, accepting functions with Keys, Values,
130 * Entries, and (Key, Value) arguments and/or return values. Because
131 * the elements of a ConcurrentHashMap are not ordered in any
132 * particular way, and may be processed in different orders in
133 * different parallel executions, the correctness of supplied
134 * functions should not depend on any ordering, or on any other
135 * objects or values that may transiently change while computation is
136 * in progress; and except for forEach actions, should ideally be
137 * side-effect-free.
138 *
139 * <ul>
140 * <li> forEach: Perform a given action on each element.
141 * A variant form applies a given transformation on each element
142 * before performing the action.</li>
143 *
144 * <li> search: Return the first available non-null result of
145 * applying a given function on each element; skipping further
146 * search when a result is found.</li>
147 *
148 * <li> reduce: Accumulate each element. The supplied reduction
149 * function cannot rely on ordering (more formally, it should be
150 * both associative and commutative). There are five variants:
151 *
152 * <ul>
153 *
154 * <li> Plain reductions. (There is not a form of this method for
155 * (key, value) function arguments since there is no corresponding
156 * return type.)</li>
157 *
158 * <li> Mapped reductions that accumulate the results of a given
159 * function applied to each element.</li>
160 *
161 * <li> Reductions to scalar doubles, longs, and ints, using a
162 * given basis value.</li>
163 *
164 * </ul>
165 * </li>
166 * </ul>
167 *
168 * <p>The concurrency properties of bulk operations follow
169 * from those of ConcurrentHashMap: Any non-null result returned
170 * from {@code get(key)} and related access methods bears a
171 * happens-before relation with the associated insertion or
172 * update. The result of any bulk operation reflects the
173 * composition of these per-element relations (but is not
174 * necessarily atomic with respect to the map as a whole unless it
175 * is somehow known to be quiescent). Conversely, because keys
176 * and values in the map are never null, null serves as a reliable
177 * atomic indicator of the current lack of any result. To
178 * maintain this property, null serves as an implicit basis for
179 * all non-scalar reduction operations. For the double, long, and
180 * int versions, the basis should be one that, when combined with
181 * any other value, returns that other value (more formally, it
182 * should be the identity element for the reduction). Most common
183 * reductions have these properties; for example, computing a sum
184 * with basis 0 or a minimum with basis MAX_VALUE.
185 *
186 * <p>Search and transformation functions provided as arguments
187 * should similarly return null to indicate the lack of any result
188 * (in which case it is not used). In the case of mapped
189 * reductions, this also enables transformations to serve as
190 * filters, returning null (or, in the case of primitive
191 * specializations, the identity basis) if the element should not
192 * be combined. You can create compound transformations and
193 * filterings by composing them yourself under this "null means
194 * there is nothing there now" rule before using them in search or
195 * reduce operations.
196 *
197 * <p>Methods accepting and/or returning Entry arguments maintain
198 * key-value associations. They may be useful for example when
199 * finding the key for the greatest value. Note that "plain" Entry
200 * arguments can be supplied using {@code new
201 * AbstractMap.SimpleEntry(k,v)}.
202 *
203 * <p>Bulk operations may complete abruptly, throwing an
204 * exception encountered in the application of a supplied
205 * function. Bear in mind when handling such exceptions that other
206 * concurrently executing functions could also have thrown
207 * exceptions, or would have done so if the first exception had
208 * not occurred.
209 *
210 * <p>Speedups for parallel compared to sequential forms are common
211 * but not guaranteed. Parallel operations involving brief functions
212 * on small maps may execute more slowly than sequential forms if the
213 * underlying work to parallelize the computation is more expensive
214 * than the computation itself. Similarly, parallelization may not
215 * lead to much actual parallelism if all processors are busy
216 * performing unrelated tasks.
217 *
218 * <p>All arguments to all task methods must be non-null.
219 *
220 * <p>This class is a member of the
221 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
222 * Java Collections Framework</a>.
223 *
224 * @since 1.5
225 * @author Doug Lea
226 * @param <K> the type of keys maintained by this map
227 * @param <V> the type of mapped values
228 */
229 public class ConcurrentHashMap<K,V>
230 implements ConcurrentMap<K,V>, Serializable {
231 private static final long serialVersionUID = 7249069246763182397L;
232
233 /*
234 * Overview:
235 *
236 * The primary design goal of this hash table is to maintain
237 * concurrent readability (typically method get(), but also
238 * iterators and related methods) while minimizing update
239 * contention. Secondary goals are to keep space consumption about
240 * the same or better than java.util.HashMap, and to support high
241 * initial insertion rates on an empty table by many threads.
242 *
243 * Each key-value mapping is held in a Node. Because Node key
244 * fields can contain special values, they are defined using plain
245 * Object types (not type "K"). This leads to a lot of explicit
246 * casting (and many explicit warning suppressions to tell
247 * compilers not to complain about it). It also allows some of the
248 * public methods to be factored into a smaller number of internal
249 * methods (although sadly not so for the five variants of
250 * put-related operations). The validation-based approach
251 * explained below leads to a lot of code sprawl because
252 * retry-control precludes factoring into smaller methods.
253 *
254 * The table is lazily initialized to a power-of-two size upon the
255 * first insertion. Each bin in the table normally contains a
256 * list of Nodes (most often, the list has only zero or one Node).
257 * Table accesses require volatile/atomic reads, writes, and
258 * CASes. Because there is no other way to arrange this without
259 * adding further indirections, we use intrinsics
260 * (sun.misc.Unsafe) operations. The lists of nodes within bins
261 * are always accurately traversable under volatile reads, so long
262 * as lookups check hash code and non-nullness of value before
263 * checking key equality.
264 *
265 * We use the top (sign) bit of Node hash fields for control
266 * purposes -- it is available anyway because of addressing
267 * constraints. Nodes with negative hash fields are forwarding
268 * nodes to either TreeBins or resized tables. The lower 31 bits
269 * of each normal Node's hash field contain a transformation of
270 * the key's hash code.
271 *
272 * Insertion (via put or its variants) of the first node in an
273 * empty bin is performed by just CASing it to the bin. This is
274 * by far the most common case for put operations under most
275 * key/hash distributions. Other update operations (insert,
276 * delete, and replace) require locks. We do not want to waste
277 * the space required to associate a distinct lock object with
278 * each bin, so instead use the first node of a bin list itself as
279 * a lock. Locking support for these locks relies on builtin
280 * "synchronized" monitors.
281 *
282 * Using the first node of a list as a lock does not by itself
283 * suffice though: When a node is locked, any update must first
284 * validate that it is still the first node after locking it, and
285 * retry if not. Because new nodes are always appended to lists,
286 * once a node is first in a bin, it remains first until deleted
287 * or the bin becomes invalidated (upon resizing). However,
288 * operations that only conditionally update may inspect nodes
289 * until the point of update. This is a converse of sorts to the
290 * lazy locking technique described by Herlihy & Shavit.
291 *
292 * The main disadvantage of per-bin locks is that other update
293 * operations on other nodes in a bin list protected by the same
294 * lock can stall, for example when user equals() or mapping
295 * functions take a long time. However, statistically, under
296 * random hash codes, this is not a common problem. Ideally, the
297 * frequency of nodes in bins follows a Poisson distribution
298 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
299 * parameter of about 0.5 on average, given the resizing threshold
300 * of 0.75, although with a large variance because of resizing
301 * granularity. Ignoring variance, the expected occurrences of
302 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
303 * first values are:
304 *
305 * 0: 0.60653066
306 * 1: 0.30326533
307 * 2: 0.07581633
308 * 3: 0.01263606
309 * 4: 0.00157952
310 * 5: 0.00015795
311 * 6: 0.00001316
312 * 7: 0.00000094
313 * 8: 0.00000006
314 * more: less than 1 in ten million
315 *
316 * Lock contention probability for two threads accessing distinct
317 * elements is roughly 1 / (8 * #elements) under random hashes.
318 *
319 * Actual hash code distributions encountered in practice
320 * sometimes deviate significantly from uniform randomness. This
321 * includes the case when N > (1<<30), so some keys MUST collide.
322 * Similarly for dumb or hostile usages in which multiple keys are
323 * designed to have identical hash codes. Also, although we guard
324 * against the worst effects of this (see method spread), sets of
325 * hashes may differ only in bits that do not impact their bin
326 * index for a given power-of-two mask. So we use a secondary
327 * strategy that applies when the number of nodes in a bin exceeds
328 * a threshold, and at least one of the keys implements
329 * Comparable. These TreeBins use a balanced tree to hold nodes
330 * (a specialized form of red-black trees), bounding search time
331 * to O(log N). Each search step in a TreeBin is at least twice as
332 * slow as in a regular list, but given that N cannot exceed
333 * (1<<64) (before running out of addresses) this bounds search
334 * steps, lock hold times, etc, to reasonable constants (roughly
335 * 100 nodes inspected per operation worst case) so long as keys
336 * are Comparable (which is very common -- String, Long, etc).
337 * TreeBin nodes (TreeNodes) also maintain the same "next"
338 * traversal pointers as regular nodes, so can be traversed in
339 * iterators in the same way.
340 *
341 * The table is resized when occupancy exceeds a percentage
342 * threshold (nominally, 0.75, but see below). Any thread
343 * noticing an overfull bin may assist in resizing after the
344 * initiating thread allocates and sets up the replacement
345 * array. However, rather than stalling, these other threads may
346 * proceed with insertions etc. The use of TreeBins shields us
347 * from the worst case effects of overfilling while resizes are in
348 * progress. Resizing proceeds by transferring bins, one by one,
349 * from the table to the next table. To enable concurrency, the
350 * next table must be (incrementally) prefilled with place-holders
351 * serving as reverse forwarders to the old table. Because we are
352 * using power-of-two expansion, the elements from each bin must
353 * either stay at same index, or move with a power of two
354 * offset. We eliminate unnecessary node creation by catching
355 * cases where old nodes can be reused because their next fields
356 * won't change. On average, only about one-sixth of them need
357 * cloning when a table doubles. The nodes they replace will be
358 * garbage collectable as soon as they are no longer referenced by
359 * any reader thread that may be in the midst of concurrently
360 * traversing table. Upon transfer, the old table bin contains
361 * only a special forwarding node (with hash field "MOVED") that
362 * contains the next table as its key. On encountering a
363 * forwarding node, access and update operations restart, using
364 * the new table.
365 *
366 * Each bin transfer requires its bin lock, which can stall
367 * waiting for locks while resizing. However, because other
368 * threads can join in and help resize rather than contend for
369 * locks, average aggregate waits become shorter as resizing
370 * progresses. The transfer operation must also ensure that all
371 * accessible bins in both the old and new table are usable by any
372 * traversal. This is arranged by proceeding from the last bin
373 * (table.length - 1) up towards the first. Upon seeing a
374 * forwarding node, traversals (see class Traverser) arrange to
375 * move to the new table without revisiting nodes. However, to
376 * ensure that no intervening nodes are skipped, bin splitting can
377 * only begin after the associated reverse-forwarders are in
378 * place.
379 *
380 * The traversal scheme also applies to partial traversals of
381 * ranges of bins (via an alternate Traverser constructor)
382 * to support partitioned aggregate operations. Also, read-only
383 * operations give up if ever forwarded to a null table, which
384 * provides support for shutdown-style clearing, which is also not
385 * currently implemented.
386 *
387 * Lazy table initialization minimizes footprint until first use,
388 * and also avoids resizings when the first operation is from a
389 * putAll, constructor with map argument, or deserialization.
390 * These cases attempt to override the initial capacity settings,
391 * but harmlessly fail to take effect in cases of races.
392 *
393 * The element count is maintained using a specialization of
394 * LongAdder. We need to incorporate a specialization rather than
395 * just use a LongAdder in order to access implicit
396 * contention-sensing that leads to creation of multiple
397 * Cells. The counter mechanics avoid contention on
398 * updates but can encounter cache thrashing if read too
399 * frequently during concurrent access. To avoid reading so often,
400 * resizing under contention is attempted only upon adding to a
401 * bin already holding two or more nodes. Under uniform hash
402 * distributions, the probability of this occurring at threshold
403 * is around 13%, meaning that only about 1 in 8 puts check
404 * threshold (and after resizing, many fewer do so). The bulk
405 * putAll operation further reduces contention by only committing
406 * count updates upon these size checks.
407 *
408 * Maintaining API and serialization compatibility with previous
409 * versions of this class introduces several oddities. Mainly: We
410 * leave untouched but unused constructor arguments refering to
411 * concurrencyLevel. We accept a loadFactor constructor argument,
412 * but apply it only to initial table capacity (which is the only
413 * time that we can guarantee to honor it.) We also declare an
414 * unused "Segment" class that is instantiated in minimal form
415 * only when serializing.
416 */
417
418 /* ---------------- Constants -------------- */
419
420 /**
421 * The largest possible table capacity. This value must be
422 * exactly 1<<30 to stay within Java array allocation and indexing
423 * bounds for power of two table sizes, and is further required
424 * because the top two bits of 32bit hash fields are used for
425 * control purposes.
426 */
427 private static final int MAXIMUM_CAPACITY = 1 << 30;
428
429 /**
430 * The default initial table capacity. Must be a power of 2
431 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
432 */
433 private static final int DEFAULT_CAPACITY = 16;
434
435 /**
436 * The largest possible (non-power of two) array size.
437 * Needed by toArray and related methods.
438 */
439 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
440
441 /**
442 * The default concurrency level for this table. Unused but
443 * defined for compatibility with previous versions of this class.
444 */
445 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
446
447 /**
448 * The load factor for this table. Overrides of this value in
449 * constructors affect only the initial table capacity. The
450 * actual floating point value isn't normally used -- it is
451 * simpler to use expressions such as {@code n - (n >>> 2)} for
452 * the associated resizing threshold.
453 */
454 private static final float LOAD_FACTOR = 0.75f;
455
456 /**
457 * The bin count threshold for using a tree rather than list for a
458 * bin. The value reflects the approximate break-even point for
459 * using tree-based operations.
460 */
461 private static final int TREE_THRESHOLD = 16;
462
463 /**
464 * Minimum number of rebinnings per transfer step. Ranges are
465 * subdivided to allow multiple resizer threads. This value
466 * serves as a lower bound to avoid resizers encountering
467 * excessive memory contention. The value should be at least
468 * DEFAULT_CAPACITY.
469 */
470 private static final int MIN_TRANSFER_STRIDE = 16;
471
472 /*
473 * Encodings for Node hash fields. See above for explanation.
474 */
475 static final int MOVED = 0x80000000; // hash field for forwarding nodes
476 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
477
478 /** Number of CPUS, to place bounds on some sizings */
479 static final int NCPU = Runtime.getRuntime().availableProcessors();
480
481 /** For serialization compatibility. */
482 private static final ObjectStreamField[] serialPersistentFields = {
483 new ObjectStreamField("segments", Segment[].class)
484 };
485
486 /* ---------------- Counters -------------- */
487
488 // Adapted from LongAdder and Striped64.
489 // See their internal docs for explanation.
490
491 // A padded cell for distributing counts
492 static final class Cell {
493 volatile long p0, p1, p2, p3, p4, p5, p6;
494 volatile long value;
495 volatile long q0, q1, q2, q3, q4, q5, q6;
496 Cell(long x) { value = x; }
497 }
498
499 /* ---------------- Fields -------------- */
500
501 /**
502 * The array of bins. Lazily initialized upon first insertion.
503 * Size is always a power of two. Accessed directly by iterators.
504 */
505 transient volatile Node<V>[] table;
506
507 /**
508 * The next table to use; non-null only while resizing.
509 */
510 private transient volatile Node<V>[] nextTable;
511
512 /**
513 * Base counter value, used mainly when there is no contention,
514 * but also as a fallback during table initialization
515 * races. Updated via CAS.
516 */
517 private transient volatile long baseCount;
518
519 /**
520 * Table initialization and resizing control. When negative, the
521 * table is being initialized or resized: -1 for initialization,
522 * else -(1 + the number of active resizing threads). Otherwise,
523 * when table is null, holds the initial table size to use upon
524 * creation, or 0 for default. After initialization, holds the
525 * next element count value upon which to resize the table.
526 */
527 private transient volatile int sizeCtl;
528
529 /**
530 * The next table index (plus one) to split while resizing.
531 */
532 private transient volatile int transferIndex;
533
534 /**
535 * The least available table index to split while resizing.
536 */
537 private transient volatile int transferOrigin;
538
539 /**
540 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
541 */
542 private transient volatile int cellsBusy;
543
544 /**
545 * Table of counter cells. When non-null, size is a power of 2.
546 */
547 private transient volatile Cell[] counterCells;
548
549 // views
550 private transient KeySetView<K,V> keySet;
551 private transient ValuesView<K,V> values;
552 private transient EntrySetView<K,V> entrySet;
553
554 /* ---------------- Table element access -------------- */
555
556 /*
557 * Volatile access methods are used for table elements as well as
558 * elements of in-progress next table while resizing. Uses are
559 * null checked by callers, and implicitly bounds-checked, relying
560 * on the invariants that tab arrays have non-zero size, and all
561 * indices are masked with (tab.length - 1) which is never
562 * negative and always less than length. Note that, to be correct
563 * wrt arbitrary concurrency errors by users, bounds checks must
564 * operate on local variables, which accounts for some odd-looking
565 * inline assignments below.
566 */
567
568 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
569 (Node<V>[] tab, int i) { // used by Traverser
570 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
571 }
572
573 private static final <V> boolean casTabAt
574 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
575 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
576 }
577
578 private static final <V> void setTabAt
579 (Node<V>[] tab, int i, Node<V> v) {
580 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
581 }
582
583 /* ---------------- Nodes -------------- */
584
585 /**
586 * Key-value entry. Note that this is never exported out as a
587 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
588 * field of MOVED are special, and do not contain user keys or
589 * values. Otherwise, keys are never null, and null val fields
590 * indicate that a node is in the process of being deleted or
591 * created. For purposes of read-only access, a key may be read
592 * before a val, but can only be used after checking val to be
593 * non-null.
594 */
595 static class Node<V> {
596 final int hash;
597 final Object key;
598 volatile V val;
599 volatile Node<V> next;
600
601 Node(int hash, Object key, V val, Node<V> next) {
602 this.hash = hash;
603 this.key = key;
604 this.val = val;
605 this.next = next;
606 }
607 }
608
609 /* ---------------- TreeBins -------------- */
610
611 /**
612 * Nodes for use in TreeBins
613 */
614 static final class TreeNode<V> extends Node<V> {
615 TreeNode<V> parent; // red-black tree links
616 TreeNode<V> left;
617 TreeNode<V> right;
618 TreeNode<V> prev; // needed to unlink next upon deletion
619 boolean red;
620
621 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
622 super(hash, key, val, next);
623 this.parent = parent;
624 }
625 }
626
627 /**
628 * Returns a Class for the given object of the form "class C
629 * implements Comparable<C>", if one exists, else null. See below
630 * for explanation.
631 */
632 static Class<?> comparableClassFor(Object x) {
633 Class<?> c, s, cmpc; Type[] ts, as; Type t; ParameterizedType p;
634 if ((c = x.getClass()) == String.class) // bypass checks
635 return c;
636 if ((cmpc = Comparable.class).isAssignableFrom(c)) {
637 while (cmpc.isAssignableFrom(s = c.getSuperclass()))
638 c = s; // find topmost comparable class
639 if ((ts = c.getGenericInterfaces()) != null) {
640 for (int i = 0; i < ts.length; ++i) {
641 if (((t = ts[i]) instanceof ParameterizedType) &&
642 ((p = (ParameterizedType)t).getRawType() == cmpc) &&
643 (as = p.getActualTypeArguments()) != null &&
644 as.length == 1 && as[0] == c) // type arg is c
645 return c;
646 }
647 }
648 }
649 return null;
650 }
651
652 /**
653 * A specialized form of red-black tree for use in bins
654 * whose size exceeds a threshold.
655 *
656 * TreeBins use a special form of comparison for search and
657 * related operations (which is the main reason we cannot use
658 * existing collections such as TreeMaps). TreeBins contain
659 * Comparable elements, but may contain others, as well as
660 * elements that are Comparable but not necessarily Comparable<T>
661 * for the same T, so we cannot invoke compareTo among them. To
662 * handle this, the tree is ordered primarily by hash value, then
663 * by Comparable.compareTo order if applicable. On lookup at a
664 * node, if elements are not comparable or compare as 0 then both
665 * left and right children may need to be searched in the case of
666 * tied hash values. (This corresponds to the full list search
667 * that would be necessary if all elements were non-Comparable and
668 * had tied hashes.) The red-black balancing code is updated from
669 * pre-jdk-collections
670 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
671 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
672 * Algorithms" (CLR).
673 *
674 * TreeBins also maintain a separate locking discipline than
675 * regular bins. Because they are forwarded via special MOVED
676 * nodes at bin heads (which can never change once established),
677 * we cannot use those nodes as locks. Instead, TreeBin
678 * extends AbstractQueuedSynchronizer to support a simple form of
679 * read-write lock. For update operations and table validation,
680 * the exclusive form of lock behaves in the same way as bin-head
681 * locks. However, lookups use shared read-lock mechanics to allow
682 * multiple readers in the absence of writers. Additionally,
683 * these lookups do not ever block: While the lock is not
684 * available, they proceed along the slow traversal path (via
685 * next-pointers) until the lock becomes available or the list is
686 * exhausted, whichever comes first. (These cases are not fast,
687 * but maximize aggregate expected throughput.) The AQS mechanics
688 * for doing this are straightforward. The lock state is held as
689 * AQS getState(). Read counts are negative; the write count (1)
690 * is positive. There are no signalling preferences among readers
691 * and writers. Since we don't need to export full Lock API, we
692 * just override the minimal AQS methods and use them directly.
693 */
694 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
695 private static final long serialVersionUID = 2249069246763182397L;
696 transient TreeNode<V> root; // root of tree
697 transient TreeNode<V> first; // head of next-pointer list
698
699 /* AQS overrides */
700 public final boolean isHeldExclusively() { return getState() > 0; }
701 public final boolean tryAcquire(int ignore) {
702 if (compareAndSetState(0, 1)) {
703 setExclusiveOwnerThread(Thread.currentThread());
704 return true;
705 }
706 return false;
707 }
708 public final boolean tryRelease(int ignore) {
709 setExclusiveOwnerThread(null);
710 setState(0);
711 return true;
712 }
713 public final int tryAcquireShared(int ignore) {
714 for (int c;;) {
715 if ((c = getState()) > 0)
716 return -1;
717 if (compareAndSetState(c, c -1))
718 return 1;
719 }
720 }
721 public final boolean tryReleaseShared(int ignore) {
722 int c;
723 do {} while (!compareAndSetState(c = getState(), c + 1));
724 return c == -1;
725 }
726
727 /** From CLR */
728 private void rotateLeft(TreeNode<V> p) {
729 if (p != null) {
730 TreeNode<V> r = p.right, pp, rl;
731 if ((rl = p.right = r.left) != null)
732 rl.parent = p;
733 if ((pp = r.parent = p.parent) == null)
734 root = r;
735 else if (pp.left == p)
736 pp.left = r;
737 else
738 pp.right = r;
739 r.left = p;
740 p.parent = r;
741 }
742 }
743
744 /** From CLR */
745 private void rotateRight(TreeNode<V> p) {
746 if (p != null) {
747 TreeNode<V> l = p.left, pp, lr;
748 if ((lr = p.left = l.right) != null)
749 lr.parent = p;
750 if ((pp = l.parent = p.parent) == null)
751 root = l;
752 else if (pp.right == p)
753 pp.right = l;
754 else
755 pp.left = l;
756 l.right = p;
757 p.parent = l;
758 }
759 }
760
761 /**
762 * Returns the TreeNode (or null if not found) for the given
763 * key. A front-end for recursive version.
764 */
765 final TreeNode<V> getTreeNode(int h, Object k) {
766 return getTreeNode(h, k, root, comparableClassFor(k));
767 }
768
769 /**
770 * Returns the TreeNode (or null if not found) for the given key
771 * starting at given root.
772 */
773 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
774 (int h, Object k, TreeNode<V> p, Class<?> cc) {
775 while (p != null) {
776 int dir, ph; Object pk;
777 if ((ph = p.hash) != h)
778 dir = (h < ph) ? -1 : 1;
779 else if ((pk = p.key) == k || k.equals(pk))
780 return p;
781 else if (cc == null || comparableClassFor(pk) != cc ||
782 (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
783 TreeNode<V> r, pr; // check both sides
784 if ((pr = p.right) != null && h >= pr.hash &&
785 (r = getTreeNode(h, k, pr, cc)) != null)
786 return r;
787 else // continue left
788 dir = -1;
789 }
790 p = (dir > 0) ? p.right : p.left;
791 }
792 return null;
793 }
794
795 /**
796 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
797 * read-lock to call getTreeNode, but during failure to get
798 * lock, searches along next links.
799 */
800 final V getValue(int h, Object k) {
801 Node<V> r = null;
802 int c = getState(); // Must read lock state first
803 for (Node<V> e = first; e != null; e = e.next) {
804 if (c <= 0 && compareAndSetState(c, c - 1)) {
805 try {
806 r = getTreeNode(h, k, root, comparableClassFor(k));
807 } finally {
808 releaseShared(0);
809 }
810 break;
811 }
812 else if (e.hash == h && k.equals(e.key)) {
813 r = e;
814 break;
815 }
816 else
817 c = getState();
818 }
819 return r == null ? null : r.val;
820 }
821
822 /**
823 * Finds or adds a node.
824 * @return null if added
825 */
826 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
827 (int h, Object k, V v) {
828 Class<?> cc = comparableClassFor(k);
829 TreeNode<V> pp = root, p = null;
830 int dir = 0;
831 while (pp != null) { // find existing node or leaf to insert at
832 int ph; Object pk;
833 p = pp;
834 if ((ph = p.hash) != h)
835 dir = (h < ph) ? -1 : 1;
836 else if ((pk = p.key) == k || k.equals(pk))
837 return p;
838 else if (cc == null || comparableClassFor(pk) != cc ||
839 (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
840 TreeNode<V> r, pr;
841 if ((pr = p.right) != null && h >= pr.hash &&
842 (r = getTreeNode(h, k, pr, cc)) != null)
843 return r;
844 else // continue left
845 dir = -1;
846 }
847 pp = (dir > 0) ? p.right : p.left;
848 }
849
850 TreeNode<V> f = first;
851 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
852 if (p == null)
853 root = x;
854 else { // attach and rebalance; adapted from CLR
855 TreeNode<V> xp, xpp;
856 if (f != null)
857 f.prev = x;
858 if (dir <= 0)
859 p.left = x;
860 else
861 p.right = x;
862 x.red = true;
863 while (x != null && (xp = x.parent) != null && xp.red &&
864 (xpp = xp.parent) != null) {
865 TreeNode<V> xppl = xpp.left;
866 if (xp == xppl) {
867 TreeNode<V> y = xpp.right;
868 if (y != null && y.red) {
869 y.red = false;
870 xp.red = false;
871 xpp.red = true;
872 x = xpp;
873 }
874 else {
875 if (x == xp.right) {
876 rotateLeft(x = xp);
877 xpp = (xp = x.parent) == null ? null : xp.parent;
878 }
879 if (xp != null) {
880 xp.red = false;
881 if (xpp != null) {
882 xpp.red = true;
883 rotateRight(xpp);
884 }
885 }
886 }
887 }
888 else {
889 TreeNode<V> y = xppl;
890 if (y != null && y.red) {
891 y.red = false;
892 xp.red = false;
893 xpp.red = true;
894 x = xpp;
895 }
896 else {
897 if (x == xp.left) {
898 rotateRight(x = xp);
899 xpp = (xp = x.parent) == null ? null : xp.parent;
900 }
901 if (xp != null) {
902 xp.red = false;
903 if (xpp != null) {
904 xpp.red = true;
905 rotateLeft(xpp);
906 }
907 }
908 }
909 }
910 }
911 TreeNode<V> r = root;
912 if (r != null && r.red)
913 r.red = false;
914 }
915 return null;
916 }
917
918 /**
919 * Removes the given node, that must be present before this
920 * call. This is messier than typical red-black deletion code
921 * because we cannot swap the contents of an interior node
922 * with a leaf successor that is pinned by "next" pointers
923 * that are accessible independently of lock. So instead we
924 * swap the tree linkages.
925 */
926 final void deleteTreeNode(TreeNode<V> p) {
927 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
928 TreeNode<V> pred = p.prev;
929 if (pred == null)
930 first = next;
931 else
932 pred.next = next;
933 if (next != null)
934 next.prev = pred;
935 TreeNode<V> replacement;
936 TreeNode<V> pl = p.left;
937 TreeNode<V> pr = p.right;
938 if (pl != null && pr != null) {
939 TreeNode<V> s = pr, sl;
940 while ((sl = s.left) != null) // find successor
941 s = sl;
942 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
943 TreeNode<V> sr = s.right;
944 TreeNode<V> pp = p.parent;
945 if (s == pr) { // p was s's direct parent
946 p.parent = s;
947 s.right = p;
948 }
949 else {
950 TreeNode<V> sp = s.parent;
951 if ((p.parent = sp) != null) {
952 if (s == sp.left)
953 sp.left = p;
954 else
955 sp.right = p;
956 }
957 if ((s.right = pr) != null)
958 pr.parent = s;
959 }
960 p.left = null;
961 if ((p.right = sr) != null)
962 sr.parent = p;
963 if ((s.left = pl) != null)
964 pl.parent = s;
965 if ((s.parent = pp) == null)
966 root = s;
967 else if (p == pp.left)
968 pp.left = s;
969 else
970 pp.right = s;
971 replacement = sr;
972 }
973 else
974 replacement = (pl != null) ? pl : pr;
975 TreeNode<V> pp = p.parent;
976 if (replacement == null) {
977 if (pp == null) {
978 root = null;
979 return;
980 }
981 replacement = p;
982 }
983 else {
984 replacement.parent = pp;
985 if (pp == null)
986 root = replacement;
987 else if (p == pp.left)
988 pp.left = replacement;
989 else
990 pp.right = replacement;
991 p.left = p.right = p.parent = null;
992 }
993 if (!p.red) { // rebalance, from CLR
994 TreeNode<V> x = replacement;
995 while (x != null) {
996 TreeNode<V> xp, xpl;
997 if (x.red || (xp = x.parent) == null) {
998 x.red = false;
999 break;
1000 }
1001 if (x == (xpl = xp.left)) {
1002 TreeNode<V> sib = xp.right;
1003 if (sib != null && sib.red) {
1004 sib.red = false;
1005 xp.red = true;
1006 rotateLeft(xp);
1007 sib = (xp = x.parent) == null ? null : xp.right;
1008 }
1009 if (sib == null)
1010 x = xp;
1011 else {
1012 TreeNode<V> sl = sib.left, sr = sib.right;
1013 if ((sr == null || !sr.red) &&
1014 (sl == null || !sl.red)) {
1015 sib.red = true;
1016 x = xp;
1017 }
1018 else {
1019 if (sr == null || !sr.red) {
1020 if (sl != null)
1021 sl.red = false;
1022 sib.red = true;
1023 rotateRight(sib);
1024 sib = (xp = x.parent) == null ?
1025 null : xp.right;
1026 }
1027 if (sib != null) {
1028 sib.red = (xp == null) ? false : xp.red;
1029 if ((sr = sib.right) != null)
1030 sr.red = false;
1031 }
1032 if (xp != null) {
1033 xp.red = false;
1034 rotateLeft(xp);
1035 }
1036 x = root;
1037 }
1038 }
1039 }
1040 else { // symmetric
1041 TreeNode<V> sib = xpl;
1042 if (sib != null && sib.red) {
1043 sib.red = false;
1044 xp.red = true;
1045 rotateRight(xp);
1046 sib = (xp = x.parent) == null ? null : xp.left;
1047 }
1048 if (sib == null)
1049 x = xp;
1050 else {
1051 TreeNode<V> sl = sib.left, sr = sib.right;
1052 if ((sl == null || !sl.red) &&
1053 (sr == null || !sr.red)) {
1054 sib.red = true;
1055 x = xp;
1056 }
1057 else {
1058 if (sl == null || !sl.red) {
1059 if (sr != null)
1060 sr.red = false;
1061 sib.red = true;
1062 rotateLeft(sib);
1063 sib = (xp = x.parent) == null ?
1064 null : xp.left;
1065 }
1066 if (sib != null) {
1067 sib.red = (xp == null) ? false : xp.red;
1068 if ((sl = sib.left) != null)
1069 sl.red = false;
1070 }
1071 if (xp != null) {
1072 xp.red = false;
1073 rotateRight(xp);
1074 }
1075 x = root;
1076 }
1077 }
1078 }
1079 }
1080 }
1081 if (p == replacement && (pp = p.parent) != null) {
1082 if (p == pp.left) // detach pointers
1083 pp.left = null;
1084 else if (p == pp.right)
1085 pp.right = null;
1086 p.parent = null;
1087 }
1088 }
1089 }
1090
1091 /* ---------------- Collision reduction methods -------------- */
1092
1093 /**
1094 * Spreads higher bits to lower, and also forces top bit to 0.
1095 * Because the table uses power-of-two masking, sets of hashes
1096 * that vary only in bits above the current mask will always
1097 * collide. (Among known examples are sets of Float keys holding
1098 * consecutive whole numbers in small tables.) To counter this,
1099 * we apply a transform that spreads the impact of higher bits
1100 * downward. There is a tradeoff between speed, utility, and
1101 * quality of bit-spreading. Because many common sets of hashes
1102 * are already reasonably distributed across bits (so don't benefit
1103 * from spreading), and because we use trees to handle large sets
1104 * of collisions in bins, we don't need excessively high quality.
1105 */
1106 private static final int spread(int h) {
1107 h ^= (h >>> 18) ^ (h >>> 12);
1108 return (h ^ (h >>> 10)) & HASH_BITS;
1109 }
1110
1111 /**
1112 * Replaces a list bin with a tree bin if key is comparable. Call
1113 * only when locked.
1114 */
1115 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1116 if (comparableClassFor(key) != null) {
1117 TreeBin<V> t = new TreeBin<V>();
1118 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1119 t.putTreeNode(e.hash, e.key, e.val);
1120 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1121 }
1122 }
1123
1124 /* ---------------- Internal access and update methods -------------- */
1125
1126 /** Implementation for get and containsKey */
1127 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1128 int h = spread(k.hashCode());
1129 retry: for (Node<V>[] tab = table; tab != null;) {
1130 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1131 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1132 if ((eh = e.hash) < 0) {
1133 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1134 return ((TreeBin<V>)ek).getValue(h, k);
1135 else { // restart with new table
1136 tab = (Node<V>[])ek;
1137 continue retry;
1138 }
1139 }
1140 else if (eh == h && (ev = e.val) != null &&
1141 ((ek = e.key) == k || k.equals(ek)))
1142 return ev;
1143 }
1144 break;
1145 }
1146 return null;
1147 }
1148
1149 /**
1150 * Implementation for the four public remove/replace methods:
1151 * Replaces node value with v, conditional upon match of cv if
1152 * non-null. If resulting value is null, delete.
1153 */
1154 @SuppressWarnings("unchecked") private final V internalReplace
1155 (Object k, V v, Object cv) {
1156 int h = spread(k.hashCode());
1157 V oldVal = null;
1158 for (Node<V>[] tab = table;;) {
1159 Node<V> f; int i, fh; Object fk;
1160 if (tab == null ||
1161 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1162 break;
1163 else if ((fh = f.hash) < 0) {
1164 if ((fk = f.key) instanceof TreeBin) {
1165 TreeBin<V> t = (TreeBin<V>)fk;
1166 boolean validated = false;
1167 boolean deleted = false;
1168 t.acquire(0);
1169 try {
1170 if (tabAt(tab, i) == f) {
1171 validated = true;
1172 TreeNode<V> p = t.getTreeNode(h, k);
1173 if (p != null) {
1174 V pv = p.val;
1175 if (cv == null || cv == pv || cv.equals(pv)) {
1176 oldVal = pv;
1177 if ((p.val = v) == null) {
1178 deleted = true;
1179 t.deleteTreeNode(p);
1180 }
1181 }
1182 }
1183 }
1184 } finally {
1185 t.release(0);
1186 }
1187 if (validated) {
1188 if (deleted)
1189 addCount(-1L, -1);
1190 break;
1191 }
1192 }
1193 else
1194 tab = (Node<V>[])fk;
1195 }
1196 else if (fh != h && f.next == null) // precheck
1197 break; // rules out possible existence
1198 else {
1199 boolean validated = false;
1200 boolean deleted = false;
1201 synchronized (f) {
1202 if (tabAt(tab, i) == f) {
1203 validated = true;
1204 for (Node<V> e = f, pred = null;;) {
1205 Object ek; V ev;
1206 if (e.hash == h &&
1207 ((ev = e.val) != null) &&
1208 ((ek = e.key) == k || k.equals(ek))) {
1209 if (cv == null || cv == ev || cv.equals(ev)) {
1210 oldVal = ev;
1211 if ((e.val = v) == null) {
1212 deleted = true;
1213 Node<V> en = e.next;
1214 if (pred != null)
1215 pred.next = en;
1216 else
1217 setTabAt(tab, i, en);
1218 }
1219 }
1220 break;
1221 }
1222 pred = e;
1223 if ((e = e.next) == null)
1224 break;
1225 }
1226 }
1227 }
1228 if (validated) {
1229 if (deleted)
1230 addCount(-1L, -1);
1231 break;
1232 }
1233 }
1234 }
1235 return oldVal;
1236 }
1237
1238 /*
1239 * Internal versions of insertion methods
1240 * All have the same basic structure as the first (internalPut):
1241 * 1. If table uninitialized, create
1242 * 2. If bin empty, try to CAS new node
1243 * 3. If bin stale, use new table
1244 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1245 * 5. Lock and validate; if valid, scan and add or update
1246 *
1247 * The putAll method differs mainly in attempting to pre-allocate
1248 * enough table space, and also more lazily performs count updates
1249 * and checks.
1250 *
1251 * Most of the function-accepting methods can't be factored nicely
1252 * because they require different functional forms, so instead
1253 * sprawl out similar mechanics.
1254 */
1255
1256 /** Implementation for put and putIfAbsent */
1257 @SuppressWarnings("unchecked") private final V internalPut
1258 (K k, V v, boolean onlyIfAbsent) {
1259 if (k == null || v == null) throw new NullPointerException();
1260 int h = spread(k.hashCode());
1261 int len = 0;
1262 for (Node<V>[] tab = table;;) {
1263 int i, fh; Node<V> f; Object fk; V fv;
1264 if (tab == null)
1265 tab = initTable();
1266 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1267 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1268 break; // no lock when adding to empty bin
1269 }
1270 else if ((fh = f.hash) < 0) {
1271 if ((fk = f.key) instanceof TreeBin) {
1272 TreeBin<V> t = (TreeBin<V>)fk;
1273 V oldVal = null;
1274 t.acquire(0);
1275 try {
1276 if (tabAt(tab, i) == f) {
1277 len = 2;
1278 TreeNode<V> p = t.putTreeNode(h, k, v);
1279 if (p != null) {
1280 oldVal = p.val;
1281 if (!onlyIfAbsent)
1282 p.val = v;
1283 }
1284 }
1285 } finally {
1286 t.release(0);
1287 }
1288 if (len != 0) {
1289 if (oldVal != null)
1290 return oldVal;
1291 break;
1292 }
1293 }
1294 else
1295 tab = (Node<V>[])fk;
1296 }
1297 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1298 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1299 return fv;
1300 else {
1301 V oldVal = null;
1302 synchronized (f) {
1303 if (tabAt(tab, i) == f) {
1304 len = 1;
1305 for (Node<V> e = f;; ++len) {
1306 Object ek; V ev;
1307 if (e.hash == h &&
1308 (ev = e.val) != null &&
1309 ((ek = e.key) == k || k.equals(ek))) {
1310 oldVal = ev;
1311 if (!onlyIfAbsent)
1312 e.val = v;
1313 break;
1314 }
1315 Node<V> last = e;
1316 if ((e = e.next) == null) {
1317 last.next = new Node<V>(h, k, v, null);
1318 if (len >= TREE_THRESHOLD)
1319 replaceWithTreeBin(tab, i, k);
1320 break;
1321 }
1322 }
1323 }
1324 }
1325 if (len != 0) {
1326 if (oldVal != null)
1327 return oldVal;
1328 break;
1329 }
1330 }
1331 }
1332 addCount(1L, len);
1333 return null;
1334 }
1335
1336 /** Implementation for computeIfAbsent */
1337 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1338 (K k, Function<? super K, ? extends V> mf) {
1339 if (k == null || mf == null)
1340 throw new NullPointerException();
1341 int h = spread(k.hashCode());
1342 V val = null;
1343 int len = 0;
1344 for (Node<V>[] tab = table;;) {
1345 Node<V> f; int i; Object fk;
1346 if (tab == null)
1347 tab = initTable();
1348 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1349 Node<V> node = new Node<V>(h, k, null, null);
1350 synchronized (node) {
1351 if (casTabAt(tab, i, null, node)) {
1352 len = 1;
1353 try {
1354 if ((val = mf.apply(k)) != null)
1355 node.val = val;
1356 } finally {
1357 if (val == null)
1358 setTabAt(tab, i, null);
1359 }
1360 }
1361 }
1362 if (len != 0)
1363 break;
1364 }
1365 else if (f.hash < 0) {
1366 if ((fk = f.key) instanceof TreeBin) {
1367 TreeBin<V> t = (TreeBin<V>)fk;
1368 boolean added = false;
1369 t.acquire(0);
1370 try {
1371 if (tabAt(tab, i) == f) {
1372 len = 1;
1373 TreeNode<V> p = t.getTreeNode(h, k);
1374 if (p != null)
1375 val = p.val;
1376 else if ((val = mf.apply(k)) != null) {
1377 added = true;
1378 len = 2;
1379 t.putTreeNode(h, k, val);
1380 }
1381 }
1382 } finally {
1383 t.release(0);
1384 }
1385 if (len != 0) {
1386 if (!added)
1387 return val;
1388 break;
1389 }
1390 }
1391 else
1392 tab = (Node<V>[])fk;
1393 }
1394 else {
1395 for (Node<V> e = f; e != null; e = e.next) { // prescan
1396 Object ek; V ev;
1397 if (e.hash == h && (ev = e.val) != null &&
1398 ((ek = e.key) == k || k.equals(ek)))
1399 return ev;
1400 }
1401 boolean added = false;
1402 synchronized (f) {
1403 if (tabAt(tab, i) == f) {
1404 len = 1;
1405 for (Node<V> e = f;; ++len) {
1406 Object ek; V ev;
1407 if (e.hash == h &&
1408 (ev = e.val) != null &&
1409 ((ek = e.key) == k || k.equals(ek))) {
1410 val = ev;
1411 break;
1412 }
1413 Node<V> last = e;
1414 if ((e = e.next) == null) {
1415 if ((val = mf.apply(k)) != null) {
1416 added = true;
1417 last.next = new Node<V>(h, k, val, null);
1418 if (len >= TREE_THRESHOLD)
1419 replaceWithTreeBin(tab, i, k);
1420 }
1421 break;
1422 }
1423 }
1424 }
1425 }
1426 if (len != 0) {
1427 if (!added)
1428 return val;
1429 break;
1430 }
1431 }
1432 }
1433 if (val != null)
1434 addCount(1L, len);
1435 return val;
1436 }
1437
1438 /** Implementation for compute */
1439 @SuppressWarnings("unchecked") private final V internalCompute
1440 (K k, boolean onlyIfPresent,
1441 BiFunction<? super K, ? super V, ? extends V> mf) {
1442 if (k == null || mf == null)
1443 throw new NullPointerException();
1444 int h = spread(k.hashCode());
1445 V val = null;
1446 int delta = 0;
1447 int len = 0;
1448 for (Node<V>[] tab = table;;) {
1449 Node<V> f; int i, fh; Object fk;
1450 if (tab == null)
1451 tab = initTable();
1452 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1453 if (onlyIfPresent)
1454 break;
1455 Node<V> node = new Node<V>(h, k, null, null);
1456 synchronized (node) {
1457 if (casTabAt(tab, i, null, node)) {
1458 try {
1459 len = 1;
1460 if ((val = mf.apply(k, null)) != null) {
1461 node.val = val;
1462 delta = 1;
1463 }
1464 } finally {
1465 if (delta == 0)
1466 setTabAt(tab, i, null);
1467 }
1468 }
1469 }
1470 if (len != 0)
1471 break;
1472 }
1473 else if ((fh = f.hash) < 0) {
1474 if ((fk = f.key) instanceof TreeBin) {
1475 TreeBin<V> t = (TreeBin<V>)fk;
1476 t.acquire(0);
1477 try {
1478 if (tabAt(tab, i) == f) {
1479 len = 1;
1480 TreeNode<V> p = t.getTreeNode(h, k);
1481 if (p == null && onlyIfPresent)
1482 break;
1483 V pv = (p == null) ? null : p.val;
1484 if ((val = mf.apply(k, pv)) != null) {
1485 if (p != null)
1486 p.val = val;
1487 else {
1488 len = 2;
1489 delta = 1;
1490 t.putTreeNode(h, k, val);
1491 }
1492 }
1493 else if (p != null) {
1494 delta = -1;
1495 t.deleteTreeNode(p);
1496 }
1497 }
1498 } finally {
1499 t.release(0);
1500 }
1501 if (len != 0)
1502 break;
1503 }
1504 else
1505 tab = (Node<V>[])fk;
1506 }
1507 else {
1508 synchronized (f) {
1509 if (tabAt(tab, i) == f) {
1510 len = 1;
1511 for (Node<V> e = f, pred = null;; ++len) {
1512 Object ek; V ev;
1513 if (e.hash == h &&
1514 (ev = e.val) != null &&
1515 ((ek = e.key) == k || k.equals(ek))) {
1516 val = mf.apply(k, ev);
1517 if (val != null)
1518 e.val = val;
1519 else {
1520 delta = -1;
1521 Node<V> en = e.next;
1522 if (pred != null)
1523 pred.next = en;
1524 else
1525 setTabAt(tab, i, en);
1526 }
1527 break;
1528 }
1529 pred = e;
1530 if ((e = e.next) == null) {
1531 if (!onlyIfPresent &&
1532 (val = mf.apply(k, null)) != null) {
1533 pred.next = new Node<V>(h, k, val, null);
1534 delta = 1;
1535 if (len >= TREE_THRESHOLD)
1536 replaceWithTreeBin(tab, i, k);
1537 }
1538 break;
1539 }
1540 }
1541 }
1542 }
1543 if (len != 0)
1544 break;
1545 }
1546 }
1547 if (delta != 0)
1548 addCount((long)delta, len);
1549 return val;
1550 }
1551
1552 /** Implementation for merge */
1553 @SuppressWarnings("unchecked") private final V internalMerge
1554 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1555 if (k == null || v == null || mf == null)
1556 throw new NullPointerException();
1557 int h = spread(k.hashCode());
1558 V val = null;
1559 int delta = 0;
1560 int len = 0;
1561 for (Node<V>[] tab = table;;) {
1562 int i; Node<V> f; Object fk; V fv;
1563 if (tab == null)
1564 tab = initTable();
1565 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1566 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1567 delta = 1;
1568 val = v;
1569 break;
1570 }
1571 }
1572 else if (f.hash < 0) {
1573 if ((fk = f.key) instanceof TreeBin) {
1574 TreeBin<V> t = (TreeBin<V>)fk;
1575 t.acquire(0);
1576 try {
1577 if (tabAt(tab, i) == f) {
1578 len = 1;
1579 TreeNode<V> p = t.getTreeNode(h, k);
1580 val = (p == null) ? v : mf.apply(p.val, v);
1581 if (val != null) {
1582 if (p != null)
1583 p.val = val;
1584 else {
1585 len = 2;
1586 delta = 1;
1587 t.putTreeNode(h, k, val);
1588 }
1589 }
1590 else if (p != null) {
1591 delta = -1;
1592 t.deleteTreeNode(p);
1593 }
1594 }
1595 } finally {
1596 t.release(0);
1597 }
1598 if (len != 0)
1599 break;
1600 }
1601 else
1602 tab = (Node<V>[])fk;
1603 }
1604 else {
1605 synchronized (f) {
1606 if (tabAt(tab, i) == f) {
1607 len = 1;
1608 for (Node<V> e = f, pred = null;; ++len) {
1609 Object ek; V ev;
1610 if (e.hash == h &&
1611 (ev = e.val) != null &&
1612 ((ek = e.key) == k || k.equals(ek))) {
1613 val = mf.apply(ev, v);
1614 if (val != null)
1615 e.val = val;
1616 else {
1617 delta = -1;
1618 Node<V> en = e.next;
1619 if (pred != null)
1620 pred.next = en;
1621 else
1622 setTabAt(tab, i, en);
1623 }
1624 break;
1625 }
1626 pred = e;
1627 if ((e = e.next) == null) {
1628 val = v;
1629 pred.next = new Node<V>(h, k, val, null);
1630 delta = 1;
1631 if (len >= TREE_THRESHOLD)
1632 replaceWithTreeBin(tab, i, k);
1633 break;
1634 }
1635 }
1636 }
1637 }
1638 if (len != 0)
1639 break;
1640 }
1641 }
1642 if (delta != 0)
1643 addCount((long)delta, len);
1644 return val;
1645 }
1646
1647 /** Implementation for putAll */
1648 @SuppressWarnings("unchecked") private final void internalPutAll
1649 (Map<? extends K, ? extends V> m) {
1650 tryPresize(m.size());
1651 long delta = 0L; // number of uncommitted additions
1652 boolean npe = false; // to throw exception on exit for nulls
1653 try { // to clean up counts on other exceptions
1654 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1655 Object k; V v;
1656 if (entry == null || (k = entry.getKey()) == null ||
1657 (v = entry.getValue()) == null) {
1658 npe = true;
1659 break;
1660 }
1661 int h = spread(k.hashCode());
1662 for (Node<V>[] tab = table;;) {
1663 int i; Node<V> f; int fh; Object fk;
1664 if (tab == null)
1665 tab = initTable();
1666 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1667 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1668 ++delta;
1669 break;
1670 }
1671 }
1672 else if ((fh = f.hash) < 0) {
1673 if ((fk = f.key) instanceof TreeBin) {
1674 TreeBin<V> t = (TreeBin<V>)fk;
1675 boolean validated = false;
1676 t.acquire(0);
1677 try {
1678 if (tabAt(tab, i) == f) {
1679 validated = true;
1680 TreeNode<V> p = t.getTreeNode(h, k);
1681 if (p != null)
1682 p.val = v;
1683 else {
1684 t.putTreeNode(h, k, v);
1685 ++delta;
1686 }
1687 }
1688 } finally {
1689 t.release(0);
1690 }
1691 if (validated)
1692 break;
1693 }
1694 else
1695 tab = (Node<V>[])fk;
1696 }
1697 else {
1698 int len = 0;
1699 synchronized (f) {
1700 if (tabAt(tab, i) == f) {
1701 len = 1;
1702 for (Node<V> e = f;; ++len) {
1703 Object ek; V ev;
1704 if (e.hash == h &&
1705 (ev = e.val) != null &&
1706 ((ek = e.key) == k || k.equals(ek))) {
1707 e.val = v;
1708 break;
1709 }
1710 Node<V> last = e;
1711 if ((e = e.next) == null) {
1712 ++delta;
1713 last.next = new Node<V>(h, k, v, null);
1714 if (len >= TREE_THRESHOLD)
1715 replaceWithTreeBin(tab, i, k);
1716 break;
1717 }
1718 }
1719 }
1720 }
1721 if (len != 0) {
1722 if (len > 1) {
1723 addCount(delta, len);
1724 delta = 0L;
1725 }
1726 break;
1727 }
1728 }
1729 }
1730 }
1731 } finally {
1732 if (delta != 0L)
1733 addCount(delta, 2);
1734 }
1735 if (npe)
1736 throw new NullPointerException();
1737 }
1738
1739 /**
1740 * Implementation for clear. Steps through each bin, removing all
1741 * nodes.
1742 */
1743 @SuppressWarnings("unchecked") private final void internalClear() {
1744 long delta = 0L; // negative number of deletions
1745 int i = 0;
1746 Node<V>[] tab = table;
1747 while (tab != null && i < tab.length) {
1748 Node<V> f = tabAt(tab, i);
1749 if (f == null)
1750 ++i;
1751 else if (f.hash < 0) {
1752 Object fk;
1753 if ((fk = f.key) instanceof TreeBin) {
1754 TreeBin<V> t = (TreeBin<V>)fk;
1755 t.acquire(0);
1756 try {
1757 if (tabAt(tab, i) == f) {
1758 for (Node<V> p = t.first; p != null; p = p.next) {
1759 if (p.val != null) { // (currently always true)
1760 p.val = null;
1761 --delta;
1762 }
1763 }
1764 t.first = null;
1765 t.root = null;
1766 ++i;
1767 }
1768 } finally {
1769 t.release(0);
1770 }
1771 }
1772 else
1773 tab = (Node<V>[])fk;
1774 }
1775 else {
1776 synchronized (f) {
1777 if (tabAt(tab, i) == f) {
1778 for (Node<V> e = f; e != null; e = e.next) {
1779 if (e.val != null) { // (currently always true)
1780 e.val = null;
1781 --delta;
1782 }
1783 }
1784 setTabAt(tab, i, null);
1785 ++i;
1786 }
1787 }
1788 }
1789 }
1790 if (delta != 0L)
1791 addCount(delta, -1);
1792 }
1793
1794 /* ---------------- Table Initialization and Resizing -------------- */
1795
1796 /**
1797 * Returns a power of two table size for the given desired capacity.
1798 * See Hackers Delight, sec 3.2
1799 */
1800 private static final int tableSizeFor(int c) {
1801 int n = c - 1;
1802 n |= n >>> 1;
1803 n |= n >>> 2;
1804 n |= n >>> 4;
1805 n |= n >>> 8;
1806 n |= n >>> 16;
1807 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1808 }
1809
1810 /**
1811 * Initializes table, using the size recorded in sizeCtl.
1812 */
1813 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1814 Node<V>[] tab; int sc;
1815 while ((tab = table) == null) {
1816 if ((sc = sizeCtl) < 0)
1817 Thread.yield(); // lost initialization race; just spin
1818 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1819 try {
1820 if ((tab = table) == null) {
1821 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1822 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1823 table = tab = (Node<V>[])tb;
1824 sc = n - (n >>> 2);
1825 }
1826 } finally {
1827 sizeCtl = sc;
1828 }
1829 break;
1830 }
1831 }
1832 return tab;
1833 }
1834
1835 /**
1836 * Adds to count, and if table is too small and not already
1837 * resizing, initiates transfer. If already resizing, helps
1838 * perform transfer if work is available. Rechecks occupancy
1839 * after a transfer to see if another resize is already needed
1840 * because resizings are lagging additions.
1841 *
1842 * @param x the count to add
1843 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1844 */
1845 private final void addCount(long x, int check) {
1846 Cell[] as; long b, s;
1847 if ((as = counterCells) != null ||
1848 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1849 Cell a; long v; int m;
1850 boolean uncontended = true;
1851 if (as == null || (m = as.length - 1) < 0 ||
1852 (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
1853 !(uncontended =
1854 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1855 fullAddCount(x, uncontended);
1856 return;
1857 }
1858 if (check <= 1)
1859 return;
1860 s = sumCount();
1861 }
1862 if (check >= 0) {
1863 Node<V>[] tab, nt; int sc;
1864 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1865 tab.length < MAXIMUM_CAPACITY) {
1866 if (sc < 0) {
1867 if (sc == -1 || transferIndex <= transferOrigin ||
1868 (nt = nextTable) == null)
1869 break;
1870 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1871 transfer(tab, nt);
1872 }
1873 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1874 transfer(tab, null);
1875 s = sumCount();
1876 }
1877 }
1878 }
1879
1880 /**
1881 * Tries to presize table to accommodate the given number of elements.
1882 *
1883 * @param size number of elements (doesn't need to be perfectly accurate)
1884 */
1885 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1886 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1887 tableSizeFor(size + (size >>> 1) + 1);
1888 int sc;
1889 while ((sc = sizeCtl) >= 0) {
1890 Node<V>[] tab = table; int n;
1891 if (tab == null || (n = tab.length) == 0) {
1892 n = (sc > c) ? sc : c;
1893 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1894 try {
1895 if (table == tab) {
1896 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1897 table = (Node<V>[])tb;
1898 sc = n - (n >>> 2);
1899 }
1900 } finally {
1901 sizeCtl = sc;
1902 }
1903 }
1904 }
1905 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1906 break;
1907 else if (tab == table &&
1908 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1909 transfer(tab, null);
1910 }
1911 }
1912
1913 /**
1914 * Moves and/or copies the nodes in each bin to new table. See
1915 * above for explanation.
1916 */
1917 @SuppressWarnings("unchecked") private final void transfer
1918 (Node<V>[] tab, Node<V>[] nextTab) {
1919 int n = tab.length, stride;
1920 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1921 stride = MIN_TRANSFER_STRIDE; // subdivide range
1922 if (nextTab == null) { // initiating
1923 try {
1924 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1925 nextTab = (Node<V>[])tb;
1926 } catch (Throwable ex) { // try to cope with OOME
1927 sizeCtl = Integer.MAX_VALUE;
1928 return;
1929 }
1930 nextTable = nextTab;
1931 transferOrigin = n;
1932 transferIndex = n;
1933 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1934 for (int k = n; k > 0;) { // progressively reveal ready slots
1935 int nextk = (k > stride) ? k - stride : 0;
1936 for (int m = nextk; m < k; ++m)
1937 nextTab[m] = rev;
1938 for (int m = n + nextk; m < n + k; ++m)
1939 nextTab[m] = rev;
1940 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1941 }
1942 }
1943 int nextn = nextTab.length;
1944 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1945 boolean advance = true;
1946 for (int i = 0, bound = 0;;) {
1947 int nextIndex, nextBound; Node<V> f; Object fk;
1948 while (advance) {
1949 if (--i >= bound)
1950 advance = false;
1951 else if ((nextIndex = transferIndex) <= transferOrigin) {
1952 i = -1;
1953 advance = false;
1954 }
1955 else if (U.compareAndSwapInt
1956 (this, TRANSFERINDEX, nextIndex,
1957 nextBound = (nextIndex > stride ?
1958 nextIndex - stride : 0))) {
1959 bound = nextBound;
1960 i = nextIndex - 1;
1961 advance = false;
1962 }
1963 }
1964 if (i < 0 || i >= n || i + n >= nextn) {
1965 for (int sc;;) {
1966 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1967 if (sc == -1) {
1968 nextTable = null;
1969 table = nextTab;
1970 sizeCtl = (n << 1) - (n >>> 1);
1971 }
1972 return;
1973 }
1974 }
1975 }
1976 else if ((f = tabAt(tab, i)) == null) {
1977 if (casTabAt(tab, i, null, fwd)) {
1978 setTabAt(nextTab, i, null);
1979 setTabAt(nextTab, i + n, null);
1980 advance = true;
1981 }
1982 }
1983 else if (f.hash >= 0) {
1984 synchronized (f) {
1985 if (tabAt(tab, i) == f) {
1986 int runBit = f.hash & n;
1987 Node<V> lastRun = f, lo = null, hi = null;
1988 for (Node<V> p = f.next; p != null; p = p.next) {
1989 int b = p.hash & n;
1990 if (b != runBit) {
1991 runBit = b;
1992 lastRun = p;
1993 }
1994 }
1995 if (runBit == 0)
1996 lo = lastRun;
1997 else
1998 hi = lastRun;
1999 for (Node<V> p = f; p != lastRun; p = p.next) {
2000 int ph = p.hash;
2001 Object pk = p.key; V pv = p.val;
2002 if ((ph & n) == 0)
2003 lo = new Node<V>(ph, pk, pv, lo);
2004 else
2005 hi = new Node<V>(ph, pk, pv, hi);
2006 }
2007 setTabAt(nextTab, i, lo);
2008 setTabAt(nextTab, i + n, hi);
2009 setTabAt(tab, i, fwd);
2010 advance = true;
2011 }
2012 }
2013 }
2014 else if ((fk = f.key) instanceof TreeBin) {
2015 TreeBin<V> t = (TreeBin<V>)fk;
2016 t.acquire(0);
2017 try {
2018 if (tabAt(tab, i) == f) {
2019 TreeBin<V> lt = new TreeBin<V>();
2020 TreeBin<V> ht = new TreeBin<V>();
2021 int lc = 0, hc = 0;
2022 for (Node<V> e = t.first; e != null; e = e.next) {
2023 int h = e.hash;
2024 Object k = e.key; V v = e.val;
2025 if ((h & n) == 0) {
2026 ++lc;
2027 lt.putTreeNode(h, k, v);
2028 }
2029 else {
2030 ++hc;
2031 ht.putTreeNode(h, k, v);
2032 }
2033 }
2034 Node<V> ln, hn; // throw away trees if too small
2035 if (lc < TREE_THRESHOLD) {
2036 ln = null;
2037 for (Node<V> p = lt.first; p != null; p = p.next)
2038 ln = new Node<V>(p.hash, p.key, p.val, ln);
2039 }
2040 else
2041 ln = new Node<V>(MOVED, lt, null, null);
2042 setTabAt(nextTab, i, ln);
2043 if (hc < TREE_THRESHOLD) {
2044 hn = null;
2045 for (Node<V> p = ht.first; p != null; p = p.next)
2046 hn = new Node<V>(p.hash, p.key, p.val, hn);
2047 }
2048 else
2049 hn = new Node<V>(MOVED, ht, null, null);
2050 setTabAt(nextTab, i + n, hn);
2051 setTabAt(tab, i, fwd);
2052 advance = true;
2053 }
2054 } finally {
2055 t.release(0);
2056 }
2057 }
2058 else
2059 advance = true; // already processed
2060 }
2061 }
2062
2063 /* ---------------- Counter support -------------- */
2064
2065 final long sumCount() {
2066 Cell[] as = counterCells; Cell a;
2067 long sum = baseCount;
2068 if (as != null) {
2069 for (int i = 0; i < as.length; ++i) {
2070 if ((a = as[i]) != null)
2071 sum += a.value;
2072 }
2073 }
2074 return sum;
2075 }
2076
2077 // See LongAdder version for explanation
2078 private final void fullAddCount(long x, boolean wasUncontended) {
2079 int h;
2080 if ((h = ThreadLocalRandom.getProbe()) == 0) {
2081 ThreadLocalRandom.localInit(); // force initialization
2082 h = ThreadLocalRandom.getProbe();
2083 wasUncontended = true;
2084 }
2085 boolean collide = false; // True if last slot nonempty
2086 for (;;) {
2087 Cell[] as; Cell a; int n; long v;
2088 if ((as = counterCells) != null && (n = as.length) > 0) {
2089 if ((a = as[(n - 1) & h]) == null) {
2090 if (cellsBusy == 0) { // Try to attach new Cell
2091 Cell r = new Cell(x); // Optimistic create
2092 if (cellsBusy == 0 &&
2093 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2094 boolean created = false;
2095 try { // Recheck under lock
2096 Cell[] rs; int m, j;
2097 if ((rs = counterCells) != null &&
2098 (m = rs.length) > 0 &&
2099 rs[j = (m - 1) & h] == null) {
2100 rs[j] = r;
2101 created = true;
2102 }
2103 } finally {
2104 cellsBusy = 0;
2105 }
2106 if (created)
2107 break;
2108 continue; // Slot is now non-empty
2109 }
2110 }
2111 collide = false;
2112 }
2113 else if (!wasUncontended) // CAS already known to fail
2114 wasUncontended = true; // Continue after rehash
2115 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2116 break;
2117 else if (counterCells != as || n >= NCPU)
2118 collide = false; // At max size or stale
2119 else if (!collide)
2120 collide = true;
2121 else if (cellsBusy == 0 &&
2122 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2123 try {
2124 if (counterCells == as) {// Expand table unless stale
2125 Cell[] rs = new Cell[n << 1];
2126 for (int i = 0; i < n; ++i)
2127 rs[i] = as[i];
2128 counterCells = rs;
2129 }
2130 } finally {
2131 cellsBusy = 0;
2132 }
2133 collide = false;
2134 continue; // Retry with expanded table
2135 }
2136 h = ThreadLocalRandom.advanceProbe(h);
2137 }
2138 else if (cellsBusy == 0 && counterCells == as &&
2139 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2140 boolean init = false;
2141 try { // Initialize table
2142 if (counterCells == as) {
2143 Cell[] rs = new Cell[2];
2144 rs[h & 1] = new Cell(x);
2145 counterCells = rs;
2146 init = true;
2147 }
2148 } finally {
2149 cellsBusy = 0;
2150 }
2151 if (init)
2152 break;
2153 }
2154 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2155 break; // Fall back on using base
2156 }
2157 }
2158
2159 /* ----------------Table Traversal -------------- */
2160
2161 /**
2162 * Encapsulates traversal for methods such as containsValue; also
2163 * serves as a base class for other iterators and bulk tasks.
2164 *
2165 * At each step, the iterator snapshots the key ("nextKey") and
2166 * value ("nextVal") of a valid node (i.e., one that, at point of
2167 * snapshot, has a non-null user value). Because val fields can
2168 * change (including to null, indicating deletion), field nextVal
2169 * might not be accurate at point of use, but still maintains the
2170 * weak consistency property of holding a value that was once
2171 * valid. To support iterator.remove, the nextKey field is not
2172 * updated (nulled out) when the iterator cannot advance.
2173 *
2174 * Exported iterators must track whether the iterator has advanced
2175 * (in hasNext vs next) (by setting/checking/nulling field
2176 * nextVal), and then extract key, value, or key-value pairs as
2177 * return values of next().
2178 *
2179 * Method advance visits once each still-valid node that was
2180 * reachable upon iterator construction. It might miss some that
2181 * were added to a bin after the bin was visited, which is OK wrt
2182 * consistency guarantees. Maintaining this property in the face
2183 * of possible ongoing resizes requires a fair amount of
2184 * bookkeeping state that is difficult to optimize away amidst
2185 * volatile accesses. Even so, traversal maintains reasonable
2186 * throughput.
2187 *
2188 * Normally, iteration proceeds bin-by-bin traversing lists.
2189 * However, if the table has been resized, then all future steps
2190 * must traverse both the bin at the current index as well as at
2191 * (index + baseSize); and so on for further resizings. To
2192 * paranoically cope with potential sharing by users of iterators
2193 * across threads, iteration terminates if a bounds checks fails
2194 * for a table read.
2195 *
2196 * Methods advanceKey and advanceValue are specializations of the
2197 * common cases of advance, relaying to the full version
2198 * otherwise. The forEachKey and forEachValue methods further
2199 * specialize, bypassing all incremental field updates in most cases.
2200 *
2201 * This class supports both Spliterator-based traversal and
2202 * CountedCompleter-based bulk tasks. The same "batch" field is
2203 * used, but in slightly different ways, in the two cases. For
2204 * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2205 * estimate of element coverage. For CHM tasks, it is a pre-scaled
2206 * size that halves down to zero for leaf tasks, that is only
2207 * computed upon execution of the task. (Tasks can be submitted to
2208 * any pool, of any size, so we don't know scale factors until
2209 * running.)
2210 *
2211 * This class extends CountedCompleter to streamline parallel
2212 * iteration in bulk operations. This adds only a few fields of
2213 * space overhead, which is small enough in cases where it is not
2214 * needed to not worry about it. Because CountedCompleter is
2215 * Serializable, but iterators need not be, we need to add warning
2216 * suppressions.
2217 */
2218 @SuppressWarnings("serial") static class Traverser<K,V,R>
2219 extends CountedCompleter<R> {
2220 final ConcurrentHashMap<K,V> map;
2221 Node<V> next; // the next entry to use
2222 K nextKey; // cached key field of next
2223 V nextVal; // cached val field of next
2224 Node<V>[] tab; // current table; updated if resized
2225 int index; // index of bin to use next
2226 int baseIndex; // current index of initial table
2227 int baseLimit; // index bound for initial table
2228 final int baseSize; // initial table size
2229 int batch; // split control
2230
2231 /** Creates iterator for all entries in the table. */
2232 Traverser(ConcurrentHashMap<K,V> map) {
2233 this.map = map;
2234 Node<V>[] t = this.tab = map.table;
2235 baseLimit = baseSize = (t == null) ? 0 : t.length;
2236 }
2237
2238 /** Task constructor */
2239 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2240 super(it);
2241 this.map = map;
2242 this.batch = batch; // -1 if unknown
2243 if (it == null) {
2244 Node<V>[] t = this.tab = map.table;
2245 baseLimit = baseSize = (t == null) ? 0 : t.length;
2246 }
2247 else { // split parent
2248 this.tab = it.tab;
2249 this.baseSize = it.baseSize;
2250 int hi = this.baseLimit = it.baseLimit;
2251 it.baseLimit = this.index = this.baseIndex =
2252 (hi + it.baseIndex) >>> 1;
2253 }
2254 }
2255
2256 /** Spliterator constructor */
2257 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2258 super(it);
2259 this.map = map;
2260 if (it == null) {
2261 Node<V>[] t = this.tab = map.table;
2262 baseLimit = baseSize = (t == null) ? 0 : t.length;
2263 long n = map.sumCount();
2264 batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2265 (int)n);
2266 }
2267 else {
2268 this.tab = it.tab;
2269 this.baseSize = it.baseSize;
2270 int hi = this.baseLimit = it.baseLimit;
2271 it.baseLimit = this.index = this.baseIndex =
2272 (hi + it.baseIndex) >>> 1;
2273 this.batch = it.batch >>>= 1;
2274 }
2275 }
2276
2277 /**
2278 * Advances if possible, returning next valid value, or null if none.
2279 */
2280 @SuppressWarnings("unchecked") final V advance() {
2281 for (Node<V> e = next;;) {
2282 if (e != null) // advance past used/skipped node
2283 e = next = e.next;
2284 while (e == null) { // get to next non-null bin
2285 Node<V>[] t; int i, n; // must use locals in checks
2286 if (baseIndex >= baseLimit || (t = tab) == null ||
2287 (n = t.length) <= (i = index) || i < 0)
2288 return nextVal = null;
2289 if ((e = next = tabAt(t, index)) != null && e.hash < 0) {
2290 Object ek;
2291 if ((ek = e.key) instanceof TreeBin)
2292 e = ((TreeBin<V>)ek).first;
2293 else {
2294 tab = (Node<V>[])ek;
2295 continue; // restarts due to null val
2296 }
2297 }
2298 if ((index += baseSize) >= n)
2299 index = ++baseIndex; // visit upper slots if present
2300 }
2301 nextKey = (K)e.key;
2302 if ((nextVal = e.val) != null) // skip deleted or special nodes
2303 return nextVal;
2304 }
2305 }
2306
2307 /**
2308 * Common case version for value traversal
2309 */
2310 @SuppressWarnings("unchecked") final V advanceValue() {
2311 outer: for (Node<V> e = next;;) {
2312 if (e == null || (e = e.next) == null) {
2313 Node<V>[] t; int i, len, n; Object ek;
2314 if ((t = tab) == null ||
2315 baseSize != (len = t.length) ||
2316 len < (n = baseLimit) ||
2317 baseIndex != (i = index))
2318 break;
2319 do {
2320 if (i < 0 || i >= n) {
2321 index = baseIndex = n;
2322 next = null;
2323 return nextVal = null;
2324 }
2325 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2326 if ((ek = e.key) instanceof TreeBin)
2327 e = ((TreeBin<V>)ek).first;
2328 else {
2329 index = baseIndex = i;
2330 next = null;
2331 tab = (Node<V>[])ek;
2332 break outer;
2333 }
2334 }
2335 ++i;
2336 } while (e == null);
2337 index = baseIndex = i;
2338 }
2339 V v;
2340 K k = (K)e.key;
2341 if ((v = e.val) != null) {
2342 nextVal = v;
2343 nextKey = k;
2344 next = e;
2345 return v;
2346 }
2347 }
2348 return advance();
2349 }
2350
2351 /**
2352 * Common case version for key traversal
2353 */
2354 @SuppressWarnings("unchecked") final K advanceKey() {
2355 outer: for (Node<V> e = next;;) {
2356 if (e == null || (e = e.next) == null) {
2357 Node<V>[] t; int i, len, n; Object ek;
2358 if ((t = tab) == null ||
2359 baseSize != (len = t.length) ||
2360 len < (n = baseLimit) ||
2361 baseIndex != (i = index))
2362 break;
2363 do {
2364 if (i < 0 || i >= n) {
2365 index = baseIndex = n;
2366 next = null;
2367 nextVal = null;
2368 return null;
2369 }
2370 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2371 if ((ek = e.key) instanceof TreeBin)
2372 e = ((TreeBin<V>)ek).first;
2373 else {
2374 index = baseIndex = i;
2375 next = null;
2376 tab = (Node<V>[])ek;
2377 break outer;
2378 }
2379 }
2380 ++i;
2381 } while (e == null);
2382 index = baseIndex = i;
2383 }
2384 V v;
2385 K k = (K)e.key;
2386 if ((v = e.val) != null) {
2387 nextVal = v;
2388 nextKey = k;
2389 next = e;
2390 return k;
2391 }
2392 }
2393 return (advance() == null) ? null : nextKey;
2394 }
2395
2396 @SuppressWarnings("unchecked") final void forEachValue(Consumer<? super V> action) {
2397 if (action == null) throw new NullPointerException();
2398 Node<V>[] t; int i, len, n;
2399 if ((t = tab) != null && baseSize == (len = t.length) &&
2400 len >= (n = baseLimit) && baseIndex == (i = index)) {
2401 index = baseIndex = n;
2402 nextVal = null;
2403 Node<V> e = next;
2404 next = null;
2405 if (e != null)
2406 e = e.next;
2407 outer: for (;; e = e.next) {
2408 V v; Object ek;
2409 for (; e == null; ++i) {
2410 if (i < 0 || i >= n)
2411 return;
2412 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2413 if ((ek = e.key) instanceof TreeBin)
2414 e = ((TreeBin<V>)ek).first;
2415 else {
2416 index = baseIndex = i;
2417 tab = (Node<V>[])ek;
2418 break outer;
2419 }
2420 }
2421 }
2422 if ((v = e.val) != null)
2423 action.accept(v);
2424 }
2425 }
2426 V v;
2427 while ((v = advance()) != null)
2428 action.accept(v);
2429 }
2430
2431 @SuppressWarnings("unchecked") final void forEachKey(Consumer<? super K> action) {
2432 if (action == null) throw new NullPointerException();
2433 Node<V>[] t; int i, len, n;
2434 if ((t = tab) != null && baseSize == (len = t.length) &&
2435 len >= (n = baseLimit) && baseIndex == (i = index)) {
2436 index = baseIndex = n;
2437 nextVal = null;
2438 Node<V> e = next;
2439 next = null;
2440 if (e != null)
2441 e = e.next;
2442 outer: for (;; e = e.next) {
2443 for (; e == null; ++i) {
2444 if (i < 0 || i >= n)
2445 return;
2446 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2447 Object ek;
2448 if ((ek = e.key) instanceof TreeBin)
2449 e = ((TreeBin<V>)ek).first;
2450 else {
2451 index = baseIndex = i;
2452 tab = (Node<V>[])ek;
2453 break outer;
2454 }
2455 }
2456 }
2457 Object k = e.key;
2458 if (e.val != null)
2459 action.accept((K)k);
2460 }
2461 }
2462 while (advance() != null)
2463 action.accept(nextKey);
2464 }
2465
2466 public final void remove() {
2467 K k = nextKey;
2468 if (k == null && (advanceValue() == null || (k = nextKey) == null))
2469 throw new IllegalStateException();
2470 map.internalReplace(k, null, null);
2471 }
2472
2473 public final boolean hasNext() {
2474 return nextVal != null || advanceValue() != null;
2475 }
2476
2477 public final boolean hasMoreElements() { return hasNext(); }
2478
2479 public void compute() { } // default no-op CountedCompleter body
2480
2481 public long estimateSize() { return batch; }
2482
2483 /**
2484 * Returns a batch value > 0 if this task should (and must) be
2485 * split, if so, adding to pending count, and in any case
2486 * updating batch value. The initial batch value is approx
2487 * exp2 of the number of times (minus one) to split task by
2488 * two before executing leaf action. This value is faster to
2489 * compute and more convenient to use as a guide to splitting
2490 * than is the depth, since it is used while dividing by two
2491 * anyway.
2492 */
2493 final int preSplit() {
2494 int b; ForkJoinPool pool;
2495 if ((b = batch) < 0) { // force initialization
2496 int sp = (((pool = getPool()) == null) ?
2497 ForkJoinPool.getCommonPoolParallelism() :
2498 pool.getParallelism()) << 3; // slack of 8
2499 long n = map.sumCount();
2500 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2501 }
2502 b = (b <= 1 || baseIndex >= baseLimit) ? 0 : (b >>> 1);
2503 if ((batch = b) > 0)
2504 addToPendingCount(1);
2505 return b;
2506 }
2507 }
2508
2509 /* ---------------- Public operations -------------- */
2510
2511 /**
2512 * Creates a new, empty map with the default initial table size (16).
2513 */
2514 public ConcurrentHashMap() {
2515 }
2516
2517 /**
2518 * Creates a new, empty map with an initial table size
2519 * accommodating the specified number of elements without the need
2520 * to dynamically resize.
2521 *
2522 * @param initialCapacity The implementation performs internal
2523 * sizing to accommodate this many elements.
2524 * @throws IllegalArgumentException if the initial capacity of
2525 * elements is negative
2526 */
2527 public ConcurrentHashMap(int initialCapacity) {
2528 if (initialCapacity < 0)
2529 throw new IllegalArgumentException();
2530 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2531 MAXIMUM_CAPACITY :
2532 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2533 this.sizeCtl = cap;
2534 }
2535
2536 /**
2537 * Creates a new map with the same mappings as the given map.
2538 *
2539 * @param m the map
2540 */
2541 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2542 this.sizeCtl = DEFAULT_CAPACITY;
2543 internalPutAll(m);
2544 }
2545
2546 /**
2547 * Creates a new, empty map with an initial table size based on
2548 * the given number of elements ({@code initialCapacity}) and
2549 * initial table density ({@code loadFactor}).
2550 *
2551 * @param initialCapacity the initial capacity. The implementation
2552 * performs internal sizing to accommodate this many elements,
2553 * given the specified load factor.
2554 * @param loadFactor the load factor (table density) for
2555 * establishing the initial table size
2556 * @throws IllegalArgumentException if the initial capacity of
2557 * elements is negative or the load factor is nonpositive
2558 *
2559 * @since 1.6
2560 */
2561 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2562 this(initialCapacity, loadFactor, 1);
2563 }
2564
2565 /**
2566 * Creates a new, empty map with an initial table size based on
2567 * the given number of elements ({@code initialCapacity}), table
2568 * density ({@code loadFactor}), and number of concurrently
2569 * updating threads ({@code concurrencyLevel}).
2570 *
2571 * @param initialCapacity the initial capacity. The implementation
2572 * performs internal sizing to accommodate this many elements,
2573 * given the specified load factor.
2574 * @param loadFactor the load factor (table density) for
2575 * establishing the initial table size
2576 * @param concurrencyLevel the estimated number of concurrently
2577 * updating threads. The implementation may use this value as
2578 * a sizing hint.
2579 * @throws IllegalArgumentException if the initial capacity is
2580 * negative or the load factor or concurrencyLevel are
2581 * nonpositive
2582 */
2583 public ConcurrentHashMap(int initialCapacity,
2584 float loadFactor, int concurrencyLevel) {
2585 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2586 throw new IllegalArgumentException();
2587 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2588 initialCapacity = concurrencyLevel; // as estimated threads
2589 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2590 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2591 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2592 this.sizeCtl = cap;
2593 }
2594
2595 /**
2596 * Creates a new {@link Set} backed by a ConcurrentHashMap
2597 * from the given type to {@code Boolean.TRUE}.
2598 *
2599 * @return the new set
2600 */
2601 public static <K> KeySetView<K,Boolean> newKeySet() {
2602 return new KeySetView<K,Boolean>
2603 (new ConcurrentHashMap<K,Boolean>(), Boolean.TRUE);
2604 }
2605
2606 /**
2607 * Creates a new {@link Set} backed by a ConcurrentHashMap
2608 * from the given type to {@code Boolean.TRUE}.
2609 *
2610 * @param initialCapacity The implementation performs internal
2611 * sizing to accommodate this many elements.
2612 * @throws IllegalArgumentException if the initial capacity of
2613 * elements is negative
2614 * @return the new set
2615 */
2616 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2617 return new KeySetView<K,Boolean>
2618 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2619 }
2620
2621 /**
2622 * {@inheritDoc}
2623 */
2624 public boolean isEmpty() {
2625 return sumCount() <= 0L; // ignore transient negative values
2626 }
2627
2628 /**
2629 * {@inheritDoc}
2630 */
2631 public int size() {
2632 long n = sumCount();
2633 return ((n < 0L) ? 0 :
2634 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2635 (int)n);
2636 }
2637
2638 /**
2639 * Returns the number of mappings. This method should be used
2640 * instead of {@link #size} because a ConcurrentHashMap may
2641 * contain more mappings than can be represented as an int. The
2642 * value returned is an estimate; the actual count may differ if
2643 * there are concurrent insertions or removals.
2644 *
2645 * @return the number of mappings
2646 */
2647 public long mappingCount() {
2648 long n = sumCount();
2649 return (n < 0L) ? 0L : n; // ignore transient negative values
2650 }
2651
2652 /**
2653 * Returns the value to which the specified key is mapped,
2654 * or {@code null} if this map contains no mapping for the key.
2655 *
2656 * <p>More formally, if this map contains a mapping from a key
2657 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2658 * then this method returns {@code v}; otherwise it returns
2659 * {@code null}. (There can be at most one such mapping.)
2660 *
2661 * @throws NullPointerException if the specified key is null
2662 */
2663 public V get(Object key) {
2664 return internalGet(key);
2665 }
2666
2667 /**
2668 * Returns the value to which the specified key is mapped,
2669 * or the given defaultValue if this map contains no mapping for the key.
2670 *
2671 * @param key the key
2672 * @param defaultValue the value to return if this map contains
2673 * no mapping for the given key
2674 * @return the mapping for the key, if present; else the defaultValue
2675 * @throws NullPointerException if the specified key is null
2676 */
2677 public V getOrDefault(Object key, V defaultValue) {
2678 V v;
2679 return (v = internalGet(key)) == null ? defaultValue : v;
2680 }
2681
2682 /**
2683 * Tests if the specified object is a key in this table.
2684 *
2685 * @param key possible key
2686 * @return {@code true} if and only if the specified object
2687 * is a key in this table, as determined by the
2688 * {@code equals} method; {@code false} otherwise
2689 * @throws NullPointerException if the specified key is null
2690 */
2691 public boolean containsKey(Object key) {
2692 return internalGet(key) != null;
2693 }
2694
2695 /**
2696 * Returns {@code true} if this map maps one or more keys to the
2697 * specified value. Note: This method may require a full traversal
2698 * of the map, and is much slower than method {@code containsKey}.
2699 *
2700 * @param value value whose presence in this map is to be tested
2701 * @return {@code true} if this map maps one or more keys to the
2702 * specified value
2703 * @throws NullPointerException if the specified value is null
2704 */
2705 public boolean containsValue(Object value) {
2706 if (value == null)
2707 throw new NullPointerException();
2708 V v;
2709 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2710 while ((v = it.advanceValue()) != null) {
2711 if (v == value || value.equals(v))
2712 return true;
2713 }
2714 return false;
2715 }
2716
2717 /**
2718 * Legacy method testing if some key maps into the specified value
2719 * in this table. This method is identical in functionality to
2720 * {@link #containsValue(Object)}, and exists solely to ensure
2721 * full compatibility with class {@link java.util.Hashtable},
2722 * which supported this method prior to introduction of the
2723 * Java Collections framework.
2724 *
2725 * @param value a value to search for
2726 * @return {@code true} if and only if some key maps to the
2727 * {@code value} argument in this table as
2728 * determined by the {@code equals} method;
2729 * {@code false} otherwise
2730 * @throws NullPointerException if the specified value is null
2731 */
2732 @Deprecated public boolean contains(Object value) {
2733 return containsValue(value);
2734 }
2735
2736 /**
2737 * Maps the specified key to the specified value in this table.
2738 * Neither the key nor the value can be null.
2739 *
2740 * <p>The value can be retrieved by calling the {@code get} method
2741 * with a key that is equal to the original key.
2742 *
2743 * @param key key with which the specified value is to be associated
2744 * @param value value to be associated with the specified key
2745 * @return the previous value associated with {@code key}, or
2746 * {@code null} if there was no mapping for {@code key}
2747 * @throws NullPointerException if the specified key or value is null
2748 */
2749 public V put(K key, V value) {
2750 return internalPut(key, value, false);
2751 }
2752
2753 /**
2754 * {@inheritDoc}
2755 *
2756 * @return the previous value associated with the specified key,
2757 * or {@code null} if there was no mapping for the key
2758 * @throws NullPointerException if the specified key or value is null
2759 */
2760 public V putIfAbsent(K key, V value) {
2761 return internalPut(key, value, true);
2762 }
2763
2764 /**
2765 * Copies all of the mappings from the specified map to this one.
2766 * These mappings replace any mappings that this map had for any of the
2767 * keys currently in the specified map.
2768 *
2769 * @param m mappings to be stored in this map
2770 */
2771 public void putAll(Map<? extends K, ? extends V> m) {
2772 internalPutAll(m);
2773 }
2774
2775 /**
2776 * If the specified key is not already associated with a value (or
2777 * is mapped to {@code null}), attempts to compute its value using
2778 * the given mapping function and enters it into this map unless
2779 * {@code null}. The entire method invocation is performed
2780 * atomically, so the function is applied at most once per key.
2781 * Some attempted update operations on this map by other threads
2782 * may be blocked while computation is in progress, so the
2783 * computation should be short and simple, and must not attempt to
2784 * update any other mappings of this Map.
2785 *
2786 * @param key key with which the specified value is to be associated
2787 * @param mappingFunction the function to compute a value
2788 * @return the current (existing or computed) value associated with
2789 * the specified key, or null if the computed value is null
2790 * @throws NullPointerException if the specified key or mappingFunction
2791 * is null
2792 * @throws IllegalStateException if the computation detectably
2793 * attempts a recursive update to this map that would
2794 * otherwise never complete
2795 * @throws RuntimeException or Error if the mappingFunction does so,
2796 * in which case the mapping is left unestablished
2797 */
2798 public V computeIfAbsent
2799 (K key, Function<? super K, ? extends V> mappingFunction) {
2800 return internalComputeIfAbsent(key, mappingFunction);
2801 }
2802
2803 /**
2804 * If the value for the specified key is present and non-null,
2805 * attempts to compute a new mapping given the key and its current
2806 * mapped value. The entire method invocation is performed
2807 * atomically. Some attempted update operations on this map by
2808 * other threads may be blocked while computation is in progress,
2809 * so the computation should be short and simple, and must not
2810 * attempt to update any other mappings of this Map.
2811 *
2812 * @param key key with which a value may be associated
2813 * @param remappingFunction the function to compute a value
2814 * @return the new value associated with the specified key, or null if none
2815 * @throws NullPointerException if the specified key or remappingFunction
2816 * is null
2817 * @throws IllegalStateException if the computation detectably
2818 * attempts a recursive update to this map that would
2819 * otherwise never complete
2820 * @throws RuntimeException or Error if the remappingFunction does so,
2821 * in which case the mapping is unchanged
2822 */
2823 public V computeIfPresent
2824 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2825 return internalCompute(key, true, remappingFunction);
2826 }
2827
2828 /**
2829 * Attempts to compute a mapping for the specified key and its
2830 * current mapped value (or {@code null} if there is no current
2831 * mapping). The entire method invocation is performed atomically.
2832 * Some attempted update operations on this map by other threads
2833 * may be blocked while computation is in progress, so the
2834 * computation should be short and simple, and must not attempt to
2835 * update any other mappings of this Map.
2836 *
2837 * @param key key with which the specified value is to be associated
2838 * @param remappingFunction the function to compute a value
2839 * @return the new value associated with the specified key, or null if none
2840 * @throws NullPointerException if the specified key or remappingFunction
2841 * is null
2842 * @throws IllegalStateException if the computation detectably
2843 * attempts a recursive update to this map that would
2844 * otherwise never complete
2845 * @throws RuntimeException or Error if the remappingFunction does so,
2846 * in which case the mapping is unchanged
2847 */
2848 public V compute
2849 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2850 return internalCompute(key, false, remappingFunction);
2851 }
2852
2853 /**
2854 * If the specified key is not already associated with a
2855 * (non-null) value, associates it with the given value.
2856 * Otherwise, replaces the value with the results of the given
2857 * remapping function, or removes if {@code null}. The entire
2858 * method invocation is performed atomically. Some attempted
2859 * update operations on this map by other threads may be blocked
2860 * while computation is in progress, so the computation should be
2861 * short and simple, and must not attempt to update any other
2862 * mappings of this Map.
2863 *
2864 * @param key key with which the specified value is to be associated
2865 * @param value the value to use if absent
2866 * @param remappingFunction the function to recompute a value if present
2867 * @return the new value associated with the specified key, or null if none
2868 * @throws NullPointerException if the specified key or the
2869 * remappingFunction is null
2870 * @throws RuntimeException or Error if the remappingFunction does so,
2871 * in which case the mapping is unchanged
2872 */
2873 public V merge
2874 (K key, V value,
2875 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2876 return internalMerge(key, value, remappingFunction);
2877 }
2878
2879 /**
2880 * Removes the key (and its corresponding value) from this map.
2881 * This method does nothing if the key is not in the map.
2882 *
2883 * @param key the key that needs to be removed
2884 * @return the previous value associated with {@code key}, or
2885 * {@code null} if there was no mapping for {@code key}
2886 * @throws NullPointerException if the specified key is null
2887 */
2888 public V remove(Object key) {
2889 return internalReplace(key, null, null);
2890 }
2891
2892 /**
2893 * {@inheritDoc}
2894 *
2895 * @throws NullPointerException if the specified key is null
2896 */
2897 public boolean remove(Object key, Object value) {
2898 if (key == null)
2899 throw new NullPointerException();
2900 return value != null && internalReplace(key, null, value) != null;
2901 }
2902
2903 /**
2904 * {@inheritDoc}
2905 *
2906 * @throws NullPointerException if any of the arguments are null
2907 */
2908 public boolean replace(K key, V oldValue, V newValue) {
2909 if (key == null || oldValue == null || newValue == null)
2910 throw new NullPointerException();
2911 return internalReplace(key, newValue, oldValue) != null;
2912 }
2913
2914 /**
2915 * {@inheritDoc}
2916 *
2917 * @return the previous value associated with the specified key,
2918 * or {@code null} if there was no mapping for the key
2919 * @throws NullPointerException if the specified key or value is null
2920 */
2921 public V replace(K key, V value) {
2922 if (key == null || value == null)
2923 throw new NullPointerException();
2924 return internalReplace(key, value, null);
2925 }
2926
2927 /**
2928 * Removes all of the mappings from this map.
2929 */
2930 public void clear() {
2931 internalClear();
2932 }
2933
2934 /**
2935 * Returns a {@link Set} view of the keys contained in this map.
2936 * The set is backed by the map, so changes to the map are
2937 * reflected in the set, and vice-versa.
2938 *
2939 * @return the set view
2940 */
2941 public KeySetView<K,V> keySet() {
2942 KeySetView<K,V> ks = keySet;
2943 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2944 }
2945
2946 /**
2947 * Returns a {@link Set} view of the keys in this map, using the
2948 * given common mapped value for any additions (i.e., {@link
2949 * Collection#add} and {@link Collection#addAll(Collection)}).
2950 * This is of course only appropriate if it is acceptable to use
2951 * the same value for all additions from this view.
2952 *
2953 * @param mappedValue the mapped value to use for any additions
2954 * @return the set view
2955 * @throws NullPointerException if the mappedValue is null
2956 */
2957 public KeySetView<K,V> keySet(V mappedValue) {
2958 if (mappedValue == null)
2959 throw new NullPointerException();
2960 return new KeySetView<K,V>(this, mappedValue);
2961 }
2962
2963 /**
2964 * Returns a {@link Collection} view of the values contained in this map.
2965 * The collection is backed by the map, so changes to the map are
2966 * reflected in the collection, and vice-versa.
2967 *
2968 * @return the collection view
2969 */
2970 public ValuesView<K,V> values() {
2971 ValuesView<K,V> vs = values;
2972 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2973 }
2974
2975 /**
2976 * Returns a {@link Set} view of the mappings contained in this map.
2977 * The set is backed by the map, so changes to the map are
2978 * reflected in the set, and vice-versa. The set supports element
2979 * removal, which removes the corresponding mapping from the map,
2980 * via the {@code Iterator.remove}, {@code Set.remove},
2981 * {@code removeAll}, {@code retainAll}, and {@code clear}
2982 * operations. It does not support the {@code add} or
2983 * {@code addAll} operations.
2984 *
2985 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2986 * that will never throw {@link ConcurrentModificationException},
2987 * and guarantees to traverse elements as they existed upon
2988 * construction of the iterator, and may (but is not guaranteed to)
2989 * reflect any modifications subsequent to construction.
2990 *
2991 * @return the set view
2992 */
2993 public Set<Map.Entry<K,V>> entrySet() {
2994 EntrySetView<K,V> es = entrySet;
2995 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2996 }
2997
2998 /**
2999 * Returns an enumeration of the keys in this table.
3000 *
3001 * @return an enumeration of the keys in this table
3002 * @see #keySet()
3003 */
3004 public Enumeration<K> keys() {
3005 return new KeyIterator<K,V>(this);
3006 }
3007
3008 /**
3009 * Returns an enumeration of the values in this table.
3010 *
3011 * @return an enumeration of the values in this table
3012 * @see #values()
3013 */
3014 public Enumeration<V> elements() {
3015 return new ValueIterator<K,V>(this);
3016 }
3017
3018 /**
3019 * Returns the hash code value for this {@link Map}, i.e.,
3020 * the sum of, for each key-value pair in the map,
3021 * {@code key.hashCode() ^ value.hashCode()}.
3022 *
3023 * @return the hash code value for this map
3024 */
3025 public int hashCode() {
3026 int h = 0;
3027 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3028 V v;
3029 while ((v = it.advanceValue()) != null) {
3030 h += it.nextKey.hashCode() ^ v.hashCode();
3031 }
3032 return h;
3033 }
3034
3035 /**
3036 * Returns a string representation of this map. The string
3037 * representation consists of a list of key-value mappings (in no
3038 * particular order) enclosed in braces ("{@code {}}"). Adjacent
3039 * mappings are separated by the characters {@code ", "} (comma
3040 * and space). Each key-value mapping is rendered as the key
3041 * followed by an equals sign ("{@code =}") followed by the
3042 * associated value.
3043 *
3044 * @return a string representation of this map
3045 */
3046 public String toString() {
3047 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3048 StringBuilder sb = new StringBuilder();
3049 sb.append('{');
3050 V v;
3051 if ((v = it.advanceValue()) != null) {
3052 for (;;) {
3053 K k = it.nextKey;
3054 sb.append(k == this ? "(this Map)" : k);
3055 sb.append('=');
3056 sb.append(v == this ? "(this Map)" : v);
3057 if ((v = it.advanceValue()) == null)
3058 break;
3059 sb.append(',').append(' ');
3060 }
3061 }
3062 return sb.append('}').toString();
3063 }
3064
3065 /**
3066 * Compares the specified object with this map for equality.
3067 * Returns {@code true} if the given object is a map with the same
3068 * mappings as this map. This operation may return misleading
3069 * results if either map is concurrently modified during execution
3070 * of this method.
3071 *
3072 * @param o object to be compared for equality with this map
3073 * @return {@code true} if the specified object is equal to this map
3074 */
3075 public boolean equals(Object o) {
3076 if (o != this) {
3077 if (!(o instanceof Map))
3078 return false;
3079 Map<?,?> m = (Map<?,?>) o;
3080 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3081 V val;
3082 while ((val = it.advanceValue()) != null) {
3083 Object v = m.get(it.nextKey);
3084 if (v == null || (v != val && !v.equals(val)))
3085 return false;
3086 }
3087 for (Map.Entry<?,?> e : m.entrySet()) {
3088 Object mk, mv, v;
3089 if ((mk = e.getKey()) == null ||
3090 (mv = e.getValue()) == null ||
3091 (v = internalGet(mk)) == null ||
3092 (mv != v && !mv.equals(v)))
3093 return false;
3094 }
3095 }
3096 return true;
3097 }
3098
3099 /* ----------------Iterators -------------- */
3100
3101 @SuppressWarnings("serial") static final class KeyIterator<K,V>
3102 extends Traverser<K,V,Object>
3103 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
3104 KeyIterator(ConcurrentHashMap<K,V> map) { super(map); }
3105 KeyIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3106 super(map, it);
3107 }
3108 public Spliterator<K> trySplit() {
3109 return (baseLimit - baseIndex <= 1) ? null :
3110 new KeyIterator<K,V>(map, this);
3111 }
3112 public final K next() {
3113 K k;
3114 if ((k = (nextVal == null) ? advanceKey() : nextKey) == null)
3115 throw new NoSuchElementException();
3116 nextVal = null;
3117 return k;
3118 }
3119
3120 public final K nextElement() { return next(); }
3121
3122 public Iterator<K> iterator() { return this; }
3123
3124 public void forEachRemaining(Consumer<? super K> action) {
3125 forEachKey(action);
3126 }
3127
3128 public boolean tryAdvance(Consumer<? super K> block) {
3129 if (block == null) throw new NullPointerException();
3130 K k;
3131 if ((k = advanceKey()) == null)
3132 return false;
3133 block.accept(k);
3134 return true;
3135 }
3136
3137 public int characteristics() {
3138 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3139 Spliterator.NONNULL;
3140 }
3141
3142 }
3143
3144 @SuppressWarnings("serial") static final class ValueIterator<K,V>
3145 extends Traverser<K,V,Object>
3146 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
3147 ValueIterator(ConcurrentHashMap<K,V> map) { super(map); }
3148 ValueIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3149 super(map, it);
3150 }
3151 public Spliterator<V> trySplit() {
3152 return (baseLimit - baseIndex <= 1) ? null :
3153 new ValueIterator<K,V>(map, this);
3154 }
3155
3156 public final V next() {
3157 V v;
3158 if ((v = nextVal) == null && (v = advanceValue()) == null)
3159 throw new NoSuchElementException();
3160 nextVal = null;
3161 return v;
3162 }
3163
3164 public final V nextElement() { return next(); }
3165
3166 public Iterator<V> iterator() { return this; }
3167
3168 public void forEachRemaining(Consumer<? super V> action) {
3169 forEachValue(action);
3170 }
3171
3172 public boolean tryAdvance(Consumer<? super V> block) {
3173 V v;
3174 if (block == null) throw new NullPointerException();
3175 if ((v = advanceValue()) == null)
3176 return false;
3177 block.accept(v);
3178 return true;
3179 }
3180
3181 public int characteristics() {
3182 return Spliterator.CONCURRENT | Spliterator.NONNULL;
3183 }
3184 }
3185
3186 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3187 extends Traverser<K,V,Object>
3188 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3189 EntryIterator(ConcurrentHashMap<K,V> map) { super(map); }
3190 EntryIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3191 super(map, it);
3192 }
3193 public Spliterator<Map.Entry<K,V>> trySplit() {
3194 return (baseLimit - baseIndex <= 1) ? null :
3195 new EntryIterator<K,V>(map, this);
3196 }
3197
3198 public final Map.Entry<K,V> next() {
3199 V v;
3200 if ((v = nextVal) == null && (v = advanceValue()) == null)
3201 throw new NoSuchElementException();
3202 K k = nextKey;
3203 nextVal = null;
3204 return new MapEntry<K,V>(k, v, map);
3205 }
3206
3207 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3208
3209 public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
3210 if (action == null) throw new NullPointerException();
3211 V v;
3212 while ((v = advanceValue()) != null)
3213 action.accept(entryFor(nextKey, v));
3214 }
3215
3216 public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> block) {
3217 V v;
3218 if (block == null) throw new NullPointerException();
3219 if ((v = advanceValue()) == null)
3220 return false;
3221 block.accept(entryFor(nextKey, v));
3222 return true;
3223 }
3224
3225 public int characteristics() {
3226 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3227 Spliterator.NONNULL;
3228 }
3229 }
3230
3231 /**
3232 * Exported Entry for iterators
3233 */
3234 static final class MapEntry<K,V> implements Map.Entry<K,V> {
3235 final K key; // non-null
3236 V val; // non-null
3237 final ConcurrentHashMap<K,V> map;
3238 MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
3239 this.key = key;
3240 this.val = val;
3241 this.map = map;
3242 }
3243 public final K getKey() { return key; }
3244 public final V getValue() { return val; }
3245 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3246 public final String toString(){ return key + "=" + val; }
3247
3248 public final boolean equals(Object o) {
3249 Object k, v; Map.Entry<?,?> e;
3250 return ((o instanceof Map.Entry) &&
3251 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3252 (v = e.getValue()) != null &&
3253 (k == key || k.equals(key)) &&
3254 (v == val || v.equals(val)));
3255 }
3256
3257 /**
3258 * Sets our entry's value and writes through to the map. The
3259 * value to return is somewhat arbitrary here. Since we do not
3260 * necessarily track asynchronous changes, the most recent
3261 * "previous" value could be different from what we return (or
3262 * could even have been removed in which case the put will
3263 * re-establish). We do not and cannot guarantee more.
3264 */
3265 public final V setValue(V value) {
3266 if (value == null) throw new NullPointerException();
3267 V v = val;
3268 val = value;
3269 map.put(key, value);
3270 return v;
3271 }
3272 }
3273
3274 /**
3275 * Returns exportable snapshot entry for the given key and value
3276 * when write-through can't or shouldn't be used.
3277 */
3278 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3279 return new AbstractMap.SimpleEntry<K,V>(k, v);
3280 }
3281
3282 /* ---------------- Serialization Support -------------- */
3283
3284 /**
3285 * Stripped-down version of helper class used in previous version,
3286 * declared for the sake of serialization compatibility
3287 */
3288 static class Segment<K,V> extends ReentrantLock implements Serializable {
3289 private static final long serialVersionUID = 2249069246763182397L;
3290 final float loadFactor;
3291 Segment(float lf) { this.loadFactor = lf; }
3292 }
3293
3294 /**
3295 * Saves the state of the {@code ConcurrentHashMap} instance to a
3296 * stream (i.e., serializes it).
3297 * @param s the stream
3298 * @serialData
3299 * the key (Object) and value (Object)
3300 * for each key-value mapping, followed by a null pair.
3301 * The key-value mappings are emitted in no particular order.
3302 */
3303 @SuppressWarnings("unchecked") private void writeObject
3304 (java.io.ObjectOutputStream s)
3305 throws java.io.IOException {
3306 // For serialization compatibility
3307 Segment<K,V>[] segments = (Segment<K,V>[])
3308 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3309 for (int i = 0; i < segments.length; ++i)
3310 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3311 s.putFields().put("segments", segments);
3312 s.writeFields();
3313 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3314 V v;
3315 while ((v = it.advanceValue()) != null) {
3316 s.writeObject(it.nextKey);
3317 s.writeObject(v);
3318 }
3319 s.writeObject(null);
3320 s.writeObject(null);
3321 segments = null; // throw away
3322 }
3323
3324 /**
3325 * Reconstitutes the instance from a stream (that is, deserializes it).
3326 * @param s the stream
3327 */
3328 @SuppressWarnings("unchecked") private void readObject
3329 (java.io.ObjectInputStream s)
3330 throws java.io.IOException, ClassNotFoundException {
3331 s.defaultReadObject();
3332
3333 // Create all nodes, then place in table once size is known
3334 long size = 0L;
3335 Node<V> p = null;
3336 for (;;) {
3337 K k = (K) s.readObject();
3338 V v = (V) s.readObject();
3339 if (k != null && v != null) {
3340 int h = spread(k.hashCode());
3341 p = new Node<V>(h, k, v, p);
3342 ++size;
3343 }
3344 else
3345 break;
3346 }
3347 if (p != null) {
3348 boolean init = false;
3349 int n;
3350 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3351 n = MAXIMUM_CAPACITY;
3352 else {
3353 int sz = (int)size;
3354 n = tableSizeFor(sz + (sz >>> 1) + 1);
3355 }
3356 int sc = sizeCtl;
3357 boolean collide = false;
3358 if (n > sc &&
3359 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3360 try {
3361 if (table == null) {
3362 init = true;
3363 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3364 Node<V>[] tab = (Node<V>[])rt;
3365 int mask = n - 1;
3366 while (p != null) {
3367 int j = p.hash & mask;
3368 Node<V> next = p.next;
3369 Node<V> q = p.next = tabAt(tab, j);
3370 setTabAt(tab, j, p);
3371 if (!collide && q != null && q.hash == p.hash)
3372 collide = true;
3373 p = next;
3374 }
3375 table = tab;
3376 addCount(size, -1);
3377 sc = n - (n >>> 2);
3378 }
3379 } finally {
3380 sizeCtl = sc;
3381 }
3382 if (collide) { // rescan and convert to TreeBins
3383 Node<V>[] tab = table;
3384 for (int i = 0; i < tab.length; ++i) {
3385 int c = 0;
3386 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3387 if (++c > TREE_THRESHOLD &&
3388 (e.key instanceof Comparable)) {
3389 replaceWithTreeBin(tab, i, e.key);
3390 break;
3391 }
3392 }
3393 }
3394 }
3395 }
3396 if (!init) { // Can only happen if unsafely published.
3397 while (p != null) {
3398 internalPut((K)p.key, p.val, false);
3399 p = p.next;
3400 }
3401 }
3402 }
3403 }
3404
3405 // -------------------------------------------------------
3406
3407 // Sequential bulk operations
3408
3409 /**
3410 * Performs the given action for each (key, value).
3411 *
3412 * @param action the action
3413 */
3414 public void forEachSequentially
3415 (BiConsumer<? super K, ? super V> action) {
3416 if (action == null) throw new NullPointerException();
3417 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3418 V v;
3419 while ((v = it.advanceValue()) != null)
3420 action.accept(it.nextKey, v);
3421 }
3422
3423 /**
3424 * Performs the given action for each non-null transformation
3425 * of each (key, value).
3426 *
3427 * @param transformer a function returning the transformation
3428 * for an element, or null if there is no transformation (in
3429 * which case the action is not applied)
3430 * @param action the action
3431 */
3432 public <U> void forEachSequentially
3433 (BiFunction<? super K, ? super V, ? extends U> transformer,
3434 Consumer<? super U> action) {
3435 if (transformer == null || action == null)
3436 throw new NullPointerException();
3437 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3438 V v; U u;
3439 while ((v = it.advanceValue()) != null) {
3440 if ((u = transformer.apply(it.nextKey, v)) != null)
3441 action.accept(u);
3442 }
3443 }
3444
3445 /**
3446 * Returns a non-null result from applying the given search
3447 * function on each (key, value), or null if none.
3448 *
3449 * @param searchFunction a function returning a non-null
3450 * result on success, else null
3451 * @return a non-null result from applying the given search
3452 * function on each (key, value), or null if none
3453 */
3454 public <U> U searchSequentially
3455 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3456 if (searchFunction == null) throw new NullPointerException();
3457 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3458 V v; U u;
3459 while ((v = it.advanceValue()) != null) {
3460 if ((u = searchFunction.apply(it.nextKey, v)) != null)
3461 return u;
3462 }
3463 return null;
3464 }
3465
3466 /**
3467 * Returns the result of accumulating the given transformation
3468 * of all (key, value) pairs using the given reducer to
3469 * combine values, or null if none.
3470 *
3471 * @param transformer a function returning the transformation
3472 * for an element, or null if there is no transformation (in
3473 * which case it is not combined)
3474 * @param reducer a commutative associative combining function
3475 * @return the result of accumulating the given transformation
3476 * of all (key, value) pairs
3477 */
3478 public <U> U reduceSequentially
3479 (BiFunction<? super K, ? super V, ? extends U> transformer,
3480 BiFunction<? super U, ? super U, ? extends U> reducer) {
3481 if (transformer == null || reducer == null)
3482 throw new NullPointerException();
3483 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3484 U r = null, u; V v;
3485 while ((v = it.advanceValue()) != null) {
3486 if ((u = transformer.apply(it.nextKey, v)) != null)
3487 r = (r == null) ? u : reducer.apply(r, u);
3488 }
3489 return r;
3490 }
3491
3492 /**
3493 * Returns the result of accumulating the given transformation
3494 * of all (key, value) pairs using the given reducer to
3495 * combine values, and the given basis as an identity value.
3496 *
3497 * @param transformer a function returning the transformation
3498 * for an element
3499 * @param basis the identity (initial default value) for the reduction
3500 * @param reducer a commutative associative combining function
3501 * @return the result of accumulating the given transformation
3502 * of all (key, value) pairs
3503 */
3504 public double reduceToDoubleSequentially
3505 (ToDoubleBiFunction<? super K, ? super V> transformer,
3506 double basis,
3507 DoubleBinaryOperator reducer) {
3508 if (transformer == null || reducer == null)
3509 throw new NullPointerException();
3510 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3511 double r = basis; V v;
3512 while ((v = it.advanceValue()) != null)
3513 r = reducer.applyAsDouble(r, transformer.applyAsDouble(it.nextKey, v));
3514 return r;
3515 }
3516
3517 /**
3518 * Returns the result of accumulating the given transformation
3519 * of all (key, value) pairs using the given reducer to
3520 * combine values, and the given basis as an identity value.
3521 *
3522 * @param transformer a function returning the transformation
3523 * for an element
3524 * @param basis the identity (initial default value) for the reduction
3525 * @param reducer a commutative associative combining function
3526 * @return the result of accumulating the given transformation
3527 * of all (key, value) pairs
3528 */
3529 public long reduceToLongSequentially
3530 (ToLongBiFunction<? super K, ? super V> transformer,
3531 long basis,
3532 LongBinaryOperator reducer) {
3533 if (transformer == null || reducer == null)
3534 throw new NullPointerException();
3535 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3536 long r = basis; V v;
3537 while ((v = it.advanceValue()) != null)
3538 r = reducer.applyAsLong(r, transformer.applyAsLong(it.nextKey, v));
3539 return r;
3540 }
3541
3542 /**
3543 * Returns the result of accumulating the given transformation
3544 * of all (key, value) pairs using the given reducer to
3545 * combine values, and the given basis as an identity value.
3546 *
3547 * @param transformer a function returning the transformation
3548 * for an element
3549 * @param basis the identity (initial default value) for the reduction
3550 * @param reducer a commutative associative combining function
3551 * @return the result of accumulating the given transformation
3552 * of all (key, value) pairs
3553 */
3554 public int reduceToIntSequentially
3555 (ToIntBiFunction<? super K, ? super V> transformer,
3556 int basis,
3557 IntBinaryOperator reducer) {
3558 if (transformer == null || reducer == null)
3559 throw new NullPointerException();
3560 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3561 int r = basis; V v;
3562 while ((v = it.advanceValue()) != null)
3563 r = reducer.applyAsInt(r, transformer.applyAsInt(it.nextKey, v));
3564 return r;
3565 }
3566
3567 /**
3568 * Performs the given action for each key.
3569 *
3570 * @param action the action
3571 */
3572 public void forEachKeySequentially
3573 (Consumer<? super K> action) {
3574 new Traverser<K,V,Object>(this).forEachKey(action);
3575 }
3576
3577 /**
3578 * Performs the given action for each non-null transformation
3579 * of each key.
3580 *
3581 * @param transformer a function returning the transformation
3582 * for an element, or null if there is no transformation (in
3583 * which case the action is not applied)
3584 * @param action the action
3585 */
3586 public <U> void forEachKeySequentially
3587 (Function<? super K, ? extends U> transformer,
3588 Consumer<? super U> action) {
3589 if (transformer == null || action == null)
3590 throw new NullPointerException();
3591 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3592 K k; U u;
3593 while ((k = it.advanceKey()) != null) {
3594 if ((u = transformer.apply(k)) != null)
3595 action.accept(u);
3596 }
3597 }
3598
3599 /**
3600 * Returns a non-null result from applying the given search
3601 * function on each key, or null if none.
3602 *
3603 * @param searchFunction a function returning a non-null
3604 * result on success, else null
3605 * @return a non-null result from applying the given search
3606 * function on each key, or null if none
3607 */
3608 public <U> U searchKeysSequentially
3609 (Function<? super K, ? extends U> searchFunction) {
3610 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3611 K k; U u;
3612 while ((k = it.advanceKey()) != null) {
3613 if ((u = searchFunction.apply(k)) != null)
3614 return u;
3615 }
3616 return null;
3617 }
3618
3619 /**
3620 * Returns the result of accumulating all keys using the given
3621 * reducer to combine values, or null if none.
3622 *
3623 * @param reducer a commutative associative combining function
3624 * @return the result of accumulating all keys using the given
3625 * reducer to combine values, or null if none
3626 */
3627 public K reduceKeysSequentially
3628 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3629 if (reducer == null) throw new NullPointerException();
3630 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3631 K u, r = null;
3632 while ((u = it.advanceKey()) != null) {
3633 r = (r == null) ? u : reducer.apply(r, u);
3634 }
3635 return r;
3636 }
3637
3638 /**
3639 * Returns the result of accumulating the given transformation
3640 * of all keys using the given reducer to combine values, or
3641 * null if none.
3642 *
3643 * @param transformer a function returning the transformation
3644 * for an element, or null if there is no transformation (in
3645 * which case it is not combined)
3646 * @param reducer a commutative associative combining function
3647 * @return the result of accumulating the given transformation
3648 * of all keys
3649 */
3650 public <U> U reduceKeysSequentially
3651 (Function<? super K, ? extends U> transformer,
3652 BiFunction<? super U, ? super U, ? extends U> reducer) {
3653 if (transformer == null || reducer == null)
3654 throw new NullPointerException();
3655 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3656 K k; U r = null, u;
3657 while ((k = it.advanceKey()) != null) {
3658 if ((u = transformer.apply(k)) != null)
3659 r = (r == null) ? u : reducer.apply(r, u);
3660 }
3661 return r;
3662 }
3663
3664 /**
3665 * Returns the result of accumulating the given transformation
3666 * of all keys using the given reducer to combine values, and
3667 * the given basis as an identity value.
3668 *
3669 * @param transformer a function returning the transformation
3670 * for an element
3671 * @param basis the identity (initial default value) for the reduction
3672 * @param reducer a commutative associative combining function
3673 * @return the result of accumulating the given transformation
3674 * of all keys
3675 */
3676 public double reduceKeysToDoubleSequentially
3677 (ToDoubleFunction<? super K> transformer,
3678 double basis,
3679 DoubleBinaryOperator reducer) {
3680 if (transformer == null || reducer == null)
3681 throw new NullPointerException();
3682 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3683 double r = basis;
3684 K k;
3685 while ((k = it.advanceKey()) != null)
3686 r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
3687 return r;
3688 }
3689
3690 /**
3691 * Returns the result of accumulating the given transformation
3692 * of all keys using the given reducer to combine values, and
3693 * the given basis as an identity value.
3694 *
3695 * @param transformer a function returning the transformation
3696 * for an element
3697 * @param basis the identity (initial default value) for the reduction
3698 * @param reducer a commutative associative combining function
3699 * @return the result of accumulating the given transformation
3700 * of all keys
3701 */
3702 public long reduceKeysToLongSequentially
3703 (ToLongFunction<? super K> transformer,
3704 long basis,
3705 LongBinaryOperator reducer) {
3706 if (transformer == null || reducer == null)
3707 throw new NullPointerException();
3708 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3709 long r = basis;
3710 K k;
3711 while ((k = it.advanceKey()) != null)
3712 r = reducer.applyAsLong(r, transformer.applyAsLong(k));
3713 return r;
3714 }
3715
3716 /**
3717 * Returns the result of accumulating the given transformation
3718 * of all keys using the given reducer to combine values, and
3719 * the given basis as an identity value.
3720 *
3721 * @param transformer a function returning the transformation
3722 * for an element
3723 * @param basis the identity (initial default value) for the reduction
3724 * @param reducer a commutative associative combining function
3725 * @return the result of accumulating the given transformation
3726 * of all keys
3727 */
3728 public int reduceKeysToIntSequentially
3729 (ToIntFunction<? super K> transformer,
3730 int basis,
3731 IntBinaryOperator reducer) {
3732 if (transformer == null || reducer == null)
3733 throw new NullPointerException();
3734 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3735 int r = basis;
3736 K k;
3737 while ((k = it.advanceKey()) != null)
3738 r = reducer.applyAsInt(r, transformer.applyAsInt(k));
3739 return r;
3740 }
3741
3742 /**
3743 * Performs the given action for each value.
3744 *
3745 * @param action the action
3746 */
3747 public void forEachValueSequentially(Consumer<? super V> action) {
3748 new Traverser<K,V,Object>(this).forEachValue(action);
3749 }
3750
3751 /**
3752 * Performs the given action for each non-null transformation
3753 * of each value.
3754 *
3755 * @param transformer a function returning the transformation
3756 * for an element, or null if there is no transformation (in
3757 * which case the action is not applied)
3758 * @param action the action
3759 */
3760 public <U> void forEachValueSequentially
3761 (Function<? super V, ? extends U> transformer,
3762 Consumer<? super U> action) {
3763 if (transformer == null || action == null)
3764 throw new NullPointerException();
3765 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3766 V v; U u;
3767 while ((v = it.advanceValue()) != null) {
3768 if ((u = transformer.apply(v)) != null)
3769 action.accept(u);
3770 }
3771 }
3772
3773 /**
3774 * Returns a non-null result from applying the given search
3775 * function on each value, or null if none.
3776 *
3777 * @param searchFunction a function returning a non-null
3778 * result on success, else null
3779 * @return a non-null result from applying the given search
3780 * function on each value, or null if none
3781 */
3782 public <U> U searchValuesSequentially
3783 (Function<? super V, ? extends U> searchFunction) {
3784 if (searchFunction == null) throw new NullPointerException();
3785 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3786 V v; U u;
3787 while ((v = it.advanceValue()) != null) {
3788 if ((u = searchFunction.apply(v)) != null)
3789 return u;
3790 }
3791 return null;
3792 }
3793
3794 /**
3795 * Returns the result of accumulating all values using the
3796 * given reducer to combine values, or null if none.
3797 *
3798 * @param reducer a commutative associative combining function
3799 * @return the result of accumulating all values
3800 */
3801 public V reduceValuesSequentially
3802 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3803 if (reducer == null) throw new NullPointerException();
3804 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3805 V r = null; V v;
3806 while ((v = it.advanceValue()) != null)
3807 r = (r == null) ? v : reducer.apply(r, v);
3808 return r;
3809 }
3810
3811 /**
3812 * Returns the result of accumulating the given transformation
3813 * of all values using the given reducer to combine values, or
3814 * null if none.
3815 *
3816 * @param transformer a function returning the transformation
3817 * for an element, or null if there is no transformation (in
3818 * which case it is not combined)
3819 * @param reducer a commutative associative combining function
3820 * @return the result of accumulating the given transformation
3821 * of all values
3822 */
3823 public <U> U reduceValuesSequentially
3824 (Function<? super V, ? extends U> transformer,
3825 BiFunction<? super U, ? super U, ? extends U> reducer) {
3826 if (transformer == null || reducer == null)
3827 throw new NullPointerException();
3828 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3829 U r = null, u; V v;
3830 while ((v = it.advanceValue()) != null) {
3831 if ((u = transformer.apply(v)) != null)
3832 r = (r == null) ? u : reducer.apply(r, u);
3833 }
3834 return r;
3835 }
3836
3837 /**
3838 * Returns the result of accumulating the given transformation
3839 * of all values using the given reducer to combine values,
3840 * and the given basis as an identity value.
3841 *
3842 * @param transformer a function returning the transformation
3843 * for an element
3844 * @param basis the identity (initial default value) for the reduction
3845 * @param reducer a commutative associative combining function
3846 * @return the result of accumulating the given transformation
3847 * of all values
3848 */
3849 public double reduceValuesToDoubleSequentially
3850 (ToDoubleFunction<? super V> transformer,
3851 double basis,
3852 DoubleBinaryOperator reducer) {
3853 if (transformer == null || reducer == null)
3854 throw new NullPointerException();
3855 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3856 double r = basis; V v;
3857 while ((v = it.advanceValue()) != null)
3858 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3859 return r;
3860 }
3861
3862 /**
3863 * Returns the result of accumulating the given transformation
3864 * of all values using the given reducer to combine values,
3865 * and the given basis as an identity value.
3866 *
3867 * @param transformer a function returning the transformation
3868 * for an element
3869 * @param basis the identity (initial default value) for the reduction
3870 * @param reducer a commutative associative combining function
3871 * @return the result of accumulating the given transformation
3872 * of all values
3873 */
3874 public long reduceValuesToLongSequentially
3875 (ToLongFunction<? super V> transformer,
3876 long basis,
3877 LongBinaryOperator reducer) {
3878 if (transformer == null || reducer == null)
3879 throw new NullPointerException();
3880 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3881 long r = basis; V v;
3882 while ((v = it.advanceValue()) != null)
3883 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3884 return r;
3885 }
3886
3887 /**
3888 * Returns the result of accumulating the given transformation
3889 * of all values using the given reducer to combine values,
3890 * and the given basis as an identity value.
3891 *
3892 * @param transformer a function returning the transformation
3893 * for an element
3894 * @param basis the identity (initial default value) for the reduction
3895 * @param reducer a commutative associative combining function
3896 * @return the result of accumulating the given transformation
3897 * of all values
3898 */
3899 public int reduceValuesToIntSequentially
3900 (ToIntFunction<? super V> transformer,
3901 int basis,
3902 IntBinaryOperator reducer) {
3903 if (transformer == null || reducer == null)
3904 throw new NullPointerException();
3905 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3906 int r = basis; V v;
3907 while ((v = it.advanceValue()) != null)
3908 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3909 return r;
3910 }
3911
3912 /**
3913 * Performs the given action for each entry.
3914 *
3915 * @param action the action
3916 */
3917 public void forEachEntrySequentially
3918 (Consumer<? super Map.Entry<K,V>> action) {
3919 if (action == null) throw new NullPointerException();
3920 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3921 V v;
3922 while ((v = it.advanceValue()) != null)
3923 action.accept(entryFor(it.nextKey, v));
3924 }
3925
3926 /**
3927 * Performs the given action for each non-null transformation
3928 * of each entry.
3929 *
3930 * @param transformer a function returning the transformation
3931 * for an element, or null if there is no transformation (in
3932 * which case the action is not applied)
3933 * @param action the action
3934 */
3935 public <U> void forEachEntrySequentially
3936 (Function<Map.Entry<K,V>, ? extends U> transformer,
3937 Consumer<? super U> action) {
3938 if (transformer == null || action == null)
3939 throw new NullPointerException();
3940 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3941 V v; U u;
3942 while ((v = it.advanceValue()) != null) {
3943 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
3944 action.accept(u);
3945 }
3946 }
3947
3948 /**
3949 * Returns a non-null result from applying the given search
3950 * function on each entry, or null if none.
3951 *
3952 * @param searchFunction a function returning a non-null
3953 * result on success, else null
3954 * @return a non-null result from applying the given search
3955 * function on each entry, or null if none
3956 */
3957 public <U> U searchEntriesSequentially
3958 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3959 if (searchFunction == null) throw new NullPointerException();
3960 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3961 V v; U u;
3962 while ((v = it.advanceValue()) != null) {
3963 if ((u = searchFunction.apply(entryFor(it.nextKey, v))) != null)
3964 return u;
3965 }
3966 return null;
3967 }
3968
3969 /**
3970 * Returns the result of accumulating all entries using the
3971 * given reducer to combine values, or null if none.
3972 *
3973 * @param reducer a commutative associative combining function
3974 * @return the result of accumulating all entries
3975 */
3976 public Map.Entry<K,V> reduceEntriesSequentially
3977 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3978 if (reducer == null) throw new NullPointerException();
3979 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3980 Map.Entry<K,V> r = null; V v;
3981 while ((v = it.advanceValue()) != null) {
3982 Map.Entry<K,V> u = entryFor(it.nextKey, v);
3983 r = (r == null) ? u : reducer.apply(r, u);
3984 }
3985 return r;
3986 }
3987
3988 /**
3989 * Returns the result of accumulating the given transformation
3990 * of all entries using the given reducer to combine values,
3991 * or null if none.
3992 *
3993 * @param transformer a function returning the transformation
3994 * for an element, or null if there is no transformation (in
3995 * which case it is not combined)
3996 * @param reducer a commutative associative combining function
3997 * @return the result of accumulating the given transformation
3998 * of all entries
3999 */
4000 public <U> U reduceEntriesSequentially
4001 (Function<Map.Entry<K,V>, ? extends U> transformer,
4002 BiFunction<? super U, ? super U, ? extends U> reducer) {
4003 if (transformer == null || reducer == null)
4004 throw new NullPointerException();
4005 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4006 U r = null, u; V v;
4007 while ((v = it.advanceValue()) != null) {
4008 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
4009 r = (r == null) ? u : reducer.apply(r, u);
4010 }
4011 return r;
4012 }
4013
4014 /**
4015 * Returns the result of accumulating the given transformation
4016 * of all entries using the given reducer to combine values,
4017 * and the given basis as an identity value.
4018 *
4019 * @param transformer a function returning the transformation
4020 * for an element
4021 * @param basis the identity (initial default value) for the reduction
4022 * @param reducer a commutative associative combining function
4023 * @return the result of accumulating the given transformation
4024 * of all entries
4025 */
4026 public double reduceEntriesToDoubleSequentially
4027 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4028 double basis,
4029 DoubleBinaryOperator reducer) {
4030 if (transformer == null || reducer == null)
4031 throw new NullPointerException();
4032 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4033 double r = basis; V v;
4034 while ((v = it.advanceValue()) != null)
4035 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(it.nextKey, v)));
4036 return r;
4037 }
4038
4039 /**
4040 * Returns the result of accumulating the given transformation
4041 * of all entries using the given reducer to combine values,
4042 * and the given basis as an identity value.
4043 *
4044 * @param transformer a function returning the transformation
4045 * for an element
4046 * @param basis the identity (initial default value) for the reduction
4047 * @param reducer a commutative associative combining function
4048 * @return the result of accumulating the given transformation
4049 * of all entries
4050 */
4051 public long reduceEntriesToLongSequentially
4052 (ToLongFunction<Map.Entry<K,V>> transformer,
4053 long basis,
4054 LongBinaryOperator reducer) {
4055 if (transformer == null || reducer == null)
4056 throw new NullPointerException();
4057 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4058 long r = basis; V v;
4059 while ((v = it.advanceValue()) != null)
4060 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(it.nextKey, v)));
4061 return r;
4062 }
4063
4064 /**
4065 * Returns the result of accumulating the given transformation
4066 * of all entries using the given reducer to combine values,
4067 * and the given basis as an identity value.
4068 *
4069 * @param transformer a function returning the transformation
4070 * for an element
4071 * @param basis the identity (initial default value) for the reduction
4072 * @param reducer a commutative associative combining function
4073 * @return the result of accumulating the given transformation
4074 * of all entries
4075 */
4076 public int reduceEntriesToIntSequentially
4077 (ToIntFunction<Map.Entry<K,V>> transformer,
4078 int basis,
4079 IntBinaryOperator reducer) {
4080 if (transformer == null || reducer == null)
4081 throw new NullPointerException();
4082 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4083 int r = basis; V v;
4084 while ((v = it.advanceValue()) != null)
4085 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(it.nextKey, v)));
4086 return r;
4087 }
4088
4089 // Parallel bulk operations
4090
4091 /**
4092 * Performs the given action for each (key, value).
4093 *
4094 * @param action the action
4095 */
4096 public void forEachInParallel(BiConsumer<? super K,? super V> action) {
4097 ForkJoinTasks.forEach
4098 (this, action).invoke();
4099 }
4100
4101 /**
4102 * Performs the given action for each non-null transformation
4103 * of each (key, value).
4104 *
4105 * @param transformer a function returning the transformation
4106 * for an element, or null if there is no transformation (in
4107 * which case the action is not applied)
4108 * @param action the action
4109 */
4110 public <U> void forEachInParallel
4111 (BiFunction<? super K, ? super V, ? extends U> transformer,
4112 Consumer<? super U> action) {
4113 ForkJoinTasks.forEach
4114 (this, transformer, action).invoke();
4115 }
4116
4117 /**
4118 * Returns a non-null result from applying the given search
4119 * function on each (key, value), or null if none. Upon
4120 * success, further element processing is suppressed and the
4121 * results of any other parallel invocations of the search
4122 * function are ignored.
4123 *
4124 * @param searchFunction a function returning a non-null
4125 * result on success, else null
4126 * @return a non-null result from applying the given search
4127 * function on each (key, value), or null if none
4128 */
4129 public <U> U searchInParallel
4130 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4131 return ForkJoinTasks.search
4132 (this, searchFunction).invoke();
4133 }
4134
4135 /**
4136 * Returns the result of accumulating the given transformation
4137 * of all (key, value) pairs using the given reducer to
4138 * combine values, or null if none.
4139 *
4140 * @param transformer a function returning the transformation
4141 * for an element, or null if there is no transformation (in
4142 * which case it is not combined)
4143 * @param reducer a commutative associative combining function
4144 * @return the result of accumulating the given transformation
4145 * of all (key, value) pairs
4146 */
4147 public <U> U reduceInParallel
4148 (BiFunction<? super K, ? super V, ? extends U> transformer,
4149 BiFunction<? super U, ? super U, ? extends U> reducer) {
4150 return ForkJoinTasks.reduce
4151 (this, transformer, reducer).invoke();
4152 }
4153
4154 /**
4155 * Returns the result of accumulating the given transformation
4156 * of all (key, value) pairs using the given reducer to
4157 * combine values, and the given basis as an identity value.
4158 *
4159 * @param transformer a function returning the transformation
4160 * for an element
4161 * @param basis the identity (initial default value) for the reduction
4162 * @param reducer a commutative associative combining function
4163 * @return the result of accumulating the given transformation
4164 * of all (key, value) pairs
4165 */
4166 public double reduceToDoubleInParallel
4167 (ToDoubleBiFunction<? super K, ? super V> transformer,
4168 double basis,
4169 DoubleBinaryOperator reducer) {
4170 return ForkJoinTasks.reduceToDouble
4171 (this, transformer, basis, reducer).invoke();
4172 }
4173
4174 /**
4175 * Returns the result of accumulating the given transformation
4176 * of all (key, value) pairs using the given reducer to
4177 * combine values, and the given basis as an identity value.
4178 *
4179 * @param transformer a function returning the transformation
4180 * for an element
4181 * @param basis the identity (initial default value) for the reduction
4182 * @param reducer a commutative associative combining function
4183 * @return the result of accumulating the given transformation
4184 * of all (key, value) pairs
4185 */
4186 public long reduceToLongInParallel
4187 (ToLongBiFunction<? super K, ? super V> transformer,
4188 long basis,
4189 LongBinaryOperator reducer) {
4190 return ForkJoinTasks.reduceToLong
4191 (this, transformer, basis, reducer).invoke();
4192 }
4193
4194 /**
4195 * Returns the result of accumulating the given transformation
4196 * of all (key, value) pairs using the given reducer to
4197 * combine values, and the given basis as an identity value.
4198 *
4199 * @param transformer a function returning the transformation
4200 * for an element
4201 * @param basis the identity (initial default value) for the reduction
4202 * @param reducer a commutative associative combining function
4203 * @return the result of accumulating the given transformation
4204 * of all (key, value) pairs
4205 */
4206 public int reduceToIntInParallel
4207 (ToIntBiFunction<? super K, ? super V> transformer,
4208 int basis,
4209 IntBinaryOperator reducer) {
4210 return ForkJoinTasks.reduceToInt
4211 (this, transformer, basis, reducer).invoke();
4212 }
4213
4214 /**
4215 * Performs the given action for each key.
4216 *
4217 * @param action the action
4218 */
4219 public void forEachKeyInParallel(Consumer<? super K> action) {
4220 ForkJoinTasks.forEachKey
4221 (this, action).invoke();
4222 }
4223
4224 /**
4225 * Performs the given action for each non-null transformation
4226 * of each key.
4227 *
4228 * @param transformer a function returning the transformation
4229 * for an element, or null if there is no transformation (in
4230 * which case the action is not applied)
4231 * @param action the action
4232 */
4233 public <U> void forEachKeyInParallel
4234 (Function<? super K, ? extends U> transformer,
4235 Consumer<? super U> action) {
4236 ForkJoinTasks.forEachKey
4237 (this, transformer, action).invoke();
4238 }
4239
4240 /**
4241 * Returns a non-null result from applying the given search
4242 * function on each key, or null if none. Upon success,
4243 * further element processing is suppressed and the results of
4244 * any other parallel invocations of the search function are
4245 * ignored.
4246 *
4247 * @param searchFunction a function returning a non-null
4248 * result on success, else null
4249 * @return a non-null result from applying the given search
4250 * function on each key, or null if none
4251 */
4252 public <U> U searchKeysInParallel
4253 (Function<? super K, ? extends U> searchFunction) {
4254 return ForkJoinTasks.searchKeys
4255 (this, searchFunction).invoke();
4256 }
4257
4258 /**
4259 * Returns the result of accumulating all keys using the given
4260 * reducer to combine values, or null if none.
4261 *
4262 * @param reducer a commutative associative combining function
4263 * @return the result of accumulating all keys using the given
4264 * reducer to combine values, or null if none
4265 */
4266 public K reduceKeysInParallel
4267 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4268 return ForkJoinTasks.reduceKeys
4269 (this, reducer).invoke();
4270 }
4271
4272 /**
4273 * Returns the result of accumulating the given transformation
4274 * of all keys using the given reducer to combine values, or
4275 * null if none.
4276 *
4277 * @param transformer a function returning the transformation
4278 * for an element, or null if there is no transformation (in
4279 * which case it is not combined)
4280 * @param reducer a commutative associative combining function
4281 * @return the result of accumulating the given transformation
4282 * of all keys
4283 */
4284 public <U> U reduceKeysInParallel
4285 (Function<? super K, ? extends U> transformer,
4286 BiFunction<? super U, ? super U, ? extends U> reducer) {
4287 return ForkJoinTasks.reduceKeys
4288 (this, transformer, reducer).invoke();
4289 }
4290
4291 /**
4292 * Returns the result of accumulating the given transformation
4293 * of all keys using the given reducer to combine values, and
4294 * the given basis as an identity value.
4295 *
4296 * @param transformer a function returning the transformation
4297 * for an element
4298 * @param basis the identity (initial default value) for the reduction
4299 * @param reducer a commutative associative combining function
4300 * @return the result of accumulating the given transformation
4301 * of all keys
4302 */
4303 public double reduceKeysToDoubleInParallel
4304 (ToDoubleFunction<? super K> transformer,
4305 double basis,
4306 DoubleBinaryOperator reducer) {
4307 return ForkJoinTasks.reduceKeysToDouble
4308 (this, transformer, basis, reducer).invoke();
4309 }
4310
4311 /**
4312 * Returns the result of accumulating the given transformation
4313 * of all keys using the given reducer to combine values, and
4314 * the given basis as an identity value.
4315 *
4316 * @param transformer a function returning the transformation
4317 * for an element
4318 * @param basis the identity (initial default value) for the reduction
4319 * @param reducer a commutative associative combining function
4320 * @return the result of accumulating the given transformation
4321 * of all keys
4322 */
4323 public long reduceKeysToLongInParallel
4324 (ToLongFunction<? super K> transformer,
4325 long basis,
4326 LongBinaryOperator reducer) {
4327 return ForkJoinTasks.reduceKeysToLong
4328 (this, transformer, basis, reducer).invoke();
4329 }
4330
4331 /**
4332 * Returns the result of accumulating the given transformation
4333 * of all keys using the given reducer to combine values, and
4334 * the given basis as an identity value.
4335 *
4336 * @param transformer a function returning the transformation
4337 * for an element
4338 * @param basis the identity (initial default value) for the reduction
4339 * @param reducer a commutative associative combining function
4340 * @return the result of accumulating the given transformation
4341 * of all keys
4342 */
4343 public int reduceKeysToIntInParallel
4344 (ToIntFunction<? super K> transformer,
4345 int basis,
4346 IntBinaryOperator reducer) {
4347 return ForkJoinTasks.reduceKeysToInt
4348 (this, transformer, basis, reducer).invoke();
4349 }
4350
4351 /**
4352 * Performs the given action for each value.
4353 *
4354 * @param action the action
4355 */
4356 public void forEachValueInParallel(Consumer<? super V> action) {
4357 ForkJoinTasks.forEachValue
4358 (this, action).invoke();
4359 }
4360
4361 /**
4362 * Performs the given action for each non-null transformation
4363 * of each value.
4364 *
4365 * @param transformer a function returning the transformation
4366 * for an element, or null if there is no transformation (in
4367 * which case the action is not applied)
4368 * @param action the action
4369 */
4370 public <U> void forEachValueInParallel
4371 (Function<? super V, ? extends U> transformer,
4372 Consumer<? super U> action) {
4373 ForkJoinTasks.forEachValue
4374 (this, transformer, action).invoke();
4375 }
4376
4377 /**
4378 * Returns a non-null result from applying the given search
4379 * function on each value, or null if none. Upon success,
4380 * further element processing is suppressed and the results of
4381 * any other parallel invocations of the search function are
4382 * ignored.
4383 *
4384 * @param searchFunction a function returning a non-null
4385 * result on success, else null
4386 * @return a non-null result from applying the given search
4387 * function on each value, or null if none
4388 */
4389 public <U> U searchValuesInParallel
4390 (Function<? super V, ? extends U> searchFunction) {
4391 return ForkJoinTasks.searchValues
4392 (this, searchFunction).invoke();
4393 }
4394
4395 /**
4396 * Returns the result of accumulating all values using the
4397 * given reducer to combine values, or null if none.
4398 *
4399 * @param reducer a commutative associative combining function
4400 * @return the result of accumulating all values
4401 */
4402 public V reduceValuesInParallel
4403 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4404 return ForkJoinTasks.reduceValues
4405 (this, reducer).invoke();
4406 }
4407
4408 /**
4409 * Returns the result of accumulating the given transformation
4410 * of all values using the given reducer to combine values, or
4411 * null if none.
4412 *
4413 * @param transformer a function returning the transformation
4414 * for an element, or null if there is no transformation (in
4415 * which case it is not combined)
4416 * @param reducer a commutative associative combining function
4417 * @return the result of accumulating the given transformation
4418 * of all values
4419 */
4420 public <U> U reduceValuesInParallel
4421 (Function<? super V, ? extends U> transformer,
4422 BiFunction<? super U, ? super U, ? extends U> reducer) {
4423 return ForkJoinTasks.reduceValues
4424 (this, transformer, reducer).invoke();
4425 }
4426
4427 /**
4428 * Returns the result of accumulating the given transformation
4429 * of all values using the given reducer to combine values,
4430 * and the given basis as an identity value.
4431 *
4432 * @param transformer a function returning the transformation
4433 * for an element
4434 * @param basis the identity (initial default value) for the reduction
4435 * @param reducer a commutative associative combining function
4436 * @return the result of accumulating the given transformation
4437 * of all values
4438 */
4439 public double reduceValuesToDoubleInParallel
4440 (ToDoubleFunction<? super V> transformer,
4441 double basis,
4442 DoubleBinaryOperator reducer) {
4443 return ForkJoinTasks.reduceValuesToDouble
4444 (this, transformer, basis, reducer).invoke();
4445 }
4446
4447 /**
4448 * Returns the result of accumulating the given transformation
4449 * of all values using the given reducer to combine values,
4450 * and the given basis as an identity value.
4451 *
4452 * @param transformer a function returning the transformation
4453 * for an element
4454 * @param basis the identity (initial default value) for the reduction
4455 * @param reducer a commutative associative combining function
4456 * @return the result of accumulating the given transformation
4457 * of all values
4458 */
4459 public long reduceValuesToLongInParallel
4460 (ToLongFunction<? super V> transformer,
4461 long basis,
4462 LongBinaryOperator reducer) {
4463 return ForkJoinTasks.reduceValuesToLong
4464 (this, transformer, basis, reducer).invoke();
4465 }
4466
4467 /**
4468 * Returns the result of accumulating the given transformation
4469 * of all values using the given reducer to combine values,
4470 * and the given basis as an identity value.
4471 *
4472 * @param transformer a function returning the transformation
4473 * for an element
4474 * @param basis the identity (initial default value) for the reduction
4475 * @param reducer a commutative associative combining function
4476 * @return the result of accumulating the given transformation
4477 * of all values
4478 */
4479 public int reduceValuesToIntInParallel
4480 (ToIntFunction<? super V> transformer,
4481 int basis,
4482 IntBinaryOperator reducer) {
4483 return ForkJoinTasks.reduceValuesToInt
4484 (this, transformer, basis, reducer).invoke();
4485 }
4486
4487 /**
4488 * Performs the given action for each entry.
4489 *
4490 * @param action the action
4491 */
4492 public void forEachEntryInParallel(Consumer<? super Map.Entry<K,V>> action) {
4493 ForkJoinTasks.forEachEntry
4494 (this, action).invoke();
4495 }
4496
4497 /**
4498 * Performs the given action for each non-null transformation
4499 * of each entry.
4500 *
4501 * @param transformer a function returning the transformation
4502 * for an element, or null if there is no transformation (in
4503 * which case the action is not applied)
4504 * @param action the action
4505 */
4506 public <U> void forEachEntryInParallel
4507 (Function<Map.Entry<K,V>, ? extends U> transformer,
4508 Consumer<? super U> action) {
4509 ForkJoinTasks.forEachEntry
4510 (this, transformer, action).invoke();
4511 }
4512
4513 /**
4514 * Returns a non-null result from applying the given search
4515 * function on each entry, or null if none. Upon success,
4516 * further element processing is suppressed and the results of
4517 * any other parallel invocations of the search function are
4518 * ignored.
4519 *
4520 * @param searchFunction a function returning a non-null
4521 * result on success, else null
4522 * @return a non-null result from applying the given search
4523 * function on each entry, or null if none
4524 */
4525 public <U> U searchEntriesInParallel
4526 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4527 return ForkJoinTasks.searchEntries
4528 (this, searchFunction).invoke();
4529 }
4530
4531 /**
4532 * Returns the result of accumulating all entries using the
4533 * given reducer to combine values, or null if none.
4534 *
4535 * @param reducer a commutative associative combining function
4536 * @return the result of accumulating all entries
4537 */
4538 public Map.Entry<K,V> reduceEntriesInParallel
4539 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4540 return ForkJoinTasks.reduceEntries
4541 (this, reducer).invoke();
4542 }
4543
4544 /**
4545 * Returns the result of accumulating the given transformation
4546 * of all entries using the given reducer to combine values,
4547 * or null if none.
4548 *
4549 * @param transformer a function returning the transformation
4550 * for an element, or null if there is no transformation (in
4551 * which case it is not combined)
4552 * @param reducer a commutative associative combining function
4553 * @return the result of accumulating the given transformation
4554 * of all entries
4555 */
4556 public <U> U reduceEntriesInParallel
4557 (Function<Map.Entry<K,V>, ? extends U> transformer,
4558 BiFunction<? super U, ? super U, ? extends U> reducer) {
4559 return ForkJoinTasks.reduceEntries
4560 (this, transformer, reducer).invoke();
4561 }
4562
4563 /**
4564 * Returns the result of accumulating the given transformation
4565 * of all entries using the given reducer to combine values,
4566 * and the given basis as an identity value.
4567 *
4568 * @param transformer a function returning the transformation
4569 * for an element
4570 * @param basis the identity (initial default value) for the reduction
4571 * @param reducer a commutative associative combining function
4572 * @return the result of accumulating the given transformation
4573 * of all entries
4574 */
4575 public double reduceEntriesToDoubleInParallel
4576 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4577 double basis,
4578 DoubleBinaryOperator reducer) {
4579 return ForkJoinTasks.reduceEntriesToDouble
4580 (this, transformer, basis, reducer).invoke();
4581 }
4582
4583 /**
4584 * Returns the result of accumulating the given transformation
4585 * of all entries using the given reducer to combine values,
4586 * and the given basis as an identity value.
4587 *
4588 * @param transformer a function returning the transformation
4589 * for an element
4590 * @param basis the identity (initial default value) for the reduction
4591 * @param reducer a commutative associative combining function
4592 * @return the result of accumulating the given transformation
4593 * of all entries
4594 */
4595 public long reduceEntriesToLongInParallel
4596 (ToLongFunction<Map.Entry<K,V>> transformer,
4597 long basis,
4598 LongBinaryOperator reducer) {
4599 return ForkJoinTasks.reduceEntriesToLong
4600 (this, transformer, basis, reducer).invoke();
4601 }
4602
4603 /**
4604 * Returns the result of accumulating the given transformation
4605 * of all entries using the given reducer to combine values,
4606 * and the given basis as an identity value.
4607 *
4608 * @param transformer a function returning the transformation
4609 * for an element
4610 * @param basis the identity (initial default value) for the reduction
4611 * @param reducer a commutative associative combining function
4612 * @return the result of accumulating the given transformation
4613 * of all entries
4614 */
4615 public int reduceEntriesToIntInParallel
4616 (ToIntFunction<Map.Entry<K,V>> transformer,
4617 int basis,
4618 IntBinaryOperator reducer) {
4619 return ForkJoinTasks.reduceEntriesToInt
4620 (this, transformer, basis, reducer).invoke();
4621 }
4622
4623
4624 /* ----------------Views -------------- */
4625
4626 /**
4627 * Base class for views.
4628 */
4629 abstract static class CHMCollectionView<K,V,E>
4630 implements Collection<E>, java.io.Serializable {
4631 private static final long serialVersionUID = 7249069246763182397L;
4632 final ConcurrentHashMap<K,V> map;
4633 CHMCollectionView(ConcurrentHashMap<K,V> map) { this.map = map; }
4634
4635 /**
4636 * Returns the map backing this view.
4637 *
4638 * @return the map backing this view
4639 */
4640 public ConcurrentHashMap<K,V> getMap() { return map; }
4641
4642 /**
4643 * Removes all of the elements from this view, by removing all
4644 * the mappings from the map backing this view.
4645 */
4646 public final void clear() { map.clear(); }
4647 public final int size() { return map.size(); }
4648 public final boolean isEmpty() { return map.isEmpty(); }
4649
4650 // implementations below rely on concrete classes supplying these
4651 // abstract methods
4652 /**
4653 * Returns a "weakly consistent" iterator that will never
4654 * throw {@link ConcurrentModificationException}, and
4655 * guarantees to traverse elements as they existed upon
4656 * construction of the iterator, and may (but is not
4657 * guaranteed to) reflect any modifications subsequent to
4658 * construction.
4659 */
4660 public abstract Iterator<E> iterator();
4661 public abstract boolean contains(Object o);
4662 public abstract boolean remove(Object o);
4663
4664 private static final String oomeMsg = "Required array size too large";
4665
4666 public final Object[] toArray() {
4667 long sz = map.mappingCount();
4668 if (sz > MAX_ARRAY_SIZE)
4669 throw new OutOfMemoryError(oomeMsg);
4670 int n = (int)sz;
4671 Object[] r = new Object[n];
4672 int i = 0;
4673 for (E e : this) {
4674 if (i == n) {
4675 if (n >= MAX_ARRAY_SIZE)
4676 throw new OutOfMemoryError(oomeMsg);
4677 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4678 n = MAX_ARRAY_SIZE;
4679 else
4680 n += (n >>> 1) + 1;
4681 r = Arrays.copyOf(r, n);
4682 }
4683 r[i++] = e;
4684 }
4685 return (i == n) ? r : Arrays.copyOf(r, i);
4686 }
4687
4688 @SuppressWarnings("unchecked")
4689 public final <T> T[] toArray(T[] a) {
4690 long sz = map.mappingCount();
4691 if (sz > MAX_ARRAY_SIZE)
4692 throw new OutOfMemoryError(oomeMsg);
4693 int m = (int)sz;
4694 T[] r = (a.length >= m) ? a :
4695 (T[])java.lang.reflect.Array
4696 .newInstance(a.getClass().getComponentType(), m);
4697 int n = r.length;
4698 int i = 0;
4699 for (E e : this) {
4700 if (i == n) {
4701 if (n >= MAX_ARRAY_SIZE)
4702 throw new OutOfMemoryError(oomeMsg);
4703 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4704 n = MAX_ARRAY_SIZE;
4705 else
4706 n += (n >>> 1) + 1;
4707 r = Arrays.copyOf(r, n);
4708 }
4709 r[i++] = (T)e;
4710 }
4711 if (a == r && i < n) {
4712 r[i] = null; // null-terminate
4713 return r;
4714 }
4715 return (i == n) ? r : Arrays.copyOf(r, i);
4716 }
4717
4718 /**
4719 * Returns a string representation of this collection.
4720 * The string representation consists of the string representations
4721 * of the collection's elements in the order they are returned by
4722 * its iterator, enclosed in square brackets ({@code "[]"}).
4723 * Adjacent elements are separated by the characters {@code ", "}
4724 * (comma and space). Elements are converted to strings as by
4725 * {@link String#valueOf(Object)}.
4726 *
4727 * @return a string representation of this collection
4728 */
4729 public final String toString() {
4730 StringBuilder sb = new StringBuilder();
4731 sb.append('[');
4732 Iterator<E> it = iterator();
4733 if (it.hasNext()) {
4734 for (;;) {
4735 Object e = it.next();
4736 sb.append(e == this ? "(this Collection)" : e);
4737 if (!it.hasNext())
4738 break;
4739 sb.append(',').append(' ');
4740 }
4741 }
4742 return sb.append(']').toString();
4743 }
4744
4745 public final boolean containsAll(Collection<?> c) {
4746 if (c != this) {
4747 for (Object e : c) {
4748 if (e == null || !contains(e))
4749 return false;
4750 }
4751 }
4752 return true;
4753 }
4754
4755 public final boolean removeAll(Collection<?> c) {
4756 boolean modified = false;
4757 for (Iterator<E> it = iterator(); it.hasNext();) {
4758 if (c.contains(it.next())) {
4759 it.remove();
4760 modified = true;
4761 }
4762 }
4763 return modified;
4764 }
4765
4766 public final boolean retainAll(Collection<?> c) {
4767 boolean modified = false;
4768 for (Iterator<E> it = iterator(); it.hasNext();) {
4769 if (!c.contains(it.next())) {
4770 it.remove();
4771 modified = true;
4772 }
4773 }
4774 return modified;
4775 }
4776
4777 }
4778
4779 abstract static class CHMSetView<K,V,E>
4780 extends CHMCollectionView<K,V,E>
4781 implements Set<E>, java.io.Serializable {
4782 private static final long serialVersionUID = 7249069246763182397L;
4783 CHMSetView(ConcurrentHashMap<K,V> map) { super(map); }
4784
4785 // Implement Set API
4786
4787 /**
4788 * Implements {@link Set#hashCode()}.
4789 * @return the hash code value for this set
4790 */
4791 public final int hashCode() {
4792 int h = 0;
4793 for (E e : this)
4794 h += e.hashCode();
4795 return h;
4796 }
4797
4798 /**
4799 * Implements {@link Set#equals(Object)}.
4800 * @param o object to be compared for equality with this set
4801 * @return {@code true} if the specified object is equal to this set
4802 */
4803 public final boolean equals(Object o) {
4804 Set<?> c;
4805 return ((o instanceof Set) &&
4806 ((c = (Set<?>)o) == this ||
4807 (containsAll(c) && c.containsAll(this))));
4808 }
4809 }
4810
4811 /**
4812 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4813 * which additions may optionally be enabled by mapping to a
4814 * common value. This class cannot be directly instantiated.
4815 * See {@link #keySet() keySet()},
4816 * {@link #keySet(Object) keySet(V)},
4817 * {@link #newKeySet() newKeySet()},
4818 * {@link #newKeySet(int) newKeySet(int)}.
4819 */
4820 public static class KeySetView<K,V>
4821 extends CHMSetView<K,V,K>
4822 implements Set<K>, java.io.Serializable {
4823 private static final long serialVersionUID = 7249069246763182397L;
4824 private final V value;
4825 KeySetView(ConcurrentHashMap<K,V> map, V value) { // non-public
4826 super(map);
4827 this.value = value;
4828 }
4829
4830 /**
4831 * Returns the default mapped value for additions,
4832 * or {@code null} if additions are not supported.
4833 *
4834 * @return the default mapped value for additions, or {@code null}
4835 * if not supported
4836 */
4837 public V getMappedValue() { return value; }
4838
4839 /**
4840 * {@inheritDoc}
4841 * @throws NullPointerException if the specified key is null
4842 */
4843 public boolean contains(Object o) { return map.containsKey(o); }
4844
4845 /**
4846 * Removes the key from this map view, by removing the key (and its
4847 * corresponding value) from the backing map. This method does
4848 * nothing if the key is not in the map.
4849 *
4850 * @param o the key to be removed from the backing map
4851 * @return {@code true} if the backing map contained the specified key
4852 * @throws NullPointerException if the specified key is null
4853 */
4854 public boolean remove(Object o) { return map.remove(o) != null; }
4855
4856 /**
4857 * @return an iterator over the keys of the backing map
4858 */
4859 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4860
4861 /**
4862 * Adds the specified key to this set view by mapping the key to
4863 * the default mapped value in the backing map, if defined.
4864 *
4865 * @param e key to be added
4866 * @return {@code true} if this set changed as a result of the call
4867 * @throws NullPointerException if the specified key is null
4868 * @throws UnsupportedOperationException if no default mapped value
4869 * for additions was provided
4870 */
4871 public boolean add(K e) {
4872 V v;
4873 if ((v = value) == null)
4874 throw new UnsupportedOperationException();
4875 return map.internalPut(e, v, true) == null;
4876 }
4877
4878 /**
4879 * Adds all of the elements in the specified collection to this set,
4880 * as if by calling {@link #add} on each one.
4881 *
4882 * @param c the elements to be inserted into this set
4883 * @return {@code true} if this set changed as a result of the call
4884 * @throws NullPointerException if the collection or any of its
4885 * elements are {@code null}
4886 * @throws UnsupportedOperationException if no default mapped value
4887 * for additions was provided
4888 */
4889 public boolean addAll(Collection<? extends K> c) {
4890 boolean added = false;
4891 V v;
4892 if ((v = value) == null)
4893 throw new UnsupportedOperationException();
4894 for (K e : c) {
4895 if (map.internalPut(e, v, true) == null)
4896 added = true;
4897 }
4898 return added;
4899 }
4900
4901 public Spliterator<K> spliterator() {
4902 return new KeyIterator<>(map, null);
4903 }
4904
4905 }
4906
4907 /**
4908 * A view of a ConcurrentHashMap as a {@link Collection} of
4909 * values, in which additions are disabled. This class cannot be
4910 * directly instantiated. See {@link #values()}.
4911 *
4912 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4913 * that will never throw {@link ConcurrentModificationException},
4914 * and guarantees to traverse elements as they existed upon
4915 * construction of the iterator, and may (but is not guaranteed to)
4916 * reflect any modifications subsequent to construction.
4917 */
4918 public static final class ValuesView<K,V>
4919 extends CHMCollectionView<K,V,V>
4920 implements Collection<V>, java.io.Serializable {
4921 private static final long serialVersionUID = 2249069246763182397L;
4922 ValuesView(ConcurrentHashMap<K,V> map) { super(map); }
4923 public final boolean contains(Object o) {
4924 return map.containsValue(o);
4925 }
4926 public final boolean remove(Object o) {
4927 if (o != null) {
4928 for (Iterator<V> it = iterator(); it.hasNext();) {
4929 if (o.equals(it.next())) {
4930 it.remove();
4931 return true;
4932 }
4933 }
4934 }
4935 return false;
4936 }
4937
4938 /**
4939 * @return an iterator over the values of the backing map
4940 */
4941 public final Iterator<V> iterator() {
4942 return new ValueIterator<K,V>(map);
4943 }
4944
4945 /** Always throws {@link UnsupportedOperationException}. */
4946 public final boolean add(V e) {
4947 throw new UnsupportedOperationException();
4948 }
4949 /** Always throws {@link UnsupportedOperationException}. */
4950 public final boolean addAll(Collection<? extends V> c) {
4951 throw new UnsupportedOperationException();
4952 }
4953
4954 public Spliterator<V> spliterator() {
4955 return new ValueIterator<K,V>(map, null);
4956 }
4957
4958 }
4959
4960 /**
4961 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4962 * entries. This class cannot be directly instantiated. See
4963 * {@link #entrySet()}.
4964 */
4965 public static final class EntrySetView<K,V>
4966 extends CHMSetView<K,V,Map.Entry<K,V>>
4967 implements Set<Map.Entry<K,V>>, java.io.Serializable {
4968 private static final long serialVersionUID = 2249069246763182397L;
4969 EntrySetView(ConcurrentHashMap<K,V> map) { super(map); }
4970
4971 public final boolean contains(Object o) {
4972 Object k, v, r; Map.Entry<?,?> e;
4973 return ((o instanceof Map.Entry) &&
4974 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4975 (r = map.get(k)) != null &&
4976 (v = e.getValue()) != null &&
4977 (v == r || v.equals(r)));
4978 }
4979 public final boolean remove(Object o) {
4980 Object k, v; Map.Entry<?,?> e;
4981 return ((o instanceof Map.Entry) &&
4982 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4983 (v = e.getValue()) != null &&
4984 map.remove(k, v));
4985 }
4986
4987 /**
4988 * @return an iterator over the entries of the backing map
4989 */
4990 public final Iterator<Map.Entry<K,V>> iterator() {
4991 return new EntryIterator<K,V>(map);
4992 }
4993
4994 /**
4995 * Adds the specified mapping to this view.
4996 *
4997 * @param e mapping to be added
4998 * @return {@code true} if this set changed as a result of the call
4999 * @throws NullPointerException if the entry, its key, or its
5000 * value is null
5001 */
5002 public final boolean add(Entry<K,V> e) {
5003 return map.internalPut(e.getKey(), e.getValue(), false) == null;
5004 }
5005
5006 /**
5007 * Adds all of the mappings in the specified collection to this
5008 * set, as if by calling {@link #add(Map.Entry)} on each one.
5009 * @param c the mappings to be inserted into this set
5010 * @return {@code true} if this set changed as a result of the call
5011 * @throws NullPointerException if the collection or any of its
5012 * entries, keys, or values are null
5013 */
5014 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
5015 boolean added = false;
5016 for (Entry<K,V> e : c) {
5017 if (add(e))
5018 added = true;
5019 }
5020 return added;
5021 }
5022
5023 public Spliterator<Map.Entry<K,V>> spliterator() {
5024 return new EntryIterator<K,V>(map, null);
5025 }
5026
5027 }
5028
5029 // ---------------------------------------------------------------------
5030
5031 /**
5032 * Predefined tasks for performing bulk parallel operations on
5033 * ConcurrentHashMaps. These tasks follow the forms and rules used
5034 * for bulk operations. Each method has the same name, but returns
5035 * a task rather than invoking it. These methods may be useful in
5036 * custom applications such as submitting a task without waiting
5037 * for completion, using a custom pool, or combining with other
5038 * tasks.
5039 */
5040 public static class ForkJoinTasks {
5041 private ForkJoinTasks() {}
5042
5043 /**
5044 * Returns a task that when invoked, performs the given
5045 * action for each (key, value)
5046 *
5047 * @param map the map
5048 * @param action the action
5049 * @return the task
5050 */
5051 public static <K,V> ForkJoinTask<Void> forEach
5052 (ConcurrentHashMap<K,V> map,
5053 BiConsumer<? super K, ? super V> action) {
5054 if (action == null) throw new NullPointerException();
5055 return new ForEachMappingTask<K,V>(map, null, -1, action);
5056 }
5057
5058 /**
5059 * Returns a task that when invoked, performs the given
5060 * action for each non-null transformation of each (key, value)
5061 *
5062 * @param map the map
5063 * @param transformer a function returning the transformation
5064 * for an element, or null if there is no transformation (in
5065 * which case the action is not applied)
5066 * @param action the action
5067 * @return the task
5068 */
5069 public static <K,V,U> ForkJoinTask<Void> forEach
5070 (ConcurrentHashMap<K,V> map,
5071 BiFunction<? super K, ? super V, ? extends U> transformer,
5072 Consumer<? super U> action) {
5073 if (transformer == null || action == null)
5074 throw new NullPointerException();
5075 return new ForEachTransformedMappingTask<K,V,U>
5076 (map, null, -1, transformer, action);
5077 }
5078
5079 /**
5080 * Returns a task that when invoked, returns a non-null result
5081 * from applying the given search function on each (key,
5082 * value), or null if none. Upon success, further element
5083 * processing is suppressed and the results of any other
5084 * parallel invocations of the search function are ignored.
5085 *
5086 * @param map the map
5087 * @param searchFunction a function returning a non-null
5088 * result on success, else null
5089 * @return the task
5090 */
5091 public static <K,V,U> ForkJoinTask<U> search
5092 (ConcurrentHashMap<K,V> map,
5093 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
5094 if (searchFunction == null) throw new NullPointerException();
5095 return new SearchMappingsTask<K,V,U>
5096 (map, null, -1, searchFunction,
5097 new AtomicReference<U>());
5098 }
5099
5100 /**
5101 * Returns a task that when invoked, returns the result of
5102 * accumulating the given transformation of all (key, value) pairs
5103 * using the given reducer to combine values, or null if none.
5104 *
5105 * @param map the map
5106 * @param transformer a function returning the transformation
5107 * for an element, or null if there is no transformation (in
5108 * which case it is not combined)
5109 * @param reducer a commutative associative combining function
5110 * @return the task
5111 */
5112 public static <K,V,U> ForkJoinTask<U> reduce
5113 (ConcurrentHashMap<K,V> map,
5114 BiFunction<? super K, ? super V, ? extends U> transformer,
5115 BiFunction<? super U, ? super U, ? extends U> reducer) {
5116 if (transformer == null || reducer == null)
5117 throw new NullPointerException();
5118 return new MapReduceMappingsTask<K,V,U>
5119 (map, null, -1, null, transformer, reducer);
5120 }
5121
5122 /**
5123 * Returns a task that when invoked, returns the result of
5124 * accumulating the given transformation of all (key, value) pairs
5125 * using the given reducer to combine values, and the given
5126 * basis as an identity value.
5127 *
5128 * @param map the map
5129 * @param transformer a function returning the transformation
5130 * for an element
5131 * @param basis the identity (initial default value) for the reduction
5132 * @param reducer a commutative associative combining function
5133 * @return the task
5134 */
5135 public static <K,V> ForkJoinTask<Double> reduceToDouble
5136 (ConcurrentHashMap<K,V> map,
5137 ToDoubleBiFunction<? super K, ? super V> transformer,
5138 double basis,
5139 DoubleBinaryOperator reducer) {
5140 if (transformer == null || reducer == null)
5141 throw new NullPointerException();
5142 return new MapReduceMappingsToDoubleTask<K,V>
5143 (map, null, -1, null, transformer, basis, reducer);
5144 }
5145
5146 /**
5147 * Returns a task that when invoked, returns the result of
5148 * accumulating the given transformation of all (key, value) pairs
5149 * using the given reducer to combine values, and the given
5150 * basis as an identity value.
5151 *
5152 * @param map the map
5153 * @param transformer a function returning the transformation
5154 * for an element
5155 * @param basis the identity (initial default value) for the reduction
5156 * @param reducer a commutative associative combining function
5157 * @return the task
5158 */
5159 public static <K,V> ForkJoinTask<Long> reduceToLong
5160 (ConcurrentHashMap<K,V> map,
5161 ToLongBiFunction<? super K, ? super V> transformer,
5162 long basis,
5163 LongBinaryOperator reducer) {
5164 if (transformer == null || reducer == null)
5165 throw new NullPointerException();
5166 return new MapReduceMappingsToLongTask<K,V>
5167 (map, null, -1, null, transformer, basis, reducer);
5168 }
5169
5170 /**
5171 * Returns a task that when invoked, returns the result of
5172 * accumulating the given transformation of all (key, value) pairs
5173 * using the given reducer to combine values, and the given
5174 * basis as an identity value.
5175 *
5176 * @param map the map
5177 * @param transformer a function returning the transformation
5178 * for an element
5179 * @param basis the identity (initial default value) for the reduction
5180 * @param reducer a commutative associative combining function
5181 * @return the task
5182 */
5183 public static <K,V> ForkJoinTask<Integer> reduceToInt
5184 (ConcurrentHashMap<K,V> map,
5185 ToIntBiFunction<? super K, ? super V> transformer,
5186 int basis,
5187 IntBinaryOperator reducer) {
5188 if (transformer == null || reducer == null)
5189 throw new NullPointerException();
5190 return new MapReduceMappingsToIntTask<K,V>
5191 (map, null, -1, null, transformer, basis, reducer);
5192 }
5193
5194 /**
5195 * Returns a task that when invoked, performs the given action
5196 * for each key.
5197 *
5198 * @param map the map
5199 * @param action the action
5200 * @return the task
5201 */
5202 public static <K,V> ForkJoinTask<Void> forEachKey
5203 (ConcurrentHashMap<K,V> map,
5204 Consumer<? super K> action) {
5205 if (action == null) throw new NullPointerException();
5206 return new ForEachKeyTask<K,V>(map, null, -1, action);
5207 }
5208
5209 /**
5210 * Returns a task that when invoked, performs the given action
5211 * for each non-null transformation of each key.
5212 *
5213 * @param map the map
5214 * @param transformer a function returning the transformation
5215 * for an element, or null if there is no transformation (in
5216 * which case the action is not applied)
5217 * @param action the action
5218 * @return the task
5219 */
5220 public static <K,V,U> ForkJoinTask<Void> forEachKey
5221 (ConcurrentHashMap<K,V> map,
5222 Function<? super K, ? extends U> transformer,
5223 Consumer<? super U> action) {
5224 if (transformer == null || action == null)
5225 throw new NullPointerException();
5226 return new ForEachTransformedKeyTask<K,V,U>
5227 (map, null, -1, transformer, action);
5228 }
5229
5230 /**
5231 * Returns a task that when invoked, returns a non-null result
5232 * from applying the given search function on each key, or
5233 * null if none. Upon success, further element processing is
5234 * suppressed and the results of any other parallel
5235 * invocations of the search function are ignored.
5236 *
5237 * @param map the map
5238 * @param searchFunction a function returning a non-null
5239 * result on success, else null
5240 * @return the task
5241 */
5242 public static <K,V,U> ForkJoinTask<U> searchKeys
5243 (ConcurrentHashMap<K,V> map,
5244 Function<? super K, ? extends U> searchFunction) {
5245 if (searchFunction == null) throw new NullPointerException();
5246 return new SearchKeysTask<K,V,U>
5247 (map, null, -1, searchFunction,
5248 new AtomicReference<U>());
5249 }
5250
5251 /**
5252 * Returns a task that when invoked, returns the result of
5253 * accumulating all keys using the given reducer to combine
5254 * values, or null if none.
5255 *
5256 * @param map the map
5257 * @param reducer a commutative associative combining function
5258 * @return the task
5259 */
5260 public static <K,V> ForkJoinTask<K> reduceKeys
5261 (ConcurrentHashMap<K,V> map,
5262 BiFunction<? super K, ? super K, ? extends K> reducer) {
5263 if (reducer == null) throw new NullPointerException();
5264 return new ReduceKeysTask<K,V>
5265 (map, null, -1, null, reducer);
5266 }
5267
5268 /**
5269 * Returns a task that when invoked, returns the result of
5270 * accumulating the given transformation of all keys using the given
5271 * reducer to combine values, or null if none.
5272 *
5273 * @param map the map
5274 * @param transformer a function returning the transformation
5275 * for an element, or null if there is no transformation (in
5276 * which case it is not combined)
5277 * @param reducer a commutative associative combining function
5278 * @return the task
5279 */
5280 public static <K,V,U> ForkJoinTask<U> reduceKeys
5281 (ConcurrentHashMap<K,V> map,
5282 Function<? super K, ? extends U> transformer,
5283 BiFunction<? super U, ? super U, ? extends U> reducer) {
5284 if (transformer == null || reducer == null)
5285 throw new NullPointerException();
5286 return new MapReduceKeysTask<K,V,U>
5287 (map, null, -1, null, transformer, reducer);
5288 }
5289
5290 /**
5291 * Returns a task that when invoked, returns the result of
5292 * accumulating the given transformation of all keys using the given
5293 * reducer to combine values, and the given basis as an
5294 * identity value.
5295 *
5296 * @param map the map
5297 * @param transformer a function returning the transformation
5298 * for an element
5299 * @param basis the identity (initial default value) for the reduction
5300 * @param reducer a commutative associative combining function
5301 * @return the task
5302 */
5303 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5304 (ConcurrentHashMap<K,V> map,
5305 ToDoubleFunction<? super K> transformer,
5306 double basis,
5307 DoubleBinaryOperator reducer) {
5308 if (transformer == null || reducer == null)
5309 throw new NullPointerException();
5310 return new MapReduceKeysToDoubleTask<K,V>
5311 (map, null, -1, null, transformer, basis, reducer);
5312 }
5313
5314 /**
5315 * Returns a task that when invoked, returns the result of
5316 * accumulating the given transformation of all keys using the given
5317 * reducer to combine values, and the given basis as an
5318 * identity value.
5319 *
5320 * @param map the map
5321 * @param transformer a function returning the transformation
5322 * for an element
5323 * @param basis the identity (initial default value) for the reduction
5324 * @param reducer a commutative associative combining function
5325 * @return the task
5326 */
5327 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5328 (ConcurrentHashMap<K,V> map,
5329 ToLongFunction<? super K> transformer,
5330 long basis,
5331 LongBinaryOperator reducer) {
5332 if (transformer == null || reducer == null)
5333 throw new NullPointerException();
5334 return new MapReduceKeysToLongTask<K,V>
5335 (map, null, -1, null, transformer, basis, reducer);
5336 }
5337
5338 /**
5339 * Returns a task that when invoked, returns the result of
5340 * accumulating the given transformation of all keys using the given
5341 * reducer to combine values, and the given basis as an
5342 * identity value.
5343 *
5344 * @param map the map
5345 * @param transformer a function returning the transformation
5346 * for an element
5347 * @param basis the identity (initial default value) for the reduction
5348 * @param reducer a commutative associative combining function
5349 * @return the task
5350 */
5351 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5352 (ConcurrentHashMap<K,V> map,
5353 ToIntFunction<? super K> transformer,
5354 int basis,
5355 IntBinaryOperator reducer) {
5356 if (transformer == null || reducer == null)
5357 throw new NullPointerException();
5358 return new MapReduceKeysToIntTask<K,V>
5359 (map, null, -1, null, transformer, basis, reducer);
5360 }
5361
5362 /**
5363 * Returns a task that when invoked, performs the given action
5364 * for each value.
5365 *
5366 * @param map the map
5367 * @param action the action
5368 * @return the task
5369 */
5370 public static <K,V> ForkJoinTask<Void> forEachValue
5371 (ConcurrentHashMap<K,V> map,
5372 Consumer<? super V> action) {
5373 if (action == null) throw new NullPointerException();
5374 return new ForEachValueTask<K,V>(map, null, -1, action);
5375 }
5376
5377 /**
5378 * Returns a task that when invoked, performs the given action
5379 * for each non-null transformation of each value.
5380 *
5381 * @param map the map
5382 * @param transformer a function returning the transformation
5383 * for an element, or null if there is no transformation (in
5384 * which case the action is not applied)
5385 * @param action the action
5386 * @return the task
5387 */
5388 public static <K,V,U> ForkJoinTask<Void> forEachValue
5389 (ConcurrentHashMap<K,V> map,
5390 Function<? super V, ? extends U> transformer,
5391 Consumer<? super U> action) {
5392 if (transformer == null || action == null)
5393 throw new NullPointerException();
5394 return new ForEachTransformedValueTask<K,V,U>
5395 (map, null, -1, transformer, action);
5396 }
5397
5398 /**
5399 * Returns a task that when invoked, returns a non-null result
5400 * from applying the given search function on each value, or
5401 * null if none. Upon success, further element processing is
5402 * suppressed and the results of any other parallel
5403 * invocations of the search function are ignored.
5404 *
5405 * @param map the map
5406 * @param searchFunction a function returning a non-null
5407 * result on success, else null
5408 * @return the task
5409 */
5410 public static <K,V,U> ForkJoinTask<U> searchValues
5411 (ConcurrentHashMap<K,V> map,
5412 Function<? super V, ? extends U> searchFunction) {
5413 if (searchFunction == null) throw new NullPointerException();
5414 return new SearchValuesTask<K,V,U>
5415 (map, null, -1, searchFunction,
5416 new AtomicReference<U>());
5417 }
5418
5419 /**
5420 * Returns a task that when invoked, returns the result of
5421 * accumulating all values using the given reducer to combine
5422 * values, or null if none.
5423 *
5424 * @param map the map
5425 * @param reducer a commutative associative combining function
5426 * @return the task
5427 */
5428 public static <K,V> ForkJoinTask<V> reduceValues
5429 (ConcurrentHashMap<K,V> map,
5430 BiFunction<? super V, ? super V, ? extends V> reducer) {
5431 if (reducer == null) throw new NullPointerException();
5432 return new ReduceValuesTask<K,V>
5433 (map, null, -1, null, reducer);
5434 }
5435
5436 /**
5437 * Returns a task that when invoked, returns the result of
5438 * accumulating the given transformation of all values using the
5439 * given reducer to combine values, or null if none.
5440 *
5441 * @param map the map
5442 * @param transformer a function returning the transformation
5443 * for an element, or null if there is no transformation (in
5444 * which case it is not combined)
5445 * @param reducer a commutative associative combining function
5446 * @return the task
5447 */
5448 public static <K,V,U> ForkJoinTask<U> reduceValues
5449 (ConcurrentHashMap<K,V> map,
5450 Function<? super V, ? extends U> transformer,
5451 BiFunction<? super U, ? super U, ? extends U> reducer) {
5452 if (transformer == null || reducer == null)
5453 throw new NullPointerException();
5454 return new MapReduceValuesTask<K,V,U>
5455 (map, null, -1, null, transformer, reducer);
5456 }
5457
5458 /**
5459 * Returns a task that when invoked, returns the result of
5460 * accumulating the given transformation of all values using the
5461 * given reducer to combine values, and the given basis as an
5462 * identity value.
5463 *
5464 * @param map the map
5465 * @param transformer a function returning the transformation
5466 * for an element
5467 * @param basis the identity (initial default value) for the reduction
5468 * @param reducer a commutative associative combining function
5469 * @return the task
5470 */
5471 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5472 (ConcurrentHashMap<K,V> map,
5473 ToDoubleFunction<? super V> transformer,
5474 double basis,
5475 DoubleBinaryOperator reducer) {
5476 if (transformer == null || reducer == null)
5477 throw new NullPointerException();
5478 return new MapReduceValuesToDoubleTask<K,V>
5479 (map, null, -1, null, transformer, basis, reducer);
5480 }
5481
5482 /**
5483 * Returns a task that when invoked, returns the result of
5484 * accumulating the given transformation of all values using the
5485 * given reducer to combine values, and the given basis as an
5486 * identity value.
5487 *
5488 * @param map the map
5489 * @param transformer a function returning the transformation
5490 * for an element
5491 * @param basis the identity (initial default value) for the reduction
5492 * @param reducer a commutative associative combining function
5493 * @return the task
5494 */
5495 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5496 (ConcurrentHashMap<K,V> map,
5497 ToLongFunction<? super V> transformer,
5498 long basis,
5499 LongBinaryOperator reducer) {
5500 if (transformer == null || reducer == null)
5501 throw new NullPointerException();
5502 return new MapReduceValuesToLongTask<K,V>
5503 (map, null, -1, null, transformer, basis, reducer);
5504 }
5505
5506 /**
5507 * Returns a task that when invoked, returns the result of
5508 * accumulating the given transformation of all values using the
5509 * given reducer to combine values, and the given basis as an
5510 * identity value.
5511 *
5512 * @param map the map
5513 * @param transformer a function returning the transformation
5514 * for an element
5515 * @param basis the identity (initial default value) for the reduction
5516 * @param reducer a commutative associative combining function
5517 * @return the task
5518 */
5519 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5520 (ConcurrentHashMap<K,V> map,
5521 ToIntFunction<? super V> transformer,
5522 int basis,
5523 IntBinaryOperator reducer) {
5524 if (transformer == null || reducer == null)
5525 throw new NullPointerException();
5526 return new MapReduceValuesToIntTask<K,V>
5527 (map, null, -1, null, transformer, basis, reducer);
5528 }
5529
5530 /**
5531 * Returns a task that when invoked, perform the given action
5532 * for each entry.
5533 *
5534 * @param map the map
5535 * @param action the action
5536 * @return the task
5537 */
5538 public static <K,V> ForkJoinTask<Void> forEachEntry
5539 (ConcurrentHashMap<K,V> map,
5540 Consumer<? super Map.Entry<K,V>> action) {
5541 if (action == null) throw new NullPointerException();
5542 return new ForEachEntryTask<K,V>(map, null, -1, action);
5543 }
5544
5545 /**
5546 * Returns a task that when invoked, perform the given action
5547 * for each non-null transformation of each entry.
5548 *
5549 * @param map the map
5550 * @param transformer a function returning the transformation
5551 * for an element, or null if there is no transformation (in
5552 * which case the action is not applied)
5553 * @param action the action
5554 * @return the task
5555 */
5556 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5557 (ConcurrentHashMap<K,V> map,
5558 Function<Map.Entry<K,V>, ? extends U> transformer,
5559 Consumer<? super U> action) {
5560 if (transformer == null || action == null)
5561 throw new NullPointerException();
5562 return new ForEachTransformedEntryTask<K,V,U>
5563 (map, null, -1, transformer, action);
5564 }
5565
5566 /**
5567 * Returns a task that when invoked, returns a non-null result
5568 * from applying the given search function on each entry, or
5569 * null if none. Upon success, further element processing is
5570 * suppressed and the results of any other parallel
5571 * invocations of the search function are ignored.
5572 *
5573 * @param map the map
5574 * @param searchFunction a function returning a non-null
5575 * result on success, else null
5576 * @return the task
5577 */
5578 public static <K,V,U> ForkJoinTask<U> searchEntries
5579 (ConcurrentHashMap<K,V> map,
5580 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5581 if (searchFunction == null) throw new NullPointerException();
5582 return new SearchEntriesTask<K,V,U>
5583 (map, null, -1, searchFunction,
5584 new AtomicReference<U>());
5585 }
5586
5587 /**
5588 * Returns a task that when invoked, returns the result of
5589 * accumulating all entries using the given reducer to combine
5590 * values, or null if none.
5591 *
5592 * @param map the map
5593 * @param reducer a commutative associative combining function
5594 * @return the task
5595 */
5596 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5597 (ConcurrentHashMap<K,V> map,
5598 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5599 if (reducer == null) throw new NullPointerException();
5600 return new ReduceEntriesTask<K,V>
5601 (map, null, -1, null, reducer);
5602 }
5603
5604 /**
5605 * Returns a task that when invoked, returns the result of
5606 * accumulating the given transformation of all entries using the
5607 * given reducer to combine values, or null if none.
5608 *
5609 * @param map the map
5610 * @param transformer a function returning the transformation
5611 * for an element, or null if there is no transformation (in
5612 * which case it is not combined)
5613 * @param reducer a commutative associative combining function
5614 * @return the task
5615 */
5616 public static <K,V,U> ForkJoinTask<U> reduceEntries
5617 (ConcurrentHashMap<K,V> map,
5618 Function<Map.Entry<K,V>, ? extends U> transformer,
5619 BiFunction<? super U, ? super U, ? extends U> reducer) {
5620 if (transformer == null || reducer == null)
5621 throw new NullPointerException();
5622 return new MapReduceEntriesTask<K,V,U>
5623 (map, null, -1, null, transformer, reducer);
5624 }
5625
5626 /**
5627 * Returns a task that when invoked, returns the result of
5628 * accumulating the given transformation of all entries using the
5629 * given reducer to combine values, and the given basis as an
5630 * identity value.
5631 *
5632 * @param map the map
5633 * @param transformer a function returning the transformation
5634 * for an element
5635 * @param basis the identity (initial default value) for the reduction
5636 * @param reducer a commutative associative combining function
5637 * @return the task
5638 */
5639 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5640 (ConcurrentHashMap<K,V> map,
5641 ToDoubleFunction<Map.Entry<K,V>> transformer,
5642 double basis,
5643 DoubleBinaryOperator reducer) {
5644 if (transformer == null || reducer == null)
5645 throw new NullPointerException();
5646 return new MapReduceEntriesToDoubleTask<K,V>
5647 (map, null, -1, null, transformer, basis, reducer);
5648 }
5649
5650 /**
5651 * Returns a task that when invoked, returns the result of
5652 * accumulating the given transformation of all entries using the
5653 * given reducer to combine values, and the given basis as an
5654 * identity value.
5655 *
5656 * @param map the map
5657 * @param transformer a function returning the transformation
5658 * for an element
5659 * @param basis the identity (initial default value) for the reduction
5660 * @param reducer a commutative associative combining function
5661 * @return the task
5662 */
5663 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5664 (ConcurrentHashMap<K,V> map,
5665 ToLongFunction<Map.Entry<K,V>> transformer,
5666 long basis,
5667 LongBinaryOperator reducer) {
5668 if (transformer == null || reducer == null)
5669 throw new NullPointerException();
5670 return new MapReduceEntriesToLongTask<K,V>
5671 (map, null, -1, null, transformer, basis, reducer);
5672 }
5673
5674 /**
5675 * Returns a task that when invoked, returns the result of
5676 * accumulating the given transformation of all entries using the
5677 * given reducer to combine values, and the given basis as an
5678 * identity value.
5679 *
5680 * @param map the map
5681 * @param transformer a function returning the transformation
5682 * for an element
5683 * @param basis the identity (initial default value) for the reduction
5684 * @param reducer a commutative associative combining function
5685 * @return the task
5686 */
5687 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5688 (ConcurrentHashMap<K,V> map,
5689 ToIntFunction<Map.Entry<K,V>> transformer,
5690 int basis,
5691 IntBinaryOperator reducer) {
5692 if (transformer == null || reducer == null)
5693 throw new NullPointerException();
5694 return new MapReduceEntriesToIntTask<K,V>
5695 (map, null, -1, null, transformer, basis, reducer);
5696 }
5697 }
5698
5699 // -------------------------------------------------------
5700
5701 /*
5702 * Task classes. Coded in a regular but ugly format/style to
5703 * simplify checks that each variant differs in the right way from
5704 * others. The null screenings exist because compilers cannot tell
5705 * that we've already null-checked task arguments, so we force
5706 * simplest hoisted bypass to help avoid convoluted traps.
5707 */
5708
5709 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5710 extends Traverser<K,V,Void> {
5711 final Consumer<? super K> action;
5712 ForEachKeyTask
5713 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5714 Consumer<? super K> action) {
5715 super(m, p, b);
5716 this.action = action;
5717 }
5718 public final void compute() {
5719 final Consumer<? super K> action;
5720 if ((action = this.action) != null) {
5721 for (int b; (b = preSplit()) > 0;)
5722 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5723 forEachKey(action);
5724 propagateCompletion();
5725 }
5726 }
5727 }
5728
5729 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5730 extends Traverser<K,V,Void> {
5731 final Consumer<? super V> action;
5732 ForEachValueTask
5733 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5734 Consumer<? super V> action) {
5735 super(m, p, b);
5736 this.action = action;
5737 }
5738 public final void compute() {
5739 final Consumer<? super V> action;
5740 if ((action = this.action) != null) {
5741 for (int b; (b = preSplit()) > 0;)
5742 new ForEachValueTask<K,V>(map, this, b, action).fork();
5743 forEachValue(action);
5744 propagateCompletion();
5745 }
5746 }
5747 }
5748
5749 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5750 extends Traverser<K,V,Void> {
5751 final Consumer<? super Entry<K,V>> action;
5752 ForEachEntryTask
5753 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5754 Consumer<? super Entry<K,V>> action) {
5755 super(m, p, b);
5756 this.action = action;
5757 }
5758 public final void compute() {
5759 final Consumer<? super Entry<K,V>> action;
5760 if ((action = this.action) != null) {
5761 for (int b; (b = preSplit()) > 0;)
5762 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5763 V v;
5764 while ((v = advanceValue()) != null)
5765 action.accept(entryFor(nextKey, v));
5766 propagateCompletion();
5767 }
5768 }
5769 }
5770
5771 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5772 extends Traverser<K,V,Void> {
5773 final BiConsumer<? super K, ? super V> action;
5774 ForEachMappingTask
5775 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5776 BiConsumer<? super K,? super V> action) {
5777 super(m, p, b);
5778 this.action = action;
5779 }
5780 public final void compute() {
5781 final BiConsumer<? super K, ? super V> action;
5782 if ((action = this.action) != null) {
5783 for (int b; (b = preSplit()) > 0;)
5784 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5785 V v;
5786 while ((v = advanceValue()) != null)
5787 action.accept(nextKey, v);
5788 propagateCompletion();
5789 }
5790 }
5791 }
5792
5793 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5794 extends Traverser<K,V,Void> {
5795 final Function<? super K, ? extends U> transformer;
5796 final Consumer<? super U> action;
5797 ForEachTransformedKeyTask
5798 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5799 Function<? super K, ? extends U> transformer, Consumer<? super U> action) {
5800 super(m, p, b);
5801 this.transformer = transformer; this.action = action;
5802 }
5803 public final void compute() {
5804 final Function<? super K, ? extends U> transformer;
5805 final Consumer<? super U> action;
5806 if ((transformer = this.transformer) != null &&
5807 (action = this.action) != null) {
5808 for (int b; (b = preSplit()) > 0;)
5809 new ForEachTransformedKeyTask<K,V,U>
5810 (map, this, b, transformer, action).fork();
5811 K k; U u;
5812 while ((k = advanceKey()) != null) {
5813 if ((u = transformer.apply(k)) != null)
5814 action.accept(u);
5815 }
5816 propagateCompletion();
5817 }
5818 }
5819 }
5820
5821 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5822 extends Traverser<K,V,Void> {
5823 final Function<? super V, ? extends U> transformer;
5824 final Consumer<? super U> action;
5825 ForEachTransformedValueTask
5826 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5827 Function<? super V, ? extends U> transformer, Consumer<? super U> action) {
5828 super(m, p, b);
5829 this.transformer = transformer; this.action = action;
5830 }
5831 public final void compute() {
5832 final Function<? super V, ? extends U> transformer;
5833 final Consumer<? super U> action;
5834 if ((transformer = this.transformer) != null &&
5835 (action = this.action) != null) {
5836 for (int b; (b = preSplit()) > 0;)
5837 new ForEachTransformedValueTask<K,V,U>
5838 (map, this, b, transformer, action).fork();
5839 V v; U u;
5840 while ((v = advanceValue()) != null) {
5841 if ((u = transformer.apply(v)) != null)
5842 action.accept(u);
5843 }
5844 propagateCompletion();
5845 }
5846 }
5847 }
5848
5849 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5850 extends Traverser<K,V,Void> {
5851 final Function<Map.Entry<K,V>, ? extends U> transformer;
5852 final Consumer<? super U> action;
5853 ForEachTransformedEntryTask
5854 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5855 Function<Map.Entry<K,V>, ? extends U> transformer, Consumer<? super U> action) {
5856 super(m, p, b);
5857 this.transformer = transformer; this.action = action;
5858 }
5859 public final void compute() {
5860 final Function<Map.Entry<K,V>, ? extends U> transformer;
5861 final Consumer<? super U> action;
5862 if ((transformer = this.transformer) != null &&
5863 (action = this.action) != null) {
5864 for (int b; (b = preSplit()) > 0;)
5865 new ForEachTransformedEntryTask<K,V,U>
5866 (map, this, b, transformer, action).fork();
5867 V v; U u;
5868 while ((v = advanceValue()) != null) {
5869 if ((u = transformer.apply(entryFor(nextKey,
5870 v))) != null)
5871 action.accept(u);
5872 }
5873 propagateCompletion();
5874 }
5875 }
5876 }
5877
5878 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5879 extends Traverser<K,V,Void> {
5880 final BiFunction<? super K, ? super V, ? extends U> transformer;
5881 final Consumer<? super U> action;
5882 ForEachTransformedMappingTask
5883 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5884 BiFunction<? super K, ? super V, ? extends U> transformer,
5885 Consumer<? super U> action) {
5886 super(m, p, b);
5887 this.transformer = transformer; this.action = action;
5888 }
5889 public final void compute() {
5890 final BiFunction<? super K, ? super V, ? extends U> transformer;
5891 final Consumer<? super U> action;
5892 if ((transformer = this.transformer) != null &&
5893 (action = this.action) != null) {
5894 for (int b; (b = preSplit()) > 0;)
5895 new ForEachTransformedMappingTask<K,V,U>
5896 (map, this, b, transformer, action).fork();
5897 V v; U u;
5898 while ((v = advanceValue()) != null) {
5899 if ((u = transformer.apply(nextKey, v)) != null)
5900 action.accept(u);
5901 }
5902 propagateCompletion();
5903 }
5904 }
5905 }
5906
5907 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5908 extends Traverser<K,V,U> {
5909 final Function<? super K, ? extends U> searchFunction;
5910 final AtomicReference<U> result;
5911 SearchKeysTask
5912 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5913 Function<? super K, ? extends U> searchFunction,
5914 AtomicReference<U> result) {
5915 super(m, p, b);
5916 this.searchFunction = searchFunction; this.result = result;
5917 }
5918 public final U getRawResult() { return result.get(); }
5919 public final void compute() {
5920 final Function<? super K, ? extends U> searchFunction;
5921 final AtomicReference<U> result;
5922 if ((searchFunction = this.searchFunction) != null &&
5923 (result = this.result) != null) {
5924 for (int b;;) {
5925 if (result.get() != null)
5926 return;
5927 if ((b = preSplit()) <= 0)
5928 break;
5929 new SearchKeysTask<K,V,U>
5930 (map, this, b, searchFunction, result).fork();
5931 }
5932 while (result.get() == null) {
5933 K k; U u;
5934 if ((k = advanceKey()) == null) {
5935 propagateCompletion();
5936 break;
5937 }
5938 if ((u = searchFunction.apply(k)) != null) {
5939 if (result.compareAndSet(null, u))
5940 quietlyCompleteRoot();
5941 break;
5942 }
5943 }
5944 }
5945 }
5946 }
5947
5948 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5949 extends Traverser<K,V,U> {
5950 final Function<? super V, ? extends U> searchFunction;
5951 final AtomicReference<U> result;
5952 SearchValuesTask
5953 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5954 Function<? super V, ? extends U> searchFunction,
5955 AtomicReference<U> result) {
5956 super(m, p, b);
5957 this.searchFunction = searchFunction; this.result = result;
5958 }
5959 public final U getRawResult() { return result.get(); }
5960 public final void compute() {
5961 final Function<? super V, ? extends U> searchFunction;
5962 final AtomicReference<U> result;
5963 if ((searchFunction = this.searchFunction) != null &&
5964 (result = this.result) != null) {
5965 for (int b;;) {
5966 if (result.get() != null)
5967 return;
5968 if ((b = preSplit()) <= 0)
5969 break;
5970 new SearchValuesTask<K,V,U>
5971 (map, this, b, searchFunction, result).fork();
5972 }
5973 while (result.get() == null) {
5974 V v; U u;
5975 if ((v = advanceValue()) == null) {
5976 propagateCompletion();
5977 break;
5978 }
5979 if ((u = searchFunction.apply(v)) != null) {
5980 if (result.compareAndSet(null, u))
5981 quietlyCompleteRoot();
5982 break;
5983 }
5984 }
5985 }
5986 }
5987 }
5988
5989 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5990 extends Traverser<K,V,U> {
5991 final Function<Entry<K,V>, ? extends U> searchFunction;
5992 final AtomicReference<U> result;
5993 SearchEntriesTask
5994 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5995 Function<Entry<K,V>, ? extends U> searchFunction,
5996 AtomicReference<U> result) {
5997 super(m, p, b);
5998 this.searchFunction = searchFunction; this.result = result;
5999 }
6000 public final U getRawResult() { return result.get(); }
6001 public final void compute() {
6002 final Function<Entry<K,V>, ? extends U> searchFunction;
6003 final AtomicReference<U> result;
6004 if ((searchFunction = this.searchFunction) != null &&
6005 (result = this.result) != null) {
6006 for (int b;;) {
6007 if (result.get() != null)
6008 return;
6009 if ((b = preSplit()) <= 0)
6010 break;
6011 new SearchEntriesTask<K,V,U>
6012 (map, this, b, searchFunction, result).fork();
6013 }
6014 while (result.get() == null) {
6015 V v; U u;
6016 if ((v = advanceValue()) == null) {
6017 propagateCompletion();
6018 break;
6019 }
6020 if ((u = searchFunction.apply(entryFor(nextKey,
6021 v))) != null) {
6022 if (result.compareAndSet(null, u))
6023 quietlyCompleteRoot();
6024 return;
6025 }
6026 }
6027 }
6028 }
6029 }
6030
6031 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
6032 extends Traverser<K,V,U> {
6033 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6034 final AtomicReference<U> result;
6035 SearchMappingsTask
6036 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6037 BiFunction<? super K, ? super V, ? extends U> searchFunction,
6038 AtomicReference<U> result) {
6039 super(m, p, b);
6040 this.searchFunction = searchFunction; this.result = result;
6041 }
6042 public final U getRawResult() { return result.get(); }
6043 public final void compute() {
6044 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6045 final AtomicReference<U> result;
6046 if ((searchFunction = this.searchFunction) != null &&
6047 (result = this.result) != null) {
6048 for (int b;;) {
6049 if (result.get() != null)
6050 return;
6051 if ((b = preSplit()) <= 0)
6052 break;
6053 new SearchMappingsTask<K,V,U>
6054 (map, this, b, searchFunction, result).fork();
6055 }
6056 while (result.get() == null) {
6057 V v; U u;
6058 if ((v = advanceValue()) == null) {
6059 propagateCompletion();
6060 break;
6061 }
6062 if ((u = searchFunction.apply(nextKey, v)) != null) {
6063 if (result.compareAndSet(null, u))
6064 quietlyCompleteRoot();
6065 break;
6066 }
6067 }
6068 }
6069 }
6070 }
6071
6072 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
6073 extends Traverser<K,V,K> {
6074 final BiFunction<? super K, ? super K, ? extends K> reducer;
6075 K result;
6076 ReduceKeysTask<K,V> rights, nextRight;
6077 ReduceKeysTask
6078 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6079 ReduceKeysTask<K,V> nextRight,
6080 BiFunction<? super K, ? super K, ? extends K> reducer) {
6081 super(m, p, b); this.nextRight = nextRight;
6082 this.reducer = reducer;
6083 }
6084 public final K getRawResult() { return result; }
6085 @SuppressWarnings("unchecked") public final void compute() {
6086 final BiFunction<? super K, ? super K, ? extends K> reducer;
6087 if ((reducer = this.reducer) != null) {
6088 for (int b; (b = preSplit()) > 0;)
6089 (rights = new ReduceKeysTask<K,V>
6090 (map, this, b, rights, reducer)).fork();
6091 K u, r = null;
6092 while ((u = advanceKey()) != null) {
6093 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
6094 }
6095 result = r;
6096 CountedCompleter<?> c;
6097 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6098 ReduceKeysTask<K,V>
6099 t = (ReduceKeysTask<K,V>)c,
6100 s = t.rights;
6101 while (s != null) {
6102 K tr, sr;
6103 if ((sr = s.result) != null)
6104 t.result = (((tr = t.result) == null) ? sr :
6105 reducer.apply(tr, sr));
6106 s = t.rights = s.nextRight;
6107 }
6108 }
6109 }
6110 }
6111 }
6112
6113 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
6114 extends Traverser<K,V,V> {
6115 final BiFunction<? super V, ? super V, ? extends V> reducer;
6116 V result;
6117 ReduceValuesTask<K,V> rights, nextRight;
6118 ReduceValuesTask
6119 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6120 ReduceValuesTask<K,V> nextRight,
6121 BiFunction<? super V, ? super V, ? extends V> reducer) {
6122 super(m, p, b); this.nextRight = nextRight;
6123 this.reducer = reducer;
6124 }
6125 public final V getRawResult() { return result; }
6126 @SuppressWarnings("unchecked") public final void compute() {
6127 final BiFunction<? super V, ? super V, ? extends V> reducer;
6128 if ((reducer = this.reducer) != null) {
6129 for (int b; (b = preSplit()) > 0;)
6130 (rights = new ReduceValuesTask<K,V>
6131 (map, this, b, rights, reducer)).fork();
6132 V r = null, v;
6133 while ((v = advanceValue()) != null)
6134 r = (r == null) ? v : reducer.apply(r, v);
6135 result = r;
6136 CountedCompleter<?> c;
6137 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6138 ReduceValuesTask<K,V>
6139 t = (ReduceValuesTask<K,V>)c,
6140 s = t.rights;
6141 while (s != null) {
6142 V tr, sr;
6143 if ((sr = s.result) != null)
6144 t.result = (((tr = t.result) == null) ? sr :
6145 reducer.apply(tr, sr));
6146 s = t.rights = s.nextRight;
6147 }
6148 }
6149 }
6150 }
6151 }
6152
6153 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
6154 extends Traverser<K,V,Map.Entry<K,V>> {
6155 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6156 Map.Entry<K,V> result;
6157 ReduceEntriesTask<K,V> rights, nextRight;
6158 ReduceEntriesTask
6159 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6160 ReduceEntriesTask<K,V> nextRight,
6161 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
6162 super(m, p, b); this.nextRight = nextRight;
6163 this.reducer = reducer;
6164 }
6165 public final Map.Entry<K,V> getRawResult() { return result; }
6166 @SuppressWarnings("unchecked") public final void compute() {
6167 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6168 if ((reducer = this.reducer) != null) {
6169 for (int b; (b = preSplit()) > 0;)
6170 (rights = new ReduceEntriesTask<K,V>
6171 (map, this, b, rights, reducer)).fork();
6172 Map.Entry<K,V> r = null;
6173 V v;
6174 while ((v = advanceValue()) != null) {
6175 Map.Entry<K,V> u = entryFor(nextKey, v);
6176 r = (r == null) ? u : reducer.apply(r, u);
6177 }
6178 result = r;
6179 CountedCompleter<?> c;
6180 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6181 ReduceEntriesTask<K,V>
6182 t = (ReduceEntriesTask<K,V>)c,
6183 s = t.rights;
6184 while (s != null) {
6185 Map.Entry<K,V> tr, sr;
6186 if ((sr = s.result) != null)
6187 t.result = (((tr = t.result) == null) ? sr :
6188 reducer.apply(tr, sr));
6189 s = t.rights = s.nextRight;
6190 }
6191 }
6192 }
6193 }
6194 }
6195
6196 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
6197 extends Traverser<K,V,U> {
6198 final Function<? super K, ? extends U> transformer;
6199 final BiFunction<? super U, ? super U, ? extends U> reducer;
6200 U result;
6201 MapReduceKeysTask<K,V,U> rights, nextRight;
6202 MapReduceKeysTask
6203 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6204 MapReduceKeysTask<K,V,U> nextRight,
6205 Function<? super K, ? extends U> transformer,
6206 BiFunction<? super U, ? super U, ? extends U> reducer) {
6207 super(m, p, b); this.nextRight = nextRight;
6208 this.transformer = transformer;
6209 this.reducer = reducer;
6210 }
6211 public final U getRawResult() { return result; }
6212 @SuppressWarnings("unchecked") public final void compute() {
6213 final Function<? super K, ? extends U> transformer;
6214 final BiFunction<? super U, ? super U, ? extends U> reducer;
6215 if ((transformer = this.transformer) != null &&
6216 (reducer = this.reducer) != null) {
6217 for (int b; (b = preSplit()) > 0;)
6218 (rights = new MapReduceKeysTask<K,V,U>
6219 (map, this, b, rights, transformer, reducer)).fork();
6220 K k; U r = null, u;
6221 while ((k = advanceKey()) != null) {
6222 if ((u = transformer.apply(k)) != null)
6223 r = (r == null) ? u : reducer.apply(r, u);
6224 }
6225 result = r;
6226 CountedCompleter<?> c;
6227 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6228 MapReduceKeysTask<K,V,U>
6229 t = (MapReduceKeysTask<K,V,U>)c,
6230 s = t.rights;
6231 while (s != null) {
6232 U tr, sr;
6233 if ((sr = s.result) != null)
6234 t.result = (((tr = t.result) == null) ? sr :
6235 reducer.apply(tr, sr));
6236 s = t.rights = s.nextRight;
6237 }
6238 }
6239 }
6240 }
6241 }
6242
6243 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6244 extends Traverser<K,V,U> {
6245 final Function<? super V, ? extends U> transformer;
6246 final BiFunction<? super U, ? super U, ? extends U> reducer;
6247 U result;
6248 MapReduceValuesTask<K,V,U> rights, nextRight;
6249 MapReduceValuesTask
6250 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6251 MapReduceValuesTask<K,V,U> nextRight,
6252 Function<? super V, ? extends U> transformer,
6253 BiFunction<? super U, ? super U, ? extends U> reducer) {
6254 super(m, p, b); this.nextRight = nextRight;
6255 this.transformer = transformer;
6256 this.reducer = reducer;
6257 }
6258 public final U getRawResult() { return result; }
6259 @SuppressWarnings("unchecked") public final void compute() {
6260 final Function<? super V, ? extends U> transformer;
6261 final BiFunction<? super U, ? super U, ? extends U> reducer;
6262 if ((transformer = this.transformer) != null &&
6263 (reducer = this.reducer) != null) {
6264 for (int b; (b = preSplit()) > 0;)
6265 (rights = new MapReduceValuesTask<K,V,U>
6266 (map, this, b, rights, transformer, reducer)).fork();
6267 U r = null, u;
6268 V v;
6269 while ((v = advanceValue()) != null) {
6270 if ((u = transformer.apply(v)) != null)
6271 r = (r == null) ? u : reducer.apply(r, u);
6272 }
6273 result = r;
6274 CountedCompleter<?> c;
6275 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6276 MapReduceValuesTask<K,V,U>
6277 t = (MapReduceValuesTask<K,V,U>)c,
6278 s = t.rights;
6279 while (s != null) {
6280 U tr, sr;
6281 if ((sr = s.result) != null)
6282 t.result = (((tr = t.result) == null) ? sr :
6283 reducer.apply(tr, sr));
6284 s = t.rights = s.nextRight;
6285 }
6286 }
6287 }
6288 }
6289 }
6290
6291 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6292 extends Traverser<K,V,U> {
6293 final Function<Map.Entry<K,V>, ? extends U> transformer;
6294 final BiFunction<? super U, ? super U, ? extends U> reducer;
6295 U result;
6296 MapReduceEntriesTask<K,V,U> rights, nextRight;
6297 MapReduceEntriesTask
6298 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6299 MapReduceEntriesTask<K,V,U> nextRight,
6300 Function<Map.Entry<K,V>, ? extends U> transformer,
6301 BiFunction<? super U, ? super U, ? extends U> reducer) {
6302 super(m, p, b); this.nextRight = nextRight;
6303 this.transformer = transformer;
6304 this.reducer = reducer;
6305 }
6306 public final U getRawResult() { return result; }
6307 @SuppressWarnings("unchecked") public final void compute() {
6308 final Function<Map.Entry<K,V>, ? extends U> transformer;
6309 final BiFunction<? super U, ? super U, ? extends U> reducer;
6310 if ((transformer = this.transformer) != null &&
6311 (reducer = this.reducer) != null) {
6312 for (int b; (b = preSplit()) > 0;)
6313 (rights = new MapReduceEntriesTask<K,V,U>
6314 (map, this, b, rights, transformer, reducer)).fork();
6315 U r = null, u;
6316 V v;
6317 while ((v = advanceValue()) != null) {
6318 if ((u = transformer.apply(entryFor(nextKey,
6319 v))) != null)
6320 r = (r == null) ? u : reducer.apply(r, u);
6321 }
6322 result = r;
6323 CountedCompleter<?> c;
6324 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6325 MapReduceEntriesTask<K,V,U>
6326 t = (MapReduceEntriesTask<K,V,U>)c,
6327 s = t.rights;
6328 while (s != null) {
6329 U tr, sr;
6330 if ((sr = s.result) != null)
6331 t.result = (((tr = t.result) == null) ? sr :
6332 reducer.apply(tr, sr));
6333 s = t.rights = s.nextRight;
6334 }
6335 }
6336 }
6337 }
6338 }
6339
6340 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6341 extends Traverser<K,V,U> {
6342 final BiFunction<? super K, ? super V, ? extends U> transformer;
6343 final BiFunction<? super U, ? super U, ? extends U> reducer;
6344 U result;
6345 MapReduceMappingsTask<K,V,U> rights, nextRight;
6346 MapReduceMappingsTask
6347 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6348 MapReduceMappingsTask<K,V,U> nextRight,
6349 BiFunction<? super K, ? super V, ? extends U> transformer,
6350 BiFunction<? super U, ? super U, ? extends U> reducer) {
6351 super(m, p, b); this.nextRight = nextRight;
6352 this.transformer = transformer;
6353 this.reducer = reducer;
6354 }
6355 public final U getRawResult() { return result; }
6356 @SuppressWarnings("unchecked") public final void compute() {
6357 final BiFunction<? super K, ? super V, ? extends U> transformer;
6358 final BiFunction<? super U, ? super U, ? extends U> reducer;
6359 if ((transformer = this.transformer) != null &&
6360 (reducer = this.reducer) != null) {
6361 for (int b; (b = preSplit()) > 0;)
6362 (rights = new MapReduceMappingsTask<K,V,U>
6363 (map, this, b, rights, transformer, reducer)).fork();
6364 U r = null, u;
6365 V v;
6366 while ((v = advanceValue()) != null) {
6367 if ((u = transformer.apply(nextKey, v)) != null)
6368 r = (r == null) ? u : reducer.apply(r, u);
6369 }
6370 result = r;
6371 CountedCompleter<?> c;
6372 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6373 MapReduceMappingsTask<K,V,U>
6374 t = (MapReduceMappingsTask<K,V,U>)c,
6375 s = t.rights;
6376 while (s != null) {
6377 U tr, sr;
6378 if ((sr = s.result) != null)
6379 t.result = (((tr = t.result) == null) ? sr :
6380 reducer.apply(tr, sr));
6381 s = t.rights = s.nextRight;
6382 }
6383 }
6384 }
6385 }
6386 }
6387
6388 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6389 extends Traverser<K,V,Double> {
6390 final ToDoubleFunction<? super K> transformer;
6391 final DoubleBinaryOperator reducer;
6392 final double basis;
6393 double result;
6394 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6395 MapReduceKeysToDoubleTask
6396 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6397 MapReduceKeysToDoubleTask<K,V> nextRight,
6398 ToDoubleFunction<? super K> transformer,
6399 double basis,
6400 DoubleBinaryOperator reducer) {
6401 super(m, p, b); this.nextRight = nextRight;
6402 this.transformer = transformer;
6403 this.basis = basis; this.reducer = reducer;
6404 }
6405 public final Double getRawResult() { return result; }
6406 @SuppressWarnings("unchecked") public final void compute() {
6407 final ToDoubleFunction<? super K> transformer;
6408 final DoubleBinaryOperator reducer;
6409 if ((transformer = this.transformer) != null &&
6410 (reducer = this.reducer) != null) {
6411 double r = this.basis;
6412 for (int b; (b = preSplit()) > 0;)
6413 (rights = new MapReduceKeysToDoubleTask<K,V>
6414 (map, this, b, rights, transformer, r, reducer)).fork();
6415 K k;
6416 while ((k = advanceKey()) != null)
6417 r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
6418 result = r;
6419 CountedCompleter<?> c;
6420 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6421 MapReduceKeysToDoubleTask<K,V>
6422 t = (MapReduceKeysToDoubleTask<K,V>)c,
6423 s = t.rights;
6424 while (s != null) {
6425 t.result = reducer.applyAsDouble(t.result, s.result);
6426 s = t.rights = s.nextRight;
6427 }
6428 }
6429 }
6430 }
6431 }
6432
6433 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6434 extends Traverser<K,V,Double> {
6435 final ToDoubleFunction<? super V> transformer;
6436 final DoubleBinaryOperator reducer;
6437 final double basis;
6438 double result;
6439 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6440 MapReduceValuesToDoubleTask
6441 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6442 MapReduceValuesToDoubleTask<K,V> nextRight,
6443 ToDoubleFunction<? super V> transformer,
6444 double basis,
6445 DoubleBinaryOperator reducer) {
6446 super(m, p, b); this.nextRight = nextRight;
6447 this.transformer = transformer;
6448 this.basis = basis; this.reducer = reducer;
6449 }
6450 public final Double getRawResult() { return result; }
6451 @SuppressWarnings("unchecked") public final void compute() {
6452 final ToDoubleFunction<? super V> transformer;
6453 final DoubleBinaryOperator reducer;
6454 if ((transformer = this.transformer) != null &&
6455 (reducer = this.reducer) != null) {
6456 double r = this.basis;
6457 for (int b; (b = preSplit()) > 0;)
6458 (rights = new MapReduceValuesToDoubleTask<K,V>
6459 (map, this, b, rights, transformer, r, reducer)).fork();
6460 V v;
6461 while ((v = advanceValue()) != null)
6462 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6463 result = r;
6464 CountedCompleter<?> c;
6465 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6466 MapReduceValuesToDoubleTask<K,V>
6467 t = (MapReduceValuesToDoubleTask<K,V>)c,
6468 s = t.rights;
6469 while (s != null) {
6470 t.result = reducer.applyAsDouble(t.result, s.result);
6471 s = t.rights = s.nextRight;
6472 }
6473 }
6474 }
6475 }
6476 }
6477
6478 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6479 extends Traverser<K,V,Double> {
6480 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6481 final DoubleBinaryOperator reducer;
6482 final double basis;
6483 double result;
6484 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6485 MapReduceEntriesToDoubleTask
6486 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6487 MapReduceEntriesToDoubleTask<K,V> nextRight,
6488 ToDoubleFunction<Map.Entry<K,V>> transformer,
6489 double basis,
6490 DoubleBinaryOperator reducer) {
6491 super(m, p, b); this.nextRight = nextRight;
6492 this.transformer = transformer;
6493 this.basis = basis; this.reducer = reducer;
6494 }
6495 public final Double getRawResult() { return result; }
6496 @SuppressWarnings("unchecked") public final void compute() {
6497 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6498 final DoubleBinaryOperator reducer;
6499 if ((transformer = this.transformer) != null &&
6500 (reducer = this.reducer) != null) {
6501 double r = this.basis;
6502 for (int b; (b = preSplit()) > 0;)
6503 (rights = new MapReduceEntriesToDoubleTask<K,V>
6504 (map, this, b, rights, transformer, r, reducer)).fork();
6505 V v;
6506 while ((v = advanceValue()) != null)
6507 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(nextKey,
6508 v)));
6509 result = r;
6510 CountedCompleter<?> c;
6511 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6512 MapReduceEntriesToDoubleTask<K,V>
6513 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6514 s = t.rights;
6515 while (s != null) {
6516 t.result = reducer.applyAsDouble(t.result, s.result);
6517 s = t.rights = s.nextRight;
6518 }
6519 }
6520 }
6521 }
6522 }
6523
6524 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6525 extends Traverser<K,V,Double> {
6526 final ToDoubleBiFunction<? super K, ? super V> transformer;
6527 final DoubleBinaryOperator reducer;
6528 final double basis;
6529 double result;
6530 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6531 MapReduceMappingsToDoubleTask
6532 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6533 MapReduceMappingsToDoubleTask<K,V> nextRight,
6534 ToDoubleBiFunction<? super K, ? super V> transformer,
6535 double basis,
6536 DoubleBinaryOperator reducer) {
6537 super(m, p, b); this.nextRight = nextRight;
6538 this.transformer = transformer;
6539 this.basis = basis; this.reducer = reducer;
6540 }
6541 public final Double getRawResult() { return result; }
6542 @SuppressWarnings("unchecked") public final void compute() {
6543 final ToDoubleBiFunction<? super K, ? super V> transformer;
6544 final DoubleBinaryOperator reducer;
6545 if ((transformer = this.transformer) != null &&
6546 (reducer = this.reducer) != null) {
6547 double r = this.basis;
6548 for (int b; (b = preSplit()) > 0;)
6549 (rights = new MapReduceMappingsToDoubleTask<K,V>
6550 (map, this, b, rights, transformer, r, reducer)).fork();
6551 V v;
6552 while ((v = advanceValue()) != null)
6553 r = reducer.applyAsDouble(r, transformer.applyAsDouble(nextKey, v));
6554 result = r;
6555 CountedCompleter<?> c;
6556 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6557 MapReduceMappingsToDoubleTask<K,V>
6558 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6559 s = t.rights;
6560 while (s != null) {
6561 t.result = reducer.applyAsDouble(t.result, s.result);
6562 s = t.rights = s.nextRight;
6563 }
6564 }
6565 }
6566 }
6567 }
6568
6569 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6570 extends Traverser<K,V,Long> {
6571 final ToLongFunction<? super K> transformer;
6572 final LongBinaryOperator reducer;
6573 final long basis;
6574 long result;
6575 MapReduceKeysToLongTask<K,V> rights, nextRight;
6576 MapReduceKeysToLongTask
6577 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6578 MapReduceKeysToLongTask<K,V> nextRight,
6579 ToLongFunction<? super K> transformer,
6580 long basis,
6581 LongBinaryOperator reducer) {
6582 super(m, p, b); this.nextRight = nextRight;
6583 this.transformer = transformer;
6584 this.basis = basis; this.reducer = reducer;
6585 }
6586 public final Long getRawResult() { return result; }
6587 @SuppressWarnings("unchecked") public final void compute() {
6588 final ToLongFunction<? super K> transformer;
6589 final LongBinaryOperator reducer;
6590 if ((transformer = this.transformer) != null &&
6591 (reducer = this.reducer) != null) {
6592 long r = this.basis;
6593 for (int b; (b = preSplit()) > 0;)
6594 (rights = new MapReduceKeysToLongTask<K,V>
6595 (map, this, b, rights, transformer, r, reducer)).fork();
6596 K k;
6597 while ((k = advanceKey()) != null)
6598 r = reducer.applyAsLong(r, transformer.applyAsLong(k));
6599 result = r;
6600 CountedCompleter<?> c;
6601 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6602 MapReduceKeysToLongTask<K,V>
6603 t = (MapReduceKeysToLongTask<K,V>)c,
6604 s = t.rights;
6605 while (s != null) {
6606 t.result = reducer.applyAsLong(t.result, s.result);
6607 s = t.rights = s.nextRight;
6608 }
6609 }
6610 }
6611 }
6612 }
6613
6614 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6615 extends Traverser<K,V,Long> {
6616 final ToLongFunction<? super V> transformer;
6617 final LongBinaryOperator reducer;
6618 final long basis;
6619 long result;
6620 MapReduceValuesToLongTask<K,V> rights, nextRight;
6621 MapReduceValuesToLongTask
6622 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6623 MapReduceValuesToLongTask<K,V> nextRight,
6624 ToLongFunction<? super V> transformer,
6625 long basis,
6626 LongBinaryOperator reducer) {
6627 super(m, p, b); this.nextRight = nextRight;
6628 this.transformer = transformer;
6629 this.basis = basis; this.reducer = reducer;
6630 }
6631 public final Long getRawResult() { return result; }
6632 @SuppressWarnings("unchecked") public final void compute() {
6633 final ToLongFunction<? super V> transformer;
6634 final LongBinaryOperator reducer;
6635 if ((transformer = this.transformer) != null &&
6636 (reducer = this.reducer) != null) {
6637 long r = this.basis;
6638 for (int b; (b = preSplit()) > 0;)
6639 (rights = new MapReduceValuesToLongTask<K,V>
6640 (map, this, b, rights, transformer, r, reducer)).fork();
6641 V v;
6642 while ((v = advanceValue()) != null)
6643 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6644 result = r;
6645 CountedCompleter<?> c;
6646 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6647 MapReduceValuesToLongTask<K,V>
6648 t = (MapReduceValuesToLongTask<K,V>)c,
6649 s = t.rights;
6650 while (s != null) {
6651 t.result = reducer.applyAsLong(t.result, s.result);
6652 s = t.rights = s.nextRight;
6653 }
6654 }
6655 }
6656 }
6657 }
6658
6659 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6660 extends Traverser<K,V,Long> {
6661 final ToLongFunction<Map.Entry<K,V>> transformer;
6662 final LongBinaryOperator reducer;
6663 final long basis;
6664 long result;
6665 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6666 MapReduceEntriesToLongTask
6667 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6668 MapReduceEntriesToLongTask<K,V> nextRight,
6669 ToLongFunction<Map.Entry<K,V>> transformer,
6670 long basis,
6671 LongBinaryOperator reducer) {
6672 super(m, p, b); this.nextRight = nextRight;
6673 this.transformer = transformer;
6674 this.basis = basis; this.reducer = reducer;
6675 }
6676 public final Long getRawResult() { return result; }
6677 @SuppressWarnings("unchecked") public final void compute() {
6678 final ToLongFunction<Map.Entry<K,V>> transformer;
6679 final LongBinaryOperator reducer;
6680 if ((transformer = this.transformer) != null &&
6681 (reducer = this.reducer) != null) {
6682 long r = this.basis;
6683 for (int b; (b = preSplit()) > 0;)
6684 (rights = new MapReduceEntriesToLongTask<K,V>
6685 (map, this, b, rights, transformer, r, reducer)).fork();
6686 V v;
6687 while ((v = advanceValue()) != null)
6688 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(nextKey, v)));
6689 result = r;
6690 CountedCompleter<?> c;
6691 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6692 MapReduceEntriesToLongTask<K,V>
6693 t = (MapReduceEntriesToLongTask<K,V>)c,
6694 s = t.rights;
6695 while (s != null) {
6696 t.result = reducer.applyAsLong(t.result, s.result);
6697 s = t.rights = s.nextRight;
6698 }
6699 }
6700 }
6701 }
6702 }
6703
6704 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6705 extends Traverser<K,V,Long> {
6706 final ToLongBiFunction<? super K, ? super V> transformer;
6707 final LongBinaryOperator reducer;
6708 final long basis;
6709 long result;
6710 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6711 MapReduceMappingsToLongTask
6712 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6713 MapReduceMappingsToLongTask<K,V> nextRight,
6714 ToLongBiFunction<? super K, ? super V> transformer,
6715 long basis,
6716 LongBinaryOperator reducer) {
6717 super(m, p, b); this.nextRight = nextRight;
6718 this.transformer = transformer;
6719 this.basis = basis; this.reducer = reducer;
6720 }
6721 public final Long getRawResult() { return result; }
6722 @SuppressWarnings("unchecked") public final void compute() {
6723 final ToLongBiFunction<? super K, ? super V> transformer;
6724 final LongBinaryOperator reducer;
6725 if ((transformer = this.transformer) != null &&
6726 (reducer = this.reducer) != null) {
6727 long r = this.basis;
6728 for (int b; (b = preSplit()) > 0;)
6729 (rights = new MapReduceMappingsToLongTask<K,V>
6730 (map, this, b, rights, transformer, r, reducer)).fork();
6731 V v;
6732 while ((v = advanceValue()) != null)
6733 r = reducer.applyAsLong(r, transformer.applyAsLong(nextKey, v));
6734 result = r;
6735 CountedCompleter<?> c;
6736 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6737 MapReduceMappingsToLongTask<K,V>
6738 t = (MapReduceMappingsToLongTask<K,V>)c,
6739 s = t.rights;
6740 while (s != null) {
6741 t.result = reducer.applyAsLong(t.result, s.result);
6742 s = t.rights = s.nextRight;
6743 }
6744 }
6745 }
6746 }
6747 }
6748
6749 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6750 extends Traverser<K,V,Integer> {
6751 final ToIntFunction<? super K> transformer;
6752 final IntBinaryOperator reducer;
6753 final int basis;
6754 int result;
6755 MapReduceKeysToIntTask<K,V> rights, nextRight;
6756 MapReduceKeysToIntTask
6757 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6758 MapReduceKeysToIntTask<K,V> nextRight,
6759 ToIntFunction<? super K> transformer,
6760 int basis,
6761 IntBinaryOperator reducer) {
6762 super(m, p, b); this.nextRight = nextRight;
6763 this.transformer = transformer;
6764 this.basis = basis; this.reducer = reducer;
6765 }
6766 public final Integer getRawResult() { return result; }
6767 @SuppressWarnings("unchecked") public final void compute() {
6768 final ToIntFunction<? super K> transformer;
6769 final IntBinaryOperator reducer;
6770 if ((transformer = this.transformer) != null &&
6771 (reducer = this.reducer) != null) {
6772 int r = this.basis;
6773 for (int b; (b = preSplit()) > 0;)
6774 (rights = new MapReduceKeysToIntTask<K,V>
6775 (map, this, b, rights, transformer, r, reducer)).fork();
6776 K k;
6777 while ((k = advanceKey()) != null)
6778 r = reducer.applyAsInt(r, transformer.applyAsInt(k));
6779 result = r;
6780 CountedCompleter<?> c;
6781 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6782 MapReduceKeysToIntTask<K,V>
6783 t = (MapReduceKeysToIntTask<K,V>)c,
6784 s = t.rights;
6785 while (s != null) {
6786 t.result = reducer.applyAsInt(t.result, s.result);
6787 s = t.rights = s.nextRight;
6788 }
6789 }
6790 }
6791 }
6792 }
6793
6794 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6795 extends Traverser<K,V,Integer> {
6796 final ToIntFunction<? super V> transformer;
6797 final IntBinaryOperator reducer;
6798 final int basis;
6799 int result;
6800 MapReduceValuesToIntTask<K,V> rights, nextRight;
6801 MapReduceValuesToIntTask
6802 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6803 MapReduceValuesToIntTask<K,V> nextRight,
6804 ToIntFunction<? super V> transformer,
6805 int basis,
6806 IntBinaryOperator reducer) {
6807 super(m, p, b); this.nextRight = nextRight;
6808 this.transformer = transformer;
6809 this.basis = basis; this.reducer = reducer;
6810 }
6811 public final Integer getRawResult() { return result; }
6812 @SuppressWarnings("unchecked") public final void compute() {
6813 final ToIntFunction<? super V> transformer;
6814 final IntBinaryOperator reducer;
6815 if ((transformer = this.transformer) != null &&
6816 (reducer = this.reducer) != null) {
6817 int r = this.basis;
6818 for (int b; (b = preSplit()) > 0;)
6819 (rights = new MapReduceValuesToIntTask<K,V>
6820 (map, this, b, rights, transformer, r, reducer)).fork();
6821 V v;
6822 while ((v = advanceValue()) != null)
6823 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6824 result = r;
6825 CountedCompleter<?> c;
6826 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6827 MapReduceValuesToIntTask<K,V>
6828 t = (MapReduceValuesToIntTask<K,V>)c,
6829 s = t.rights;
6830 while (s != null) {
6831 t.result = reducer.applyAsInt(t.result, s.result);
6832 s = t.rights = s.nextRight;
6833 }
6834 }
6835 }
6836 }
6837 }
6838
6839 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6840 extends Traverser<K,V,Integer> {
6841 final ToIntFunction<Map.Entry<K,V>> transformer;
6842 final IntBinaryOperator reducer;
6843 final int basis;
6844 int result;
6845 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6846 MapReduceEntriesToIntTask
6847 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6848 MapReduceEntriesToIntTask<K,V> nextRight,
6849 ToIntFunction<Map.Entry<K,V>> transformer,
6850 int basis,
6851 IntBinaryOperator reducer) {
6852 super(m, p, b); this.nextRight = nextRight;
6853 this.transformer = transformer;
6854 this.basis = basis; this.reducer = reducer;
6855 }
6856 public final Integer getRawResult() { return result; }
6857 @SuppressWarnings("unchecked") public final void compute() {
6858 final ToIntFunction<Map.Entry<K,V>> transformer;
6859 final IntBinaryOperator reducer;
6860 if ((transformer = this.transformer) != null &&
6861 (reducer = this.reducer) != null) {
6862 int r = this.basis;
6863 for (int b; (b = preSplit()) > 0;)
6864 (rights = new MapReduceEntriesToIntTask<K,V>
6865 (map, this, b, rights, transformer, r, reducer)).fork();
6866 V v;
6867 while ((v = advanceValue()) != null)
6868 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(nextKey,
6869 v)));
6870 result = r;
6871 CountedCompleter<?> c;
6872 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6873 MapReduceEntriesToIntTask<K,V>
6874 t = (MapReduceEntriesToIntTask<K,V>)c,
6875 s = t.rights;
6876 while (s != null) {
6877 t.result = reducer.applyAsInt(t.result, s.result);
6878 s = t.rights = s.nextRight;
6879 }
6880 }
6881 }
6882 }
6883 }
6884
6885 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6886 extends Traverser<K,V,Integer> {
6887 final ToIntBiFunction<? super K, ? super V> transformer;
6888 final IntBinaryOperator reducer;
6889 final int basis;
6890 int result;
6891 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6892 MapReduceMappingsToIntTask
6893 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6894 MapReduceMappingsToIntTask<K,V> nextRight,
6895 ToIntBiFunction<? super K, ? super V> transformer,
6896 int basis,
6897 IntBinaryOperator reducer) {
6898 super(m, p, b); this.nextRight = nextRight;
6899 this.transformer = transformer;
6900 this.basis = basis; this.reducer = reducer;
6901 }
6902 public final Integer getRawResult() { return result; }
6903 @SuppressWarnings("unchecked") public final void compute() {
6904 final ToIntBiFunction<? super K, ? super V> transformer;
6905 final IntBinaryOperator reducer;
6906 if ((transformer = this.transformer) != null &&
6907 (reducer = this.reducer) != null) {
6908 int r = this.basis;
6909 for (int b; (b = preSplit()) > 0;)
6910 (rights = new MapReduceMappingsToIntTask<K,V>
6911 (map, this, b, rights, transformer, r, reducer)).fork();
6912 V v;
6913 while ((v = advanceValue()) != null)
6914 r = reducer.applyAsInt(r, transformer.applyAsInt(nextKey, v));
6915 result = r;
6916 CountedCompleter<?> c;
6917 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6918 MapReduceMappingsToIntTask<K,V>
6919 t = (MapReduceMappingsToIntTask<K,V>)c,
6920 s = t.rights;
6921 while (s != null) {
6922 t.result = reducer.applyAsInt(t.result, s.result);
6923 s = t.rights = s.nextRight;
6924 }
6925 }
6926 }
6927 }
6928 }
6929
6930 // Unsafe mechanics
6931 private static final sun.misc.Unsafe U;
6932 private static final long SIZECTL;
6933 private static final long TRANSFERINDEX;
6934 private static final long TRANSFERORIGIN;
6935 private static final long BASECOUNT;
6936 private static final long CELLSBUSY;
6937 private static final long CELLVALUE;
6938 private static final long ABASE;
6939 private static final int ASHIFT;
6940
6941 static {
6942 try {
6943 U = sun.misc.Unsafe.getUnsafe();
6944 Class<?> k = ConcurrentHashMap.class;
6945 SIZECTL = U.objectFieldOffset
6946 (k.getDeclaredField("sizeCtl"));
6947 TRANSFERINDEX = U.objectFieldOffset
6948 (k.getDeclaredField("transferIndex"));
6949 TRANSFERORIGIN = U.objectFieldOffset
6950 (k.getDeclaredField("transferOrigin"));
6951 BASECOUNT = U.objectFieldOffset
6952 (k.getDeclaredField("baseCount"));
6953 CELLSBUSY = U.objectFieldOffset
6954 (k.getDeclaredField("cellsBusy"));
6955 Class<?> ck = Cell.class;
6956 CELLVALUE = U.objectFieldOffset
6957 (ck.getDeclaredField("value"));
6958 Class<?> sc = Node[].class;
6959 ABASE = U.arrayBaseOffset(sc);
6960 int scale = U.arrayIndexScale(sc);
6961 if ((scale & (scale - 1)) != 0)
6962 throw new Error("data type scale not a power of two");
6963 ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6964 } catch (Exception e) {
6965 throw new Error(e);
6966 }
6967 }
6968
6969 }