ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.203
Committed: Thu Apr 11 18:15:53 2013 UTC (11 years, 1 month ago) by dl
Branch: MAIN
Changes since 1.202: +3 -11 lines
Log Message:
Remove unneeded method

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.util.concurrent.ForkJoinPool;
9 import java.util.concurrent.CountedCompleter;
10 import java.util.Spliterator;
11 import java.util.stream.Stream;
12 import java.util.stream.Streams;
13 import java.util.function.*;
14 import java.util.function.Consumer;
15 import java.util.function.Function;
16 import java.util.function.BiFunction;
17
18 import java.util.Comparator;
19 import java.util.Arrays;
20 import java.util.Map;
21 import java.util.Set;
22 import java.util.Collection;
23 import java.util.AbstractMap;
24 import java.util.AbstractSet;
25 import java.util.AbstractCollection;
26 import java.util.Hashtable;
27 import java.util.HashMap;
28 import java.util.Iterator;
29 import java.util.Enumeration;
30 import java.util.ConcurrentModificationException;
31 import java.util.NoSuchElementException;
32 import java.util.concurrent.ConcurrentMap;
33 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
34 import java.util.concurrent.atomic.AtomicInteger;
35 import java.util.concurrent.atomic.AtomicReference;
36 import java.io.Serializable;
37 import java.lang.reflect.Type;
38 import java.lang.reflect.ParameterizedType;
39
40 /**
41 * A hash table supporting full concurrency of retrievals and
42 * high expected concurrency for updates. This class obeys the
43 * same functional specification as {@link java.util.Hashtable}, and
44 * includes versions of methods corresponding to each method of
45 * {@code Hashtable}. However, even though all operations are
46 * thread-safe, retrieval operations do <em>not</em> entail locking,
47 * and there is <em>not</em> any support for locking the entire table
48 * in a way that prevents all access. This class is fully
49 * interoperable with {@code Hashtable} in programs that rely on its
50 * thread safety but not on its synchronization details.
51 *
52 * <p>Retrieval operations (including {@code get}) generally do not
53 * block, so may overlap with update operations (including {@code put}
54 * and {@code remove}). Retrievals reflect the results of the most
55 * recently <em>completed</em> update operations holding upon their
56 * onset. (More formally, an update operation for a given key bears a
57 * <em>happens-before</em> relation with any (non-null) retrieval for
58 * that key reporting the updated value.) For aggregate operations
59 * such as {@code putAll} and {@code clear}, concurrent retrievals may
60 * reflect insertion or removal of only some entries. Similarly,
61 * Iterators and Enumerations return elements reflecting the state of
62 * the hash table at some point at or since the creation of the
63 * iterator/enumeration. They do <em>not</em> throw {@link
64 * ConcurrentModificationException}. However, iterators are designed
65 * to be used by only one thread at a time. Bear in mind that the
66 * results of aggregate status methods including {@code size}, {@code
67 * isEmpty}, and {@code containsValue} are typically useful only when
68 * a map is not undergoing concurrent updates in other threads.
69 * Otherwise the results of these methods reflect transient states
70 * that may be adequate for monitoring or estimation purposes, but not
71 * for program control.
72 *
73 * <p>The table is dynamically expanded when there are too many
74 * collisions (i.e., keys that have distinct hash codes but fall into
75 * the same slot modulo the table size), with the expected average
76 * effect of maintaining roughly two bins per mapping (corresponding
77 * to a 0.75 load factor threshold for resizing). There may be much
78 * variance around this average as mappings are added and removed, but
79 * overall, this maintains a commonly accepted time/space tradeoff for
80 * hash tables. However, resizing this or any other kind of hash
81 * table may be a relatively slow operation. When possible, it is a
82 * good idea to provide a size estimate as an optional {@code
83 * initialCapacity} constructor argument. An additional optional
84 * {@code loadFactor} constructor argument provides a further means of
85 * customizing initial table capacity by specifying the table density
86 * to be used in calculating the amount of space to allocate for the
87 * given number of elements. Also, for compatibility with previous
88 * versions of this class, constructors may optionally specify an
89 * expected {@code concurrencyLevel} as an additional hint for
90 * internal sizing. Note that using many keys with exactly the same
91 * {@code hashCode()} is a sure way to slow down performance of any
92 * hash table.
93 *
94 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
95 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
96 * (using {@link #keySet(Object)} when only keys are of interest, and the
97 * mapped values are (perhaps transiently) not used or all take the
98 * same mapping value.
99 *
100 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
101 * form of histogram or multiset) by using {@link
102 * java.util.concurrent.atomic.LongAdder} values and initializing via
103 * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
104 * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
105 * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
106 *
107 * <p>This class and its views and iterators implement all of the
108 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
109 * interfaces.
110 *
111 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
112 * does <em>not</em> allow {@code null} to be used as a key or value.
113 *
114 * <p>ConcurrentHashMaps support sequential and parallel operations
115 * bulk operations. (Parallel forms use the {@link
116 * ForkJoinPool#commonPool()}). Tasks that may be used in other
117 * contexts are available in class {@link ForkJoinTasks}. These
118 * operations are designed to be safely, and often sensibly, applied
119 * even with maps that are being concurrently updated by other
120 * threads; for example, when computing a snapshot summary of the
121 * values in a shared registry. There are three kinds of operation,
122 * each with four forms, accepting functions with Keys, Values,
123 * Entries, and (Key, Value) arguments and/or return values. Because
124 * the elements of a ConcurrentHashMap are not ordered in any
125 * particular way, and may be processed in different orders in
126 * different parallel executions, the correctness of supplied
127 * functions should not depend on any ordering, or on any other
128 * objects or values that may transiently change while computation is
129 * in progress; and except for forEach actions, should ideally be
130 * side-effect-free.
131 *
132 * <ul>
133 * <li> forEach: Perform a given action on each element.
134 * A variant form applies a given transformation on each element
135 * before performing the action.</li>
136 *
137 * <li> search: Return the first available non-null result of
138 * applying a given function on each element; skipping further
139 * search when a result is found.</li>
140 *
141 * <li> reduce: Accumulate each element. The supplied reduction
142 * function cannot rely on ordering (more formally, it should be
143 * both associative and commutative). There are five variants:
144 *
145 * <ul>
146 *
147 * <li> Plain reductions. (There is not a form of this method for
148 * (key, value) function arguments since there is no corresponding
149 * return type.)</li>
150 *
151 * <li> Mapped reductions that accumulate the results of a given
152 * function applied to each element.</li>
153 *
154 * <li> Reductions to scalar doubles, longs, and ints, using a
155 * given basis value.</li>
156 *
157 * </ul>
158 * </li>
159 * </ul>
160 *
161 * <p>The concurrency properties of bulk operations follow
162 * from those of ConcurrentHashMap: Any non-null result returned
163 * from {@code get(key)} and related access methods bears a
164 * happens-before relation with the associated insertion or
165 * update. The result of any bulk operation reflects the
166 * composition of these per-element relations (but is not
167 * necessarily atomic with respect to the map as a whole unless it
168 * is somehow known to be quiescent). Conversely, because keys
169 * and values in the map are never null, null serves as a reliable
170 * atomic indicator of the current lack of any result. To
171 * maintain this property, null serves as an implicit basis for
172 * all non-scalar reduction operations. For the double, long, and
173 * int versions, the basis should be one that, when combined with
174 * any other value, returns that other value (more formally, it
175 * should be the identity element for the reduction). Most common
176 * reductions have these properties; for example, computing a sum
177 * with basis 0 or a minimum with basis MAX_VALUE.
178 *
179 * <p>Search and transformation functions provided as arguments
180 * should similarly return null to indicate the lack of any result
181 * (in which case it is not used). In the case of mapped
182 * reductions, this also enables transformations to serve as
183 * filters, returning null (or, in the case of primitive
184 * specializations, the identity basis) if the element should not
185 * be combined. You can create compound transformations and
186 * filterings by composing them yourself under this "null means
187 * there is nothing there now" rule before using them in search or
188 * reduce operations.
189 *
190 * <p>Methods accepting and/or returning Entry arguments maintain
191 * key-value associations. They may be useful for example when
192 * finding the key for the greatest value. Note that "plain" Entry
193 * arguments can be supplied using {@code new
194 * AbstractMap.SimpleEntry(k,v)}.
195 *
196 * <p>Bulk operations may complete abruptly, throwing an
197 * exception encountered in the application of a supplied
198 * function. Bear in mind when handling such exceptions that other
199 * concurrently executing functions could also have thrown
200 * exceptions, or would have done so if the first exception had
201 * not occurred.
202 *
203 * <p>Speedups for parallel compared to sequential forms are common
204 * but not guaranteed. Parallel operations involving brief functions
205 * on small maps may execute more slowly than sequential forms if the
206 * underlying work to parallelize the computation is more expensive
207 * than the computation itself. Similarly, parallelization may not
208 * lead to much actual parallelism if all processors are busy
209 * performing unrelated tasks.
210 *
211 * <p>All arguments to all task methods must be non-null.
212 *
213 * <p>This class is a member of the
214 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
215 * Java Collections Framework</a>.
216 *
217 * @since 1.5
218 * @author Doug Lea
219 * @param <K> the type of keys maintained by this map
220 * @param <V> the type of mapped values
221 */
222 public class ConcurrentHashMap<K,V>
223 implements ConcurrentMap<K,V>, Serializable {
224 private static final long serialVersionUID = 7249069246763182397L;
225
226 /*
227 * Overview:
228 *
229 * The primary design goal of this hash table is to maintain
230 * concurrent readability (typically method get(), but also
231 * iterators and related methods) while minimizing update
232 * contention. Secondary goals are to keep space consumption about
233 * the same or better than java.util.HashMap, and to support high
234 * initial insertion rates on an empty table by many threads.
235 *
236 * Each key-value mapping is held in a Node. Because Node key
237 * fields can contain special values, they are defined using plain
238 * Object types (not type "K"). This leads to a lot of explicit
239 * casting (and many explicit warning suppressions to tell
240 * compilers not to complain about it). It also allows some of the
241 * public methods to be factored into a smaller number of internal
242 * methods (although sadly not so for the five variants of
243 * put-related operations). The validation-based approach
244 * explained below leads to a lot of code sprawl because
245 * retry-control precludes factoring into smaller methods.
246 *
247 * The table is lazily initialized to a power-of-two size upon the
248 * first insertion. Each bin in the table normally contains a
249 * list of Nodes (most often, the list has only zero or one Node).
250 * Table accesses require volatile/atomic reads, writes, and
251 * CASes. Because there is no other way to arrange this without
252 * adding further indirections, we use intrinsics
253 * (sun.misc.Unsafe) operations. The lists of nodes within bins
254 * are always accurately traversable under volatile reads, so long
255 * as lookups check hash code and non-nullness of value before
256 * checking key equality.
257 *
258 * We use the top (sign) bit of Node hash fields for control
259 * purposes -- it is available anyway because of addressing
260 * constraints. Nodes with negative hash fields are forwarding
261 * nodes to either TreeBins or resized tables. The lower 31 bits
262 * of each normal Node's hash field contain a transformation of
263 * the key's hash code.
264 *
265 * Insertion (via put or its variants) of the first node in an
266 * empty bin is performed by just CASing it to the bin. This is
267 * by far the most common case for put operations under most
268 * key/hash distributions. Other update operations (insert,
269 * delete, and replace) require locks. We do not want to waste
270 * the space required to associate a distinct lock object with
271 * each bin, so instead use the first node of a bin list itself as
272 * a lock. Locking support for these locks relies on builtin
273 * "synchronized" monitors.
274 *
275 * Using the first node of a list as a lock does not by itself
276 * suffice though: When a node is locked, any update must first
277 * validate that it is still the first node after locking it, and
278 * retry if not. Because new nodes are always appended to lists,
279 * once a node is first in a bin, it remains first until deleted
280 * or the bin becomes invalidated (upon resizing). However,
281 * operations that only conditionally update may inspect nodes
282 * until the point of update. This is a converse of sorts to the
283 * lazy locking technique described by Herlihy & Shavit.
284 *
285 * The main disadvantage of per-bin locks is that other update
286 * operations on other nodes in a bin list protected by the same
287 * lock can stall, for example when user equals() or mapping
288 * functions take a long time. However, statistically, under
289 * random hash codes, this is not a common problem. Ideally, the
290 * frequency of nodes in bins follows a Poisson distribution
291 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
292 * parameter of about 0.5 on average, given the resizing threshold
293 * of 0.75, although with a large variance because of resizing
294 * granularity. Ignoring variance, the expected occurrences of
295 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
296 * first values are:
297 *
298 * 0: 0.60653066
299 * 1: 0.30326533
300 * 2: 0.07581633
301 * 3: 0.01263606
302 * 4: 0.00157952
303 * 5: 0.00015795
304 * 6: 0.00001316
305 * 7: 0.00000094
306 * 8: 0.00000006
307 * more: less than 1 in ten million
308 *
309 * Lock contention probability for two threads accessing distinct
310 * elements is roughly 1 / (8 * #elements) under random hashes.
311 *
312 * Actual hash code distributions encountered in practice
313 * sometimes deviate significantly from uniform randomness. This
314 * includes the case when N > (1<<30), so some keys MUST collide.
315 * Similarly for dumb or hostile usages in which multiple keys are
316 * designed to have identical hash codes. Also, although we guard
317 * against the worst effects of this (see method spread), sets of
318 * hashes may differ only in bits that do not impact their bin
319 * index for a given power-of-two mask. So we use a secondary
320 * strategy that applies when the number of nodes in a bin exceeds
321 * a threshold, and at least one of the keys implements
322 * Comparable. These TreeBins use a balanced tree to hold nodes
323 * (a specialized form of red-black trees), bounding search time
324 * to O(log N). Each search step in a TreeBin is around twice as
325 * slow as in a regular list, but given that N cannot exceed
326 * (1<<64) (before running out of addresses) this bounds search
327 * steps, lock hold times, etc, to reasonable constants (roughly
328 * 100 nodes inspected per operation worst case) so long as keys
329 * are Comparable (which is very common -- String, Long, etc).
330 * TreeBin nodes (TreeNodes) also maintain the same "next"
331 * traversal pointers as regular nodes, so can be traversed in
332 * iterators in the same way.
333 *
334 * The table is resized when occupancy exceeds a percentage
335 * threshold (nominally, 0.75, but see below). Any thread
336 * noticing an overfull bin may assist in resizing after the
337 * initiating thread allocates and sets up the replacement
338 * array. However, rather than stalling, these other threads may
339 * proceed with insertions etc. The use of TreeBins shields us
340 * from the worst case effects of overfilling while resizes are in
341 * progress. Resizing proceeds by transferring bins, one by one,
342 * from the table to the next table. To enable concurrency, the
343 * next table must be (incrementally) prefilled with place-holders
344 * serving as reverse forwarders to the old table. Because we are
345 * using power-of-two expansion, the elements from each bin must
346 * either stay at same index, or move with a power of two
347 * offset. We eliminate unnecessary node creation by catching
348 * cases where old nodes can be reused because their next fields
349 * won't change. On average, only about one-sixth of them need
350 * cloning when a table doubles. The nodes they replace will be
351 * garbage collectable as soon as they are no longer referenced by
352 * any reader thread that may be in the midst of concurrently
353 * traversing table. Upon transfer, the old table bin contains
354 * only a special forwarding node (with hash field "MOVED") that
355 * contains the next table as its key. On encountering a
356 * forwarding node, access and update operations restart, using
357 * the new table.
358 *
359 * Each bin transfer requires its bin lock, which can stall
360 * waiting for locks while resizing. However, because other
361 * threads can join in and help resize rather than contend for
362 * locks, average aggregate waits become shorter as resizing
363 * progresses. The transfer operation must also ensure that all
364 * accessible bins in both the old and new table are usable by any
365 * traversal. This is arranged by proceeding from the last bin
366 * (table.length - 1) up towards the first. Upon seeing a
367 * forwarding node, traversals (see class Traverser) arrange to
368 * move to the new table without revisiting nodes. However, to
369 * ensure that no intervening nodes are skipped, bin splitting can
370 * only begin after the associated reverse-forwarders are in
371 * place.
372 *
373 * The traversal scheme also applies to partial traversals of
374 * ranges of bins (via an alternate Traverser constructor)
375 * to support partitioned aggregate operations. Also, read-only
376 * operations give up if ever forwarded to a null table, which
377 * provides support for shutdown-style clearing, which is also not
378 * currently implemented.
379 *
380 * Lazy table initialization minimizes footprint until first use,
381 * and also avoids resizings when the first operation is from a
382 * putAll, constructor with map argument, or deserialization.
383 * These cases attempt to override the initial capacity settings,
384 * but harmlessly fail to take effect in cases of races.
385 *
386 * The element count is maintained using a specialization of
387 * LongAdder. We need to incorporate a specialization rather than
388 * just use a LongAdder in order to access implicit
389 * contention-sensing that leads to creation of multiple
390 * Cells. The counter mechanics avoid contention on
391 * updates but can encounter cache thrashing if read too
392 * frequently during concurrent access. To avoid reading so often,
393 * resizing under contention is attempted only upon adding to a
394 * bin already holding two or more nodes. Under uniform hash
395 * distributions, the probability of this occurring at threshold
396 * is around 13%, meaning that only about 1 in 8 puts check
397 * threshold (and after resizing, many fewer do so). The bulk
398 * putAll operation further reduces contention by only committing
399 * count updates upon these size checks.
400 *
401 * Maintaining API and serialization compatibility with previous
402 * versions of this class introduces several oddities. Mainly: We
403 * leave untouched but unused constructor arguments refering to
404 * concurrencyLevel. We accept a loadFactor constructor argument,
405 * but apply it only to initial table capacity (which is the only
406 * time that we can guarantee to honor it.) We also declare an
407 * unused "Segment" class that is instantiated in minimal form
408 * only when serializing.
409 */
410
411 /* ---------------- Constants -------------- */
412
413 /**
414 * The largest possible table capacity. This value must be
415 * exactly 1<<30 to stay within Java array allocation and indexing
416 * bounds for power of two table sizes, and is further required
417 * because the top two bits of 32bit hash fields are used for
418 * control purposes.
419 */
420 private static final int MAXIMUM_CAPACITY = 1 << 30;
421
422 /**
423 * The default initial table capacity. Must be a power of 2
424 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
425 */
426 private static final int DEFAULT_CAPACITY = 16;
427
428 /**
429 * The largest possible (non-power of two) array size.
430 * Needed by toArray and related methods.
431 */
432 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
433
434 /**
435 * The default concurrency level for this table. Unused but
436 * defined for compatibility with previous versions of this class.
437 */
438 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
439
440 /**
441 * The load factor for this table. Overrides of this value in
442 * constructors affect only the initial table capacity. The
443 * actual floating point value isn't normally used -- it is
444 * simpler to use expressions such as {@code n - (n >>> 2)} for
445 * the associated resizing threshold.
446 */
447 private static final float LOAD_FACTOR = 0.75f;
448
449 /**
450 * The bin count threshold for using a tree rather than list for a
451 * bin. The value reflects the approximate break-even point for
452 * using tree-based operations.
453 */
454 private static final int TREE_THRESHOLD = 16;
455
456 /**
457 * Minimum number of rebinnings per transfer step. Ranges are
458 * subdivided to allow multiple resizer threads. This value
459 * serves as a lower bound to avoid resizers encountering
460 * excessive memory contention. The value should be at least
461 * DEFAULT_CAPACITY.
462 */
463 private static final int MIN_TRANSFER_STRIDE = 16;
464
465 /*
466 * Encodings for Node hash fields. See above for explanation.
467 */
468 static final int MOVED = 0x80000000; // hash field for forwarding nodes
469 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
470
471 /** Number of CPUS, to place bounds on some sizings */
472 static final int NCPU = Runtime.getRuntime().availableProcessors();
473
474 /* ---------------- Counters -------------- */
475
476 // Adapted from LongAdder and Striped64.
477 // See their internal docs for explanation.
478
479 // A padded cell for distributing counts
480 static final class Cell {
481 volatile long p0, p1, p2, p3, p4, p5, p6;
482 volatile long value;
483 volatile long q0, q1, q2, q3, q4, q5, q6;
484 Cell(long x) { value = x; }
485 }
486
487 /* ---------------- Fields -------------- */
488
489 /**
490 * The array of bins. Lazily initialized upon first insertion.
491 * Size is always a power of two. Accessed directly by iterators.
492 */
493 transient volatile Node<V>[] table;
494
495 /**
496 * The next table to use; non-null only while resizing.
497 */
498 private transient volatile Node<V>[] nextTable;
499
500 /**
501 * Base counter value, used mainly when there is no contention,
502 * but also as a fallback during table initialization
503 * races. Updated via CAS.
504 */
505 private transient volatile long baseCount;
506
507 /**
508 * Table initialization and resizing control. When negative, the
509 * table is being initialized or resized: -1 for initialization,
510 * else -(1 + the number of active resizing threads). Otherwise,
511 * when table is null, holds the initial table size to use upon
512 * creation, or 0 for default. After initialization, holds the
513 * next element count value upon which to resize the table.
514 */
515 private transient volatile int sizeCtl;
516
517 /**
518 * The next table index (plus one) to split while resizing.
519 */
520 private transient volatile int transferIndex;
521
522 /**
523 * The least available table index to split while resizing.
524 */
525 private transient volatile int transferOrigin;
526
527 /**
528 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
529 */
530 private transient volatile int cellsBusy;
531
532 /**
533 * Table of counter cells. When non-null, size is a power of 2.
534 */
535 private transient volatile Cell[] counterCells;
536
537 // views
538 private transient KeySetView<K,V> keySet;
539 private transient ValuesView<K,V> values;
540 private transient EntrySetView<K,V> entrySet;
541
542 /** For serialization compatibility. Null unless serialized; see below */
543 private Segment<K,V>[] segments;
544
545 /* ---------------- Table element access -------------- */
546
547 /*
548 * Volatile access methods are used for table elements as well as
549 * elements of in-progress next table while resizing. Uses are
550 * null checked by callers, and implicitly bounds-checked, relying
551 * on the invariants that tab arrays have non-zero size, and all
552 * indices are masked with (tab.length - 1) which is never
553 * negative and always less than length. Note that, to be correct
554 * wrt arbitrary concurrency errors by users, bounds checks must
555 * operate on local variables, which accounts for some odd-looking
556 * inline assignments below.
557 */
558
559 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
560 (Node<V>[] tab, int i) { // used by Traverser
561 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
562 }
563
564 private static final <V> boolean casTabAt
565 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
566 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
567 }
568
569 private static final <V> void setTabAt
570 (Node<V>[] tab, int i, Node<V> v) {
571 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
572 }
573
574 /* ---------------- Nodes -------------- */
575
576 /**
577 * Key-value entry. Note that this is never exported out as a
578 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
579 * field of MOVED are special, and do not contain user keys or
580 * values. Otherwise, keys are never null, and null val fields
581 * indicate that a node is in the process of being deleted or
582 * created. For purposes of read-only access, a key may be read
583 * before a val, but can only be used after checking val to be
584 * non-null.
585 */
586 static class Node<V> {
587 final int hash;
588 final Object key;
589 volatile V val;
590 volatile Node<V> next;
591
592 Node(int hash, Object key, V val, Node<V> next) {
593 this.hash = hash;
594 this.key = key;
595 this.val = val;
596 this.next = next;
597 }
598 }
599
600 /* ---------------- TreeBins -------------- */
601
602 /**
603 * Nodes for use in TreeBins
604 */
605 static final class TreeNode<V> extends Node<V> {
606 TreeNode<V> parent; // red-black tree links
607 TreeNode<V> left;
608 TreeNode<V> right;
609 TreeNode<V> prev; // needed to unlink next upon deletion
610 boolean red;
611
612 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
613 super(hash, key, val, next);
614 this.parent = parent;
615 }
616 }
617
618
619 /**
620 * Returns a Class for the given object of the form "class C
621 * implements Comparable<C>", if one exists, else null. See below
622 * for explanation.
623 */
624 static Class<?> comparableClassFor(Object x) {
625 Class<?> c, s, cmpc; Type[] ts, as; Type t; ParameterizedType p;
626 if ((c = x.getClass()) == String.class) // bypass checks
627 return c;
628 if ((cmpc = Comparable.class).isAssignableFrom(c)) {
629 while (cmpc.isAssignableFrom(s = c.getSuperclass()))
630 c = s; // find topmost comparable class
631 if ((ts = c.getGenericInterfaces()) != null) {
632 for (int i = 0; i < ts.length; ++i) {
633 if (((t = ts[i]) instanceof ParameterizedType) &&
634 ((p = (ParameterizedType)t).getRawType() == cmpc) &&
635 (as = p.getActualTypeArguments()) != null &&
636 as.length == 1 && as[0] == c) // type arg is c
637 return c;
638 }
639 }
640 }
641 return null;
642 }
643
644 /**
645 * A specialized form of red-black tree for use in bins
646 * whose size exceeds a threshold.
647 *
648 * TreeBins use a special form of comparison for search and
649 * related operations (which is the main reason we cannot use
650 * existing collections such as TreeMaps). TreeBins contain
651 * Comparable elements, but may contain others, as well as
652 * elements that are Comparable but not necessarily Comparable<T>
653 * for the same T, so we cannot invoke compareTo among them. To
654 * handle this, the tree is ordered primarily by hash value, then
655 * by Comparable.compareTo order if applicable, else by class
656 * names if both comparable but not to each other. On lookup at a
657 * node, if elements are not comparable or compare as 0 then both
658 * left and right children may need to be searched in the case of
659 * tied hash values. (This corresponds to the full list search
660 * that would be necessary if all elements were non-Comparable and
661 * had tied hashes.) The red-black balancing code is updated from
662 * pre-jdk-collections
663 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
664 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
665 * Algorithms" (CLR).
666 *
667 * TreeBins also maintain a separate locking discipline than
668 * regular bins. Because they are forwarded via special MOVED
669 * nodes at bin heads (which can never change once established),
670 * we cannot use those nodes as locks. Instead, TreeBin
671 * extends AbstractQueuedSynchronizer to support a simple form of
672 * read-write lock. For update operations and table validation,
673 * the exclusive form of lock behaves in the same way as bin-head
674 * locks. However, lookups use shared read-lock mechanics to allow
675 * multiple readers in the absence of writers. Additionally,
676 * these lookups do not ever block: While the lock is not
677 * available, they proceed along the slow traversal path (via
678 * next-pointers) until the lock becomes available or the list is
679 * exhausted, whichever comes first. (These cases are not fast,
680 * but maximize aggregate expected throughput.) The AQS mechanics
681 * for doing this are straightforward. The lock state is held as
682 * AQS getState(). Read counts are negative; the write count (1)
683 * is positive. There are no signalling preferences among readers
684 * and writers. Since we don't need to export full Lock API, we
685 * just override the minimal AQS methods and use them directly.
686 */
687 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
688 private static final long serialVersionUID = 2249069246763182397L;
689 transient TreeNode<V> root; // root of tree
690 transient TreeNode<V> first; // head of next-pointer list
691
692 /* AQS overrides */
693 public final boolean isHeldExclusively() { return getState() > 0; }
694 public final boolean tryAcquire(int ignore) {
695 if (compareAndSetState(0, 1)) {
696 setExclusiveOwnerThread(Thread.currentThread());
697 return true;
698 }
699 return false;
700 }
701 public final boolean tryRelease(int ignore) {
702 setExclusiveOwnerThread(null);
703 setState(0);
704 return true;
705 }
706 public final int tryAcquireShared(int ignore) {
707 for (int c;;) {
708 if ((c = getState()) > 0)
709 return -1;
710 if (compareAndSetState(c, c -1))
711 return 1;
712 }
713 }
714 public final boolean tryReleaseShared(int ignore) {
715 int c;
716 do {} while (!compareAndSetState(c = getState(), c + 1));
717 return c == -1;
718 }
719
720 /** From CLR */
721 private void rotateLeft(TreeNode<V> p) {
722 if (p != null) {
723 TreeNode<V> r = p.right, pp, rl;
724 if ((rl = p.right = r.left) != null)
725 rl.parent = p;
726 if ((pp = r.parent = p.parent) == null)
727 root = r;
728 else if (pp.left == p)
729 pp.left = r;
730 else
731 pp.right = r;
732 r.left = p;
733 p.parent = r;
734 }
735 }
736
737 /** From CLR */
738 private void rotateRight(TreeNode<V> p) {
739 if (p != null) {
740 TreeNode<V> l = p.left, pp, lr;
741 if ((lr = p.left = l.right) != null)
742 lr.parent = p;
743 if ((pp = l.parent = p.parent) == null)
744 root = l;
745 else if (pp.right == p)
746 pp.right = l;
747 else
748 pp.left = l;
749 l.right = p;
750 p.parent = l;
751 }
752 }
753
754 /**
755 * Compares k and x: if k's comparable class (cc) matches x's,
756 * uses Comparable.compareTo. Otherwise compares on comparable
757 * class name if either exist, else 0.
758 */
759 @SuppressWarnings("unchecked")
760 static int cccompare(Class<?> cc, Object k, Object x) {
761 Class<?> cx = comparableClassFor(x);
762 return ((cc == null) ? ((cx == null) ? 0 : 1) :
763 (cx == null) ? -1 :
764 (cx == cc) ? ((Comparable<Object>)k).compareTo(x) :
765 cc.getName().compareTo(cx.getName()));
766 }
767
768 /**
769 * Returns the TreeNode (or null if not found) for the given
770 * key. A front-end for recursive version.
771 */
772 final TreeNode<V> getTreeNode(int h, Object k) {
773 return getTreeNode(h, k, root, comparableClassFor(k));
774 }
775
776 /**
777 * Returns the TreeNode (or null if not found) for the given key
778 * starting at given root.
779 */
780 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
781 (int h, Object k, TreeNode<V> p, Class<?> cc) {
782 while (p != null) {
783 int dir, ph; Object pk;
784 if ((ph = p.hash) != h)
785 dir = (h < ph) ? -1 : 1;
786 else if ((pk = p.key) == k || k.equals(pk))
787 return p;
788 else if ((dir = cccompare(cc, k, pk)) == 0) {
789 TreeNode<V> r, pl, pr; // check both sides
790 if ((pr = p.right) != null && h >= pr.hash &&
791 (r = getTreeNode(h, k, pr, cc)) != null)
792 return r;
793 else if ((pl = p.left) != null && h <= pl.hash)
794 dir = -1;
795 else // nothing there
796 break;
797 }
798 p = (dir > 0) ? p.right : p.left;
799 }
800 return null;
801 }
802
803 /**
804 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
805 * read-lock to call getTreeNode, but during failure to get
806 * lock, searches along next links.
807 */
808 final V getValue(int h, Object k) {
809 Node<V> r = null;
810 int c = getState(); // Must read lock state first
811 for (Node<V> e = first; e != null; e = e.next) {
812 if (c <= 0 && compareAndSetState(c, c - 1)) {
813 try {
814 r = getTreeNode(h, k, root, comparableClassFor(k));
815 } finally {
816 releaseShared(0);
817 }
818 break;
819 }
820 else if (e.hash == h && k.equals(e.key)) {
821 r = e;
822 break;
823 }
824 else
825 c = getState();
826 }
827 return r == null ? null : r.val;
828 }
829
830 /**
831 * Finds or adds a node.
832 * @return null if added
833 */
834 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
835 (int h, Object k, V v) {
836 Class<?> cc = comparableClassFor(k);
837 TreeNode<V> pp = root, p = null;
838 int dir = 0;
839 while (pp != null) { // find existing node or leaf to insert at
840 int ph; Object pk;
841 p = pp;
842 if ((ph = p.hash) != h)
843 dir = (h < ph) ? -1 : 1;
844 else if ((pk = p.key) == k || k.equals(pk))
845 return p;
846 else if ((dir = cccompare(cc, k, pk)) == 0) {
847 TreeNode<V> r, pr;
848 if ((pr = p.right) != null && h >= pr.hash &&
849 (r = getTreeNode(h, k, pr, cc)) != null)
850 return r;
851 else // continue left
852 dir = -1;
853 }
854 pp = (dir > 0) ? p.right : p.left;
855 }
856
857 TreeNode<V> f = first;
858 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
859 if (p == null)
860 root = x;
861 else { // attach and rebalance; adapted from CLR
862 TreeNode<V> xp, xpp;
863 if (f != null)
864 f.prev = x;
865 if (dir <= 0)
866 p.left = x;
867 else
868 p.right = x;
869 x.red = true;
870 while (x != null && (xp = x.parent) != null && xp.red &&
871 (xpp = xp.parent) != null) {
872 TreeNode<V> xppl = xpp.left;
873 if (xp == xppl) {
874 TreeNode<V> y = xpp.right;
875 if (y != null && y.red) {
876 y.red = false;
877 xp.red = false;
878 xpp.red = true;
879 x = xpp;
880 }
881 else {
882 if (x == xp.right) {
883 rotateLeft(x = xp);
884 xpp = (xp = x.parent) == null ? null : xp.parent;
885 }
886 if (xp != null) {
887 xp.red = false;
888 if (xpp != null) {
889 xpp.red = true;
890 rotateRight(xpp);
891 }
892 }
893 }
894 }
895 else {
896 TreeNode<V> y = xppl;
897 if (y != null && y.red) {
898 y.red = false;
899 xp.red = false;
900 xpp.red = true;
901 x = xpp;
902 }
903 else {
904 if (x == xp.left) {
905 rotateRight(x = xp);
906 xpp = (xp = x.parent) == null ? null : xp.parent;
907 }
908 if (xp != null) {
909 xp.red = false;
910 if (xpp != null) {
911 xpp.red = true;
912 rotateLeft(xpp);
913 }
914 }
915 }
916 }
917 }
918 TreeNode<V> r = root;
919 if (r != null && r.red)
920 r.red = false;
921 }
922 return null;
923 }
924
925 /**
926 * Removes the given node, that must be present before this
927 * call. This is messier than typical red-black deletion code
928 * because we cannot swap the contents of an interior node
929 * with a leaf successor that is pinned by "next" pointers
930 * that are accessible independently of lock. So instead we
931 * swap the tree linkages.
932 */
933 final void deleteTreeNode(TreeNode<V> p) {
934 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
935 TreeNode<V> pred = p.prev;
936 if (pred == null)
937 first = next;
938 else
939 pred.next = next;
940 if (next != null)
941 next.prev = pred;
942 TreeNode<V> replacement;
943 TreeNode<V> pl = p.left;
944 TreeNode<V> pr = p.right;
945 if (pl != null && pr != null) {
946 TreeNode<V> s = pr, sl;
947 while ((sl = s.left) != null) // find successor
948 s = sl;
949 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
950 TreeNode<V> sr = s.right;
951 TreeNode<V> pp = p.parent;
952 if (s == pr) { // p was s's direct parent
953 p.parent = s;
954 s.right = p;
955 }
956 else {
957 TreeNode<V> sp = s.parent;
958 if ((p.parent = sp) != null) {
959 if (s == sp.left)
960 sp.left = p;
961 else
962 sp.right = p;
963 }
964 if ((s.right = pr) != null)
965 pr.parent = s;
966 }
967 p.left = null;
968 if ((p.right = sr) != null)
969 sr.parent = p;
970 if ((s.left = pl) != null)
971 pl.parent = s;
972 if ((s.parent = pp) == null)
973 root = s;
974 else if (p == pp.left)
975 pp.left = s;
976 else
977 pp.right = s;
978 replacement = sr;
979 }
980 else
981 replacement = (pl != null) ? pl : pr;
982 TreeNode<V> pp = p.parent;
983 if (replacement == null) {
984 if (pp == null) {
985 root = null;
986 return;
987 }
988 replacement = p;
989 }
990 else {
991 replacement.parent = pp;
992 if (pp == null)
993 root = replacement;
994 else if (p == pp.left)
995 pp.left = replacement;
996 else
997 pp.right = replacement;
998 p.left = p.right = p.parent = null;
999 }
1000 if (!p.red) { // rebalance, from CLR
1001 TreeNode<V> x = replacement;
1002 while (x != null) {
1003 TreeNode<V> xp, xpl;
1004 if (x.red || (xp = x.parent) == null) {
1005 x.red = false;
1006 break;
1007 }
1008 if (x == (xpl = xp.left)) {
1009 TreeNode<V> sib = xp.right;
1010 if (sib != null && sib.red) {
1011 sib.red = false;
1012 xp.red = true;
1013 rotateLeft(xp);
1014 sib = (xp = x.parent) == null ? null : xp.right;
1015 }
1016 if (sib == null)
1017 x = xp;
1018 else {
1019 TreeNode<V> sl = sib.left, sr = sib.right;
1020 if ((sr == null || !sr.red) &&
1021 (sl == null || !sl.red)) {
1022 sib.red = true;
1023 x = xp;
1024 }
1025 else {
1026 if (sr == null || !sr.red) {
1027 if (sl != null)
1028 sl.red = false;
1029 sib.red = true;
1030 rotateRight(sib);
1031 sib = (xp = x.parent) == null ?
1032 null : xp.right;
1033 }
1034 if (sib != null) {
1035 sib.red = (xp == null) ? false : xp.red;
1036 if ((sr = sib.right) != null)
1037 sr.red = false;
1038 }
1039 if (xp != null) {
1040 xp.red = false;
1041 rotateLeft(xp);
1042 }
1043 x = root;
1044 }
1045 }
1046 }
1047 else { // symmetric
1048 TreeNode<V> sib = xpl;
1049 if (sib != null && sib.red) {
1050 sib.red = false;
1051 xp.red = true;
1052 rotateRight(xp);
1053 sib = (xp = x.parent) == null ? null : xp.left;
1054 }
1055 if (sib == null)
1056 x = xp;
1057 else {
1058 TreeNode<V> sl = sib.left, sr = sib.right;
1059 if ((sl == null || !sl.red) &&
1060 (sr == null || !sr.red)) {
1061 sib.red = true;
1062 x = xp;
1063 }
1064 else {
1065 if (sl == null || !sl.red) {
1066 if (sr != null)
1067 sr.red = false;
1068 sib.red = true;
1069 rotateLeft(sib);
1070 sib = (xp = x.parent) == null ?
1071 null : xp.left;
1072 }
1073 if (sib != null) {
1074 sib.red = (xp == null) ? false : xp.red;
1075 if ((sl = sib.left) != null)
1076 sl.red = false;
1077 }
1078 if (xp != null) {
1079 xp.red = false;
1080 rotateRight(xp);
1081 }
1082 x = root;
1083 }
1084 }
1085 }
1086 }
1087 }
1088 if (p == replacement && (pp = p.parent) != null) {
1089 if (p == pp.left) // detach pointers
1090 pp.left = null;
1091 else if (p == pp.right)
1092 pp.right = null;
1093 p.parent = null;
1094 }
1095 }
1096 }
1097
1098 /* ---------------- Collision reduction methods -------------- */
1099
1100 /**
1101 * Spreads higher bits to lower, and also forces top bit to 0.
1102 * Because the table uses power-of-two masking, sets of hashes
1103 * that vary only in bits above the current mask will always
1104 * collide. (Among known examples are sets of Float keys holding
1105 * consecutive whole numbers in small tables.) To counter this,
1106 * we apply a transform that spreads the impact of higher bits
1107 * downward. There is a tradeoff between speed, utility, and
1108 * quality of bit-spreading. Because many common sets of hashes
1109 * are already reasonably distributed across bits (so don't benefit
1110 * from spreading), and because we use trees to handle large sets
1111 * of collisions in bins, we don't need excessively high quality.
1112 */
1113 private static final int spread(int h) {
1114 h ^= (h >>> 18) ^ (h >>> 12);
1115 return (h ^ (h >>> 10)) & HASH_BITS;
1116 }
1117
1118 /**
1119 * Replaces a list bin with a tree bin if key is comparable. Call
1120 * only when locked.
1121 */
1122 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1123 if (comparableClassFor(key) != null) {
1124 TreeBin<V> t = new TreeBin<V>();
1125 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1126 t.putTreeNode(e.hash, e.key, e.val);
1127 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1128 }
1129 }
1130
1131 /* ---------------- Internal access and update methods -------------- */
1132
1133 /** Implementation for get and containsKey */
1134 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1135 int h = spread(k.hashCode());
1136 retry: for (Node<V>[] tab = table; tab != null;) {
1137 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1138 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1139 if ((eh = e.hash) < 0) {
1140 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1141 return ((TreeBin<V>)ek).getValue(h, k);
1142 else { // restart with new table
1143 tab = (Node<V>[])ek;
1144 continue retry;
1145 }
1146 }
1147 else if (eh == h && (ev = e.val) != null &&
1148 ((ek = e.key) == k || k.equals(ek)))
1149 return ev;
1150 }
1151 break;
1152 }
1153 return null;
1154 }
1155
1156 /**
1157 * Implementation for the four public remove/replace methods:
1158 * Replaces node value with v, conditional upon match of cv if
1159 * non-null. If resulting value is null, delete.
1160 */
1161 @SuppressWarnings("unchecked") private final V internalReplace
1162 (Object k, V v, Object cv) {
1163 int h = spread(k.hashCode());
1164 V oldVal = null;
1165 for (Node<V>[] tab = table;;) {
1166 Node<V> f; int i, fh; Object fk;
1167 if (tab == null ||
1168 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1169 break;
1170 else if ((fh = f.hash) < 0) {
1171 if ((fk = f.key) instanceof TreeBin) {
1172 TreeBin<V> t = (TreeBin<V>)fk;
1173 boolean validated = false;
1174 boolean deleted = false;
1175 t.acquire(0);
1176 try {
1177 if (tabAt(tab, i) == f) {
1178 validated = true;
1179 TreeNode<V> p = t.getTreeNode(h, k);
1180 if (p != null) {
1181 V pv = p.val;
1182 if (cv == null || cv == pv || cv.equals(pv)) {
1183 oldVal = pv;
1184 if ((p.val = v) == null) {
1185 deleted = true;
1186 t.deleteTreeNode(p);
1187 }
1188 }
1189 }
1190 }
1191 } finally {
1192 t.release(0);
1193 }
1194 if (validated) {
1195 if (deleted)
1196 addCount(-1L, -1);
1197 break;
1198 }
1199 }
1200 else
1201 tab = (Node<V>[])fk;
1202 }
1203 else if (fh != h && f.next == null) // precheck
1204 break; // rules out possible existence
1205 else {
1206 boolean validated = false;
1207 boolean deleted = false;
1208 synchronized (f) {
1209 if (tabAt(tab, i) == f) {
1210 validated = true;
1211 for (Node<V> e = f, pred = null;;) {
1212 Object ek; V ev;
1213 if (e.hash == h &&
1214 ((ev = e.val) != null) &&
1215 ((ek = e.key) == k || k.equals(ek))) {
1216 if (cv == null || cv == ev || cv.equals(ev)) {
1217 oldVal = ev;
1218 if ((e.val = v) == null) {
1219 deleted = true;
1220 Node<V> en = e.next;
1221 if (pred != null)
1222 pred.next = en;
1223 else
1224 setTabAt(tab, i, en);
1225 }
1226 }
1227 break;
1228 }
1229 pred = e;
1230 if ((e = e.next) == null)
1231 break;
1232 }
1233 }
1234 }
1235 if (validated) {
1236 if (deleted)
1237 addCount(-1L, -1);
1238 break;
1239 }
1240 }
1241 }
1242 return oldVal;
1243 }
1244
1245 /*
1246 * Internal versions of insertion methods
1247 * All have the same basic structure as the first (internalPut):
1248 * 1. If table uninitialized, create
1249 * 2. If bin empty, try to CAS new node
1250 * 3. If bin stale, use new table
1251 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1252 * 5. Lock and validate; if valid, scan and add or update
1253 *
1254 * The putAll method differs mainly in attempting to pre-allocate
1255 * enough table space, and also more lazily performs count updates
1256 * and checks.
1257 *
1258 * Most of the function-accepting methods can't be factored nicely
1259 * because they require different functional forms, so instead
1260 * sprawl out similar mechanics.
1261 */
1262
1263 /** Implementation for put and putIfAbsent */
1264 @SuppressWarnings("unchecked") private final V internalPut
1265 (K k, V v, boolean onlyIfAbsent) {
1266 if (k == null || v == null) throw new NullPointerException();
1267 int h = spread(k.hashCode());
1268 int len = 0;
1269 for (Node<V>[] tab = table;;) {
1270 int i, fh; Node<V> f; Object fk; V fv;
1271 if (tab == null)
1272 tab = initTable();
1273 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1274 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1275 break; // no lock when adding to empty bin
1276 }
1277 else if ((fh = f.hash) < 0) {
1278 if ((fk = f.key) instanceof TreeBin) {
1279 TreeBin<V> t = (TreeBin<V>)fk;
1280 V oldVal = null;
1281 t.acquire(0);
1282 try {
1283 if (tabAt(tab, i) == f) {
1284 len = 2;
1285 TreeNode<V> p = t.putTreeNode(h, k, v);
1286 if (p != null) {
1287 oldVal = p.val;
1288 if (!onlyIfAbsent)
1289 p.val = v;
1290 }
1291 }
1292 } finally {
1293 t.release(0);
1294 }
1295 if (len != 0) {
1296 if (oldVal != null)
1297 return oldVal;
1298 break;
1299 }
1300 }
1301 else
1302 tab = (Node<V>[])fk;
1303 }
1304 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1305 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1306 return fv;
1307 else {
1308 V oldVal = null;
1309 synchronized (f) {
1310 if (tabAt(tab, i) == f) {
1311 len = 1;
1312 for (Node<V> e = f;; ++len) {
1313 Object ek; V ev;
1314 if (e.hash == h &&
1315 (ev = e.val) != null &&
1316 ((ek = e.key) == k || k.equals(ek))) {
1317 oldVal = ev;
1318 if (!onlyIfAbsent)
1319 e.val = v;
1320 break;
1321 }
1322 Node<V> last = e;
1323 if ((e = e.next) == null) {
1324 last.next = new Node<V>(h, k, v, null);
1325 if (len >= TREE_THRESHOLD)
1326 replaceWithTreeBin(tab, i, k);
1327 break;
1328 }
1329 }
1330 }
1331 }
1332 if (len != 0) {
1333 if (oldVal != null)
1334 return oldVal;
1335 break;
1336 }
1337 }
1338 }
1339 addCount(1L, len);
1340 return null;
1341 }
1342
1343 /** Implementation for computeIfAbsent */
1344 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1345 (K k, Function<? super K, ? extends V> mf) {
1346 if (k == null || mf == null)
1347 throw new NullPointerException();
1348 int h = spread(k.hashCode());
1349 V val = null;
1350 int len = 0;
1351 for (Node<V>[] tab = table;;) {
1352 Node<V> f; int i; Object fk;
1353 if (tab == null)
1354 tab = initTable();
1355 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1356 Node<V> node = new Node<V>(h, k, null, null);
1357 synchronized (node) {
1358 if (casTabAt(tab, i, null, node)) {
1359 len = 1;
1360 try {
1361 if ((val = mf.apply(k)) != null)
1362 node.val = val;
1363 } finally {
1364 if (val == null)
1365 setTabAt(tab, i, null);
1366 }
1367 }
1368 }
1369 if (len != 0)
1370 break;
1371 }
1372 else if (f.hash < 0) {
1373 if ((fk = f.key) instanceof TreeBin) {
1374 TreeBin<V> t = (TreeBin<V>)fk;
1375 boolean added = false;
1376 t.acquire(0);
1377 try {
1378 if (tabAt(tab, i) == f) {
1379 len = 1;
1380 TreeNode<V> p = t.getTreeNode(h, k);
1381 if (p != null)
1382 val = p.val;
1383 else if ((val = mf.apply(k)) != null) {
1384 added = true;
1385 len = 2;
1386 t.putTreeNode(h, k, val);
1387 }
1388 }
1389 } finally {
1390 t.release(0);
1391 }
1392 if (len != 0) {
1393 if (!added)
1394 return val;
1395 break;
1396 }
1397 }
1398 else
1399 tab = (Node<V>[])fk;
1400 }
1401 else {
1402 for (Node<V> e = f; e != null; e = e.next) { // prescan
1403 Object ek; V ev;
1404 if (e.hash == h && (ev = e.val) != null &&
1405 ((ek = e.key) == k || k.equals(ek)))
1406 return ev;
1407 }
1408 boolean added = false;
1409 synchronized (f) {
1410 if (tabAt(tab, i) == f) {
1411 len = 1;
1412 for (Node<V> e = f;; ++len) {
1413 Object ek; V ev;
1414 if (e.hash == h &&
1415 (ev = e.val) != null &&
1416 ((ek = e.key) == k || k.equals(ek))) {
1417 val = ev;
1418 break;
1419 }
1420 Node<V> last = e;
1421 if ((e = e.next) == null) {
1422 if ((val = mf.apply(k)) != null) {
1423 added = true;
1424 last.next = new Node<V>(h, k, val, null);
1425 if (len >= TREE_THRESHOLD)
1426 replaceWithTreeBin(tab, i, k);
1427 }
1428 break;
1429 }
1430 }
1431 }
1432 }
1433 if (len != 0) {
1434 if (!added)
1435 return val;
1436 break;
1437 }
1438 }
1439 }
1440 if (val != null)
1441 addCount(1L, len);
1442 return val;
1443 }
1444
1445 /** Implementation for compute */
1446 @SuppressWarnings("unchecked") private final V internalCompute
1447 (K k, boolean onlyIfPresent,
1448 BiFunction<? super K, ? super V, ? extends V> mf) {
1449 if (k == null || mf == null)
1450 throw new NullPointerException();
1451 int h = spread(k.hashCode());
1452 V val = null;
1453 int delta = 0;
1454 int len = 0;
1455 for (Node<V>[] tab = table;;) {
1456 Node<V> f; int i, fh; Object fk;
1457 if (tab == null)
1458 tab = initTable();
1459 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1460 if (onlyIfPresent)
1461 break;
1462 Node<V> node = new Node<V>(h, k, null, null);
1463 synchronized (node) {
1464 if (casTabAt(tab, i, null, node)) {
1465 try {
1466 len = 1;
1467 if ((val = mf.apply(k, null)) != null) {
1468 node.val = val;
1469 delta = 1;
1470 }
1471 } finally {
1472 if (delta == 0)
1473 setTabAt(tab, i, null);
1474 }
1475 }
1476 }
1477 if (len != 0)
1478 break;
1479 }
1480 else if ((fh = f.hash) < 0) {
1481 if ((fk = f.key) instanceof TreeBin) {
1482 TreeBin<V> t = (TreeBin<V>)fk;
1483 t.acquire(0);
1484 try {
1485 if (tabAt(tab, i) == f) {
1486 len = 1;
1487 TreeNode<V> p = t.getTreeNode(h, k);
1488 if (p == null && onlyIfPresent)
1489 break;
1490 V pv = (p == null) ? null : p.val;
1491 if ((val = mf.apply(k, pv)) != null) {
1492 if (p != null)
1493 p.val = val;
1494 else {
1495 len = 2;
1496 delta = 1;
1497 t.putTreeNode(h, k, val);
1498 }
1499 }
1500 else if (p != null) {
1501 delta = -1;
1502 t.deleteTreeNode(p);
1503 }
1504 }
1505 } finally {
1506 t.release(0);
1507 }
1508 if (len != 0)
1509 break;
1510 }
1511 else
1512 tab = (Node<V>[])fk;
1513 }
1514 else {
1515 synchronized (f) {
1516 if (tabAt(tab, i) == f) {
1517 len = 1;
1518 for (Node<V> e = f, pred = null;; ++len) {
1519 Object ek; V ev;
1520 if (e.hash == h &&
1521 (ev = e.val) != null &&
1522 ((ek = e.key) == k || k.equals(ek))) {
1523 val = mf.apply(k, ev);
1524 if (val != null)
1525 e.val = val;
1526 else {
1527 delta = -1;
1528 Node<V> en = e.next;
1529 if (pred != null)
1530 pred.next = en;
1531 else
1532 setTabAt(tab, i, en);
1533 }
1534 break;
1535 }
1536 pred = e;
1537 if ((e = e.next) == null) {
1538 if (!onlyIfPresent &&
1539 (val = mf.apply(k, null)) != null) {
1540 pred.next = new Node<V>(h, k, val, null);
1541 delta = 1;
1542 if (len >= TREE_THRESHOLD)
1543 replaceWithTreeBin(tab, i, k);
1544 }
1545 break;
1546 }
1547 }
1548 }
1549 }
1550 if (len != 0)
1551 break;
1552 }
1553 }
1554 if (delta != 0)
1555 addCount((long)delta, len);
1556 return val;
1557 }
1558
1559 /** Implementation for merge */
1560 @SuppressWarnings("unchecked") private final V internalMerge
1561 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1562 if (k == null || v == null || mf == null)
1563 throw new NullPointerException();
1564 int h = spread(k.hashCode());
1565 V val = null;
1566 int delta = 0;
1567 int len = 0;
1568 for (Node<V>[] tab = table;;) {
1569 int i; Node<V> f; Object fk; V fv;
1570 if (tab == null)
1571 tab = initTable();
1572 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1573 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1574 delta = 1;
1575 val = v;
1576 break;
1577 }
1578 }
1579 else if (f.hash < 0) {
1580 if ((fk = f.key) instanceof TreeBin) {
1581 TreeBin<V> t = (TreeBin<V>)fk;
1582 t.acquire(0);
1583 try {
1584 if (tabAt(tab, i) == f) {
1585 len = 1;
1586 TreeNode<V> p = t.getTreeNode(h, k);
1587 val = (p == null) ? v : mf.apply(p.val, v);
1588 if (val != null) {
1589 if (p != null)
1590 p.val = val;
1591 else {
1592 len = 2;
1593 delta = 1;
1594 t.putTreeNode(h, k, val);
1595 }
1596 }
1597 else if (p != null) {
1598 delta = -1;
1599 t.deleteTreeNode(p);
1600 }
1601 }
1602 } finally {
1603 t.release(0);
1604 }
1605 if (len != 0)
1606 break;
1607 }
1608 else
1609 tab = (Node<V>[])fk;
1610 }
1611 else {
1612 synchronized (f) {
1613 if (tabAt(tab, i) == f) {
1614 len = 1;
1615 for (Node<V> e = f, pred = null;; ++len) {
1616 Object ek; V ev;
1617 if (e.hash == h &&
1618 (ev = e.val) != null &&
1619 ((ek = e.key) == k || k.equals(ek))) {
1620 val = mf.apply(ev, v);
1621 if (val != null)
1622 e.val = val;
1623 else {
1624 delta = -1;
1625 Node<V> en = e.next;
1626 if (pred != null)
1627 pred.next = en;
1628 else
1629 setTabAt(tab, i, en);
1630 }
1631 break;
1632 }
1633 pred = e;
1634 if ((e = e.next) == null) {
1635 val = v;
1636 pred.next = new Node<V>(h, k, val, null);
1637 delta = 1;
1638 if (len >= TREE_THRESHOLD)
1639 replaceWithTreeBin(tab, i, k);
1640 break;
1641 }
1642 }
1643 }
1644 }
1645 if (len != 0)
1646 break;
1647 }
1648 }
1649 if (delta != 0)
1650 addCount((long)delta, len);
1651 return val;
1652 }
1653
1654 /** Implementation for putAll */
1655 @SuppressWarnings("unchecked") private final void internalPutAll
1656 (Map<? extends K, ? extends V> m) {
1657 tryPresize(m.size());
1658 long delta = 0L; // number of uncommitted additions
1659 boolean npe = false; // to throw exception on exit for nulls
1660 try { // to clean up counts on other exceptions
1661 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1662 Object k; V v;
1663 if (entry == null || (k = entry.getKey()) == null ||
1664 (v = entry.getValue()) == null) {
1665 npe = true;
1666 break;
1667 }
1668 int h = spread(k.hashCode());
1669 for (Node<V>[] tab = table;;) {
1670 int i; Node<V> f; int fh; Object fk;
1671 if (tab == null)
1672 tab = initTable();
1673 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1674 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1675 ++delta;
1676 break;
1677 }
1678 }
1679 else if ((fh = f.hash) < 0) {
1680 if ((fk = f.key) instanceof TreeBin) {
1681 TreeBin<V> t = (TreeBin<V>)fk;
1682 boolean validated = false;
1683 t.acquire(0);
1684 try {
1685 if (tabAt(tab, i) == f) {
1686 validated = true;
1687 TreeNode<V> p = t.getTreeNode(h, k);
1688 if (p != null)
1689 p.val = v;
1690 else {
1691 t.putTreeNode(h, k, v);
1692 ++delta;
1693 }
1694 }
1695 } finally {
1696 t.release(0);
1697 }
1698 if (validated)
1699 break;
1700 }
1701 else
1702 tab = (Node<V>[])fk;
1703 }
1704 else {
1705 int len = 0;
1706 synchronized (f) {
1707 if (tabAt(tab, i) == f) {
1708 len = 1;
1709 for (Node<V> e = f;; ++len) {
1710 Object ek; V ev;
1711 if (e.hash == h &&
1712 (ev = e.val) != null &&
1713 ((ek = e.key) == k || k.equals(ek))) {
1714 e.val = v;
1715 break;
1716 }
1717 Node<V> last = e;
1718 if ((e = e.next) == null) {
1719 ++delta;
1720 last.next = new Node<V>(h, k, v, null);
1721 if (len >= TREE_THRESHOLD)
1722 replaceWithTreeBin(tab, i, k);
1723 break;
1724 }
1725 }
1726 }
1727 }
1728 if (len != 0) {
1729 if (len > 1) {
1730 addCount(delta, len);
1731 delta = 0L;
1732 }
1733 break;
1734 }
1735 }
1736 }
1737 }
1738 } finally {
1739 if (delta != 0L)
1740 addCount(delta, 2);
1741 }
1742 if (npe)
1743 throw new NullPointerException();
1744 }
1745
1746 /**
1747 * Implementation for clear. Steps through each bin, removing all
1748 * nodes.
1749 */
1750 @SuppressWarnings("unchecked") private final void internalClear() {
1751 long delta = 0L; // negative number of deletions
1752 int i = 0;
1753 Node<V>[] tab = table;
1754 while (tab != null && i < tab.length) {
1755 Node<V> f = tabAt(tab, i);
1756 if (f == null)
1757 ++i;
1758 else if (f.hash < 0) {
1759 Object fk;
1760 if ((fk = f.key) instanceof TreeBin) {
1761 TreeBin<V> t = (TreeBin<V>)fk;
1762 t.acquire(0);
1763 try {
1764 if (tabAt(tab, i) == f) {
1765 for (Node<V> p = t.first; p != null; p = p.next) {
1766 if (p.val != null) { // (currently always true)
1767 p.val = null;
1768 --delta;
1769 }
1770 }
1771 t.first = null;
1772 t.root = null;
1773 ++i;
1774 }
1775 } finally {
1776 t.release(0);
1777 }
1778 }
1779 else
1780 tab = (Node<V>[])fk;
1781 }
1782 else {
1783 synchronized (f) {
1784 if (tabAt(tab, i) == f) {
1785 for (Node<V> e = f; e != null; e = e.next) {
1786 if (e.val != null) { // (currently always true)
1787 e.val = null;
1788 --delta;
1789 }
1790 }
1791 setTabAt(tab, i, null);
1792 ++i;
1793 }
1794 }
1795 }
1796 }
1797 if (delta != 0L)
1798 addCount(delta, -1);
1799 }
1800
1801 /* ---------------- Table Initialization and Resizing -------------- */
1802
1803 /**
1804 * Returns a power of two table size for the given desired capacity.
1805 * See Hackers Delight, sec 3.2
1806 */
1807 private static final int tableSizeFor(int c) {
1808 int n = c - 1;
1809 n |= n >>> 1;
1810 n |= n >>> 2;
1811 n |= n >>> 4;
1812 n |= n >>> 8;
1813 n |= n >>> 16;
1814 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1815 }
1816
1817 /**
1818 * Initializes table, using the size recorded in sizeCtl.
1819 */
1820 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1821 Node<V>[] tab; int sc;
1822 while ((tab = table) == null) {
1823 if ((sc = sizeCtl) < 0)
1824 Thread.yield(); // lost initialization race; just spin
1825 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1826 try {
1827 if ((tab = table) == null) {
1828 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1829 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1830 table = tab = (Node<V>[])tb;
1831 sc = n - (n >>> 2);
1832 }
1833 } finally {
1834 sizeCtl = sc;
1835 }
1836 break;
1837 }
1838 }
1839 return tab;
1840 }
1841
1842 /**
1843 * Adds to count, and if table is too small and not already
1844 * resizing, initiates transfer. If already resizing, helps
1845 * perform transfer if work is available. Rechecks occupancy
1846 * after a transfer to see if another resize is already needed
1847 * because resizings are lagging additions.
1848 *
1849 * @param x the count to add
1850 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1851 */
1852 private final void addCount(long x, int check) {
1853 Cell[] as; long b, s;
1854 if ((as = counterCells) != null ||
1855 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1856 Cell a; long v; int m;
1857 boolean uncontended = true;
1858 if (as == null || (m = as.length - 1) < 0 ||
1859 (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
1860 !(uncontended =
1861 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1862 fullAddCount(x, uncontended);
1863 return;
1864 }
1865 if (check <= 1)
1866 return;
1867 s = sumCount();
1868 }
1869 if (check >= 0) {
1870 Node<V>[] tab, nt; int sc;
1871 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1872 tab.length < MAXIMUM_CAPACITY) {
1873 if (sc < 0) {
1874 if (sc == -1 || transferIndex <= transferOrigin ||
1875 (nt = nextTable) == null)
1876 break;
1877 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1878 transfer(tab, nt);
1879 }
1880 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1881 transfer(tab, null);
1882 s = sumCount();
1883 }
1884 }
1885 }
1886
1887 /**
1888 * Tries to presize table to accommodate the given number of elements.
1889 *
1890 * @param size number of elements (doesn't need to be perfectly accurate)
1891 */
1892 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1893 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1894 tableSizeFor(size + (size >>> 1) + 1);
1895 int sc;
1896 while ((sc = sizeCtl) >= 0) {
1897 Node<V>[] tab = table; int n;
1898 if (tab == null || (n = tab.length) == 0) {
1899 n = (sc > c) ? sc : c;
1900 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1901 try {
1902 if (table == tab) {
1903 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1904 table = (Node<V>[])tb;
1905 sc = n - (n >>> 2);
1906 }
1907 } finally {
1908 sizeCtl = sc;
1909 }
1910 }
1911 }
1912 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1913 break;
1914 else if (tab == table &&
1915 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1916 transfer(tab, null);
1917 }
1918 }
1919
1920 /**
1921 * Moves and/or copies the nodes in each bin to new table. See
1922 * above for explanation.
1923 */
1924 @SuppressWarnings("unchecked") private final void transfer
1925 (Node<V>[] tab, Node<V>[] nextTab) {
1926 int n = tab.length, stride;
1927 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1928 stride = MIN_TRANSFER_STRIDE; // subdivide range
1929 if (nextTab == null) { // initiating
1930 try {
1931 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1932 nextTab = (Node<V>[])tb;
1933 } catch (Throwable ex) { // try to cope with OOME
1934 sizeCtl = Integer.MAX_VALUE;
1935 return;
1936 }
1937 nextTable = nextTab;
1938 transferOrigin = n;
1939 transferIndex = n;
1940 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1941 for (int k = n; k > 0;) { // progressively reveal ready slots
1942 int nextk = (k > stride) ? k - stride : 0;
1943 for (int m = nextk; m < k; ++m)
1944 nextTab[m] = rev;
1945 for (int m = n + nextk; m < n + k; ++m)
1946 nextTab[m] = rev;
1947 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1948 }
1949 }
1950 int nextn = nextTab.length;
1951 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1952 boolean advance = true;
1953 for (int i = 0, bound = 0;;) {
1954 int nextIndex, nextBound; Node<V> f; Object fk;
1955 while (advance) {
1956 if (--i >= bound)
1957 advance = false;
1958 else if ((nextIndex = transferIndex) <= transferOrigin) {
1959 i = -1;
1960 advance = false;
1961 }
1962 else if (U.compareAndSwapInt
1963 (this, TRANSFERINDEX, nextIndex,
1964 nextBound = (nextIndex > stride ?
1965 nextIndex - stride : 0))) {
1966 bound = nextBound;
1967 i = nextIndex - 1;
1968 advance = false;
1969 }
1970 }
1971 if (i < 0 || i >= n || i + n >= nextn) {
1972 for (int sc;;) {
1973 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1974 if (sc == -1) {
1975 nextTable = null;
1976 table = nextTab;
1977 sizeCtl = (n << 1) - (n >>> 1);
1978 }
1979 return;
1980 }
1981 }
1982 }
1983 else if ((f = tabAt(tab, i)) == null) {
1984 if (casTabAt(tab, i, null, fwd)) {
1985 setTabAt(nextTab, i, null);
1986 setTabAt(nextTab, i + n, null);
1987 advance = true;
1988 }
1989 }
1990 else if (f.hash >= 0) {
1991 synchronized (f) {
1992 if (tabAt(tab, i) == f) {
1993 int runBit = f.hash & n;
1994 Node<V> lastRun = f, lo = null, hi = null;
1995 for (Node<V> p = f.next; p != null; p = p.next) {
1996 int b = p.hash & n;
1997 if (b != runBit) {
1998 runBit = b;
1999 lastRun = p;
2000 }
2001 }
2002 if (runBit == 0)
2003 lo = lastRun;
2004 else
2005 hi = lastRun;
2006 for (Node<V> p = f; p != lastRun; p = p.next) {
2007 int ph = p.hash;
2008 Object pk = p.key; V pv = p.val;
2009 if ((ph & n) == 0)
2010 lo = new Node<V>(ph, pk, pv, lo);
2011 else
2012 hi = new Node<V>(ph, pk, pv, hi);
2013 }
2014 setTabAt(nextTab, i, lo);
2015 setTabAt(nextTab, i + n, hi);
2016 setTabAt(tab, i, fwd);
2017 advance = true;
2018 }
2019 }
2020 }
2021 else if ((fk = f.key) instanceof TreeBin) {
2022 TreeBin<V> t = (TreeBin<V>)fk;
2023 t.acquire(0);
2024 try {
2025 if (tabAt(tab, i) == f) {
2026 TreeBin<V> lt = new TreeBin<V>();
2027 TreeBin<V> ht = new TreeBin<V>();
2028 int lc = 0, hc = 0;
2029 for (Node<V> e = t.first; e != null; e = e.next) {
2030 int h = e.hash;
2031 Object k = e.key; V v = e.val;
2032 if ((h & n) == 0) {
2033 ++lc;
2034 lt.putTreeNode(h, k, v);
2035 }
2036 else {
2037 ++hc;
2038 ht.putTreeNode(h, k, v);
2039 }
2040 }
2041 Node<V> ln, hn; // throw away trees if too small
2042 if (lc < TREE_THRESHOLD) {
2043 ln = null;
2044 for (Node<V> p = lt.first; p != null; p = p.next)
2045 ln = new Node<V>(p.hash, p.key, p.val, ln);
2046 }
2047 else
2048 ln = new Node<V>(MOVED, lt, null, null);
2049 setTabAt(nextTab, i, ln);
2050 if (hc < TREE_THRESHOLD) {
2051 hn = null;
2052 for (Node<V> p = ht.first; p != null; p = p.next)
2053 hn = new Node<V>(p.hash, p.key, p.val, hn);
2054 }
2055 else
2056 hn = new Node<V>(MOVED, ht, null, null);
2057 setTabAt(nextTab, i + n, hn);
2058 setTabAt(tab, i, fwd);
2059 advance = true;
2060 }
2061 } finally {
2062 t.release(0);
2063 }
2064 }
2065 else
2066 advance = true; // already processed
2067 }
2068 }
2069
2070 /* ---------------- Counter support -------------- */
2071
2072 final long sumCount() {
2073 Cell[] as = counterCells; Cell a;
2074 long sum = baseCount;
2075 if (as != null) {
2076 for (int i = 0; i < as.length; ++i) {
2077 if ((a = as[i]) != null)
2078 sum += a.value;
2079 }
2080 }
2081 return sum;
2082 }
2083
2084 // See LongAdder version for explanation
2085 private final void fullAddCount(long x, boolean wasUncontended) {
2086 int h;
2087 if ((h = ThreadLocalRandom.getProbe()) == 0) {
2088 ThreadLocalRandom.localInit(); // force initialization
2089 h = ThreadLocalRandom.getProbe();
2090 wasUncontended = true;
2091 }
2092 boolean collide = false; // True if last slot nonempty
2093 for (;;) {
2094 Cell[] as; Cell a; int n; long v;
2095 if ((as = counterCells) != null && (n = as.length) > 0) {
2096 if ((a = as[(n - 1) & h]) == null) {
2097 if (cellsBusy == 0) { // Try to attach new Cell
2098 Cell r = new Cell(x); // Optimistic create
2099 if (cellsBusy == 0 &&
2100 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2101 boolean created = false;
2102 try { // Recheck under lock
2103 Cell[] rs; int m, j;
2104 if ((rs = counterCells) != null &&
2105 (m = rs.length) > 0 &&
2106 rs[j = (m - 1) & h] == null) {
2107 rs[j] = r;
2108 created = true;
2109 }
2110 } finally {
2111 cellsBusy = 0;
2112 }
2113 if (created)
2114 break;
2115 continue; // Slot is now non-empty
2116 }
2117 }
2118 collide = false;
2119 }
2120 else if (!wasUncontended) // CAS already known to fail
2121 wasUncontended = true; // Continue after rehash
2122 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2123 break;
2124 else if (counterCells != as || n >= NCPU)
2125 collide = false; // At max size or stale
2126 else if (!collide)
2127 collide = true;
2128 else if (cellsBusy == 0 &&
2129 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2130 try {
2131 if (counterCells == as) {// Expand table unless stale
2132 Cell[] rs = new Cell[n << 1];
2133 for (int i = 0; i < n; ++i)
2134 rs[i] = as[i];
2135 counterCells = rs;
2136 }
2137 } finally {
2138 cellsBusy = 0;
2139 }
2140 collide = false;
2141 continue; // Retry with expanded table
2142 }
2143 h = ThreadLocalRandom.advanceProbe(h);
2144 }
2145 else if (cellsBusy == 0 && counterCells == as &&
2146 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2147 boolean init = false;
2148 try { // Initialize table
2149 if (counterCells == as) {
2150 Cell[] rs = new Cell[2];
2151 rs[h & 1] = new Cell(x);
2152 counterCells = rs;
2153 init = true;
2154 }
2155 } finally {
2156 cellsBusy = 0;
2157 }
2158 if (init)
2159 break;
2160 }
2161 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2162 break; // Fall back on using base
2163 }
2164 }
2165
2166 /* ----------------Table Traversal -------------- */
2167
2168 /**
2169 * Encapsulates traversal for methods such as containsValue; also
2170 * serves as a base class for other iterators and bulk tasks.
2171 *
2172 * At each step, the iterator snapshots the key ("nextKey") and
2173 * value ("nextVal") of a valid node (i.e., one that, at point of
2174 * snapshot, has a non-null user value). Because val fields can
2175 * change (including to null, indicating deletion), field nextVal
2176 * might not be accurate at point of use, but still maintains the
2177 * weak consistency property of holding a value that was once
2178 * valid. To support iterator.remove, the nextKey field is not
2179 * updated (nulled out) when the iterator cannot advance.
2180 *
2181 * Exported iterators must track whether the iterator has advanced
2182 * (in hasNext vs next) (by setting/checking/nulling field
2183 * nextVal), and then extract key, value, or key-value pairs as
2184 * return values of next().
2185 *
2186 * Method advance visits once each still-valid node that was
2187 * reachable upon iterator construction. It might miss some that
2188 * were added to a bin after the bin was visited, which is OK wrt
2189 * consistency guarantees. Maintaining this property in the face
2190 * of possible ongoing resizes requires a fair amount of
2191 * bookkeeping state that is difficult to optimize away amidst
2192 * volatile accesses. Even so, traversal maintains reasonable
2193 * throughput.
2194 *
2195 * Normally, iteration proceeds bin-by-bin traversing lists.
2196 * However, if the table has been resized, then all future steps
2197 * must traverse both the bin at the current index as well as at
2198 * (index + baseSize); and so on for further resizings. To
2199 * paranoically cope with potential sharing by users of iterators
2200 * across threads, iteration terminates if a bounds checks fails
2201 * for a table read.
2202 *
2203 * Methods advanceKey and advanceValue are specializations of the
2204 * common cases of advance, relaying to the full version
2205 * otherwise. The forEachKey and forEachValue methods further
2206 * specialize, bypassing all incremental field updates in most cases.
2207 *
2208 * This class supports both Spliterator-based traversal and
2209 * CountedCompleter-based bulk tasks. The same "batch" field is
2210 * used, but in slightly different ways, in the two cases. For
2211 * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2212 * estimate of element coverage. For CHM tasks, it is a pre-scaled
2213 * size that halves down to zero for leaf tasks, that is only
2214 * computed upon execution of the task. (Tasks can be submitted to
2215 * any pool, of any size, so we don't know scale factors until
2216 * running.)
2217 *
2218 * This class extends CountedCompleter to streamline parallel
2219 * iteration in bulk operations. This adds only a few fields of
2220 * space overhead, which is small enough in cases where it is not
2221 * needed to not worry about it. Because CountedCompleter is
2222 * Serializable, but iterators need not be, we need to add warning
2223 * suppressions.
2224 */
2225 @SuppressWarnings("serial") static class Traverser<K,V,R>
2226 extends CountedCompleter<R> {
2227 final ConcurrentHashMap<K,V> map;
2228 Node<V> next; // the next entry to use
2229 K nextKey; // cached key field of next
2230 V nextVal; // cached val field of next
2231 Node<V>[] tab; // current table; updated if resized
2232 int index; // index of bin to use next
2233 int baseIndex; // current index of initial table
2234 int baseLimit; // index bound for initial table
2235 final int baseSize; // initial table size
2236 int batch; // split control
2237
2238 /** Creates iterator for all entries in the table. */
2239 Traverser(ConcurrentHashMap<K,V> map) {
2240 this.map = map;
2241 Node<V>[] t = this.tab = map.table;
2242 baseLimit = baseSize = (t == null) ? 0 : t.length;
2243 }
2244
2245 /** Task constructor */
2246 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2247 super(it);
2248 this.map = map;
2249 this.batch = batch; // -1 if unknown
2250 if (it == null) {
2251 Node<V>[] t = this.tab = map.table;
2252 baseLimit = baseSize = (t == null) ? 0 : t.length;
2253 }
2254 else { // split parent
2255 this.tab = it.tab;
2256 this.baseSize = it.baseSize;
2257 int hi = this.baseLimit = it.baseLimit;
2258 it.baseLimit = this.index = this.baseIndex =
2259 (hi + it.baseIndex) >>> 1;
2260 }
2261 }
2262
2263 /** Spliterator constructor */
2264 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2265 super(it);
2266 this.map = map;
2267 if (it == null) {
2268 Node<V>[] t = this.tab = map.table;
2269 baseLimit = baseSize = (t == null) ? 0 : t.length;
2270 long n = map.sumCount();
2271 batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2272 (int)n);
2273 }
2274 else {
2275 this.tab = it.tab;
2276 this.baseSize = it.baseSize;
2277 int hi = this.baseLimit = it.baseLimit;
2278 it.baseLimit = this.index = this.baseIndex =
2279 (hi + it.baseIndex) >>> 1;
2280 this.batch = it.batch >>>= 1;
2281 }
2282 }
2283
2284 /**
2285 * Advances if possible, returning next valid value, or null if none.
2286 */
2287 @SuppressWarnings("unchecked") final V advance() {
2288 for (Node<V> e = next;;) {
2289 if (e != null) // advance past used/skipped node
2290 e = next = e.next;
2291 while (e == null) { // get to next non-null bin
2292 Node<V>[] t; int i, n; // must use locals in checks
2293 if (baseIndex >= baseLimit || (t = tab) == null ||
2294 (n = t.length) <= (i = index) || i < 0)
2295 return nextVal = null;
2296 if ((e = next = tabAt(t, index)) != null && e.hash < 0) {
2297 Object ek;
2298 if ((ek = e.key) instanceof TreeBin)
2299 e = ((TreeBin<V>)ek).first;
2300 else {
2301 tab = (Node<V>[])ek;
2302 continue; // restarts due to null val
2303 }
2304 }
2305 if ((index += baseSize) >= n)
2306 index = ++baseIndex; // visit upper slots if present
2307 }
2308 nextKey = (K)e.key;
2309 if ((nextVal = e.val) != null) // skip deleted or special nodes
2310 return nextVal;
2311 }
2312 }
2313
2314 /**
2315 * Common case version for value traversal
2316 */
2317 @SuppressWarnings("unchecked") final V advanceValue() {
2318 outer: for (Node<V> e = next;;) {
2319 if (e == null || (e = e.next) == null) {
2320 Node<V>[] t; int i, len, n; Object ek;
2321 if ((t = tab) == null ||
2322 baseSize != (len = t.length) ||
2323 len < (n = baseLimit) ||
2324 baseIndex != (i = index))
2325 break;
2326 do {
2327 if (i < 0 || i >= n) {
2328 index = baseIndex = n;
2329 next = null;
2330 return nextVal = null;
2331 }
2332 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2333 if ((ek = e.key) instanceof TreeBin)
2334 e = ((TreeBin<V>)ek).first;
2335 else {
2336 index = baseIndex = i;
2337 next = null;
2338 tab = (Node<V>[])ek;
2339 break outer;
2340 }
2341 }
2342 ++i;
2343 } while (e == null);
2344 index = baseIndex = i;
2345 }
2346 V v;
2347 K k = (K)e.key;
2348 if ((v = e.val) != null) {
2349 nextVal = v;
2350 nextKey = k;
2351 next = e;
2352 return v;
2353 }
2354 }
2355 return advance();
2356 }
2357
2358 /**
2359 * Common case version for key traversal
2360 */
2361 @SuppressWarnings("unchecked") final K advanceKey() {
2362 outer: for (Node<V> e = next;;) {
2363 if (e == null || (e = e.next) == null) {
2364 Node<V>[] t; int i, len, n; Object ek;
2365 if ((t = tab) == null ||
2366 baseSize != (len = t.length) ||
2367 len < (n = baseLimit) ||
2368 baseIndex != (i = index))
2369 break;
2370 do {
2371 if (i < 0 || i >= n) {
2372 index = baseIndex = n;
2373 next = null;
2374 nextVal = null;
2375 return null;
2376 }
2377 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2378 if ((ek = e.key) instanceof TreeBin)
2379 e = ((TreeBin<V>)ek).first;
2380 else {
2381 index = baseIndex = i;
2382 next = null;
2383 tab = (Node<V>[])ek;
2384 break outer;
2385 }
2386 }
2387 ++i;
2388 } while (e == null);
2389 index = baseIndex = i;
2390 }
2391 V v;
2392 K k = (K)e.key;
2393 if ((v = e.val) != null) {
2394 nextVal = v;
2395 nextKey = k;
2396 next = e;
2397 return k;
2398 }
2399 }
2400 return (advance() == null) ? null : nextKey;
2401 }
2402
2403 @SuppressWarnings("unchecked") final void forEachValue(Consumer<? super V> action) {
2404 if (action == null) throw new NullPointerException();
2405 Node<V>[] t; int i, len, n;
2406 if ((t = tab) != null && baseSize == (len = t.length) &&
2407 len >= (n = baseLimit) && baseIndex == (i = index)) {
2408 index = baseIndex = n;
2409 nextVal = null;
2410 Node<V> e = next;
2411 next = null;
2412 if (e != null)
2413 e = e.next;
2414 outer: for (;; e = e.next) {
2415 V v; Object ek;
2416 for (; e == null; ++i) {
2417 if (i < 0 || i >= n)
2418 return;
2419 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2420 if ((ek = e.key) instanceof TreeBin)
2421 e = ((TreeBin<V>)ek).first;
2422 else {
2423 index = baseIndex = i;
2424 tab = (Node<V>[])ek;
2425 break outer;
2426 }
2427 }
2428 }
2429 if ((v = e.val) != null)
2430 action.accept(v);
2431 }
2432 }
2433 V v;
2434 while ((v = advance()) != null)
2435 action.accept(v);
2436 }
2437
2438 @SuppressWarnings("unchecked") final void forEachKey(Consumer<? super K> action) {
2439 if (action == null) throw new NullPointerException();
2440 Node<V>[] t; int i, len, n;
2441 if ((t = tab) != null && baseSize == (len = t.length) &&
2442 len >= (n = baseLimit) && baseIndex == (i = index)) {
2443 index = baseIndex = n;
2444 nextVal = null;
2445 Node<V> e = next;
2446 next = null;
2447 if (e != null)
2448 e = e.next;
2449 outer: for (;; e = e.next) {
2450 for (; e == null; ++i) {
2451 if (i < 0 || i >= n)
2452 return;
2453 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2454 Object ek;
2455 if ((ek = e.key) instanceof TreeBin)
2456 e = ((TreeBin<V>)ek).first;
2457 else {
2458 index = baseIndex = i;
2459 tab = (Node<V>[])ek;
2460 break outer;
2461 }
2462 }
2463 }
2464 Object k = e.key;
2465 if (e.val != null)
2466 action.accept((K)k);
2467 }
2468 }
2469 while (advance() != null)
2470 action.accept(nextKey);
2471 }
2472
2473 public final void remove() {
2474 K k = nextKey;
2475 if (k == null && (advanceValue() == null || (k = nextKey) == null))
2476 throw new IllegalStateException();
2477 map.internalReplace(k, null, null);
2478 }
2479
2480 public final boolean hasNext() {
2481 return nextVal != null || advanceValue() != null;
2482 }
2483
2484 public final boolean hasMoreElements() { return hasNext(); }
2485
2486 public void compute() { } // default no-op CountedCompleter body
2487
2488 public long estimateSize() { return batch; }
2489
2490 /**
2491 * Returns a batch value > 0 if this task should (and must) be
2492 * split, if so, adding to pending count, and in any case
2493 * updating batch value. The initial batch value is approx
2494 * exp2 of the number of times (minus one) to split task by
2495 * two before executing leaf action. This value is faster to
2496 * compute and more convenient to use as a guide to splitting
2497 * than is the depth, since it is used while dividing by two
2498 * anyway.
2499 */
2500 final int preSplit() {
2501 int b; ForkJoinPool pool;
2502 if ((b = batch) < 0) { // force initialization
2503 int sp = (((pool = getPool()) == null) ?
2504 ForkJoinPool.getCommonPoolParallelism() :
2505 pool.getParallelism()) << 3; // slack of 8
2506 long n = map.sumCount();
2507 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2508 }
2509 b = (b <= 1 || baseIndex >= baseLimit) ? 0 : (b >>> 1);
2510 if ((batch = b) > 0)
2511 addToPendingCount(1);
2512 return b;
2513 }
2514 }
2515
2516 /* ---------------- Public operations -------------- */
2517
2518 /**
2519 * Creates a new, empty map with the default initial table size (16).
2520 */
2521 public ConcurrentHashMap() {
2522 }
2523
2524 /**
2525 * Creates a new, empty map with an initial table size
2526 * accommodating the specified number of elements without the need
2527 * to dynamically resize.
2528 *
2529 * @param initialCapacity The implementation performs internal
2530 * sizing to accommodate this many elements.
2531 * @throws IllegalArgumentException if the initial capacity of
2532 * elements is negative
2533 */
2534 public ConcurrentHashMap(int initialCapacity) {
2535 if (initialCapacity < 0)
2536 throw new IllegalArgumentException();
2537 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2538 MAXIMUM_CAPACITY :
2539 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2540 this.sizeCtl = cap;
2541 }
2542
2543 /**
2544 * Creates a new map with the same mappings as the given map.
2545 *
2546 * @param m the map
2547 */
2548 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2549 this.sizeCtl = DEFAULT_CAPACITY;
2550 internalPutAll(m);
2551 }
2552
2553 /**
2554 * Creates a new, empty map with an initial table size based on
2555 * the given number of elements ({@code initialCapacity}) and
2556 * initial table density ({@code loadFactor}).
2557 *
2558 * @param initialCapacity the initial capacity. The implementation
2559 * performs internal sizing to accommodate this many elements,
2560 * given the specified load factor.
2561 * @param loadFactor the load factor (table density) for
2562 * establishing the initial table size
2563 * @throws IllegalArgumentException if the initial capacity of
2564 * elements is negative or the load factor is nonpositive
2565 *
2566 * @since 1.6
2567 */
2568 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2569 this(initialCapacity, loadFactor, 1);
2570 }
2571
2572 /**
2573 * Creates a new, empty map with an initial table size based on
2574 * the given number of elements ({@code initialCapacity}), table
2575 * density ({@code loadFactor}), and number of concurrently
2576 * updating threads ({@code concurrencyLevel}).
2577 *
2578 * @param initialCapacity the initial capacity. The implementation
2579 * performs internal sizing to accommodate this many elements,
2580 * given the specified load factor.
2581 * @param loadFactor the load factor (table density) for
2582 * establishing the initial table size
2583 * @param concurrencyLevel the estimated number of concurrently
2584 * updating threads. The implementation may use this value as
2585 * a sizing hint.
2586 * @throws IllegalArgumentException if the initial capacity is
2587 * negative or the load factor or concurrencyLevel are
2588 * nonpositive
2589 */
2590 public ConcurrentHashMap(int initialCapacity,
2591 float loadFactor, int concurrencyLevel) {
2592 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2593 throw new IllegalArgumentException();
2594 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2595 initialCapacity = concurrencyLevel; // as estimated threads
2596 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2597 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2598 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2599 this.sizeCtl = cap;
2600 }
2601
2602 /**
2603 * Creates a new {@link Set} backed by a ConcurrentHashMap
2604 * from the given type to {@code Boolean.TRUE}.
2605 *
2606 * @return the new set
2607 */
2608 public static <K> KeySetView<K,Boolean> newKeySet() {
2609 return new KeySetView<K,Boolean>
2610 (new ConcurrentHashMap<K,Boolean>(), Boolean.TRUE);
2611 }
2612
2613 /**
2614 * Creates a new {@link Set} backed by a ConcurrentHashMap
2615 * from the given type to {@code Boolean.TRUE}.
2616 *
2617 * @param initialCapacity The implementation performs internal
2618 * sizing to accommodate this many elements.
2619 * @throws IllegalArgumentException if the initial capacity of
2620 * elements is negative
2621 * @return the new set
2622 */
2623 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2624 return new KeySetView<K,Boolean>
2625 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2626 }
2627
2628 /**
2629 * {@inheritDoc}
2630 */
2631 public boolean isEmpty() {
2632 return sumCount() <= 0L; // ignore transient negative values
2633 }
2634
2635 /**
2636 * {@inheritDoc}
2637 */
2638 public int size() {
2639 long n = sumCount();
2640 return ((n < 0L) ? 0 :
2641 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2642 (int)n);
2643 }
2644
2645 /**
2646 * Returns the number of mappings. This method should be used
2647 * instead of {@link #size} because a ConcurrentHashMap may
2648 * contain more mappings than can be represented as an int. The
2649 * value returned is an estimate; the actual count may differ if
2650 * there are concurrent insertions or removals.
2651 *
2652 * @return the number of mappings
2653 */
2654 public long mappingCount() {
2655 long n = sumCount();
2656 return (n < 0L) ? 0L : n; // ignore transient negative values
2657 }
2658
2659 /**
2660 * Returns the value to which the specified key is mapped,
2661 * or {@code null} if this map contains no mapping for the key.
2662 *
2663 * <p>More formally, if this map contains a mapping from a key
2664 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2665 * then this method returns {@code v}; otherwise it returns
2666 * {@code null}. (There can be at most one such mapping.)
2667 *
2668 * @throws NullPointerException if the specified key is null
2669 */
2670 public V get(Object key) {
2671 return internalGet(key);
2672 }
2673
2674 /**
2675 * Returns the value to which the specified key is mapped,
2676 * or the given defaultValue if this map contains no mapping for the key.
2677 *
2678 * @param key the key
2679 * @param defaultValue the value to return if this map contains
2680 * no mapping for the given key
2681 * @return the mapping for the key, if present; else the defaultValue
2682 * @throws NullPointerException if the specified key is null
2683 */
2684 public V getOrDefault(Object key, V defaultValue) {
2685 V v;
2686 return (v = internalGet(key)) == null ? defaultValue : v;
2687 }
2688
2689 /**
2690 * Tests if the specified object is a key in this table.
2691 *
2692 * @param key possible key
2693 * @return {@code true} if and only if the specified object
2694 * is a key in this table, as determined by the
2695 * {@code equals} method; {@code false} otherwise
2696 * @throws NullPointerException if the specified key is null
2697 */
2698 public boolean containsKey(Object key) {
2699 return internalGet(key) != null;
2700 }
2701
2702 /**
2703 * Returns {@code true} if this map maps one or more keys to the
2704 * specified value. Note: This method may require a full traversal
2705 * of the map, and is much slower than method {@code containsKey}.
2706 *
2707 * @param value value whose presence in this map is to be tested
2708 * @return {@code true} if this map maps one or more keys to the
2709 * specified value
2710 * @throws NullPointerException if the specified value is null
2711 */
2712 public boolean containsValue(Object value) {
2713 if (value == null)
2714 throw new NullPointerException();
2715 V v;
2716 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2717 while ((v = it.advanceValue()) != null) {
2718 if (v == value || value.equals(v))
2719 return true;
2720 }
2721 return false;
2722 }
2723
2724 /**
2725 * Legacy method testing if some key maps into the specified value
2726 * in this table. This method is identical in functionality to
2727 * {@link #containsValue(Object)}, and exists solely to ensure
2728 * full compatibility with class {@link java.util.Hashtable},
2729 * which supported this method prior to introduction of the
2730 * Java Collections framework.
2731 *
2732 * @param value a value to search for
2733 * @return {@code true} if and only if some key maps to the
2734 * {@code value} argument in this table as
2735 * determined by the {@code equals} method;
2736 * {@code false} otherwise
2737 * @throws NullPointerException if the specified value is null
2738 */
2739 @Deprecated public boolean contains(Object value) {
2740 return containsValue(value);
2741 }
2742
2743 /**
2744 * Maps the specified key to the specified value in this table.
2745 * Neither the key nor the value can be null.
2746 *
2747 * <p>The value can be retrieved by calling the {@code get} method
2748 * with a key that is equal to the original key.
2749 *
2750 * @param key key with which the specified value is to be associated
2751 * @param value value to be associated with the specified key
2752 * @return the previous value associated with {@code key}, or
2753 * {@code null} if there was no mapping for {@code key}
2754 * @throws NullPointerException if the specified key or value is null
2755 */
2756 public V put(K key, V value) {
2757 return internalPut(key, value, false);
2758 }
2759
2760 /**
2761 * {@inheritDoc}
2762 *
2763 * @return the previous value associated with the specified key,
2764 * or {@code null} if there was no mapping for the key
2765 * @throws NullPointerException if the specified key or value is null
2766 */
2767 public V putIfAbsent(K key, V value) {
2768 return internalPut(key, value, true);
2769 }
2770
2771 /**
2772 * Copies all of the mappings from the specified map to this one.
2773 * These mappings replace any mappings that this map had for any of the
2774 * keys currently in the specified map.
2775 *
2776 * @param m mappings to be stored in this map
2777 */
2778 public void putAll(Map<? extends K, ? extends V> m) {
2779 internalPutAll(m);
2780 }
2781
2782 /**
2783 * If the specified key is not already associated with a value (or
2784 * is mapped to {@code null}), attempts to compute its value using
2785 * the given mapping function and enters it into this map unless
2786 * {@code null}. The entire method invocation is performed
2787 * atomically, so the function is applied at most once per key.
2788 * Some attempted update operations on this map by other threads
2789 * may be blocked while computation is in progress, so the
2790 * computation should be short and simple, and must not attempt to
2791 * update any other mappings of this Map.
2792 *
2793 * @param key key with which the specified value is to be associated
2794 * @param mappingFunction the function to compute a value
2795 * @return the current (existing or computed) value associated with
2796 * the specified key, or null if the computed value is null
2797 * @throws NullPointerException if the specified key or mappingFunction
2798 * is null
2799 * @throws IllegalStateException if the computation detectably
2800 * attempts a recursive update to this map that would
2801 * otherwise never complete
2802 * @throws RuntimeException or Error if the mappingFunction does so,
2803 * in which case the mapping is left unestablished
2804 */
2805 public V computeIfAbsent
2806 (K key, Function<? super K, ? extends V> mappingFunction) {
2807 return internalComputeIfAbsent(key, mappingFunction);
2808 }
2809
2810 /**
2811 * If the value for the specified key is present and non-null,
2812 * attempts to compute a new mapping given the key and its current
2813 * mapped value. The entire method invocation is performed
2814 * atomically. Some attempted update operations on this map by
2815 * other threads may be blocked while computation is in progress,
2816 * so the computation should be short and simple, and must not
2817 * attempt to update any other mappings of this Map.
2818 *
2819 * @param key key with which a value may be associated
2820 * @param remappingFunction the function to compute a value
2821 * @return the new value associated with the specified key, or null if none
2822 * @throws NullPointerException if the specified key or remappingFunction
2823 * is null
2824 * @throws IllegalStateException if the computation detectably
2825 * attempts a recursive update to this map that would
2826 * otherwise never complete
2827 * @throws RuntimeException or Error if the remappingFunction does so,
2828 * in which case the mapping is unchanged
2829 */
2830 public V computeIfPresent
2831 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2832 return internalCompute(key, true, remappingFunction);
2833 }
2834
2835 /**
2836 * Attempts to compute a mapping for the specified key and its
2837 * current mapped value (or {@code null} if there is no current
2838 * mapping). The entire method invocation is performed atomically.
2839 * Some attempted update operations on this map by other threads
2840 * may be blocked while computation is in progress, so the
2841 * computation should be short and simple, and must not attempt to
2842 * update any other mappings of this Map.
2843 *
2844 * @param key key with which the specified value is to be associated
2845 * @param remappingFunction the function to compute a value
2846 * @return the new value associated with the specified key, or null if none
2847 * @throws NullPointerException if the specified key or remappingFunction
2848 * is null
2849 * @throws IllegalStateException if the computation detectably
2850 * attempts a recursive update to this map that would
2851 * otherwise never complete
2852 * @throws RuntimeException or Error if the remappingFunction does so,
2853 * in which case the mapping is unchanged
2854 */
2855 public V compute
2856 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2857 return internalCompute(key, false, remappingFunction);
2858 }
2859
2860 /**
2861 * If the specified key is not already associated with a
2862 * (non-null) value, associates it with the given value.
2863 * Otherwise, replaces the value with the results of the given
2864 * remapping function, or removes if {@code null}. The entire
2865 * method invocation is performed atomically. Some attempted
2866 * update operations on this map by other threads may be blocked
2867 * while computation is in progress, so the computation should be
2868 * short and simple, and must not attempt to update any other
2869 * mappings of this Map.
2870 *
2871 * @param key key with which the specified value is to be associated
2872 * @param value the value to use if absent
2873 * @param remappingFunction the function to recompute a value if present
2874 * @return the new value associated with the specified key, or null if none
2875 * @throws NullPointerException if the specified key or the
2876 * remappingFunction is null
2877 * @throws RuntimeException or Error if the remappingFunction does so,
2878 * in which case the mapping is unchanged
2879 */
2880 public V merge
2881 (K key, V value,
2882 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2883 return internalMerge(key, value, remappingFunction);
2884 }
2885
2886 /**
2887 * Removes the key (and its corresponding value) from this map.
2888 * This method does nothing if the key is not in the map.
2889 *
2890 * @param key the key that needs to be removed
2891 * @return the previous value associated with {@code key}, or
2892 * {@code null} if there was no mapping for {@code key}
2893 * @throws NullPointerException if the specified key is null
2894 */
2895 public V remove(Object key) {
2896 return internalReplace(key, null, null);
2897 }
2898
2899 /**
2900 * {@inheritDoc}
2901 *
2902 * @throws NullPointerException if the specified key is null
2903 */
2904 public boolean remove(Object key, Object value) {
2905 if (key == null)
2906 throw new NullPointerException();
2907 return value != null && internalReplace(key, null, value) != null;
2908 }
2909
2910 /**
2911 * {@inheritDoc}
2912 *
2913 * @throws NullPointerException if any of the arguments are null
2914 */
2915 public boolean replace(K key, V oldValue, V newValue) {
2916 if (key == null || oldValue == null || newValue == null)
2917 throw new NullPointerException();
2918 return internalReplace(key, newValue, oldValue) != null;
2919 }
2920
2921 /**
2922 * {@inheritDoc}
2923 *
2924 * @return the previous value associated with the specified key,
2925 * or {@code null} if there was no mapping for the key
2926 * @throws NullPointerException if the specified key or value is null
2927 */
2928 public V replace(K key, V value) {
2929 if (key == null || value == null)
2930 throw new NullPointerException();
2931 return internalReplace(key, value, null);
2932 }
2933
2934 /**
2935 * Removes all of the mappings from this map.
2936 */
2937 public void clear() {
2938 internalClear();
2939 }
2940
2941 /**
2942 * Returns a {@link Set} view of the keys contained in this map.
2943 * The set is backed by the map, so changes to the map are
2944 * reflected in the set, and vice-versa.
2945 *
2946 * @return the set view
2947 */
2948 public KeySetView<K,V> keySet() {
2949 KeySetView<K,V> ks = keySet;
2950 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2951 }
2952
2953 /**
2954 * Returns a {@link Set} view of the keys in this map, using the
2955 * given common mapped value for any additions (i.e., {@link
2956 * Collection#add} and {@link Collection#addAll(Collection)}).
2957 * This is of course only appropriate if it is acceptable to use
2958 * the same value for all additions from this view.
2959 *
2960 * @param mappedValue the mapped value to use for any additions
2961 * @return the set view
2962 * @throws NullPointerException if the mappedValue is null
2963 */
2964 public KeySetView<K,V> keySet(V mappedValue) {
2965 if (mappedValue == null)
2966 throw new NullPointerException();
2967 return new KeySetView<K,V>(this, mappedValue);
2968 }
2969
2970 /**
2971 * Returns a {@link Collection} view of the values contained in this map.
2972 * The collection is backed by the map, so changes to the map are
2973 * reflected in the collection, and vice-versa.
2974 *
2975 * @return the collection view
2976 */
2977 public ValuesView<K,V> values() {
2978 ValuesView<K,V> vs = values;
2979 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2980 }
2981
2982 /**
2983 * Returns a {@link Set} view of the mappings contained in this map.
2984 * The set is backed by the map, so changes to the map are
2985 * reflected in the set, and vice-versa. The set supports element
2986 * removal, which removes the corresponding mapping from the map,
2987 * via the {@code Iterator.remove}, {@code Set.remove},
2988 * {@code removeAll}, {@code retainAll}, and {@code clear}
2989 * operations. It does not support the {@code add} or
2990 * {@code addAll} operations.
2991 *
2992 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2993 * that will never throw {@link ConcurrentModificationException},
2994 * and guarantees to traverse elements as they existed upon
2995 * construction of the iterator, and may (but is not guaranteed to)
2996 * reflect any modifications subsequent to construction.
2997 *
2998 * @return the set view
2999 */
3000 public Set<Map.Entry<K,V>> entrySet() {
3001 EntrySetView<K,V> es = entrySet;
3002 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
3003 }
3004
3005 /**
3006 * Returns an enumeration of the keys in this table.
3007 *
3008 * @return an enumeration of the keys in this table
3009 * @see #keySet()
3010 */
3011 public Enumeration<K> keys() {
3012 return new KeyIterator<K,V>(this);
3013 }
3014
3015 /**
3016 * Returns an enumeration of the values in this table.
3017 *
3018 * @return an enumeration of the values in this table
3019 * @see #values()
3020 */
3021 public Enumeration<V> elements() {
3022 return new ValueIterator<K,V>(this);
3023 }
3024
3025 /**
3026 * Returns the hash code value for this {@link Map}, i.e.,
3027 * the sum of, for each key-value pair in the map,
3028 * {@code key.hashCode() ^ value.hashCode()}.
3029 *
3030 * @return the hash code value for this map
3031 */
3032 public int hashCode() {
3033 int h = 0;
3034 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3035 V v;
3036 while ((v = it.advanceValue()) != null) {
3037 h += it.nextKey.hashCode() ^ v.hashCode();
3038 }
3039 return h;
3040 }
3041
3042 /**
3043 * Returns a string representation of this map. The string
3044 * representation consists of a list of key-value mappings (in no
3045 * particular order) enclosed in braces ("{@code {}}"). Adjacent
3046 * mappings are separated by the characters {@code ", "} (comma
3047 * and space). Each key-value mapping is rendered as the key
3048 * followed by an equals sign ("{@code =}") followed by the
3049 * associated value.
3050 *
3051 * @return a string representation of this map
3052 */
3053 public String toString() {
3054 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3055 StringBuilder sb = new StringBuilder();
3056 sb.append('{');
3057 V v;
3058 if ((v = it.advanceValue()) != null) {
3059 for (;;) {
3060 K k = it.nextKey;
3061 sb.append(k == this ? "(this Map)" : k);
3062 sb.append('=');
3063 sb.append(v == this ? "(this Map)" : v);
3064 if ((v = it.advanceValue()) == null)
3065 break;
3066 sb.append(',').append(' ');
3067 }
3068 }
3069 return sb.append('}').toString();
3070 }
3071
3072 /**
3073 * Compares the specified object with this map for equality.
3074 * Returns {@code true} if the given object is a map with the same
3075 * mappings as this map. This operation may return misleading
3076 * results if either map is concurrently modified during execution
3077 * of this method.
3078 *
3079 * @param o object to be compared for equality with this map
3080 * @return {@code true} if the specified object is equal to this map
3081 */
3082 public boolean equals(Object o) {
3083 if (o != this) {
3084 if (!(o instanceof Map))
3085 return false;
3086 Map<?,?> m = (Map<?,?>) o;
3087 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3088 V val;
3089 while ((val = it.advanceValue()) != null) {
3090 Object v = m.get(it.nextKey);
3091 if (v == null || (v != val && !v.equals(val)))
3092 return false;
3093 }
3094 for (Map.Entry<?,?> e : m.entrySet()) {
3095 Object mk, mv, v;
3096 if ((mk = e.getKey()) == null ||
3097 (mv = e.getValue()) == null ||
3098 (v = internalGet(mk)) == null ||
3099 (mv != v && !mv.equals(v)))
3100 return false;
3101 }
3102 }
3103 return true;
3104 }
3105
3106 /* ----------------Iterators -------------- */
3107
3108 @SuppressWarnings("serial") static final class KeyIterator<K,V>
3109 extends Traverser<K,V,Object>
3110 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
3111 KeyIterator(ConcurrentHashMap<K,V> map) { super(map); }
3112 KeyIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3113 super(map, it);
3114 }
3115 public Spliterator<K> trySplit() {
3116 return (baseLimit - baseIndex <= 1) ? null :
3117 new KeyIterator<K,V>(map, this);
3118 }
3119 public final K next() {
3120 K k;
3121 if ((k = (nextVal == null) ? advanceKey() : nextKey) == null)
3122 throw new NoSuchElementException();
3123 nextVal = null;
3124 return k;
3125 }
3126
3127 public final K nextElement() { return next(); }
3128
3129 public Iterator<K> iterator() { return this; }
3130
3131 public void forEachRemaining(Consumer<? super K> action) {
3132 forEachKey(action);
3133 }
3134
3135 public boolean tryAdvance(Consumer<? super K> block) {
3136 if (block == null) throw new NullPointerException();
3137 K k;
3138 if ((k = advanceKey()) == null)
3139 return false;
3140 block.accept(k);
3141 return true;
3142 }
3143
3144 public int characteristics() {
3145 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3146 Spliterator.NONNULL;
3147 }
3148
3149 }
3150
3151 @SuppressWarnings("serial") static final class ValueIterator<K,V>
3152 extends Traverser<K,V,Object>
3153 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
3154 ValueIterator(ConcurrentHashMap<K,V> map) { super(map); }
3155 ValueIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3156 super(map, it);
3157 }
3158 public Spliterator<V> trySplit() {
3159 return (baseLimit - baseIndex <= 1) ? null :
3160 new ValueIterator<K,V>(map, this);
3161 }
3162
3163 public final V next() {
3164 V v;
3165 if ((v = nextVal) == null && (v = advanceValue()) == null)
3166 throw new NoSuchElementException();
3167 nextVal = null;
3168 return v;
3169 }
3170
3171 public final V nextElement() { return next(); }
3172
3173 public Iterator<V> iterator() { return this; }
3174
3175 public void forEachRemaining(Consumer<? super V> action) {
3176 forEachValue(action);
3177 }
3178
3179 public boolean tryAdvance(Consumer<? super V> block) {
3180 V v;
3181 if (block == null) throw new NullPointerException();
3182 if ((v = advanceValue()) == null)
3183 return false;
3184 block.accept(v);
3185 return true;
3186 }
3187
3188 public int characteristics() {
3189 return Spliterator.CONCURRENT | Spliterator.NONNULL;
3190 }
3191 }
3192
3193 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3194 extends Traverser<K,V,Object>
3195 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3196 EntryIterator(ConcurrentHashMap<K,V> map) { super(map); }
3197 EntryIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3198 super(map, it);
3199 }
3200 public Spliterator<Map.Entry<K,V>> trySplit() {
3201 return (baseLimit - baseIndex <= 1) ? null :
3202 new EntryIterator<K,V>(map, this);
3203 }
3204
3205 public final Map.Entry<K,V> next() {
3206 V v;
3207 if ((v = nextVal) == null && (v = advanceValue()) == null)
3208 throw new NoSuchElementException();
3209 K k = nextKey;
3210 nextVal = null;
3211 return new MapEntry<K,V>(k, v, map);
3212 }
3213
3214 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3215
3216 public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
3217 if (action == null) throw new NullPointerException();
3218 V v;
3219 while ((v = advanceValue()) != null)
3220 action.accept(entryFor(nextKey, v));
3221 }
3222
3223 public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> block) {
3224 V v;
3225 if (block == null) throw new NullPointerException();
3226 if ((v = advanceValue()) == null)
3227 return false;
3228 block.accept(entryFor(nextKey, v));
3229 return true;
3230 }
3231
3232 public int characteristics() {
3233 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3234 Spliterator.NONNULL;
3235 }
3236 }
3237
3238 /**
3239 * Exported Entry for iterators
3240 */
3241 static final class MapEntry<K,V> implements Map.Entry<K,V> {
3242 final K key; // non-null
3243 V val; // non-null
3244 final ConcurrentHashMap<K,V> map;
3245 MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
3246 this.key = key;
3247 this.val = val;
3248 this.map = map;
3249 }
3250 public final K getKey() { return key; }
3251 public final V getValue() { return val; }
3252 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3253 public final String toString(){ return key + "=" + val; }
3254
3255 public final boolean equals(Object o) {
3256 Object k, v; Map.Entry<?,?> e;
3257 return ((o instanceof Map.Entry) &&
3258 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3259 (v = e.getValue()) != null &&
3260 (k == key || k.equals(key)) &&
3261 (v == val || v.equals(val)));
3262 }
3263
3264 /**
3265 * Sets our entry's value and writes through to the map. The
3266 * value to return is somewhat arbitrary here. Since we do not
3267 * necessarily track asynchronous changes, the most recent
3268 * "previous" value could be different from what we return (or
3269 * could even have been removed in which case the put will
3270 * re-establish). We do not and cannot guarantee more.
3271 */
3272 public final V setValue(V value) {
3273 if (value == null) throw new NullPointerException();
3274 V v = val;
3275 val = value;
3276 map.put(key, value);
3277 return v;
3278 }
3279 }
3280
3281 /**
3282 * Returns exportable snapshot entry for the given key and value
3283 * when write-through can't or shouldn't be used.
3284 */
3285 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3286 return new AbstractMap.SimpleEntry<K,V>(k, v);
3287 }
3288
3289 /* ---------------- Serialization Support -------------- */
3290
3291 /**
3292 * Stripped-down version of helper class used in previous version,
3293 * declared for the sake of serialization compatibility
3294 */
3295 static class Segment<K,V> implements Serializable {
3296 private static final long serialVersionUID = 2249069246763182397L;
3297 final float loadFactor;
3298 Segment(float lf) { this.loadFactor = lf; }
3299 }
3300
3301 /**
3302 * Saves the state of the {@code ConcurrentHashMap} instance to a
3303 * stream (i.e., serializes it).
3304 * @param s the stream
3305 * @serialData
3306 * the key (Object) and value (Object)
3307 * for each key-value mapping, followed by a null pair.
3308 * The key-value mappings are emitted in no particular order.
3309 */
3310 @SuppressWarnings("unchecked") private void writeObject
3311 (java.io.ObjectOutputStream s)
3312 throws java.io.IOException {
3313 if (segments == null) { // for serialization compatibility
3314 segments = (Segment<K,V>[])
3315 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3316 for (int i = 0; i < segments.length; ++i)
3317 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3318 }
3319 s.defaultWriteObject();
3320 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3321 V v;
3322 while ((v = it.advanceValue()) != null) {
3323 s.writeObject(it.nextKey);
3324 s.writeObject(v);
3325 }
3326 s.writeObject(null);
3327 s.writeObject(null);
3328 segments = null; // throw away
3329 }
3330
3331 /**
3332 * Reconstitutes the instance from a stream (that is, deserializes it).
3333 * @param s the stream
3334 */
3335 @SuppressWarnings("unchecked") private void readObject
3336 (java.io.ObjectInputStream s)
3337 throws java.io.IOException, ClassNotFoundException {
3338 s.defaultReadObject();
3339 this.segments = null; // unneeded
3340
3341 // Create all nodes, then place in table once size is known
3342 long size = 0L;
3343 Node<V> p = null;
3344 for (;;) {
3345 K k = (K) s.readObject();
3346 V v = (V) s.readObject();
3347 if (k != null && v != null) {
3348 int h = spread(k.hashCode());
3349 p = new Node<V>(h, k, v, p);
3350 ++size;
3351 }
3352 else
3353 break;
3354 }
3355 if (p != null) {
3356 boolean init = false;
3357 int n;
3358 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3359 n = MAXIMUM_CAPACITY;
3360 else {
3361 int sz = (int)size;
3362 n = tableSizeFor(sz + (sz >>> 1) + 1);
3363 }
3364 int sc = sizeCtl;
3365 boolean collide = false;
3366 if (n > sc &&
3367 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3368 try {
3369 if (table == null) {
3370 init = true;
3371 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3372 Node<V>[] tab = (Node<V>[])rt;
3373 int mask = n - 1;
3374 while (p != null) {
3375 int j = p.hash & mask;
3376 Node<V> next = p.next;
3377 Node<V> q = p.next = tabAt(tab, j);
3378 setTabAt(tab, j, p);
3379 if (!collide && q != null && q.hash == p.hash)
3380 collide = true;
3381 p = next;
3382 }
3383 table = tab;
3384 addCount(size, -1);
3385 sc = n - (n >>> 2);
3386 }
3387 } finally {
3388 sizeCtl = sc;
3389 }
3390 if (collide) { // rescan and convert to TreeBins
3391 Node<V>[] tab = table;
3392 for (int i = 0; i < tab.length; ++i) {
3393 int c = 0;
3394 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3395 if (++c > TREE_THRESHOLD &&
3396 (e.key instanceof Comparable)) {
3397 replaceWithTreeBin(tab, i, e.key);
3398 break;
3399 }
3400 }
3401 }
3402 }
3403 }
3404 if (!init) { // Can only happen if unsafely published.
3405 while (p != null) {
3406 internalPut((K)p.key, p.val, false);
3407 p = p.next;
3408 }
3409 }
3410 }
3411 }
3412
3413 // -------------------------------------------------------
3414
3415 // Sequential bulk operations
3416
3417 /**
3418 * Performs the given action for each (key, value).
3419 *
3420 * @param action the action
3421 */
3422 public void forEachSequentially
3423 (BiConsumer<? super K, ? super V> action) {
3424 if (action == null) throw new NullPointerException();
3425 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3426 V v;
3427 while ((v = it.advanceValue()) != null)
3428 action.accept(it.nextKey, v);
3429 }
3430
3431 /**
3432 * Performs the given action for each non-null transformation
3433 * of each (key, value).
3434 *
3435 * @param transformer a function returning the transformation
3436 * for an element, or null if there is no transformation (in
3437 * which case the action is not applied)
3438 * @param action the action
3439 */
3440 public <U> void forEachSequentially
3441 (BiFunction<? super K, ? super V, ? extends U> transformer,
3442 Consumer<? super U> action) {
3443 if (transformer == null || action == null)
3444 throw new NullPointerException();
3445 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3446 V v; U u;
3447 while ((v = it.advanceValue()) != null) {
3448 if ((u = transformer.apply(it.nextKey, v)) != null)
3449 action.accept(u);
3450 }
3451 }
3452
3453 /**
3454 * Returns a non-null result from applying the given search
3455 * function on each (key, value), or null if none.
3456 *
3457 * @param searchFunction a function returning a non-null
3458 * result on success, else null
3459 * @return a non-null result from applying the given search
3460 * function on each (key, value), or null if none
3461 */
3462 public <U> U searchSequentially
3463 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3464 if (searchFunction == null) throw new NullPointerException();
3465 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3466 V v; U u;
3467 while ((v = it.advanceValue()) != null) {
3468 if ((u = searchFunction.apply(it.nextKey, v)) != null)
3469 return u;
3470 }
3471 return null;
3472 }
3473
3474 /**
3475 * Returns the result of accumulating the given transformation
3476 * of all (key, value) pairs using the given reducer to
3477 * combine values, or null if none.
3478 *
3479 * @param transformer a function returning the transformation
3480 * for an element, or null if there is no transformation (in
3481 * which case it is not combined)
3482 * @param reducer a commutative associative combining function
3483 * @return the result of accumulating the given transformation
3484 * of all (key, value) pairs
3485 */
3486 public <U> U reduceSequentially
3487 (BiFunction<? super K, ? super V, ? extends U> transformer,
3488 BiFunction<? super U, ? super U, ? extends U> reducer) {
3489 if (transformer == null || reducer == null)
3490 throw new NullPointerException();
3491 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3492 U r = null, u; V v;
3493 while ((v = it.advanceValue()) != null) {
3494 if ((u = transformer.apply(it.nextKey, v)) != null)
3495 r = (r == null) ? u : reducer.apply(r, u);
3496 }
3497 return r;
3498 }
3499
3500 /**
3501 * Returns the result of accumulating the given transformation
3502 * of all (key, value) pairs using the given reducer to
3503 * combine values, and the given basis as an identity value.
3504 *
3505 * @param transformer a function returning the transformation
3506 * for an element
3507 * @param basis the identity (initial default value) for the reduction
3508 * @param reducer a commutative associative combining function
3509 * @return the result of accumulating the given transformation
3510 * of all (key, value) pairs
3511 */
3512 public double reduceToDoubleSequentially
3513 (ToDoubleBiFunction<? super K, ? super V> transformer,
3514 double basis,
3515 DoubleBinaryOperator reducer) {
3516 if (transformer == null || reducer == null)
3517 throw new NullPointerException();
3518 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3519 double r = basis; V v;
3520 while ((v = it.advanceValue()) != null)
3521 r = reducer.applyAsDouble(r, transformer.applyAsDouble(it.nextKey, v));
3522 return r;
3523 }
3524
3525 /**
3526 * Returns the result of accumulating the given transformation
3527 * of all (key, value) pairs using the given reducer to
3528 * combine values, and the given basis as an identity value.
3529 *
3530 * @param transformer a function returning the transformation
3531 * for an element
3532 * @param basis the identity (initial default value) for the reduction
3533 * @param reducer a commutative associative combining function
3534 * @return the result of accumulating the given transformation
3535 * of all (key, value) pairs
3536 */
3537 public long reduceToLongSequentially
3538 (ToLongBiFunction<? super K, ? super V> transformer,
3539 long basis,
3540 LongBinaryOperator reducer) {
3541 if (transformer == null || reducer == null)
3542 throw new NullPointerException();
3543 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3544 long r = basis; V v;
3545 while ((v = it.advanceValue()) != null)
3546 r = reducer.applyAsLong(r, transformer.applyAsLong(it.nextKey, v));
3547 return r;
3548 }
3549
3550 /**
3551 * Returns the result of accumulating the given transformation
3552 * of all (key, value) pairs using the given reducer to
3553 * combine values, and the given basis as an identity value.
3554 *
3555 * @param transformer a function returning the transformation
3556 * for an element
3557 * @param basis the identity (initial default value) for the reduction
3558 * @param reducer a commutative associative combining function
3559 * @return the result of accumulating the given transformation
3560 * of all (key, value) pairs
3561 */
3562 public int reduceToIntSequentially
3563 (ToIntBiFunction<? super K, ? super V> transformer,
3564 int basis,
3565 IntBinaryOperator reducer) {
3566 if (transformer == null || reducer == null)
3567 throw new NullPointerException();
3568 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3569 int r = basis; V v;
3570 while ((v = it.advanceValue()) != null)
3571 r = reducer.applyAsInt(r, transformer.applyAsInt(it.nextKey, v));
3572 return r;
3573 }
3574
3575 /**
3576 * Performs the given action for each key.
3577 *
3578 * @param action the action
3579 */
3580 public void forEachKeySequentially
3581 (Consumer<? super K> action) {
3582 new Traverser<K,V,Object>(this).forEachKey(action);
3583 }
3584
3585 /**
3586 * Performs the given action for each non-null transformation
3587 * of each key.
3588 *
3589 * @param transformer a function returning the transformation
3590 * for an element, or null if there is no transformation (in
3591 * which case the action is not applied)
3592 * @param action the action
3593 */
3594 public <U> void forEachKeySequentially
3595 (Function<? super K, ? extends U> transformer,
3596 Consumer<? super U> action) {
3597 if (transformer == null || action == null)
3598 throw new NullPointerException();
3599 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3600 K k; U u;
3601 while ((k = it.advanceKey()) != null) {
3602 if ((u = transformer.apply(k)) != null)
3603 action.accept(u);
3604 }
3605 }
3606
3607 /**
3608 * Returns a non-null result from applying the given search
3609 * function on each key, or null if none.
3610 *
3611 * @param searchFunction a function returning a non-null
3612 * result on success, else null
3613 * @return a non-null result from applying the given search
3614 * function on each key, or null if none
3615 */
3616 public <U> U searchKeysSequentially
3617 (Function<? super K, ? extends U> searchFunction) {
3618 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3619 K k; U u;
3620 while ((k = it.advanceKey()) != null) {
3621 if ((u = searchFunction.apply(k)) != null)
3622 return u;
3623 }
3624 return null;
3625 }
3626
3627 /**
3628 * Returns the result of accumulating all keys using the given
3629 * reducer to combine values, or null if none.
3630 *
3631 * @param reducer a commutative associative combining function
3632 * @return the result of accumulating all keys using the given
3633 * reducer to combine values, or null if none
3634 */
3635 public K reduceKeysSequentially
3636 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3637 if (reducer == null) throw new NullPointerException();
3638 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3639 K u, r = null;
3640 while ((u = it.advanceKey()) != null) {
3641 r = (r == null) ? u : reducer.apply(r, u);
3642 }
3643 return r;
3644 }
3645
3646 /**
3647 * Returns the result of accumulating the given transformation
3648 * of all keys using the given reducer to combine values, or
3649 * null if none.
3650 *
3651 * @param transformer a function returning the transformation
3652 * for an element, or null if there is no transformation (in
3653 * which case it is not combined)
3654 * @param reducer a commutative associative combining function
3655 * @return the result of accumulating the given transformation
3656 * of all keys
3657 */
3658 public <U> U reduceKeysSequentially
3659 (Function<? super K, ? extends U> transformer,
3660 BiFunction<? super U, ? super U, ? extends U> reducer) {
3661 if (transformer == null || reducer == null)
3662 throw new NullPointerException();
3663 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3664 K k; U r = null, u;
3665 while ((k = it.advanceKey()) != null) {
3666 if ((u = transformer.apply(k)) != null)
3667 r = (r == null) ? u : reducer.apply(r, u);
3668 }
3669 return r;
3670 }
3671
3672 /**
3673 * Returns the result of accumulating the given transformation
3674 * of all keys using the given reducer to combine values, and
3675 * the given basis as an identity value.
3676 *
3677 * @param transformer a function returning the transformation
3678 * for an element
3679 * @param basis the identity (initial default value) for the reduction
3680 * @param reducer a commutative associative combining function
3681 * @return the result of accumulating the given transformation
3682 * of all keys
3683 */
3684 public double reduceKeysToDoubleSequentially
3685 (ToDoubleFunction<? super K> transformer,
3686 double basis,
3687 DoubleBinaryOperator reducer) {
3688 if (transformer == null || reducer == null)
3689 throw new NullPointerException();
3690 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3691 double r = basis;
3692 K k;
3693 while ((k = it.advanceKey()) != null)
3694 r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
3695 return r;
3696 }
3697
3698 /**
3699 * Returns the result of accumulating the given transformation
3700 * of all keys using the given reducer to combine values, and
3701 * the given basis as an identity value.
3702 *
3703 * @param transformer a function returning the transformation
3704 * for an element
3705 * @param basis the identity (initial default value) for the reduction
3706 * @param reducer a commutative associative combining function
3707 * @return the result of accumulating the given transformation
3708 * of all keys
3709 */
3710 public long reduceKeysToLongSequentially
3711 (ToLongFunction<? super K> transformer,
3712 long basis,
3713 LongBinaryOperator reducer) {
3714 if (transformer == null || reducer == null)
3715 throw new NullPointerException();
3716 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3717 long r = basis;
3718 K k;
3719 while ((k = it.advanceKey()) != null)
3720 r = reducer.applyAsLong(r, transformer.applyAsLong(k));
3721 return r;
3722 }
3723
3724 /**
3725 * Returns the result of accumulating the given transformation
3726 * of all keys using the given reducer to combine values, and
3727 * the given basis as an identity value.
3728 *
3729 * @param transformer a function returning the transformation
3730 * for an element
3731 * @param basis the identity (initial default value) for the reduction
3732 * @param reducer a commutative associative combining function
3733 * @return the result of accumulating the given transformation
3734 * of all keys
3735 */
3736 public int reduceKeysToIntSequentially
3737 (ToIntFunction<? super K> transformer,
3738 int basis,
3739 IntBinaryOperator reducer) {
3740 if (transformer == null || reducer == null)
3741 throw new NullPointerException();
3742 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3743 int r = basis;
3744 K k;
3745 while ((k = it.advanceKey()) != null)
3746 r = reducer.applyAsInt(r, transformer.applyAsInt(k));
3747 return r;
3748 }
3749
3750 /**
3751 * Performs the given action for each value.
3752 *
3753 * @param action the action
3754 */
3755 public void forEachValueSequentially(Consumer<? super V> action) {
3756 new Traverser<K,V,Object>(this).forEachValue(action);
3757 }
3758
3759 /**
3760 * Performs the given action for each non-null transformation
3761 * of each value.
3762 *
3763 * @param transformer a function returning the transformation
3764 * for an element, or null if there is no transformation (in
3765 * which case the action is not applied)
3766 * @param action the action
3767 */
3768 public <U> void forEachValueSequentially
3769 (Function<? super V, ? extends U> transformer,
3770 Consumer<? super U> action) {
3771 if (transformer == null || action == null)
3772 throw new NullPointerException();
3773 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3774 V v; U u;
3775 while ((v = it.advanceValue()) != null) {
3776 if ((u = transformer.apply(v)) != null)
3777 action.accept(u);
3778 }
3779 }
3780
3781 /**
3782 * Returns a non-null result from applying the given search
3783 * function on each value, or null if none.
3784 *
3785 * @param searchFunction a function returning a non-null
3786 * result on success, else null
3787 * @return a non-null result from applying the given search
3788 * function on each value, or null if none
3789 */
3790 public <U> U searchValuesSequentially
3791 (Function<? super V, ? extends U> searchFunction) {
3792 if (searchFunction == null) throw new NullPointerException();
3793 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3794 V v; U u;
3795 while ((v = it.advanceValue()) != null) {
3796 if ((u = searchFunction.apply(v)) != null)
3797 return u;
3798 }
3799 return null;
3800 }
3801
3802 /**
3803 * Returns the result of accumulating all values using the
3804 * given reducer to combine values, or null if none.
3805 *
3806 * @param reducer a commutative associative combining function
3807 * @return the result of accumulating all values
3808 */
3809 public V reduceValuesSequentially
3810 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3811 if (reducer == null) throw new NullPointerException();
3812 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3813 V r = null; V v;
3814 while ((v = it.advanceValue()) != null)
3815 r = (r == null) ? v : reducer.apply(r, v);
3816 return r;
3817 }
3818
3819 /**
3820 * Returns the result of accumulating the given transformation
3821 * of all values using the given reducer to combine values, or
3822 * null if none.
3823 *
3824 * @param transformer a function returning the transformation
3825 * for an element, or null if there is no transformation (in
3826 * which case it is not combined)
3827 * @param reducer a commutative associative combining function
3828 * @return the result of accumulating the given transformation
3829 * of all values
3830 */
3831 public <U> U reduceValuesSequentially
3832 (Function<? super V, ? extends U> transformer,
3833 BiFunction<? super U, ? super U, ? extends U> reducer) {
3834 if (transformer == null || reducer == null)
3835 throw new NullPointerException();
3836 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3837 U r = null, u; V v;
3838 while ((v = it.advanceValue()) != null) {
3839 if ((u = transformer.apply(v)) != null)
3840 r = (r == null) ? u : reducer.apply(r, u);
3841 }
3842 return r;
3843 }
3844
3845 /**
3846 * Returns the result of accumulating the given transformation
3847 * of all values using the given reducer to combine values,
3848 * and the given basis as an identity value.
3849 *
3850 * @param transformer a function returning the transformation
3851 * for an element
3852 * @param basis the identity (initial default value) for the reduction
3853 * @param reducer a commutative associative combining function
3854 * @return the result of accumulating the given transformation
3855 * of all values
3856 */
3857 public double reduceValuesToDoubleSequentially
3858 (ToDoubleFunction<? super V> transformer,
3859 double basis,
3860 DoubleBinaryOperator reducer) {
3861 if (transformer == null || reducer == null)
3862 throw new NullPointerException();
3863 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3864 double r = basis; V v;
3865 while ((v = it.advanceValue()) != null)
3866 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3867 return r;
3868 }
3869
3870 /**
3871 * Returns the result of accumulating the given transformation
3872 * of all values using the given reducer to combine values,
3873 * and the given basis as an identity value.
3874 *
3875 * @param transformer a function returning the transformation
3876 * for an element
3877 * @param basis the identity (initial default value) for the reduction
3878 * @param reducer a commutative associative combining function
3879 * @return the result of accumulating the given transformation
3880 * of all values
3881 */
3882 public long reduceValuesToLongSequentially
3883 (ToLongFunction<? super V> transformer,
3884 long basis,
3885 LongBinaryOperator reducer) {
3886 if (transformer == null || reducer == null)
3887 throw new NullPointerException();
3888 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3889 long r = basis; V v;
3890 while ((v = it.advanceValue()) != null)
3891 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3892 return r;
3893 }
3894
3895 /**
3896 * Returns the result of accumulating the given transformation
3897 * of all values using the given reducer to combine values,
3898 * and the given basis as an identity value.
3899 *
3900 * @param transformer a function returning the transformation
3901 * for an element
3902 * @param basis the identity (initial default value) for the reduction
3903 * @param reducer a commutative associative combining function
3904 * @return the result of accumulating the given transformation
3905 * of all values
3906 */
3907 public int reduceValuesToIntSequentially
3908 (ToIntFunction<? super V> transformer,
3909 int basis,
3910 IntBinaryOperator reducer) {
3911 if (transformer == null || reducer == null)
3912 throw new NullPointerException();
3913 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3914 int r = basis; V v;
3915 while ((v = it.advanceValue()) != null)
3916 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3917 return r;
3918 }
3919
3920 /**
3921 * Performs the given action for each entry.
3922 *
3923 * @param action the action
3924 */
3925 public void forEachEntrySequentially
3926 (Consumer<? super Map.Entry<K,V>> action) {
3927 if (action == null) throw new NullPointerException();
3928 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3929 V v;
3930 while ((v = it.advanceValue()) != null)
3931 action.accept(entryFor(it.nextKey, v));
3932 }
3933
3934 /**
3935 * Performs the given action for each non-null transformation
3936 * of each entry.
3937 *
3938 * @param transformer a function returning the transformation
3939 * for an element, or null if there is no transformation (in
3940 * which case the action is not applied)
3941 * @param action the action
3942 */
3943 public <U> void forEachEntrySequentially
3944 (Function<Map.Entry<K,V>, ? extends U> transformer,
3945 Consumer<? super U> action) {
3946 if (transformer == null || action == null)
3947 throw new NullPointerException();
3948 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3949 V v; U u;
3950 while ((v = it.advanceValue()) != null) {
3951 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
3952 action.accept(u);
3953 }
3954 }
3955
3956 /**
3957 * Returns a non-null result from applying the given search
3958 * function on each entry, or null if none.
3959 *
3960 * @param searchFunction a function returning a non-null
3961 * result on success, else null
3962 * @return a non-null result from applying the given search
3963 * function on each entry, or null if none
3964 */
3965 public <U> U searchEntriesSequentially
3966 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3967 if (searchFunction == null) throw new NullPointerException();
3968 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3969 V v; U u;
3970 while ((v = it.advanceValue()) != null) {
3971 if ((u = searchFunction.apply(entryFor(it.nextKey, v))) != null)
3972 return u;
3973 }
3974 return null;
3975 }
3976
3977 /**
3978 * Returns the result of accumulating all entries using the
3979 * given reducer to combine values, or null if none.
3980 *
3981 * @param reducer a commutative associative combining function
3982 * @return the result of accumulating all entries
3983 */
3984 public Map.Entry<K,V> reduceEntriesSequentially
3985 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3986 if (reducer == null) throw new NullPointerException();
3987 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3988 Map.Entry<K,V> r = null; V v;
3989 while ((v = it.advanceValue()) != null) {
3990 Map.Entry<K,V> u = entryFor(it.nextKey, v);
3991 r = (r == null) ? u : reducer.apply(r, u);
3992 }
3993 return r;
3994 }
3995
3996 /**
3997 * Returns the result of accumulating the given transformation
3998 * of all entries using the given reducer to combine values,
3999 * or null if none.
4000 *
4001 * @param transformer a function returning the transformation
4002 * for an element, or null if there is no transformation (in
4003 * which case it is not combined)
4004 * @param reducer a commutative associative combining function
4005 * @return the result of accumulating the given transformation
4006 * of all entries
4007 */
4008 public <U> U reduceEntriesSequentially
4009 (Function<Map.Entry<K,V>, ? extends U> transformer,
4010 BiFunction<? super U, ? super U, ? extends U> reducer) {
4011 if (transformer == null || reducer == null)
4012 throw new NullPointerException();
4013 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4014 U r = null, u; V v;
4015 while ((v = it.advanceValue()) != null) {
4016 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
4017 r = (r == null) ? u : reducer.apply(r, u);
4018 }
4019 return r;
4020 }
4021
4022 /**
4023 * Returns the result of accumulating the given transformation
4024 * of all entries using the given reducer to combine values,
4025 * and the given basis as an identity value.
4026 *
4027 * @param transformer a function returning the transformation
4028 * for an element
4029 * @param basis the identity (initial default value) for the reduction
4030 * @param reducer a commutative associative combining function
4031 * @return the result of accumulating the given transformation
4032 * of all entries
4033 */
4034 public double reduceEntriesToDoubleSequentially
4035 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4036 double basis,
4037 DoubleBinaryOperator reducer) {
4038 if (transformer == null || reducer == null)
4039 throw new NullPointerException();
4040 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4041 double r = basis; V v;
4042 while ((v = it.advanceValue()) != null)
4043 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(it.nextKey, v)));
4044 return r;
4045 }
4046
4047 /**
4048 * Returns the result of accumulating the given transformation
4049 * of all entries using the given reducer to combine values,
4050 * and the given basis as an identity value.
4051 *
4052 * @param transformer a function returning the transformation
4053 * for an element
4054 * @param basis the identity (initial default value) for the reduction
4055 * @param reducer a commutative associative combining function
4056 * @return the result of accumulating the given transformation
4057 * of all entries
4058 */
4059 public long reduceEntriesToLongSequentially
4060 (ToLongFunction<Map.Entry<K,V>> transformer,
4061 long basis,
4062 LongBinaryOperator reducer) {
4063 if (transformer == null || reducer == null)
4064 throw new NullPointerException();
4065 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4066 long r = basis; V v;
4067 while ((v = it.advanceValue()) != null)
4068 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(it.nextKey, v)));
4069 return r;
4070 }
4071
4072 /**
4073 * Returns the result of accumulating the given transformation
4074 * of all entries using the given reducer to combine values,
4075 * and the given basis as an identity value.
4076 *
4077 * @param transformer a function returning the transformation
4078 * for an element
4079 * @param basis the identity (initial default value) for the reduction
4080 * @param reducer a commutative associative combining function
4081 * @return the result of accumulating the given transformation
4082 * of all entries
4083 */
4084 public int reduceEntriesToIntSequentially
4085 (ToIntFunction<Map.Entry<K,V>> transformer,
4086 int basis,
4087 IntBinaryOperator reducer) {
4088 if (transformer == null || reducer == null)
4089 throw new NullPointerException();
4090 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4091 int r = basis; V v;
4092 while ((v = it.advanceValue()) != null)
4093 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(it.nextKey, v)));
4094 return r;
4095 }
4096
4097 // Parallel bulk operations
4098
4099 /**
4100 * Performs the given action for each (key, value).
4101 *
4102 * @param action the action
4103 */
4104 public void forEachInParallel(BiConsumer<? super K,? super V> action) {
4105 ForkJoinTasks.forEach
4106 (this, action).invoke();
4107 }
4108
4109 /**
4110 * Performs the given action for each non-null transformation
4111 * of each (key, value).
4112 *
4113 * @param transformer a function returning the transformation
4114 * for an element, or null if there is no transformation (in
4115 * which case the action is not applied)
4116 * @param action the action
4117 */
4118 public <U> void forEachInParallel
4119 (BiFunction<? super K, ? super V, ? extends U> transformer,
4120 Consumer<? super U> action) {
4121 ForkJoinTasks.forEach
4122 (this, transformer, action).invoke();
4123 }
4124
4125 /**
4126 * Returns a non-null result from applying the given search
4127 * function on each (key, value), or null if none. Upon
4128 * success, further element processing is suppressed and the
4129 * results of any other parallel invocations of the search
4130 * function are ignored.
4131 *
4132 * @param searchFunction a function returning a non-null
4133 * result on success, else null
4134 * @return a non-null result from applying the given search
4135 * function on each (key, value), or null if none
4136 */
4137 public <U> U searchInParallel
4138 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4139 return ForkJoinTasks.search
4140 (this, searchFunction).invoke();
4141 }
4142
4143 /**
4144 * Returns the result of accumulating the given transformation
4145 * of all (key, value) pairs using the given reducer to
4146 * combine values, or null if none.
4147 *
4148 * @param transformer a function returning the transformation
4149 * for an element, or null if there is no transformation (in
4150 * which case it is not combined)
4151 * @param reducer a commutative associative combining function
4152 * @return the result of accumulating the given transformation
4153 * of all (key, value) pairs
4154 */
4155 public <U> U reduceInParallel
4156 (BiFunction<? super K, ? super V, ? extends U> transformer,
4157 BiFunction<? super U, ? super U, ? extends U> reducer) {
4158 return ForkJoinTasks.reduce
4159 (this, transformer, reducer).invoke();
4160 }
4161
4162 /**
4163 * Returns the result of accumulating the given transformation
4164 * of all (key, value) pairs using the given reducer to
4165 * combine values, and the given basis as an identity value.
4166 *
4167 * @param transformer a function returning the transformation
4168 * for an element
4169 * @param basis the identity (initial default value) for the reduction
4170 * @param reducer a commutative associative combining function
4171 * @return the result of accumulating the given transformation
4172 * of all (key, value) pairs
4173 */
4174 public double reduceToDoubleInParallel
4175 (ToDoubleBiFunction<? super K, ? super V> transformer,
4176 double basis,
4177 DoubleBinaryOperator reducer) {
4178 return ForkJoinTasks.reduceToDouble
4179 (this, transformer, basis, reducer).invoke();
4180 }
4181
4182 /**
4183 * Returns the result of accumulating the given transformation
4184 * of all (key, value) pairs using the given reducer to
4185 * combine values, and the given basis as an identity value.
4186 *
4187 * @param transformer a function returning the transformation
4188 * for an element
4189 * @param basis the identity (initial default value) for the reduction
4190 * @param reducer a commutative associative combining function
4191 * @return the result of accumulating the given transformation
4192 * of all (key, value) pairs
4193 */
4194 public long reduceToLongInParallel
4195 (ToLongBiFunction<? super K, ? super V> transformer,
4196 long basis,
4197 LongBinaryOperator reducer) {
4198 return ForkJoinTasks.reduceToLong
4199 (this, transformer, basis, reducer).invoke();
4200 }
4201
4202 /**
4203 * Returns the result of accumulating the given transformation
4204 * of all (key, value) pairs using the given reducer to
4205 * combine values, and the given basis as an identity value.
4206 *
4207 * @param transformer a function returning the transformation
4208 * for an element
4209 * @param basis the identity (initial default value) for the reduction
4210 * @param reducer a commutative associative combining function
4211 * @return the result of accumulating the given transformation
4212 * of all (key, value) pairs
4213 */
4214 public int reduceToIntInParallel
4215 (ToIntBiFunction<? super K, ? super V> transformer,
4216 int basis,
4217 IntBinaryOperator reducer) {
4218 return ForkJoinTasks.reduceToInt
4219 (this, transformer, basis, reducer).invoke();
4220 }
4221
4222 /**
4223 * Performs the given action for each key.
4224 *
4225 * @param action the action
4226 */
4227 public void forEachKeyInParallel(Consumer<? super K> action) {
4228 ForkJoinTasks.forEachKey
4229 (this, action).invoke();
4230 }
4231
4232 /**
4233 * Performs the given action for each non-null transformation
4234 * of each key.
4235 *
4236 * @param transformer a function returning the transformation
4237 * for an element, or null if there is no transformation (in
4238 * which case the action is not applied)
4239 * @param action the action
4240 */
4241 public <U> void forEachKeyInParallel
4242 (Function<? super K, ? extends U> transformer,
4243 Consumer<? super U> action) {
4244 ForkJoinTasks.forEachKey
4245 (this, transformer, action).invoke();
4246 }
4247
4248 /**
4249 * Returns a non-null result from applying the given search
4250 * function on each key, or null if none. Upon success,
4251 * further element processing is suppressed and the results of
4252 * any other parallel invocations of the search function are
4253 * ignored.
4254 *
4255 * @param searchFunction a function returning a non-null
4256 * result on success, else null
4257 * @return a non-null result from applying the given search
4258 * function on each key, or null if none
4259 */
4260 public <U> U searchKeysInParallel
4261 (Function<? super K, ? extends U> searchFunction) {
4262 return ForkJoinTasks.searchKeys
4263 (this, searchFunction).invoke();
4264 }
4265
4266 /**
4267 * Returns the result of accumulating all keys using the given
4268 * reducer to combine values, or null if none.
4269 *
4270 * @param reducer a commutative associative combining function
4271 * @return the result of accumulating all keys using the given
4272 * reducer to combine values, or null if none
4273 */
4274 public K reduceKeysInParallel
4275 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4276 return ForkJoinTasks.reduceKeys
4277 (this, reducer).invoke();
4278 }
4279
4280 /**
4281 * Returns the result of accumulating the given transformation
4282 * of all keys using the given reducer to combine values, or
4283 * null if none.
4284 *
4285 * @param transformer a function returning the transformation
4286 * for an element, or null if there is no transformation (in
4287 * which case it is not combined)
4288 * @param reducer a commutative associative combining function
4289 * @return the result of accumulating the given transformation
4290 * of all keys
4291 */
4292 public <U> U reduceKeysInParallel
4293 (Function<? super K, ? extends U> transformer,
4294 BiFunction<? super U, ? super U, ? extends U> reducer) {
4295 return ForkJoinTasks.reduceKeys
4296 (this, transformer, reducer).invoke();
4297 }
4298
4299 /**
4300 * Returns the result of accumulating the given transformation
4301 * of all keys using the given reducer to combine values, and
4302 * the given basis as an identity value.
4303 *
4304 * @param transformer a function returning the transformation
4305 * for an element
4306 * @param basis the identity (initial default value) for the reduction
4307 * @param reducer a commutative associative combining function
4308 * @return the result of accumulating the given transformation
4309 * of all keys
4310 */
4311 public double reduceKeysToDoubleInParallel
4312 (ToDoubleFunction<? super K> transformer,
4313 double basis,
4314 DoubleBinaryOperator reducer) {
4315 return ForkJoinTasks.reduceKeysToDouble
4316 (this, transformer, basis, reducer).invoke();
4317 }
4318
4319 /**
4320 * Returns the result of accumulating the given transformation
4321 * of all keys using the given reducer to combine values, and
4322 * the given basis as an identity value.
4323 *
4324 * @param transformer a function returning the transformation
4325 * for an element
4326 * @param basis the identity (initial default value) for the reduction
4327 * @param reducer a commutative associative combining function
4328 * @return the result of accumulating the given transformation
4329 * of all keys
4330 */
4331 public long reduceKeysToLongInParallel
4332 (ToLongFunction<? super K> transformer,
4333 long basis,
4334 LongBinaryOperator reducer) {
4335 return ForkJoinTasks.reduceKeysToLong
4336 (this, transformer, basis, reducer).invoke();
4337 }
4338
4339 /**
4340 * Returns the result of accumulating the given transformation
4341 * of all keys using the given reducer to combine values, and
4342 * the given basis as an identity value.
4343 *
4344 * @param transformer a function returning the transformation
4345 * for an element
4346 * @param basis the identity (initial default value) for the reduction
4347 * @param reducer a commutative associative combining function
4348 * @return the result of accumulating the given transformation
4349 * of all keys
4350 */
4351 public int reduceKeysToIntInParallel
4352 (ToIntFunction<? super K> transformer,
4353 int basis,
4354 IntBinaryOperator reducer) {
4355 return ForkJoinTasks.reduceKeysToInt
4356 (this, transformer, basis, reducer).invoke();
4357 }
4358
4359 /**
4360 * Performs the given action for each value.
4361 *
4362 * @param action the action
4363 */
4364 public void forEachValueInParallel(Consumer<? super V> action) {
4365 ForkJoinTasks.forEachValue
4366 (this, action).invoke();
4367 }
4368
4369 /**
4370 * Performs the given action for each non-null transformation
4371 * of each value.
4372 *
4373 * @param transformer a function returning the transformation
4374 * for an element, or null if there is no transformation (in
4375 * which case the action is not applied)
4376 * @param action the action
4377 */
4378 public <U> void forEachValueInParallel
4379 (Function<? super V, ? extends U> transformer,
4380 Consumer<? super U> action) {
4381 ForkJoinTasks.forEachValue
4382 (this, transformer, action).invoke();
4383 }
4384
4385 /**
4386 * Returns a non-null result from applying the given search
4387 * function on each value, or null if none. Upon success,
4388 * further element processing is suppressed and the results of
4389 * any other parallel invocations of the search function are
4390 * ignored.
4391 *
4392 * @param searchFunction a function returning a non-null
4393 * result on success, else null
4394 * @return a non-null result from applying the given search
4395 * function on each value, or null if none
4396 */
4397 public <U> U searchValuesInParallel
4398 (Function<? super V, ? extends U> searchFunction) {
4399 return ForkJoinTasks.searchValues
4400 (this, searchFunction).invoke();
4401 }
4402
4403 /**
4404 * Returns the result of accumulating all values using the
4405 * given reducer to combine values, or null if none.
4406 *
4407 * @param reducer a commutative associative combining function
4408 * @return the result of accumulating all values
4409 */
4410 public V reduceValuesInParallel
4411 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4412 return ForkJoinTasks.reduceValues
4413 (this, reducer).invoke();
4414 }
4415
4416 /**
4417 * Returns the result of accumulating the given transformation
4418 * of all values using the given reducer to combine values, or
4419 * null if none.
4420 *
4421 * @param transformer a function returning the transformation
4422 * for an element, or null if there is no transformation (in
4423 * which case it is not combined)
4424 * @param reducer a commutative associative combining function
4425 * @return the result of accumulating the given transformation
4426 * of all values
4427 */
4428 public <U> U reduceValuesInParallel
4429 (Function<? super V, ? extends U> transformer,
4430 BiFunction<? super U, ? super U, ? extends U> reducer) {
4431 return ForkJoinTasks.reduceValues
4432 (this, transformer, reducer).invoke();
4433 }
4434
4435 /**
4436 * Returns the result of accumulating the given transformation
4437 * of all values using the given reducer to combine values,
4438 * and the given basis as an identity value.
4439 *
4440 * @param transformer a function returning the transformation
4441 * for an element
4442 * @param basis the identity (initial default value) for the reduction
4443 * @param reducer a commutative associative combining function
4444 * @return the result of accumulating the given transformation
4445 * of all values
4446 */
4447 public double reduceValuesToDoubleInParallel
4448 (ToDoubleFunction<? super V> transformer,
4449 double basis,
4450 DoubleBinaryOperator reducer) {
4451 return ForkJoinTasks.reduceValuesToDouble
4452 (this, transformer, basis, reducer).invoke();
4453 }
4454
4455 /**
4456 * Returns the result of accumulating the given transformation
4457 * of all values using the given reducer to combine values,
4458 * and the given basis as an identity value.
4459 *
4460 * @param transformer a function returning the transformation
4461 * for an element
4462 * @param basis the identity (initial default value) for the reduction
4463 * @param reducer a commutative associative combining function
4464 * @return the result of accumulating the given transformation
4465 * of all values
4466 */
4467 public long reduceValuesToLongInParallel
4468 (ToLongFunction<? super V> transformer,
4469 long basis,
4470 LongBinaryOperator reducer) {
4471 return ForkJoinTasks.reduceValuesToLong
4472 (this, transformer, basis, reducer).invoke();
4473 }
4474
4475 /**
4476 * Returns the result of accumulating the given transformation
4477 * of all values using the given reducer to combine values,
4478 * and the given basis as an identity value.
4479 *
4480 * @param transformer a function returning the transformation
4481 * for an element
4482 * @param basis the identity (initial default value) for the reduction
4483 * @param reducer a commutative associative combining function
4484 * @return the result of accumulating the given transformation
4485 * of all values
4486 */
4487 public int reduceValuesToIntInParallel
4488 (ToIntFunction<? super V> transformer,
4489 int basis,
4490 IntBinaryOperator reducer) {
4491 return ForkJoinTasks.reduceValuesToInt
4492 (this, transformer, basis, reducer).invoke();
4493 }
4494
4495 /**
4496 * Performs the given action for each entry.
4497 *
4498 * @param action the action
4499 */
4500 public void forEachEntryInParallel(Consumer<? super Map.Entry<K,V>> action) {
4501 ForkJoinTasks.forEachEntry
4502 (this, action).invoke();
4503 }
4504
4505 /**
4506 * Performs the given action for each non-null transformation
4507 * of each entry.
4508 *
4509 * @param transformer a function returning the transformation
4510 * for an element, or null if there is no transformation (in
4511 * which case the action is not applied)
4512 * @param action the action
4513 */
4514 public <U> void forEachEntryInParallel
4515 (Function<Map.Entry<K,V>, ? extends U> transformer,
4516 Consumer<? super U> action) {
4517 ForkJoinTasks.forEachEntry
4518 (this, transformer, action).invoke();
4519 }
4520
4521 /**
4522 * Returns a non-null result from applying the given search
4523 * function on each entry, or null if none. Upon success,
4524 * further element processing is suppressed and the results of
4525 * any other parallel invocations of the search function are
4526 * ignored.
4527 *
4528 * @param searchFunction a function returning a non-null
4529 * result on success, else null
4530 * @return a non-null result from applying the given search
4531 * function on each entry, or null if none
4532 */
4533 public <U> U searchEntriesInParallel
4534 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4535 return ForkJoinTasks.searchEntries
4536 (this, searchFunction).invoke();
4537 }
4538
4539 /**
4540 * Returns the result of accumulating all entries using the
4541 * given reducer to combine values, or null if none.
4542 *
4543 * @param reducer a commutative associative combining function
4544 * @return the result of accumulating all entries
4545 */
4546 public Map.Entry<K,V> reduceEntriesInParallel
4547 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4548 return ForkJoinTasks.reduceEntries
4549 (this, reducer).invoke();
4550 }
4551
4552 /**
4553 * Returns the result of accumulating the given transformation
4554 * of all entries using the given reducer to combine values,
4555 * or null if none.
4556 *
4557 * @param transformer a function returning the transformation
4558 * for an element, or null if there is no transformation (in
4559 * which case it is not combined)
4560 * @param reducer a commutative associative combining function
4561 * @return the result of accumulating the given transformation
4562 * of all entries
4563 */
4564 public <U> U reduceEntriesInParallel
4565 (Function<Map.Entry<K,V>, ? extends U> transformer,
4566 BiFunction<? super U, ? super U, ? extends U> reducer) {
4567 return ForkJoinTasks.reduceEntries
4568 (this, transformer, reducer).invoke();
4569 }
4570
4571 /**
4572 * Returns the result of accumulating the given transformation
4573 * of all entries using the given reducer to combine values,
4574 * and the given basis as an identity value.
4575 *
4576 * @param transformer a function returning the transformation
4577 * for an element
4578 * @param basis the identity (initial default value) for the reduction
4579 * @param reducer a commutative associative combining function
4580 * @return the result of accumulating the given transformation
4581 * of all entries
4582 */
4583 public double reduceEntriesToDoubleInParallel
4584 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4585 double basis,
4586 DoubleBinaryOperator reducer) {
4587 return ForkJoinTasks.reduceEntriesToDouble
4588 (this, transformer, basis, reducer).invoke();
4589 }
4590
4591 /**
4592 * Returns the result of accumulating the given transformation
4593 * of all entries using the given reducer to combine values,
4594 * and the given basis as an identity value.
4595 *
4596 * @param transformer a function returning the transformation
4597 * for an element
4598 * @param basis the identity (initial default value) for the reduction
4599 * @param reducer a commutative associative combining function
4600 * @return the result of accumulating the given transformation
4601 * of all entries
4602 */
4603 public long reduceEntriesToLongInParallel
4604 (ToLongFunction<Map.Entry<K,V>> transformer,
4605 long basis,
4606 LongBinaryOperator reducer) {
4607 return ForkJoinTasks.reduceEntriesToLong
4608 (this, transformer, basis, reducer).invoke();
4609 }
4610
4611 /**
4612 * Returns the result of accumulating the given transformation
4613 * of all entries using the given reducer to combine values,
4614 * and the given basis as an identity value.
4615 *
4616 * @param transformer a function returning the transformation
4617 * for an element
4618 * @param basis the identity (initial default value) for the reduction
4619 * @param reducer a commutative associative combining function
4620 * @return the result of accumulating the given transformation
4621 * of all entries
4622 */
4623 public int reduceEntriesToIntInParallel
4624 (ToIntFunction<Map.Entry<K,V>> transformer,
4625 int basis,
4626 IntBinaryOperator reducer) {
4627 return ForkJoinTasks.reduceEntriesToInt
4628 (this, transformer, basis, reducer).invoke();
4629 }
4630
4631
4632 /* ----------------Views -------------- */
4633
4634 /**
4635 * Base class for views.
4636 */
4637 abstract static class CHMCollectionView<K,V,E>
4638 implements Collection<E>, java.io.Serializable {
4639 private static final long serialVersionUID = 7249069246763182397L;
4640 final ConcurrentHashMap<K,V> map;
4641 CHMCollectionView(ConcurrentHashMap<K,V> map) { this.map = map; }
4642
4643 /**
4644 * Returns the map backing this view.
4645 *
4646 * @return the map backing this view
4647 */
4648 public ConcurrentHashMap<K,V> getMap() { return map; }
4649
4650 /**
4651 * Removes all of the elements from this view, by removing all
4652 * the mappings from the map backing this view.
4653 */
4654 public final void clear() { map.clear(); }
4655 public final int size() { return map.size(); }
4656 public final boolean isEmpty() { return map.isEmpty(); }
4657
4658 // implementations below rely on concrete classes supplying these
4659 // abstract methods
4660 /**
4661 * Returns a "weakly consistent" iterator that will never
4662 * throw {@link ConcurrentModificationException}, and
4663 * guarantees to traverse elements as they existed upon
4664 * construction of the iterator, and may (but is not
4665 * guaranteed to) reflect any modifications subsequent to
4666 * construction.
4667 */
4668 public abstract Iterator<E> iterator();
4669 public abstract boolean contains(Object o);
4670 public abstract boolean remove(Object o);
4671
4672 private static final String oomeMsg = "Required array size too large";
4673
4674 public final Object[] toArray() {
4675 long sz = map.mappingCount();
4676 if (sz > MAX_ARRAY_SIZE)
4677 throw new OutOfMemoryError(oomeMsg);
4678 int n = (int)sz;
4679 Object[] r = new Object[n];
4680 int i = 0;
4681 for (E e : this) {
4682 if (i == n) {
4683 if (n >= MAX_ARRAY_SIZE)
4684 throw new OutOfMemoryError(oomeMsg);
4685 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4686 n = MAX_ARRAY_SIZE;
4687 else
4688 n += (n >>> 1) + 1;
4689 r = Arrays.copyOf(r, n);
4690 }
4691 r[i++] = e;
4692 }
4693 return (i == n) ? r : Arrays.copyOf(r, i);
4694 }
4695
4696 @SuppressWarnings("unchecked")
4697 public final <T> T[] toArray(T[] a) {
4698 long sz = map.mappingCount();
4699 if (sz > MAX_ARRAY_SIZE)
4700 throw new OutOfMemoryError(oomeMsg);
4701 int m = (int)sz;
4702 T[] r = (a.length >= m) ? a :
4703 (T[])java.lang.reflect.Array
4704 .newInstance(a.getClass().getComponentType(), m);
4705 int n = r.length;
4706 int i = 0;
4707 for (E e : this) {
4708 if (i == n) {
4709 if (n >= MAX_ARRAY_SIZE)
4710 throw new OutOfMemoryError(oomeMsg);
4711 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4712 n = MAX_ARRAY_SIZE;
4713 else
4714 n += (n >>> 1) + 1;
4715 r = Arrays.copyOf(r, n);
4716 }
4717 r[i++] = (T)e;
4718 }
4719 if (a == r && i < n) {
4720 r[i] = null; // null-terminate
4721 return r;
4722 }
4723 return (i == n) ? r : Arrays.copyOf(r, i);
4724 }
4725
4726 /**
4727 * Returns a string representation of this collection.
4728 * The string representation consists of the string representations
4729 * of the collection's elements in the order they are returned by
4730 * its iterator, enclosed in square brackets ({@code "[]"}).
4731 * Adjacent elements are separated by the characters {@code ", "}
4732 * (comma and space). Elements are converted to strings as by
4733 * {@link String#valueOf(Object)}.
4734 *
4735 * @return a string representation of this collection
4736 */
4737 public final String toString() {
4738 StringBuilder sb = new StringBuilder();
4739 sb.append('[');
4740 Iterator<E> it = iterator();
4741 if (it.hasNext()) {
4742 for (;;) {
4743 Object e = it.next();
4744 sb.append(e == this ? "(this Collection)" : e);
4745 if (!it.hasNext())
4746 break;
4747 sb.append(',').append(' ');
4748 }
4749 }
4750 return sb.append(']').toString();
4751 }
4752
4753 public final boolean containsAll(Collection<?> c) {
4754 if (c != this) {
4755 for (Object e : c) {
4756 if (e == null || !contains(e))
4757 return false;
4758 }
4759 }
4760 return true;
4761 }
4762
4763 public final boolean removeAll(Collection<?> c) {
4764 boolean modified = false;
4765 for (Iterator<E> it = iterator(); it.hasNext();) {
4766 if (c.contains(it.next())) {
4767 it.remove();
4768 modified = true;
4769 }
4770 }
4771 return modified;
4772 }
4773
4774 public final boolean retainAll(Collection<?> c) {
4775 boolean modified = false;
4776 for (Iterator<E> it = iterator(); it.hasNext();) {
4777 if (!c.contains(it.next())) {
4778 it.remove();
4779 modified = true;
4780 }
4781 }
4782 return modified;
4783 }
4784
4785 }
4786
4787 abstract static class CHMSetView<K,V,E>
4788 extends CHMCollectionView<K,V,E>
4789 implements Set<E>, java.io.Serializable {
4790 private static final long serialVersionUID = 7249069246763182397L;
4791 CHMSetView(ConcurrentHashMap<K,V> map) { super(map); }
4792
4793 // Implement Set API
4794
4795 /**
4796 * Implements {@link Set#hashCode()}.
4797 * @return the hash code value for this set
4798 */
4799 public final int hashCode() {
4800 int h = 0;
4801 for (E e : this)
4802 h += e.hashCode();
4803 return h;
4804 }
4805
4806 /**
4807 * Implements {@link Set#equals(Object)}.
4808 * @param o object to be compared for equality with this set
4809 * @return {@code true} if the specified object is equal to this set
4810 */
4811 public final boolean equals(Object o) {
4812 Set<?> c;
4813 return ((o instanceof Set) &&
4814 ((c = (Set<?>)o) == this ||
4815 (containsAll(c) && c.containsAll(this))));
4816 }
4817 }
4818
4819 /**
4820 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4821 * which additions may optionally be enabled by mapping to a
4822 * common value. This class cannot be directly instantiated.
4823 * See {@link #keySet() keySet()},
4824 * {@link #keySet(Object) keySet(V)},
4825 * {@link #newKeySet() newKeySet()},
4826 * {@link #newKeySet(int) newKeySet(int)}.
4827 */
4828 public static class KeySetView<K,V>
4829 extends CHMSetView<K,V,K>
4830 implements Set<K>, java.io.Serializable {
4831 private static final long serialVersionUID = 7249069246763182397L;
4832 private final V value;
4833 KeySetView(ConcurrentHashMap<K,V> map, V value) { // non-public
4834 super(map);
4835 this.value = value;
4836 }
4837
4838 /**
4839 * Returns the default mapped value for additions,
4840 * or {@code null} if additions are not supported.
4841 *
4842 * @return the default mapped value for additions, or {@code null}
4843 * if not supported
4844 */
4845 public V getMappedValue() { return value; }
4846
4847 /**
4848 * {@inheritDoc}
4849 * @throws NullPointerException if the specified key is null
4850 */
4851 public boolean contains(Object o) { return map.containsKey(o); }
4852
4853 /**
4854 * Removes the key from this map view, by removing the key (and its
4855 * corresponding value) from the backing map. This method does
4856 * nothing if the key is not in the map.
4857 *
4858 * @param o the key to be removed from the backing map
4859 * @return {@code true} if the backing map contained the specified key
4860 * @throws NullPointerException if the specified key is null
4861 */
4862 public boolean remove(Object o) { return map.remove(o) != null; }
4863
4864 /**
4865 * @return an iterator over the keys of the backing map
4866 */
4867 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4868
4869 /**
4870 * Adds the specified key to this set view by mapping the key to
4871 * the default mapped value in the backing map, if defined.
4872 *
4873 * @param e key to be added
4874 * @return {@code true} if this set changed as a result of the call
4875 * @throws NullPointerException if the specified key is null
4876 * @throws UnsupportedOperationException if no default mapped value
4877 * for additions was provided
4878 */
4879 public boolean add(K e) {
4880 V v;
4881 if ((v = value) == null)
4882 throw new UnsupportedOperationException();
4883 return map.internalPut(e, v, true) == null;
4884 }
4885
4886 /**
4887 * Adds all of the elements in the specified collection to this set,
4888 * as if by calling {@link #add} on each one.
4889 *
4890 * @param c the elements to be inserted into this set
4891 * @return {@code true} if this set changed as a result of the call
4892 * @throws NullPointerException if the collection or any of its
4893 * elements are {@code null}
4894 * @throws UnsupportedOperationException if no default mapped value
4895 * for additions was provided
4896 */
4897 public boolean addAll(Collection<? extends K> c) {
4898 boolean added = false;
4899 V v;
4900 if ((v = value) == null)
4901 throw new UnsupportedOperationException();
4902 for (K e : c) {
4903 if (map.internalPut(e, v, true) == null)
4904 added = true;
4905 }
4906 return added;
4907 }
4908
4909 public Spliterator<K> spliterator() {
4910 return new KeyIterator<>(map, null);
4911 }
4912
4913 }
4914
4915 /**
4916 * A view of a ConcurrentHashMap as a {@link Collection} of
4917 * values, in which additions are disabled. This class cannot be
4918 * directly instantiated. See {@link #values()}.
4919 *
4920 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4921 * that will never throw {@link ConcurrentModificationException},
4922 * and guarantees to traverse elements as they existed upon
4923 * construction of the iterator, and may (but is not guaranteed to)
4924 * reflect any modifications subsequent to construction.
4925 */
4926 public static final class ValuesView<K,V>
4927 extends CHMCollectionView<K,V,V>
4928 implements Collection<V>, java.io.Serializable {
4929 private static final long serialVersionUID = 2249069246763182397L;
4930 ValuesView(ConcurrentHashMap<K,V> map) { super(map); }
4931 public final boolean contains(Object o) {
4932 return map.containsValue(o);
4933 }
4934 public final boolean remove(Object o) {
4935 if (o != null) {
4936 for (Iterator<V> it = iterator(); it.hasNext();) {
4937 if (o.equals(it.next())) {
4938 it.remove();
4939 return true;
4940 }
4941 }
4942 }
4943 return false;
4944 }
4945
4946 /**
4947 * @return an iterator over the values of the backing map
4948 */
4949 public final Iterator<V> iterator() {
4950 return new ValueIterator<K,V>(map);
4951 }
4952
4953 /** Always throws {@link UnsupportedOperationException}. */
4954 public final boolean add(V e) {
4955 throw new UnsupportedOperationException();
4956 }
4957 /** Always throws {@link UnsupportedOperationException}. */
4958 public final boolean addAll(Collection<? extends V> c) {
4959 throw new UnsupportedOperationException();
4960 }
4961
4962 public Spliterator<V> spliterator() {
4963 return new ValueIterator<K,V>(map, null);
4964 }
4965
4966 }
4967
4968 /**
4969 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4970 * entries. This class cannot be directly instantiated. See
4971 * {@link #entrySet()}.
4972 */
4973 public static final class EntrySetView<K,V>
4974 extends CHMSetView<K,V,Map.Entry<K,V>>
4975 implements Set<Map.Entry<K,V>>, java.io.Serializable {
4976 private static final long serialVersionUID = 2249069246763182397L;
4977 EntrySetView(ConcurrentHashMap<K,V> map) { super(map); }
4978
4979 public final boolean contains(Object o) {
4980 Object k, v, r; Map.Entry<?,?> e;
4981 return ((o instanceof Map.Entry) &&
4982 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4983 (r = map.get(k)) != null &&
4984 (v = e.getValue()) != null &&
4985 (v == r || v.equals(r)));
4986 }
4987 public final boolean remove(Object o) {
4988 Object k, v; Map.Entry<?,?> e;
4989 return ((o instanceof Map.Entry) &&
4990 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4991 (v = e.getValue()) != null &&
4992 map.remove(k, v));
4993 }
4994
4995 /**
4996 * @return an iterator over the entries of the backing map
4997 */
4998 public final Iterator<Map.Entry<K,V>> iterator() {
4999 return new EntryIterator<K,V>(map);
5000 }
5001
5002 /**
5003 * Adds the specified mapping to this view.
5004 *
5005 * @param e mapping to be added
5006 * @return {@code true} if this set changed as a result of the call
5007 * @throws NullPointerException if the entry, its key, or its
5008 * value is null
5009 */
5010 public final boolean add(Entry<K,V> e) {
5011 return map.internalPut(e.getKey(), e.getValue(), false) == null;
5012 }
5013
5014 /**
5015 * Adds all of the mappings in the specified collection to this
5016 * set, as if by calling {@link #add(Map.Entry)} on each one.
5017 * @param c the mappings to be inserted into this set
5018 * @return {@code true} if this set changed as a result of the call
5019 * @throws NullPointerException if the collection or any of its
5020 * entries, keys, or values are null
5021 */
5022 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
5023 boolean added = false;
5024 for (Entry<K,V> e : c) {
5025 if (add(e))
5026 added = true;
5027 }
5028 return added;
5029 }
5030
5031 public Spliterator<Map.Entry<K,V>> spliterator() {
5032 return new EntryIterator<K,V>(map, null);
5033 }
5034
5035 }
5036
5037 // ---------------------------------------------------------------------
5038
5039 /**
5040 * Predefined tasks for performing bulk parallel operations on
5041 * ConcurrentHashMaps. These tasks follow the forms and rules used
5042 * for bulk operations. Each method has the same name, but returns
5043 * a task rather than invoking it. These methods may be useful in
5044 * custom applications such as submitting a task without waiting
5045 * for completion, using a custom pool, or combining with other
5046 * tasks.
5047 */
5048 public static class ForkJoinTasks {
5049 private ForkJoinTasks() {}
5050
5051 /**
5052 * Returns a task that when invoked, performs the given
5053 * action for each (key, value)
5054 *
5055 * @param map the map
5056 * @param action the action
5057 * @return the task
5058 */
5059 public static <K,V> ForkJoinTask<Void> forEach
5060 (ConcurrentHashMap<K,V> map,
5061 BiConsumer<? super K, ? super V> action) {
5062 if (action == null) throw new NullPointerException();
5063 return new ForEachMappingTask<K,V>(map, null, -1, action);
5064 }
5065
5066 /**
5067 * Returns a task that when invoked, performs the given
5068 * action for each non-null transformation of each (key, value)
5069 *
5070 * @param map the map
5071 * @param transformer a function returning the transformation
5072 * for an element, or null if there is no transformation (in
5073 * which case the action is not applied)
5074 * @param action the action
5075 * @return the task
5076 */
5077 public static <K,V,U> ForkJoinTask<Void> forEach
5078 (ConcurrentHashMap<K,V> map,
5079 BiFunction<? super K, ? super V, ? extends U> transformer,
5080 Consumer<? super U> action) {
5081 if (transformer == null || action == null)
5082 throw new NullPointerException();
5083 return new ForEachTransformedMappingTask<K,V,U>
5084 (map, null, -1, transformer, action);
5085 }
5086
5087 /**
5088 * Returns a task that when invoked, returns a non-null result
5089 * from applying the given search function on each (key,
5090 * value), or null if none. Upon success, further element
5091 * processing is suppressed and the results of any other
5092 * parallel invocations of the search function are ignored.
5093 *
5094 * @param map the map
5095 * @param searchFunction a function returning a non-null
5096 * result on success, else null
5097 * @return the task
5098 */
5099 public static <K,V,U> ForkJoinTask<U> search
5100 (ConcurrentHashMap<K,V> map,
5101 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
5102 if (searchFunction == null) throw new NullPointerException();
5103 return new SearchMappingsTask<K,V,U>
5104 (map, null, -1, searchFunction,
5105 new AtomicReference<U>());
5106 }
5107
5108 /**
5109 * Returns a task that when invoked, returns the result of
5110 * accumulating the given transformation of all (key, value) pairs
5111 * using the given reducer to combine values, or null if none.
5112 *
5113 * @param map the map
5114 * @param transformer a function returning the transformation
5115 * for an element, or null if there is no transformation (in
5116 * which case it is not combined)
5117 * @param reducer a commutative associative combining function
5118 * @return the task
5119 */
5120 public static <K,V,U> ForkJoinTask<U> reduce
5121 (ConcurrentHashMap<K,V> map,
5122 BiFunction<? super K, ? super V, ? extends U> transformer,
5123 BiFunction<? super U, ? super U, ? extends U> reducer) {
5124 if (transformer == null || reducer == null)
5125 throw new NullPointerException();
5126 return new MapReduceMappingsTask<K,V,U>
5127 (map, null, -1, null, transformer, reducer);
5128 }
5129
5130 /**
5131 * Returns a task that when invoked, returns the result of
5132 * accumulating the given transformation of all (key, value) pairs
5133 * using the given reducer to combine values, and the given
5134 * basis as an identity value.
5135 *
5136 * @param map the map
5137 * @param transformer a function returning the transformation
5138 * for an element
5139 * @param basis the identity (initial default value) for the reduction
5140 * @param reducer a commutative associative combining function
5141 * @return the task
5142 */
5143 public static <K,V> ForkJoinTask<Double> reduceToDouble
5144 (ConcurrentHashMap<K,V> map,
5145 ToDoubleBiFunction<? super K, ? super V> transformer,
5146 double basis,
5147 DoubleBinaryOperator reducer) {
5148 if (transformer == null || reducer == null)
5149 throw new NullPointerException();
5150 return new MapReduceMappingsToDoubleTask<K,V>
5151 (map, null, -1, null, transformer, basis, reducer);
5152 }
5153
5154 /**
5155 * Returns a task that when invoked, returns the result of
5156 * accumulating the given transformation of all (key, value) pairs
5157 * using the given reducer to combine values, and the given
5158 * basis as an identity value.
5159 *
5160 * @param map the map
5161 * @param transformer a function returning the transformation
5162 * for an element
5163 * @param basis the identity (initial default value) for the reduction
5164 * @param reducer a commutative associative combining function
5165 * @return the task
5166 */
5167 public static <K,V> ForkJoinTask<Long> reduceToLong
5168 (ConcurrentHashMap<K,V> map,
5169 ToLongBiFunction<? super K, ? super V> transformer,
5170 long basis,
5171 LongBinaryOperator reducer) {
5172 if (transformer == null || reducer == null)
5173 throw new NullPointerException();
5174 return new MapReduceMappingsToLongTask<K,V>
5175 (map, null, -1, null, transformer, basis, reducer);
5176 }
5177
5178 /**
5179 * Returns a task that when invoked, returns the result of
5180 * accumulating the given transformation of all (key, value) pairs
5181 * using the given reducer to combine values, and the given
5182 * basis as an identity value.
5183 *
5184 * @param map the map
5185 * @param transformer a function returning the transformation
5186 * for an element
5187 * @param basis the identity (initial default value) for the reduction
5188 * @param reducer a commutative associative combining function
5189 * @return the task
5190 */
5191 public static <K,V> ForkJoinTask<Integer> reduceToInt
5192 (ConcurrentHashMap<K,V> map,
5193 ToIntBiFunction<? super K, ? super V> transformer,
5194 int basis,
5195 IntBinaryOperator reducer) {
5196 if (transformer == null || reducer == null)
5197 throw new NullPointerException();
5198 return new MapReduceMappingsToIntTask<K,V>
5199 (map, null, -1, null, transformer, basis, reducer);
5200 }
5201
5202 /**
5203 * Returns a task that when invoked, performs the given action
5204 * for each key.
5205 *
5206 * @param map the map
5207 * @param action the action
5208 * @return the task
5209 */
5210 public static <K,V> ForkJoinTask<Void> forEachKey
5211 (ConcurrentHashMap<K,V> map,
5212 Consumer<? super K> action) {
5213 if (action == null) throw new NullPointerException();
5214 return new ForEachKeyTask<K,V>(map, null, -1, action);
5215 }
5216
5217 /**
5218 * Returns a task that when invoked, performs the given action
5219 * for each non-null transformation of each key.
5220 *
5221 * @param map the map
5222 * @param transformer a function returning the transformation
5223 * for an element, or null if there is no transformation (in
5224 * which case the action is not applied)
5225 * @param action the action
5226 * @return the task
5227 */
5228 public static <K,V,U> ForkJoinTask<Void> forEachKey
5229 (ConcurrentHashMap<K,V> map,
5230 Function<? super K, ? extends U> transformer,
5231 Consumer<? super U> action) {
5232 if (transformer == null || action == null)
5233 throw new NullPointerException();
5234 return new ForEachTransformedKeyTask<K,V,U>
5235 (map, null, -1, transformer, action);
5236 }
5237
5238 /**
5239 * Returns a task that when invoked, returns a non-null result
5240 * from applying the given search function on each key, or
5241 * null if none. Upon success, further element processing is
5242 * suppressed and the results of any other parallel
5243 * invocations of the search function are ignored.
5244 *
5245 * @param map the map
5246 * @param searchFunction a function returning a non-null
5247 * result on success, else null
5248 * @return the task
5249 */
5250 public static <K,V,U> ForkJoinTask<U> searchKeys
5251 (ConcurrentHashMap<K,V> map,
5252 Function<? super K, ? extends U> searchFunction) {
5253 if (searchFunction == null) throw new NullPointerException();
5254 return new SearchKeysTask<K,V,U>
5255 (map, null, -1, searchFunction,
5256 new AtomicReference<U>());
5257 }
5258
5259 /**
5260 * Returns a task that when invoked, returns the result of
5261 * accumulating all keys using the given reducer to combine
5262 * values, or null if none.
5263 *
5264 * @param map the map
5265 * @param reducer a commutative associative combining function
5266 * @return the task
5267 */
5268 public static <K,V> ForkJoinTask<K> reduceKeys
5269 (ConcurrentHashMap<K,V> map,
5270 BiFunction<? super K, ? super K, ? extends K> reducer) {
5271 if (reducer == null) throw new NullPointerException();
5272 return new ReduceKeysTask<K,V>
5273 (map, null, -1, null, reducer);
5274 }
5275
5276 /**
5277 * Returns a task that when invoked, returns the result of
5278 * accumulating the given transformation of all keys using the given
5279 * reducer to combine values, or null if none.
5280 *
5281 * @param map the map
5282 * @param transformer a function returning the transformation
5283 * for an element, or null if there is no transformation (in
5284 * which case it is not combined)
5285 * @param reducer a commutative associative combining function
5286 * @return the task
5287 */
5288 public static <K,V,U> ForkJoinTask<U> reduceKeys
5289 (ConcurrentHashMap<K,V> map,
5290 Function<? super K, ? extends U> transformer,
5291 BiFunction<? super U, ? super U, ? extends U> reducer) {
5292 if (transformer == null || reducer == null)
5293 throw new NullPointerException();
5294 return new MapReduceKeysTask<K,V,U>
5295 (map, null, -1, null, transformer, reducer);
5296 }
5297
5298 /**
5299 * Returns a task that when invoked, returns the result of
5300 * accumulating the given transformation of all keys using the given
5301 * reducer to combine values, and the given basis as an
5302 * identity value.
5303 *
5304 * @param map the map
5305 * @param transformer a function returning the transformation
5306 * for an element
5307 * @param basis the identity (initial default value) for the reduction
5308 * @param reducer a commutative associative combining function
5309 * @return the task
5310 */
5311 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5312 (ConcurrentHashMap<K,V> map,
5313 ToDoubleFunction<? super K> transformer,
5314 double basis,
5315 DoubleBinaryOperator reducer) {
5316 if (transformer == null || reducer == null)
5317 throw new NullPointerException();
5318 return new MapReduceKeysToDoubleTask<K,V>
5319 (map, null, -1, null, transformer, basis, reducer);
5320 }
5321
5322 /**
5323 * Returns a task that when invoked, returns the result of
5324 * accumulating the given transformation of all keys using the given
5325 * reducer to combine values, and the given basis as an
5326 * identity value.
5327 *
5328 * @param map the map
5329 * @param transformer a function returning the transformation
5330 * for an element
5331 * @param basis the identity (initial default value) for the reduction
5332 * @param reducer a commutative associative combining function
5333 * @return the task
5334 */
5335 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5336 (ConcurrentHashMap<K,V> map,
5337 ToLongFunction<? super K> transformer,
5338 long basis,
5339 LongBinaryOperator reducer) {
5340 if (transformer == null || reducer == null)
5341 throw new NullPointerException();
5342 return new MapReduceKeysToLongTask<K,V>
5343 (map, null, -1, null, transformer, basis, reducer);
5344 }
5345
5346 /**
5347 * Returns a task that when invoked, returns the result of
5348 * accumulating the given transformation of all keys using the given
5349 * reducer to combine values, and the given basis as an
5350 * identity value.
5351 *
5352 * @param map the map
5353 * @param transformer a function returning the transformation
5354 * for an element
5355 * @param basis the identity (initial default value) for the reduction
5356 * @param reducer a commutative associative combining function
5357 * @return the task
5358 */
5359 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5360 (ConcurrentHashMap<K,V> map,
5361 ToIntFunction<? super K> transformer,
5362 int basis,
5363 IntBinaryOperator reducer) {
5364 if (transformer == null || reducer == null)
5365 throw new NullPointerException();
5366 return new MapReduceKeysToIntTask<K,V>
5367 (map, null, -1, null, transformer, basis, reducer);
5368 }
5369
5370 /**
5371 * Returns a task that when invoked, performs the given action
5372 * for each value.
5373 *
5374 * @param map the map
5375 * @param action the action
5376 * @return the task
5377 */
5378 public static <K,V> ForkJoinTask<Void> forEachValue
5379 (ConcurrentHashMap<K,V> map,
5380 Consumer<? super V> action) {
5381 if (action == null) throw new NullPointerException();
5382 return new ForEachValueTask<K,V>(map, null, -1, action);
5383 }
5384
5385 /**
5386 * Returns a task that when invoked, performs the given action
5387 * for each non-null transformation of each value.
5388 *
5389 * @param map the map
5390 * @param transformer a function returning the transformation
5391 * for an element, or null if there is no transformation (in
5392 * which case the action is not applied)
5393 * @param action the action
5394 * @return the task
5395 */
5396 public static <K,V,U> ForkJoinTask<Void> forEachValue
5397 (ConcurrentHashMap<K,V> map,
5398 Function<? super V, ? extends U> transformer,
5399 Consumer<? super U> action) {
5400 if (transformer == null || action == null)
5401 throw new NullPointerException();
5402 return new ForEachTransformedValueTask<K,V,U>
5403 (map, null, -1, transformer, action);
5404 }
5405
5406 /**
5407 * Returns a task that when invoked, returns a non-null result
5408 * from applying the given search function on each value, or
5409 * null if none. Upon success, further element processing is
5410 * suppressed and the results of any other parallel
5411 * invocations of the search function are ignored.
5412 *
5413 * @param map the map
5414 * @param searchFunction a function returning a non-null
5415 * result on success, else null
5416 * @return the task
5417 */
5418 public static <K,V,U> ForkJoinTask<U> searchValues
5419 (ConcurrentHashMap<K,V> map,
5420 Function<? super V, ? extends U> searchFunction) {
5421 if (searchFunction == null) throw new NullPointerException();
5422 return new SearchValuesTask<K,V,U>
5423 (map, null, -1, searchFunction,
5424 new AtomicReference<U>());
5425 }
5426
5427 /**
5428 * Returns a task that when invoked, returns the result of
5429 * accumulating all values using the given reducer to combine
5430 * values, or null if none.
5431 *
5432 * @param map the map
5433 * @param reducer a commutative associative combining function
5434 * @return the task
5435 */
5436 public static <K,V> ForkJoinTask<V> reduceValues
5437 (ConcurrentHashMap<K,V> map,
5438 BiFunction<? super V, ? super V, ? extends V> reducer) {
5439 if (reducer == null) throw new NullPointerException();
5440 return new ReduceValuesTask<K,V>
5441 (map, null, -1, null, reducer);
5442 }
5443
5444 /**
5445 * Returns a task that when invoked, returns the result of
5446 * accumulating the given transformation of all values using the
5447 * given reducer to combine values, or null if none.
5448 *
5449 * @param map the map
5450 * @param transformer a function returning the transformation
5451 * for an element, or null if there is no transformation (in
5452 * which case it is not combined)
5453 * @param reducer a commutative associative combining function
5454 * @return the task
5455 */
5456 public static <K,V,U> ForkJoinTask<U> reduceValues
5457 (ConcurrentHashMap<K,V> map,
5458 Function<? super V, ? extends U> transformer,
5459 BiFunction<? super U, ? super U, ? extends U> reducer) {
5460 if (transformer == null || reducer == null)
5461 throw new NullPointerException();
5462 return new MapReduceValuesTask<K,V,U>
5463 (map, null, -1, null, transformer, reducer);
5464 }
5465
5466 /**
5467 * Returns a task that when invoked, returns the result of
5468 * accumulating the given transformation of all values using the
5469 * given reducer to combine values, and the given basis as an
5470 * identity value.
5471 *
5472 * @param map the map
5473 * @param transformer a function returning the transformation
5474 * for an element
5475 * @param basis the identity (initial default value) for the reduction
5476 * @param reducer a commutative associative combining function
5477 * @return the task
5478 */
5479 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5480 (ConcurrentHashMap<K,V> map,
5481 ToDoubleFunction<? super V> transformer,
5482 double basis,
5483 DoubleBinaryOperator reducer) {
5484 if (transformer == null || reducer == null)
5485 throw new NullPointerException();
5486 return new MapReduceValuesToDoubleTask<K,V>
5487 (map, null, -1, null, transformer, basis, reducer);
5488 }
5489
5490 /**
5491 * Returns a task that when invoked, returns the result of
5492 * accumulating the given transformation of all values using the
5493 * given reducer to combine values, and the given basis as an
5494 * identity value.
5495 *
5496 * @param map the map
5497 * @param transformer a function returning the transformation
5498 * for an element
5499 * @param basis the identity (initial default value) for the reduction
5500 * @param reducer a commutative associative combining function
5501 * @return the task
5502 */
5503 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5504 (ConcurrentHashMap<K,V> map,
5505 ToLongFunction<? super V> transformer,
5506 long basis,
5507 LongBinaryOperator reducer) {
5508 if (transformer == null || reducer == null)
5509 throw new NullPointerException();
5510 return new MapReduceValuesToLongTask<K,V>
5511 (map, null, -1, null, transformer, basis, reducer);
5512 }
5513
5514 /**
5515 * Returns a task that when invoked, returns the result of
5516 * accumulating the given transformation of all values using the
5517 * given reducer to combine values, and the given basis as an
5518 * identity value.
5519 *
5520 * @param map the map
5521 * @param transformer a function returning the transformation
5522 * for an element
5523 * @param basis the identity (initial default value) for the reduction
5524 * @param reducer a commutative associative combining function
5525 * @return the task
5526 */
5527 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5528 (ConcurrentHashMap<K,V> map,
5529 ToIntFunction<? super V> transformer,
5530 int basis,
5531 IntBinaryOperator reducer) {
5532 if (transformer == null || reducer == null)
5533 throw new NullPointerException();
5534 return new MapReduceValuesToIntTask<K,V>
5535 (map, null, -1, null, transformer, basis, reducer);
5536 }
5537
5538 /**
5539 * Returns a task that when invoked, perform the given action
5540 * for each entry.
5541 *
5542 * @param map the map
5543 * @param action the action
5544 * @return the task
5545 */
5546 public static <K,V> ForkJoinTask<Void> forEachEntry
5547 (ConcurrentHashMap<K,V> map,
5548 Consumer<? super Map.Entry<K,V>> action) {
5549 if (action == null) throw new NullPointerException();
5550 return new ForEachEntryTask<K,V>(map, null, -1, action);
5551 }
5552
5553 /**
5554 * Returns a task that when invoked, perform the given action
5555 * for each non-null transformation of each entry.
5556 *
5557 * @param map the map
5558 * @param transformer a function returning the transformation
5559 * for an element, or null if there is no transformation (in
5560 * which case the action is not applied)
5561 * @param action the action
5562 * @return the task
5563 */
5564 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5565 (ConcurrentHashMap<K,V> map,
5566 Function<Map.Entry<K,V>, ? extends U> transformer,
5567 Consumer<? super U> action) {
5568 if (transformer == null || action == null)
5569 throw new NullPointerException();
5570 return new ForEachTransformedEntryTask<K,V,U>
5571 (map, null, -1, transformer, action);
5572 }
5573
5574 /**
5575 * Returns a task that when invoked, returns a non-null result
5576 * from applying the given search function on each entry, or
5577 * null if none. Upon success, further element processing is
5578 * suppressed and the results of any other parallel
5579 * invocations of the search function are ignored.
5580 *
5581 * @param map the map
5582 * @param searchFunction a function returning a non-null
5583 * result on success, else null
5584 * @return the task
5585 */
5586 public static <K,V,U> ForkJoinTask<U> searchEntries
5587 (ConcurrentHashMap<K,V> map,
5588 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5589 if (searchFunction == null) throw new NullPointerException();
5590 return new SearchEntriesTask<K,V,U>
5591 (map, null, -1, searchFunction,
5592 new AtomicReference<U>());
5593 }
5594
5595 /**
5596 * Returns a task that when invoked, returns the result of
5597 * accumulating all entries using the given reducer to combine
5598 * values, or null if none.
5599 *
5600 * @param map the map
5601 * @param reducer a commutative associative combining function
5602 * @return the task
5603 */
5604 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5605 (ConcurrentHashMap<K,V> map,
5606 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5607 if (reducer == null) throw new NullPointerException();
5608 return new ReduceEntriesTask<K,V>
5609 (map, null, -1, null, reducer);
5610 }
5611
5612 /**
5613 * Returns a task that when invoked, returns the result of
5614 * accumulating the given transformation of all entries using the
5615 * given reducer to combine values, or null if none.
5616 *
5617 * @param map the map
5618 * @param transformer a function returning the transformation
5619 * for an element, or null if there is no transformation (in
5620 * which case it is not combined)
5621 * @param reducer a commutative associative combining function
5622 * @return the task
5623 */
5624 public static <K,V,U> ForkJoinTask<U> reduceEntries
5625 (ConcurrentHashMap<K,V> map,
5626 Function<Map.Entry<K,V>, ? extends U> transformer,
5627 BiFunction<? super U, ? super U, ? extends U> reducer) {
5628 if (transformer == null || reducer == null)
5629 throw new NullPointerException();
5630 return new MapReduceEntriesTask<K,V,U>
5631 (map, null, -1, null, transformer, reducer);
5632 }
5633
5634 /**
5635 * Returns a task that when invoked, returns the result of
5636 * accumulating the given transformation of all entries using the
5637 * given reducer to combine values, and the given basis as an
5638 * identity value.
5639 *
5640 * @param map the map
5641 * @param transformer a function returning the transformation
5642 * for an element
5643 * @param basis the identity (initial default value) for the reduction
5644 * @param reducer a commutative associative combining function
5645 * @return the task
5646 */
5647 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5648 (ConcurrentHashMap<K,V> map,
5649 ToDoubleFunction<Map.Entry<K,V>> transformer,
5650 double basis,
5651 DoubleBinaryOperator reducer) {
5652 if (transformer == null || reducer == null)
5653 throw new NullPointerException();
5654 return new MapReduceEntriesToDoubleTask<K,V>
5655 (map, null, -1, null, transformer, basis, reducer);
5656 }
5657
5658 /**
5659 * Returns a task that when invoked, returns the result of
5660 * accumulating the given transformation of all entries using the
5661 * given reducer to combine values, and the given basis as an
5662 * identity value.
5663 *
5664 * @param map the map
5665 * @param transformer a function returning the transformation
5666 * for an element
5667 * @param basis the identity (initial default value) for the reduction
5668 * @param reducer a commutative associative combining function
5669 * @return the task
5670 */
5671 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5672 (ConcurrentHashMap<K,V> map,
5673 ToLongFunction<Map.Entry<K,V>> transformer,
5674 long basis,
5675 LongBinaryOperator reducer) {
5676 if (transformer == null || reducer == null)
5677 throw new NullPointerException();
5678 return new MapReduceEntriesToLongTask<K,V>
5679 (map, null, -1, null, transformer, basis, reducer);
5680 }
5681
5682 /**
5683 * Returns a task that when invoked, returns the result of
5684 * accumulating the given transformation of all entries using the
5685 * given reducer to combine values, and the given basis as an
5686 * identity value.
5687 *
5688 * @param map the map
5689 * @param transformer a function returning the transformation
5690 * for an element
5691 * @param basis the identity (initial default value) for the reduction
5692 * @param reducer a commutative associative combining function
5693 * @return the task
5694 */
5695 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5696 (ConcurrentHashMap<K,V> map,
5697 ToIntFunction<Map.Entry<K,V>> transformer,
5698 int basis,
5699 IntBinaryOperator reducer) {
5700 if (transformer == null || reducer == null)
5701 throw new NullPointerException();
5702 return new MapReduceEntriesToIntTask<K,V>
5703 (map, null, -1, null, transformer, basis, reducer);
5704 }
5705 }
5706
5707 // -------------------------------------------------------
5708
5709 /*
5710 * Task classes. Coded in a regular but ugly format/style to
5711 * simplify checks that each variant differs in the right way from
5712 * others. The null screenings exist because compilers cannot tell
5713 * that we've already null-checked task arguments, so we force
5714 * simplest hoisted bypass to help avoid convoluted traps.
5715 */
5716
5717 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5718 extends Traverser<K,V,Void> {
5719 final Consumer<? super K> action;
5720 ForEachKeyTask
5721 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5722 Consumer<? super K> action) {
5723 super(m, p, b);
5724 this.action = action;
5725 }
5726 public final void compute() {
5727 final Consumer<? super K> action;
5728 if ((action = this.action) != null) {
5729 for (int b; (b = preSplit()) > 0;)
5730 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5731 forEachKey(action);
5732 propagateCompletion();
5733 }
5734 }
5735 }
5736
5737 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5738 extends Traverser<K,V,Void> {
5739 final Consumer<? super V> action;
5740 ForEachValueTask
5741 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5742 Consumer<? super V> action) {
5743 super(m, p, b);
5744 this.action = action;
5745 }
5746 public final void compute() {
5747 final Consumer<? super V> action;
5748 if ((action = this.action) != null) {
5749 for (int b; (b = preSplit()) > 0;)
5750 new ForEachValueTask<K,V>(map, this, b, action).fork();
5751 forEachValue(action);
5752 propagateCompletion();
5753 }
5754 }
5755 }
5756
5757 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5758 extends Traverser<K,V,Void> {
5759 final Consumer<? super Entry<K,V>> action;
5760 ForEachEntryTask
5761 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5762 Consumer<? super Entry<K,V>> action) {
5763 super(m, p, b);
5764 this.action = action;
5765 }
5766 public final void compute() {
5767 final Consumer<? super Entry<K,V>> action;
5768 if ((action = this.action) != null) {
5769 for (int b; (b = preSplit()) > 0;)
5770 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5771 V v;
5772 while ((v = advanceValue()) != null)
5773 action.accept(entryFor(nextKey, v));
5774 propagateCompletion();
5775 }
5776 }
5777 }
5778
5779 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5780 extends Traverser<K,V,Void> {
5781 final BiConsumer<? super K, ? super V> action;
5782 ForEachMappingTask
5783 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5784 BiConsumer<? super K,? super V> action) {
5785 super(m, p, b);
5786 this.action = action;
5787 }
5788 public final void compute() {
5789 final BiConsumer<? super K, ? super V> action;
5790 if ((action = this.action) != null) {
5791 for (int b; (b = preSplit()) > 0;)
5792 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5793 V v;
5794 while ((v = advanceValue()) != null)
5795 action.accept(nextKey, v);
5796 propagateCompletion();
5797 }
5798 }
5799 }
5800
5801 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5802 extends Traverser<K,V,Void> {
5803 final Function<? super K, ? extends U> transformer;
5804 final Consumer<? super U> action;
5805 ForEachTransformedKeyTask
5806 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5807 Function<? super K, ? extends U> transformer, Consumer<? super U> action) {
5808 super(m, p, b);
5809 this.transformer = transformer; this.action = action;
5810 }
5811 public final void compute() {
5812 final Function<? super K, ? extends U> transformer;
5813 final Consumer<? super U> action;
5814 if ((transformer = this.transformer) != null &&
5815 (action = this.action) != null) {
5816 for (int b; (b = preSplit()) > 0;)
5817 new ForEachTransformedKeyTask<K,V,U>
5818 (map, this, b, transformer, action).fork();
5819 K k; U u;
5820 while ((k = advanceKey()) != null) {
5821 if ((u = transformer.apply(k)) != null)
5822 action.accept(u);
5823 }
5824 propagateCompletion();
5825 }
5826 }
5827 }
5828
5829 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5830 extends Traverser<K,V,Void> {
5831 final Function<? super V, ? extends U> transformer;
5832 final Consumer<? super U> action;
5833 ForEachTransformedValueTask
5834 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5835 Function<? super V, ? extends U> transformer, Consumer<? super U> action) {
5836 super(m, p, b);
5837 this.transformer = transformer; this.action = action;
5838 }
5839 public final void compute() {
5840 final Function<? super V, ? extends U> transformer;
5841 final Consumer<? super U> action;
5842 if ((transformer = this.transformer) != null &&
5843 (action = this.action) != null) {
5844 for (int b; (b = preSplit()) > 0;)
5845 new ForEachTransformedValueTask<K,V,U>
5846 (map, this, b, transformer, action).fork();
5847 V v; U u;
5848 while ((v = advanceValue()) != null) {
5849 if ((u = transformer.apply(v)) != null)
5850 action.accept(u);
5851 }
5852 propagateCompletion();
5853 }
5854 }
5855 }
5856
5857 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5858 extends Traverser<K,V,Void> {
5859 final Function<Map.Entry<K,V>, ? extends U> transformer;
5860 final Consumer<? super U> action;
5861 ForEachTransformedEntryTask
5862 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5863 Function<Map.Entry<K,V>, ? extends U> transformer, Consumer<? super U> action) {
5864 super(m, p, b);
5865 this.transformer = transformer; this.action = action;
5866 }
5867 public final void compute() {
5868 final Function<Map.Entry<K,V>, ? extends U> transformer;
5869 final Consumer<? super U> action;
5870 if ((transformer = this.transformer) != null &&
5871 (action = this.action) != null) {
5872 for (int b; (b = preSplit()) > 0;)
5873 new ForEachTransformedEntryTask<K,V,U>
5874 (map, this, b, transformer, action).fork();
5875 V v; U u;
5876 while ((v = advanceValue()) != null) {
5877 if ((u = transformer.apply(entryFor(nextKey,
5878 v))) != null)
5879 action.accept(u);
5880 }
5881 propagateCompletion();
5882 }
5883 }
5884 }
5885
5886 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5887 extends Traverser<K,V,Void> {
5888 final BiFunction<? super K, ? super V, ? extends U> transformer;
5889 final Consumer<? super U> action;
5890 ForEachTransformedMappingTask
5891 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5892 BiFunction<? super K, ? super V, ? extends U> transformer,
5893 Consumer<? super U> action) {
5894 super(m, p, b);
5895 this.transformer = transformer; this.action = action;
5896 }
5897 public final void compute() {
5898 final BiFunction<? super K, ? super V, ? extends U> transformer;
5899 final Consumer<? super U> action;
5900 if ((transformer = this.transformer) != null &&
5901 (action = this.action) != null) {
5902 for (int b; (b = preSplit()) > 0;)
5903 new ForEachTransformedMappingTask<K,V,U>
5904 (map, this, b, transformer, action).fork();
5905 V v; U u;
5906 while ((v = advanceValue()) != null) {
5907 if ((u = transformer.apply(nextKey, v)) != null)
5908 action.accept(u);
5909 }
5910 propagateCompletion();
5911 }
5912 }
5913 }
5914
5915 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5916 extends Traverser<K,V,U> {
5917 final Function<? super K, ? extends U> searchFunction;
5918 final AtomicReference<U> result;
5919 SearchKeysTask
5920 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5921 Function<? super K, ? extends U> searchFunction,
5922 AtomicReference<U> result) {
5923 super(m, p, b);
5924 this.searchFunction = searchFunction; this.result = result;
5925 }
5926 public final U getRawResult() { return result.get(); }
5927 public final void compute() {
5928 final Function<? super K, ? extends U> searchFunction;
5929 final AtomicReference<U> result;
5930 if ((searchFunction = this.searchFunction) != null &&
5931 (result = this.result) != null) {
5932 for (int b;;) {
5933 if (result.get() != null)
5934 return;
5935 if ((b = preSplit()) <= 0)
5936 break;
5937 new SearchKeysTask<K,V,U>
5938 (map, this, b, searchFunction, result).fork();
5939 }
5940 while (result.get() == null) {
5941 K k; U u;
5942 if ((k = advanceKey()) == null) {
5943 propagateCompletion();
5944 break;
5945 }
5946 if ((u = searchFunction.apply(k)) != null) {
5947 if (result.compareAndSet(null, u))
5948 quietlyCompleteRoot();
5949 break;
5950 }
5951 }
5952 }
5953 }
5954 }
5955
5956 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5957 extends Traverser<K,V,U> {
5958 final Function<? super V, ? extends U> searchFunction;
5959 final AtomicReference<U> result;
5960 SearchValuesTask
5961 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5962 Function<? super V, ? extends U> searchFunction,
5963 AtomicReference<U> result) {
5964 super(m, p, b);
5965 this.searchFunction = searchFunction; this.result = result;
5966 }
5967 public final U getRawResult() { return result.get(); }
5968 public final void compute() {
5969 final Function<? super V, ? extends U> searchFunction;
5970 final AtomicReference<U> result;
5971 if ((searchFunction = this.searchFunction) != null &&
5972 (result = this.result) != null) {
5973 for (int b;;) {
5974 if (result.get() != null)
5975 return;
5976 if ((b = preSplit()) <= 0)
5977 break;
5978 new SearchValuesTask<K,V,U>
5979 (map, this, b, searchFunction, result).fork();
5980 }
5981 while (result.get() == null) {
5982 V v; U u;
5983 if ((v = advanceValue()) == null) {
5984 propagateCompletion();
5985 break;
5986 }
5987 if ((u = searchFunction.apply(v)) != null) {
5988 if (result.compareAndSet(null, u))
5989 quietlyCompleteRoot();
5990 break;
5991 }
5992 }
5993 }
5994 }
5995 }
5996
5997 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5998 extends Traverser<K,V,U> {
5999 final Function<Entry<K,V>, ? extends U> searchFunction;
6000 final AtomicReference<U> result;
6001 SearchEntriesTask
6002 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6003 Function<Entry<K,V>, ? extends U> searchFunction,
6004 AtomicReference<U> result) {
6005 super(m, p, b);
6006 this.searchFunction = searchFunction; this.result = result;
6007 }
6008 public final U getRawResult() { return result.get(); }
6009 public final void compute() {
6010 final Function<Entry<K,V>, ? extends U> searchFunction;
6011 final AtomicReference<U> result;
6012 if ((searchFunction = this.searchFunction) != null &&
6013 (result = this.result) != null) {
6014 for (int b;;) {
6015 if (result.get() != null)
6016 return;
6017 if ((b = preSplit()) <= 0)
6018 break;
6019 new SearchEntriesTask<K,V,U>
6020 (map, this, b, searchFunction, result).fork();
6021 }
6022 while (result.get() == null) {
6023 V v; U u;
6024 if ((v = advanceValue()) == null) {
6025 propagateCompletion();
6026 break;
6027 }
6028 if ((u = searchFunction.apply(entryFor(nextKey,
6029 v))) != null) {
6030 if (result.compareAndSet(null, u))
6031 quietlyCompleteRoot();
6032 return;
6033 }
6034 }
6035 }
6036 }
6037 }
6038
6039 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
6040 extends Traverser<K,V,U> {
6041 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6042 final AtomicReference<U> result;
6043 SearchMappingsTask
6044 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6045 BiFunction<? super K, ? super V, ? extends U> searchFunction,
6046 AtomicReference<U> result) {
6047 super(m, p, b);
6048 this.searchFunction = searchFunction; this.result = result;
6049 }
6050 public final U getRawResult() { return result.get(); }
6051 public final void compute() {
6052 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6053 final AtomicReference<U> result;
6054 if ((searchFunction = this.searchFunction) != null &&
6055 (result = this.result) != null) {
6056 for (int b;;) {
6057 if (result.get() != null)
6058 return;
6059 if ((b = preSplit()) <= 0)
6060 break;
6061 new SearchMappingsTask<K,V,U>
6062 (map, this, b, searchFunction, result).fork();
6063 }
6064 while (result.get() == null) {
6065 V v; U u;
6066 if ((v = advanceValue()) == null) {
6067 propagateCompletion();
6068 break;
6069 }
6070 if ((u = searchFunction.apply(nextKey, v)) != null) {
6071 if (result.compareAndSet(null, u))
6072 quietlyCompleteRoot();
6073 break;
6074 }
6075 }
6076 }
6077 }
6078 }
6079
6080 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
6081 extends Traverser<K,V,K> {
6082 final BiFunction<? super K, ? super K, ? extends K> reducer;
6083 K result;
6084 ReduceKeysTask<K,V> rights, nextRight;
6085 ReduceKeysTask
6086 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6087 ReduceKeysTask<K,V> nextRight,
6088 BiFunction<? super K, ? super K, ? extends K> reducer) {
6089 super(m, p, b); this.nextRight = nextRight;
6090 this.reducer = reducer;
6091 }
6092 public final K getRawResult() { return result; }
6093 @SuppressWarnings("unchecked") public final void compute() {
6094 final BiFunction<? super K, ? super K, ? extends K> reducer;
6095 if ((reducer = this.reducer) != null) {
6096 for (int b; (b = preSplit()) > 0;)
6097 (rights = new ReduceKeysTask<K,V>
6098 (map, this, b, rights, reducer)).fork();
6099 K u, r = null;
6100 while ((u = advanceKey()) != null) {
6101 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
6102 }
6103 result = r;
6104 CountedCompleter<?> c;
6105 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6106 ReduceKeysTask<K,V>
6107 t = (ReduceKeysTask<K,V>)c,
6108 s = t.rights;
6109 while (s != null) {
6110 K tr, sr;
6111 if ((sr = s.result) != null)
6112 t.result = (((tr = t.result) == null) ? sr :
6113 reducer.apply(tr, sr));
6114 s = t.rights = s.nextRight;
6115 }
6116 }
6117 }
6118 }
6119 }
6120
6121 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
6122 extends Traverser<K,V,V> {
6123 final BiFunction<? super V, ? super V, ? extends V> reducer;
6124 V result;
6125 ReduceValuesTask<K,V> rights, nextRight;
6126 ReduceValuesTask
6127 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6128 ReduceValuesTask<K,V> nextRight,
6129 BiFunction<? super V, ? super V, ? extends V> reducer) {
6130 super(m, p, b); this.nextRight = nextRight;
6131 this.reducer = reducer;
6132 }
6133 public final V getRawResult() { return result; }
6134 @SuppressWarnings("unchecked") public final void compute() {
6135 final BiFunction<? super V, ? super V, ? extends V> reducer;
6136 if ((reducer = this.reducer) != null) {
6137 for (int b; (b = preSplit()) > 0;)
6138 (rights = new ReduceValuesTask<K,V>
6139 (map, this, b, rights, reducer)).fork();
6140 V r = null, v;
6141 while ((v = advanceValue()) != null)
6142 r = (r == null) ? v : reducer.apply(r, v);
6143 result = r;
6144 CountedCompleter<?> c;
6145 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6146 ReduceValuesTask<K,V>
6147 t = (ReduceValuesTask<K,V>)c,
6148 s = t.rights;
6149 while (s != null) {
6150 V tr, sr;
6151 if ((sr = s.result) != null)
6152 t.result = (((tr = t.result) == null) ? sr :
6153 reducer.apply(tr, sr));
6154 s = t.rights = s.nextRight;
6155 }
6156 }
6157 }
6158 }
6159 }
6160
6161 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
6162 extends Traverser<K,V,Map.Entry<K,V>> {
6163 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6164 Map.Entry<K,V> result;
6165 ReduceEntriesTask<K,V> rights, nextRight;
6166 ReduceEntriesTask
6167 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6168 ReduceEntriesTask<K,V> nextRight,
6169 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
6170 super(m, p, b); this.nextRight = nextRight;
6171 this.reducer = reducer;
6172 }
6173 public final Map.Entry<K,V> getRawResult() { return result; }
6174 @SuppressWarnings("unchecked") public final void compute() {
6175 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6176 if ((reducer = this.reducer) != null) {
6177 for (int b; (b = preSplit()) > 0;)
6178 (rights = new ReduceEntriesTask<K,V>
6179 (map, this, b, rights, reducer)).fork();
6180 Map.Entry<K,V> r = null;
6181 V v;
6182 while ((v = advanceValue()) != null) {
6183 Map.Entry<K,V> u = entryFor(nextKey, v);
6184 r = (r == null) ? u : reducer.apply(r, u);
6185 }
6186 result = r;
6187 CountedCompleter<?> c;
6188 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6189 ReduceEntriesTask<K,V>
6190 t = (ReduceEntriesTask<K,V>)c,
6191 s = t.rights;
6192 while (s != null) {
6193 Map.Entry<K,V> tr, sr;
6194 if ((sr = s.result) != null)
6195 t.result = (((tr = t.result) == null) ? sr :
6196 reducer.apply(tr, sr));
6197 s = t.rights = s.nextRight;
6198 }
6199 }
6200 }
6201 }
6202 }
6203
6204 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
6205 extends Traverser<K,V,U> {
6206 final Function<? super K, ? extends U> transformer;
6207 final BiFunction<? super U, ? super U, ? extends U> reducer;
6208 U result;
6209 MapReduceKeysTask<K,V,U> rights, nextRight;
6210 MapReduceKeysTask
6211 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6212 MapReduceKeysTask<K,V,U> nextRight,
6213 Function<? super K, ? extends U> transformer,
6214 BiFunction<? super U, ? super U, ? extends U> reducer) {
6215 super(m, p, b); this.nextRight = nextRight;
6216 this.transformer = transformer;
6217 this.reducer = reducer;
6218 }
6219 public final U getRawResult() { return result; }
6220 @SuppressWarnings("unchecked") public final void compute() {
6221 final Function<? super K, ? extends U> transformer;
6222 final BiFunction<? super U, ? super U, ? extends U> reducer;
6223 if ((transformer = this.transformer) != null &&
6224 (reducer = this.reducer) != null) {
6225 for (int b; (b = preSplit()) > 0;)
6226 (rights = new MapReduceKeysTask<K,V,U>
6227 (map, this, b, rights, transformer, reducer)).fork();
6228 K k; U r = null, u;
6229 while ((k = advanceKey()) != null) {
6230 if ((u = transformer.apply(k)) != null)
6231 r = (r == null) ? u : reducer.apply(r, u);
6232 }
6233 result = r;
6234 CountedCompleter<?> c;
6235 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6236 MapReduceKeysTask<K,V,U>
6237 t = (MapReduceKeysTask<K,V,U>)c,
6238 s = t.rights;
6239 while (s != null) {
6240 U tr, sr;
6241 if ((sr = s.result) != null)
6242 t.result = (((tr = t.result) == null) ? sr :
6243 reducer.apply(tr, sr));
6244 s = t.rights = s.nextRight;
6245 }
6246 }
6247 }
6248 }
6249 }
6250
6251 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6252 extends Traverser<K,V,U> {
6253 final Function<? super V, ? extends U> transformer;
6254 final BiFunction<? super U, ? super U, ? extends U> reducer;
6255 U result;
6256 MapReduceValuesTask<K,V,U> rights, nextRight;
6257 MapReduceValuesTask
6258 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6259 MapReduceValuesTask<K,V,U> nextRight,
6260 Function<? super V, ? extends U> transformer,
6261 BiFunction<? super U, ? super U, ? extends U> reducer) {
6262 super(m, p, b); this.nextRight = nextRight;
6263 this.transformer = transformer;
6264 this.reducer = reducer;
6265 }
6266 public final U getRawResult() { return result; }
6267 @SuppressWarnings("unchecked") public final void compute() {
6268 final Function<? super V, ? extends U> transformer;
6269 final BiFunction<? super U, ? super U, ? extends U> reducer;
6270 if ((transformer = this.transformer) != null &&
6271 (reducer = this.reducer) != null) {
6272 for (int b; (b = preSplit()) > 0;)
6273 (rights = new MapReduceValuesTask<K,V,U>
6274 (map, this, b, rights, transformer, reducer)).fork();
6275 U r = null, u;
6276 V v;
6277 while ((v = advanceValue()) != null) {
6278 if ((u = transformer.apply(v)) != null)
6279 r = (r == null) ? u : reducer.apply(r, u);
6280 }
6281 result = r;
6282 CountedCompleter<?> c;
6283 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6284 MapReduceValuesTask<K,V,U>
6285 t = (MapReduceValuesTask<K,V,U>)c,
6286 s = t.rights;
6287 while (s != null) {
6288 U tr, sr;
6289 if ((sr = s.result) != null)
6290 t.result = (((tr = t.result) == null) ? sr :
6291 reducer.apply(tr, sr));
6292 s = t.rights = s.nextRight;
6293 }
6294 }
6295 }
6296 }
6297 }
6298
6299 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6300 extends Traverser<K,V,U> {
6301 final Function<Map.Entry<K,V>, ? extends U> transformer;
6302 final BiFunction<? super U, ? super U, ? extends U> reducer;
6303 U result;
6304 MapReduceEntriesTask<K,V,U> rights, nextRight;
6305 MapReduceEntriesTask
6306 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6307 MapReduceEntriesTask<K,V,U> nextRight,
6308 Function<Map.Entry<K,V>, ? extends U> transformer,
6309 BiFunction<? super U, ? super U, ? extends U> reducer) {
6310 super(m, p, b); this.nextRight = nextRight;
6311 this.transformer = transformer;
6312 this.reducer = reducer;
6313 }
6314 public final U getRawResult() { return result; }
6315 @SuppressWarnings("unchecked") public final void compute() {
6316 final Function<Map.Entry<K,V>, ? extends U> transformer;
6317 final BiFunction<? super U, ? super U, ? extends U> reducer;
6318 if ((transformer = this.transformer) != null &&
6319 (reducer = this.reducer) != null) {
6320 for (int b; (b = preSplit()) > 0;)
6321 (rights = new MapReduceEntriesTask<K,V,U>
6322 (map, this, b, rights, transformer, reducer)).fork();
6323 U r = null, u;
6324 V v;
6325 while ((v = advanceValue()) != null) {
6326 if ((u = transformer.apply(entryFor(nextKey,
6327 v))) != null)
6328 r = (r == null) ? u : reducer.apply(r, u);
6329 }
6330 result = r;
6331 CountedCompleter<?> c;
6332 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6333 MapReduceEntriesTask<K,V,U>
6334 t = (MapReduceEntriesTask<K,V,U>)c,
6335 s = t.rights;
6336 while (s != null) {
6337 U tr, sr;
6338 if ((sr = s.result) != null)
6339 t.result = (((tr = t.result) == null) ? sr :
6340 reducer.apply(tr, sr));
6341 s = t.rights = s.nextRight;
6342 }
6343 }
6344 }
6345 }
6346 }
6347
6348 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6349 extends Traverser<K,V,U> {
6350 final BiFunction<? super K, ? super V, ? extends U> transformer;
6351 final BiFunction<? super U, ? super U, ? extends U> reducer;
6352 U result;
6353 MapReduceMappingsTask<K,V,U> rights, nextRight;
6354 MapReduceMappingsTask
6355 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6356 MapReduceMappingsTask<K,V,U> nextRight,
6357 BiFunction<? super K, ? super V, ? extends U> transformer,
6358 BiFunction<? super U, ? super U, ? extends U> reducer) {
6359 super(m, p, b); this.nextRight = nextRight;
6360 this.transformer = transformer;
6361 this.reducer = reducer;
6362 }
6363 public final U getRawResult() { return result; }
6364 @SuppressWarnings("unchecked") public final void compute() {
6365 final BiFunction<? super K, ? super V, ? extends U> transformer;
6366 final BiFunction<? super U, ? super U, ? extends U> reducer;
6367 if ((transformer = this.transformer) != null &&
6368 (reducer = this.reducer) != null) {
6369 for (int b; (b = preSplit()) > 0;)
6370 (rights = new MapReduceMappingsTask<K,V,U>
6371 (map, this, b, rights, transformer, reducer)).fork();
6372 U r = null, u;
6373 V v;
6374 while ((v = advanceValue()) != null) {
6375 if ((u = transformer.apply(nextKey, v)) != null)
6376 r = (r == null) ? u : reducer.apply(r, u);
6377 }
6378 result = r;
6379 CountedCompleter<?> c;
6380 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6381 MapReduceMappingsTask<K,V,U>
6382 t = (MapReduceMappingsTask<K,V,U>)c,
6383 s = t.rights;
6384 while (s != null) {
6385 U tr, sr;
6386 if ((sr = s.result) != null)
6387 t.result = (((tr = t.result) == null) ? sr :
6388 reducer.apply(tr, sr));
6389 s = t.rights = s.nextRight;
6390 }
6391 }
6392 }
6393 }
6394 }
6395
6396 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6397 extends Traverser<K,V,Double> {
6398 final ToDoubleFunction<? super K> transformer;
6399 final DoubleBinaryOperator reducer;
6400 final double basis;
6401 double result;
6402 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6403 MapReduceKeysToDoubleTask
6404 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6405 MapReduceKeysToDoubleTask<K,V> nextRight,
6406 ToDoubleFunction<? super K> transformer,
6407 double basis,
6408 DoubleBinaryOperator reducer) {
6409 super(m, p, b); this.nextRight = nextRight;
6410 this.transformer = transformer;
6411 this.basis = basis; this.reducer = reducer;
6412 }
6413 public final Double getRawResult() { return result; }
6414 @SuppressWarnings("unchecked") public final void compute() {
6415 final ToDoubleFunction<? super K> transformer;
6416 final DoubleBinaryOperator reducer;
6417 if ((transformer = this.transformer) != null &&
6418 (reducer = this.reducer) != null) {
6419 double r = this.basis;
6420 for (int b; (b = preSplit()) > 0;)
6421 (rights = new MapReduceKeysToDoubleTask<K,V>
6422 (map, this, b, rights, transformer, r, reducer)).fork();
6423 K k;
6424 while ((k = advanceKey()) != null)
6425 r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
6426 result = r;
6427 CountedCompleter<?> c;
6428 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6429 MapReduceKeysToDoubleTask<K,V>
6430 t = (MapReduceKeysToDoubleTask<K,V>)c,
6431 s = t.rights;
6432 while (s != null) {
6433 t.result = reducer.applyAsDouble(t.result, s.result);
6434 s = t.rights = s.nextRight;
6435 }
6436 }
6437 }
6438 }
6439 }
6440
6441 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6442 extends Traverser<K,V,Double> {
6443 final ToDoubleFunction<? super V> transformer;
6444 final DoubleBinaryOperator reducer;
6445 final double basis;
6446 double result;
6447 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6448 MapReduceValuesToDoubleTask
6449 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6450 MapReduceValuesToDoubleTask<K,V> nextRight,
6451 ToDoubleFunction<? super V> transformer,
6452 double basis,
6453 DoubleBinaryOperator reducer) {
6454 super(m, p, b); this.nextRight = nextRight;
6455 this.transformer = transformer;
6456 this.basis = basis; this.reducer = reducer;
6457 }
6458 public final Double getRawResult() { return result; }
6459 @SuppressWarnings("unchecked") public final void compute() {
6460 final ToDoubleFunction<? super V> transformer;
6461 final DoubleBinaryOperator reducer;
6462 if ((transformer = this.transformer) != null &&
6463 (reducer = this.reducer) != null) {
6464 double r = this.basis;
6465 for (int b; (b = preSplit()) > 0;)
6466 (rights = new MapReduceValuesToDoubleTask<K,V>
6467 (map, this, b, rights, transformer, r, reducer)).fork();
6468 V v;
6469 while ((v = advanceValue()) != null)
6470 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6471 result = r;
6472 CountedCompleter<?> c;
6473 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6474 MapReduceValuesToDoubleTask<K,V>
6475 t = (MapReduceValuesToDoubleTask<K,V>)c,
6476 s = t.rights;
6477 while (s != null) {
6478 t.result = reducer.applyAsDouble(t.result, s.result);
6479 s = t.rights = s.nextRight;
6480 }
6481 }
6482 }
6483 }
6484 }
6485
6486 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6487 extends Traverser<K,V,Double> {
6488 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6489 final DoubleBinaryOperator reducer;
6490 final double basis;
6491 double result;
6492 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6493 MapReduceEntriesToDoubleTask
6494 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6495 MapReduceEntriesToDoubleTask<K,V> nextRight,
6496 ToDoubleFunction<Map.Entry<K,V>> transformer,
6497 double basis,
6498 DoubleBinaryOperator reducer) {
6499 super(m, p, b); this.nextRight = nextRight;
6500 this.transformer = transformer;
6501 this.basis = basis; this.reducer = reducer;
6502 }
6503 public final Double getRawResult() { return result; }
6504 @SuppressWarnings("unchecked") public final void compute() {
6505 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6506 final DoubleBinaryOperator reducer;
6507 if ((transformer = this.transformer) != null &&
6508 (reducer = this.reducer) != null) {
6509 double r = this.basis;
6510 for (int b; (b = preSplit()) > 0;)
6511 (rights = new MapReduceEntriesToDoubleTask<K,V>
6512 (map, this, b, rights, transformer, r, reducer)).fork();
6513 V v;
6514 while ((v = advanceValue()) != null)
6515 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(nextKey,
6516 v)));
6517 result = r;
6518 CountedCompleter<?> c;
6519 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6520 MapReduceEntriesToDoubleTask<K,V>
6521 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6522 s = t.rights;
6523 while (s != null) {
6524 t.result = reducer.applyAsDouble(t.result, s.result);
6525 s = t.rights = s.nextRight;
6526 }
6527 }
6528 }
6529 }
6530 }
6531
6532 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6533 extends Traverser<K,V,Double> {
6534 final ToDoubleBiFunction<? super K, ? super V> transformer;
6535 final DoubleBinaryOperator reducer;
6536 final double basis;
6537 double result;
6538 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6539 MapReduceMappingsToDoubleTask
6540 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6541 MapReduceMappingsToDoubleTask<K,V> nextRight,
6542 ToDoubleBiFunction<? super K, ? super V> transformer,
6543 double basis,
6544 DoubleBinaryOperator reducer) {
6545 super(m, p, b); this.nextRight = nextRight;
6546 this.transformer = transformer;
6547 this.basis = basis; this.reducer = reducer;
6548 }
6549 public final Double getRawResult() { return result; }
6550 @SuppressWarnings("unchecked") public final void compute() {
6551 final ToDoubleBiFunction<? super K, ? super V> transformer;
6552 final DoubleBinaryOperator reducer;
6553 if ((transformer = this.transformer) != null &&
6554 (reducer = this.reducer) != null) {
6555 double r = this.basis;
6556 for (int b; (b = preSplit()) > 0;)
6557 (rights = new MapReduceMappingsToDoubleTask<K,V>
6558 (map, this, b, rights, transformer, r, reducer)).fork();
6559 V v;
6560 while ((v = advanceValue()) != null)
6561 r = reducer.applyAsDouble(r, transformer.applyAsDouble(nextKey, v));
6562 result = r;
6563 CountedCompleter<?> c;
6564 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6565 MapReduceMappingsToDoubleTask<K,V>
6566 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6567 s = t.rights;
6568 while (s != null) {
6569 t.result = reducer.applyAsDouble(t.result, s.result);
6570 s = t.rights = s.nextRight;
6571 }
6572 }
6573 }
6574 }
6575 }
6576
6577 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6578 extends Traverser<K,V,Long> {
6579 final ToLongFunction<? super K> transformer;
6580 final LongBinaryOperator reducer;
6581 final long basis;
6582 long result;
6583 MapReduceKeysToLongTask<K,V> rights, nextRight;
6584 MapReduceKeysToLongTask
6585 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6586 MapReduceKeysToLongTask<K,V> nextRight,
6587 ToLongFunction<? super K> transformer,
6588 long basis,
6589 LongBinaryOperator reducer) {
6590 super(m, p, b); this.nextRight = nextRight;
6591 this.transformer = transformer;
6592 this.basis = basis; this.reducer = reducer;
6593 }
6594 public final Long getRawResult() { return result; }
6595 @SuppressWarnings("unchecked") public final void compute() {
6596 final ToLongFunction<? super K> transformer;
6597 final LongBinaryOperator reducer;
6598 if ((transformer = this.transformer) != null &&
6599 (reducer = this.reducer) != null) {
6600 long r = this.basis;
6601 for (int b; (b = preSplit()) > 0;)
6602 (rights = new MapReduceKeysToLongTask<K,V>
6603 (map, this, b, rights, transformer, r, reducer)).fork();
6604 K k;
6605 while ((k = advanceKey()) != null)
6606 r = reducer.applyAsLong(r, transformer.applyAsLong(k));
6607 result = r;
6608 CountedCompleter<?> c;
6609 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6610 MapReduceKeysToLongTask<K,V>
6611 t = (MapReduceKeysToLongTask<K,V>)c,
6612 s = t.rights;
6613 while (s != null) {
6614 t.result = reducer.applyAsLong(t.result, s.result);
6615 s = t.rights = s.nextRight;
6616 }
6617 }
6618 }
6619 }
6620 }
6621
6622 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6623 extends Traverser<K,V,Long> {
6624 final ToLongFunction<? super V> transformer;
6625 final LongBinaryOperator reducer;
6626 final long basis;
6627 long result;
6628 MapReduceValuesToLongTask<K,V> rights, nextRight;
6629 MapReduceValuesToLongTask
6630 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6631 MapReduceValuesToLongTask<K,V> nextRight,
6632 ToLongFunction<? super V> transformer,
6633 long basis,
6634 LongBinaryOperator reducer) {
6635 super(m, p, b); this.nextRight = nextRight;
6636 this.transformer = transformer;
6637 this.basis = basis; this.reducer = reducer;
6638 }
6639 public final Long getRawResult() { return result; }
6640 @SuppressWarnings("unchecked") public final void compute() {
6641 final ToLongFunction<? super V> transformer;
6642 final LongBinaryOperator reducer;
6643 if ((transformer = this.transformer) != null &&
6644 (reducer = this.reducer) != null) {
6645 long r = this.basis;
6646 for (int b; (b = preSplit()) > 0;)
6647 (rights = new MapReduceValuesToLongTask<K,V>
6648 (map, this, b, rights, transformer, r, reducer)).fork();
6649 V v;
6650 while ((v = advanceValue()) != null)
6651 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6652 result = r;
6653 CountedCompleter<?> c;
6654 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6655 MapReduceValuesToLongTask<K,V>
6656 t = (MapReduceValuesToLongTask<K,V>)c,
6657 s = t.rights;
6658 while (s != null) {
6659 t.result = reducer.applyAsLong(t.result, s.result);
6660 s = t.rights = s.nextRight;
6661 }
6662 }
6663 }
6664 }
6665 }
6666
6667 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6668 extends Traverser<K,V,Long> {
6669 final ToLongFunction<Map.Entry<K,V>> transformer;
6670 final LongBinaryOperator reducer;
6671 final long basis;
6672 long result;
6673 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6674 MapReduceEntriesToLongTask
6675 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6676 MapReduceEntriesToLongTask<K,V> nextRight,
6677 ToLongFunction<Map.Entry<K,V>> transformer,
6678 long basis,
6679 LongBinaryOperator reducer) {
6680 super(m, p, b); this.nextRight = nextRight;
6681 this.transformer = transformer;
6682 this.basis = basis; this.reducer = reducer;
6683 }
6684 public final Long getRawResult() { return result; }
6685 @SuppressWarnings("unchecked") public final void compute() {
6686 final ToLongFunction<Map.Entry<K,V>> transformer;
6687 final LongBinaryOperator reducer;
6688 if ((transformer = this.transformer) != null &&
6689 (reducer = this.reducer) != null) {
6690 long r = this.basis;
6691 for (int b; (b = preSplit()) > 0;)
6692 (rights = new MapReduceEntriesToLongTask<K,V>
6693 (map, this, b, rights, transformer, r, reducer)).fork();
6694 V v;
6695 while ((v = advanceValue()) != null)
6696 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(nextKey, v)));
6697 result = r;
6698 CountedCompleter<?> c;
6699 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6700 MapReduceEntriesToLongTask<K,V>
6701 t = (MapReduceEntriesToLongTask<K,V>)c,
6702 s = t.rights;
6703 while (s != null) {
6704 t.result = reducer.applyAsLong(t.result, s.result);
6705 s = t.rights = s.nextRight;
6706 }
6707 }
6708 }
6709 }
6710 }
6711
6712 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6713 extends Traverser<K,V,Long> {
6714 final ToLongBiFunction<? super K, ? super V> transformer;
6715 final LongBinaryOperator reducer;
6716 final long basis;
6717 long result;
6718 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6719 MapReduceMappingsToLongTask
6720 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6721 MapReduceMappingsToLongTask<K,V> nextRight,
6722 ToLongBiFunction<? super K, ? super V> transformer,
6723 long basis,
6724 LongBinaryOperator reducer) {
6725 super(m, p, b); this.nextRight = nextRight;
6726 this.transformer = transformer;
6727 this.basis = basis; this.reducer = reducer;
6728 }
6729 public final Long getRawResult() { return result; }
6730 @SuppressWarnings("unchecked") public final void compute() {
6731 final ToLongBiFunction<? super K, ? super V> transformer;
6732 final LongBinaryOperator reducer;
6733 if ((transformer = this.transformer) != null &&
6734 (reducer = this.reducer) != null) {
6735 long r = this.basis;
6736 for (int b; (b = preSplit()) > 0;)
6737 (rights = new MapReduceMappingsToLongTask<K,V>
6738 (map, this, b, rights, transformer, r, reducer)).fork();
6739 V v;
6740 while ((v = advanceValue()) != null)
6741 r = reducer.applyAsLong(r, transformer.applyAsLong(nextKey, v));
6742 result = r;
6743 CountedCompleter<?> c;
6744 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6745 MapReduceMappingsToLongTask<K,V>
6746 t = (MapReduceMappingsToLongTask<K,V>)c,
6747 s = t.rights;
6748 while (s != null) {
6749 t.result = reducer.applyAsLong(t.result, s.result);
6750 s = t.rights = s.nextRight;
6751 }
6752 }
6753 }
6754 }
6755 }
6756
6757 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6758 extends Traverser<K,V,Integer> {
6759 final ToIntFunction<? super K> transformer;
6760 final IntBinaryOperator reducer;
6761 final int basis;
6762 int result;
6763 MapReduceKeysToIntTask<K,V> rights, nextRight;
6764 MapReduceKeysToIntTask
6765 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6766 MapReduceKeysToIntTask<K,V> nextRight,
6767 ToIntFunction<? super K> transformer,
6768 int basis,
6769 IntBinaryOperator reducer) {
6770 super(m, p, b); this.nextRight = nextRight;
6771 this.transformer = transformer;
6772 this.basis = basis; this.reducer = reducer;
6773 }
6774 public final Integer getRawResult() { return result; }
6775 @SuppressWarnings("unchecked") public final void compute() {
6776 final ToIntFunction<? super K> transformer;
6777 final IntBinaryOperator reducer;
6778 if ((transformer = this.transformer) != null &&
6779 (reducer = this.reducer) != null) {
6780 int r = this.basis;
6781 for (int b; (b = preSplit()) > 0;)
6782 (rights = new MapReduceKeysToIntTask<K,V>
6783 (map, this, b, rights, transformer, r, reducer)).fork();
6784 K k;
6785 while ((k = advanceKey()) != null)
6786 r = reducer.applyAsInt(r, transformer.applyAsInt(k));
6787 result = r;
6788 CountedCompleter<?> c;
6789 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6790 MapReduceKeysToIntTask<K,V>
6791 t = (MapReduceKeysToIntTask<K,V>)c,
6792 s = t.rights;
6793 while (s != null) {
6794 t.result = reducer.applyAsInt(t.result, s.result);
6795 s = t.rights = s.nextRight;
6796 }
6797 }
6798 }
6799 }
6800 }
6801
6802 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6803 extends Traverser<K,V,Integer> {
6804 final ToIntFunction<? super V> transformer;
6805 final IntBinaryOperator reducer;
6806 final int basis;
6807 int result;
6808 MapReduceValuesToIntTask<K,V> rights, nextRight;
6809 MapReduceValuesToIntTask
6810 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6811 MapReduceValuesToIntTask<K,V> nextRight,
6812 ToIntFunction<? super V> transformer,
6813 int basis,
6814 IntBinaryOperator reducer) {
6815 super(m, p, b); this.nextRight = nextRight;
6816 this.transformer = transformer;
6817 this.basis = basis; this.reducer = reducer;
6818 }
6819 public final Integer getRawResult() { return result; }
6820 @SuppressWarnings("unchecked") public final void compute() {
6821 final ToIntFunction<? super V> transformer;
6822 final IntBinaryOperator reducer;
6823 if ((transformer = this.transformer) != null &&
6824 (reducer = this.reducer) != null) {
6825 int r = this.basis;
6826 for (int b; (b = preSplit()) > 0;)
6827 (rights = new MapReduceValuesToIntTask<K,V>
6828 (map, this, b, rights, transformer, r, reducer)).fork();
6829 V v;
6830 while ((v = advanceValue()) != null)
6831 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6832 result = r;
6833 CountedCompleter<?> c;
6834 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6835 MapReduceValuesToIntTask<K,V>
6836 t = (MapReduceValuesToIntTask<K,V>)c,
6837 s = t.rights;
6838 while (s != null) {
6839 t.result = reducer.applyAsInt(t.result, s.result);
6840 s = t.rights = s.nextRight;
6841 }
6842 }
6843 }
6844 }
6845 }
6846
6847 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6848 extends Traverser<K,V,Integer> {
6849 final ToIntFunction<Map.Entry<K,V>> transformer;
6850 final IntBinaryOperator reducer;
6851 final int basis;
6852 int result;
6853 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6854 MapReduceEntriesToIntTask
6855 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6856 MapReduceEntriesToIntTask<K,V> nextRight,
6857 ToIntFunction<Map.Entry<K,V>> transformer,
6858 int basis,
6859 IntBinaryOperator reducer) {
6860 super(m, p, b); this.nextRight = nextRight;
6861 this.transformer = transformer;
6862 this.basis = basis; this.reducer = reducer;
6863 }
6864 public final Integer getRawResult() { return result; }
6865 @SuppressWarnings("unchecked") public final void compute() {
6866 final ToIntFunction<Map.Entry<K,V>> transformer;
6867 final IntBinaryOperator reducer;
6868 if ((transformer = this.transformer) != null &&
6869 (reducer = this.reducer) != null) {
6870 int r = this.basis;
6871 for (int b; (b = preSplit()) > 0;)
6872 (rights = new MapReduceEntriesToIntTask<K,V>
6873 (map, this, b, rights, transformer, r, reducer)).fork();
6874 V v;
6875 while ((v = advanceValue()) != null)
6876 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(nextKey,
6877 v)));
6878 result = r;
6879 CountedCompleter<?> c;
6880 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6881 MapReduceEntriesToIntTask<K,V>
6882 t = (MapReduceEntriesToIntTask<K,V>)c,
6883 s = t.rights;
6884 while (s != null) {
6885 t.result = reducer.applyAsInt(t.result, s.result);
6886 s = t.rights = s.nextRight;
6887 }
6888 }
6889 }
6890 }
6891 }
6892
6893 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6894 extends Traverser<K,V,Integer> {
6895 final ToIntBiFunction<? super K, ? super V> transformer;
6896 final IntBinaryOperator reducer;
6897 final int basis;
6898 int result;
6899 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6900 MapReduceMappingsToIntTask
6901 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6902 MapReduceMappingsToIntTask<K,V> nextRight,
6903 ToIntBiFunction<? super K, ? super V> transformer,
6904 int basis,
6905 IntBinaryOperator reducer) {
6906 super(m, p, b); this.nextRight = nextRight;
6907 this.transformer = transformer;
6908 this.basis = basis; this.reducer = reducer;
6909 }
6910 public final Integer getRawResult() { return result; }
6911 @SuppressWarnings("unchecked") public final void compute() {
6912 final ToIntBiFunction<? super K, ? super V> transformer;
6913 final IntBinaryOperator reducer;
6914 if ((transformer = this.transformer) != null &&
6915 (reducer = this.reducer) != null) {
6916 int r = this.basis;
6917 for (int b; (b = preSplit()) > 0;)
6918 (rights = new MapReduceMappingsToIntTask<K,V>
6919 (map, this, b, rights, transformer, r, reducer)).fork();
6920 V v;
6921 while ((v = advanceValue()) != null)
6922 r = reducer.applyAsInt(r, transformer.applyAsInt(nextKey, v));
6923 result = r;
6924 CountedCompleter<?> c;
6925 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6926 MapReduceMappingsToIntTask<K,V>
6927 t = (MapReduceMappingsToIntTask<K,V>)c,
6928 s = t.rights;
6929 while (s != null) {
6930 t.result = reducer.applyAsInt(t.result, s.result);
6931 s = t.rights = s.nextRight;
6932 }
6933 }
6934 }
6935 }
6936 }
6937
6938 // Unsafe mechanics
6939 private static final sun.misc.Unsafe U;
6940 private static final long SIZECTL;
6941 private static final long TRANSFERINDEX;
6942 private static final long TRANSFERORIGIN;
6943 private static final long BASECOUNT;
6944 private static final long CELLSBUSY;
6945 private static final long CELLVALUE;
6946 private static final long ABASE;
6947 private static final int ASHIFT;
6948
6949 static {
6950 try {
6951 U = sun.misc.Unsafe.getUnsafe();
6952 Class<?> k = ConcurrentHashMap.class;
6953 SIZECTL = U.objectFieldOffset
6954 (k.getDeclaredField("sizeCtl"));
6955 TRANSFERINDEX = U.objectFieldOffset
6956 (k.getDeclaredField("transferIndex"));
6957 TRANSFERORIGIN = U.objectFieldOffset
6958 (k.getDeclaredField("transferOrigin"));
6959 BASECOUNT = U.objectFieldOffset
6960 (k.getDeclaredField("baseCount"));
6961 CELLSBUSY = U.objectFieldOffset
6962 (k.getDeclaredField("cellsBusy"));
6963 Class<?> ck = Cell.class;
6964 CELLVALUE = U.objectFieldOffset
6965 (ck.getDeclaredField("value"));
6966 Class<?> sc = Node[].class;
6967 ABASE = U.arrayBaseOffset(sc);
6968 int scale = U.arrayIndexScale(sc);
6969 if ((scale & (scale - 1)) != 0)
6970 throw new Error("data type scale not a power of two");
6971 ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6972 } catch (Exception e) {
6973 throw new Error(e);
6974 }
6975 }
6976
6977 }