ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.152
Committed: Sat Dec 15 21:00:15 2012 UTC (11 years, 5 months ago) by jsr166
Branch: MAIN
Changes since 1.151: +2 -2 lines
Log Message:
whitespace

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.util.concurrent.ForkJoinPool;
9 import java.util.concurrent.CountedCompleter;
10
11 import java.util.Comparator;
12 import java.util.Arrays;
13 import java.util.Map;
14 import java.util.Set;
15 import java.util.Collection;
16 import java.util.AbstractMap;
17 import java.util.AbstractSet;
18 import java.util.AbstractCollection;
19 import java.util.Hashtable;
20 import java.util.HashMap;
21 import java.util.Iterator;
22 import java.util.Enumeration;
23 import java.util.ConcurrentModificationException;
24 import java.util.NoSuchElementException;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
27 import java.util.concurrent.atomic.AtomicInteger;
28 import java.util.concurrent.atomic.AtomicReference;
29 import java.io.Serializable;
30
31 /**
32 * A hash table supporting full concurrency of retrievals and
33 * high expected concurrency for updates. This class obeys the
34 * same functional specification as {@link java.util.Hashtable}, and
35 * includes versions of methods corresponding to each method of
36 * {@code Hashtable}. However, even though all operations are
37 * thread-safe, retrieval operations do <em>not</em> entail locking,
38 * and there is <em>not</em> any support for locking the entire table
39 * in a way that prevents all access. This class is fully
40 * interoperable with {@code Hashtable} in programs that rely on its
41 * thread safety but not on its synchronization details.
42 *
43 * <p>Retrieval operations (including {@code get}) generally do not
44 * block, so may overlap with update operations (including {@code put}
45 * and {@code remove}). Retrievals reflect the results of the most
46 * recently <em>completed</em> update operations holding upon their
47 * onset. (More formally, an update operation for a given key bears a
48 * <em>happens-before</em> relation with any (non-null) retrieval for
49 * that key reporting the updated value.) For aggregate operations
50 * such as {@code putAll} and {@code clear}, concurrent retrievals may
51 * reflect insertion or removal of only some entries. Similarly,
52 * Iterators and Enumerations return elements reflecting the state of
53 * the hash table at some point at or since the creation of the
54 * iterator/enumeration. They do <em>not</em> throw {@link
55 * ConcurrentModificationException}. However, iterators are designed
56 * to be used by only one thread at a time. Bear in mind that the
57 * results of aggregate status methods including {@code size}, {@code
58 * isEmpty}, and {@code containsValue} are typically useful only when
59 * a map is not undergoing concurrent updates in other threads.
60 * Otherwise the results of these methods reflect transient states
61 * that may be adequate for monitoring or estimation purposes, but not
62 * for program control.
63 *
64 * <p>The table is dynamically expanded when there are too many
65 * collisions (i.e., keys that have distinct hash codes but fall into
66 * the same slot modulo the table size), with the expected average
67 * effect of maintaining roughly two bins per mapping (corresponding
68 * to a 0.75 load factor threshold for resizing). There may be much
69 * variance around this average as mappings are added and removed, but
70 * overall, this maintains a commonly accepted time/space tradeoff for
71 * hash tables. However, resizing this or any other kind of hash
72 * table may be a relatively slow operation. When possible, it is a
73 * good idea to provide a size estimate as an optional {@code
74 * initialCapacity} constructor argument. An additional optional
75 * {@code loadFactor} constructor argument provides a further means of
76 * customizing initial table capacity by specifying the table density
77 * to be used in calculating the amount of space to allocate for the
78 * given number of elements. Also, for compatibility with previous
79 * versions of this class, constructors may optionally specify an
80 * expected {@code concurrencyLevel} as an additional hint for
81 * internal sizing. Note that using many keys with exactly the same
82 * {@code hashCode()} is a sure way to slow down performance of any
83 * hash table.
84 *
85 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
86 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
87 * (using {@link #keySet(Object)} when only keys are of interest, and the
88 * mapped values are (perhaps transiently) not used or all take the
89 * same mapping value.
90 *
91 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
92 * form of histogram or multiset) by using {@link LongAdder} values
93 * and initializing via {@link #computeIfAbsent}. For example, to add
94 * a count to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you
95 * can use {@code freqs.computeIfAbsent(k -> new
96 * LongAdder()).increment();}
97 *
98 * <p>This class and its views and iterators implement all of the
99 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
100 * interfaces.
101 *
102 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
103 * does <em>not</em> allow {@code null} to be used as a key or value.
104 *
105 * <p>ConcurrentHashMaps support sequential and parallel operations
106 * bulk operations. (Parallel forms use the {@link
107 * ForkJoinPool#commonPool()}). Tasks that may be used in other
108 * contexts are available in class {@link ForkJoinTasks}. These
109 * operations are designed to be safely, and often sensibly, applied
110 * even with maps that are being concurrently updated by other
111 * threads; for example, when computing a snapshot summary of the
112 * values in a shared registry. There are three kinds of operation,
113 * each with four forms, accepting functions with Keys, Values,
114 * Entries, and (Key, Value) arguments and/or return values. Because
115 * the elements of a ConcurrentHashMap are not ordered in any
116 * particular way, and may be processed in different orders in
117 * different parallel executions, the correctness of supplied
118 * functions should not depend on any ordering, or on any other
119 * objects or values that may transiently change while computation is
120 * in progress; and except for forEach actions, should ideally be
121 * side-effect-free.
122 *
123 * <ul>
124 * <li> forEach: Perform a given action on each element.
125 * A variant form applies a given transformation on each element
126 * before performing the action.</li>
127 *
128 * <li> search: Return the first available non-null result of
129 * applying a given function on each element; skipping further
130 * search when a result is found.</li>
131 *
132 * <li> reduce: Accumulate each element. The supplied reduction
133 * function cannot rely on ordering (more formally, it should be
134 * both associative and commutative). There are five variants:
135 *
136 * <ul>
137 *
138 * <li> Plain reductions. (There is not a form of this method for
139 * (key, value) function arguments since there is no corresponding
140 * return type.)</li>
141 *
142 * <li> Mapped reductions that accumulate the results of a given
143 * function applied to each element.</li>
144 *
145 * <li> Reductions to scalar doubles, longs, and ints, using a
146 * given basis value.</li>
147 *
148 * </li>
149 * </ul>
150 * </ul>
151 *
152 * <p>The concurrency properties of bulk operations follow
153 * from those of ConcurrentHashMap: Any non-null result returned
154 * from {@code get(key)} and related access methods bears a
155 * happens-before relation with the associated insertion or
156 * update. The result of any bulk operation reflects the
157 * composition of these per-element relations (but is not
158 * necessarily atomic with respect to the map as a whole unless it
159 * is somehow known to be quiescent). Conversely, because keys
160 * and values in the map are never null, null serves as a reliable
161 * atomic indicator of the current lack of any result. To
162 * maintain this property, null serves as an implicit basis for
163 * all non-scalar reduction operations. For the double, long, and
164 * int versions, the basis should be one that, when combined with
165 * any other value, returns that other value (more formally, it
166 * should be the identity element for the reduction). Most common
167 * reductions have these properties; for example, computing a sum
168 * with basis 0 or a minimum with basis MAX_VALUE.
169 *
170 * <p>Search and transformation functions provided as arguments
171 * should similarly return null to indicate the lack of any result
172 * (in which case it is not used). In the case of mapped
173 * reductions, this also enables transformations to serve as
174 * filters, returning null (or, in the case of primitive
175 * specializations, the identity basis) if the element should not
176 * be combined. You can create compound transformations and
177 * filterings by composing them yourself under this "null means
178 * there is nothing there now" rule before using them in search or
179 * reduce operations.
180 *
181 * <p>Methods accepting and/or returning Entry arguments maintain
182 * key-value associations. They may be useful for example when
183 * finding the key for the greatest value. Note that "plain" Entry
184 * arguments can be supplied using {@code new
185 * AbstractMap.SimpleEntry(k,v)}.
186 *
187 * <p>Bulk operations may complete abruptly, throwing an
188 * exception encountered in the application of a supplied
189 * function. Bear in mind when handling such exceptions that other
190 * concurrently executing functions could also have thrown
191 * exceptions, or would have done so if the first exception had
192 * not occurred.
193 *
194 * <p>Speedups for parallel compared to sequential forms are common
195 * but not guaranteed. Parallel operations involving brief functions
196 * on small maps may execute more slowly than sequential forms if the
197 * underlying work to parallelize the computation is more expensive
198 * than the computation itself. Similarly, parallelization may not
199 * lead to much actual parallelism if all processors are busy
200 * performing unrelated tasks.
201 *
202 * <p>All arguments to all task methods must be non-null.
203 *
204 * <p><em>jsr166e note: During transition, this class
205 * uses nested functional interfaces with different names but the
206 * same forms as those expected for JDK8.</em>
207 *
208 * <p>This class is a member of the
209 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
210 * Java Collections Framework</a>.
211 *
212 * @since 1.5
213 * @author Doug Lea
214 * @param <K> the type of keys maintained by this map
215 * @param <V> the type of mapped values
216 */
217 public class ConcurrentHashMap<K, V>
218 implements ConcurrentMap<K, V>, Serializable {
219 private static final long serialVersionUID = 7249069246763182397L;
220
221 /**
222 * A partitionable iterator. A Spliterator can be traversed
223 * directly, but can also be partitioned (before traversal) by
224 * creating another Spliterator that covers a non-overlapping
225 * portion of the elements, and so may be amenable to parallel
226 * execution.
227 *
228 * <p>This interface exports a subset of expected JDK8
229 * functionality.
230 *
231 * <p>Sample usage: Here is one (of the several) ways to compute
232 * the sum of the values held in a map using the ForkJoin
233 * framework. As illustrated here, Spliterators are well suited to
234 * designs in which a task repeatedly splits off half its work
235 * into forked subtasks until small enough to process directly,
236 * and then joins these subtasks. Variants of this style can also
237 * be used in completion-based designs.
238 *
239 * <pre>
240 * {@code ConcurrentHashMap<String, Long> m = ...
241 * // split as if have 8 * parallelism, for load balance
242 * int n = m.size();
243 * int p = aForkJoinPool.getParallelism() * 8;
244 * int split = (n < p)? n : p;
245 * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), split, null));
246 * // ...
247 * static class SumValues extends RecursiveTask<Long> {
248 * final Spliterator<Long> s;
249 * final int split; // split while > 1
250 * final SumValues nextJoin; // records forked subtasks to join
251 * SumValues(Spliterator<Long> s, int depth, SumValues nextJoin) {
252 * this.s = s; this.depth = depth; this.nextJoin = nextJoin;
253 * }
254 * public Long compute() {
255 * long sum = 0;
256 * SumValues subtasks = null; // fork subtasks
257 * for (int s = split >>> 1; s > 0; s >>>= 1)
258 * (subtasks = new SumValues(s.split(), s, subtasks)).fork();
259 * while (s.hasNext()) // directly process remaining elements
260 * sum += s.next();
261 * for (SumValues t = subtasks; t != null; t = t.nextJoin)
262 * sum += t.join(); // collect subtask results
263 * return sum;
264 * }
265 * }
266 * }</pre>
267 */
268 public static interface Spliterator<T> extends Iterator<T> {
269 /**
270 * Returns a Spliterator covering approximately half of the
271 * elements, guaranteed not to overlap with those subsequently
272 * returned by this Spliterator. After invoking this method,
273 * the current Spliterator will <em>not</em> produce any of
274 * the elements of the returned Spliterator, but the two
275 * Spliterators together will produce all of the elements that
276 * would have been produced by this Spliterator had this
277 * method not been called. The exact number of elements
278 * produced by the returned Spliterator is not guaranteed, and
279 * may be zero (i.e., with {@code hasNext()} reporting {@code
280 * false}) if this Spliterator cannot be further split.
281 *
282 * @return a Spliterator covering approximately half of the
283 * elements
284 * @throws IllegalStateException if this Spliterator has
285 * already commenced traversing elements
286 */
287 Spliterator<T> split();
288 }
289
290 /*
291 * Overview:
292 *
293 * The primary design goal of this hash table is to maintain
294 * concurrent readability (typically method get(), but also
295 * iterators and related methods) while minimizing update
296 * contention. Secondary goals are to keep space consumption about
297 * the same or better than java.util.HashMap, and to support high
298 * initial insertion rates on an empty table by many threads.
299 *
300 * Each key-value mapping is held in a Node. Because Node key
301 * fields can contain special values, they are defined using plain
302 * Object types (not type "K"). This leads to a lot of explicit
303 * casting (and many explicit warning suppressions to tell
304 * compilers not to complain about it). It also allows some of the
305 * public methods to be factored into a smaller number of internal
306 * methods (although sadly not so for the five variants of
307 * put-related operations). The validation-based approach
308 * explained below leads to a lot of code sprawl because
309 * retry-control precludes factoring into smaller methods.
310 *
311 * The table is lazily initialized to a power-of-two size upon the
312 * first insertion. Each bin in the table normally contains a
313 * list of Nodes (most often, the list has only zero or one Node).
314 * Table accesses require volatile/atomic reads, writes, and
315 * CASes. Because there is no other way to arrange this without
316 * adding further indirections, we use intrinsics
317 * (sun.misc.Unsafe) operations. The lists of nodes within bins
318 * are always accurately traversable under volatile reads, so long
319 * as lookups check hash code and non-nullness of value before
320 * checking key equality.
321 *
322 * We use the top (sign) bit of Node hash fields for control
323 * purposes -- it is available anyway because of addressing
324 * constraints. Nodes with negative hash fields are forwarding
325 * nodes to either TreeBins or resized tables. The lower 31 bits
326 * of each normal Node's hash field contain a transformation of
327 * the key's hash code.
328 *
329 * Insertion (via put or its variants) of the first node in an
330 * empty bin is performed by just CASing it to the bin. This is
331 * by far the most common case for put operations under most
332 * key/hash distributions. Other update operations (insert,
333 * delete, and replace) require locks. We do not want to waste
334 * the space required to associate a distinct lock object with
335 * each bin, so instead use the first node of a bin list itself as
336 * a lock. Locking support for these locks relies on builtin
337 * "synchronized" monitors.
338 *
339 * Using the first node of a list as a lock does not by itself
340 * suffice though: When a node is locked, any update must first
341 * validate that it is still the first node after locking it, and
342 * retry if not. Because new nodes are always appended to lists,
343 * once a node is first in a bin, it remains first until deleted
344 * or the bin becomes invalidated (upon resizing). However,
345 * operations that only conditionally update may inspect nodes
346 * until the point of update. This is a converse of sorts to the
347 * lazy locking technique described by Herlihy & Shavit.
348 *
349 * The main disadvantage of per-bin locks is that other update
350 * operations on other nodes in a bin list protected by the same
351 * lock can stall, for example when user equals() or mapping
352 * functions take a long time. However, statistically, under
353 * random hash codes, this is not a common problem. Ideally, the
354 * frequency of nodes in bins follows a Poisson distribution
355 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
356 * parameter of about 0.5 on average, given the resizing threshold
357 * of 0.75, although with a large variance because of resizing
358 * granularity. Ignoring variance, the expected occurrences of
359 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
360 * first values are:
361 *
362 * 0: 0.60653066
363 * 1: 0.30326533
364 * 2: 0.07581633
365 * 3: 0.01263606
366 * 4: 0.00157952
367 * 5: 0.00015795
368 * 6: 0.00001316
369 * 7: 0.00000094
370 * 8: 0.00000006
371 * more: less than 1 in ten million
372 *
373 * Lock contention probability for two threads accessing distinct
374 * elements is roughly 1 / (8 * #elements) under random hashes.
375 *
376 * Actual hash code distributions encountered in practice
377 * sometimes deviate significantly from uniform randomness. This
378 * includes the case when N > (1<<30), so some keys MUST collide.
379 * Similarly for dumb or hostile usages in which multiple keys are
380 * designed to have identical hash codes. Also, although we guard
381 * against the worst effects of this (see method spread), sets of
382 * hashes may differ only in bits that do not impact their bin
383 * index for a given power-of-two mask. So we use a secondary
384 * strategy that applies when the number of nodes in a bin exceeds
385 * a threshold, and at least one of the keys implements
386 * Comparable. These TreeBins use a balanced tree to hold nodes
387 * (a specialized form of red-black trees), bounding search time
388 * to O(log N). Each search step in a TreeBin is around twice as
389 * slow as in a regular list, but given that N cannot exceed
390 * (1<<64) (before running out of addresses) this bounds search
391 * steps, lock hold times, etc, to reasonable constants (roughly
392 * 100 nodes inspected per operation worst case) so long as keys
393 * are Comparable (which is very common -- String, Long, etc).
394 * TreeBin nodes (TreeNodes) also maintain the same "next"
395 * traversal pointers as regular nodes, so can be traversed in
396 * iterators in the same way.
397 *
398 * The table is resized when occupancy exceeds a percentage
399 * threshold (nominally, 0.75, but see below). Any thread
400 * noticing an overfull bin may assist in resizing after the
401 * initiating thread allocates and sets up the replacement
402 * array. However, rather than stalling, these other threads may
403 * proceed with insertions etc. The use of TreeBins shields us
404 * from the worst case effects of overfilling while resizes are in
405 * progress. Resizing proceeds by transferring bins, one by one,
406 * from the table to the next table. To enable concurrency, the
407 * next table must be (incrementally) prefilled with place-holders
408 * serving as reverse forwarders to the old table. Because we are
409 * using power-of-two expansion, the elements from each bin must
410 * either stay at same index, or move with a power of two
411 * offset. We eliminate unnecessary node creation by catching
412 * cases where old nodes can be reused because their next fields
413 * won't change. On average, only about one-sixth of them need
414 * cloning when a table doubles. The nodes they replace will be
415 * garbage collectable as soon as they are no longer referenced by
416 * any reader thread that may be in the midst of concurrently
417 * traversing table. Upon transfer, the old table bin contains
418 * only a special forwarding node (with hash field "MOVED") that
419 * contains the next table as its key. On encountering a
420 * forwarding node, access and update operations restart, using
421 * the new table.
422 *
423 * Each bin transfer requires its bin lock, which can stall
424 * waiting for locks while resizing. However, because other
425 * threads can join in and help resize rather than contend for
426 * locks, average aggregate waits become shorter as resizing
427 * progresses. The transfer operation must also ensure that all
428 * accessible bins in both the old and new table are usable by any
429 * traversal. This is arranged by proceeding from the last bin
430 * (table.length - 1) up towards the first. Upon seeing a
431 * forwarding node, traversals (see class Traverser) arrange to
432 * move to the new table without revisiting nodes. However, to
433 * ensure that no intervening nodes are skipped, bin splitting can
434 * only begin after the associated reverse-forwarders are in
435 * place.
436 *
437 * The traversal scheme also applies to partial traversals of
438 * ranges of bins (via an alternate Traverser constructor)
439 * to support partitioned aggregate operations. Also, read-only
440 * operations give up if ever forwarded to a null table, which
441 * provides support for shutdown-style clearing, which is also not
442 * currently implemented.
443 *
444 * Lazy table initialization minimizes footprint until first use,
445 * and also avoids resizings when the first operation is from a
446 * putAll, constructor with map argument, or deserialization.
447 * These cases attempt to override the initial capacity settings,
448 * but harmlessly fail to take effect in cases of races.
449 *
450 * The element count is maintained using a specialization of
451 * LongAdder. We need to incorporate a specialization rather than
452 * just use a LongAdder in order to access implicit
453 * contention-sensing that leads to creation of multiple
454 * CounterCells. The counter mechanics avoid contention on
455 * updates but can encounter cache thrashing if read too
456 * frequently during concurrent access. To avoid reading so often,
457 * resizing under contention is attempted only upon adding to a
458 * bin already holding two or more nodes. Under uniform hash
459 * distributions, the probability of this occurring at threshold
460 * is around 13%, meaning that only about 1 in 8 puts check
461 * threshold (and after resizing, many fewer do so). The bulk
462 * putAll operation further reduces contention by only committing
463 * count updates upon these size checks.
464 *
465 * Maintaining API and serialization compatibility with previous
466 * versions of this class introduces several oddities. Mainly: We
467 * leave untouched but unused constructor arguments refering to
468 * concurrencyLevel. We accept a loadFactor constructor argument,
469 * but apply it only to initial table capacity (which is the only
470 * time that we can guarantee to honor it.) We also declare an
471 * unused "Segment" class that is instantiated in minimal form
472 * only when serializing.
473 */
474
475 /* ---------------- Constants -------------- */
476
477 /**
478 * The largest possible table capacity. This value must be
479 * exactly 1<<30 to stay within Java array allocation and indexing
480 * bounds for power of two table sizes, and is further required
481 * because the top two bits of 32bit hash fields are used for
482 * control purposes.
483 */
484 private static final int MAXIMUM_CAPACITY = 1 << 30;
485
486 /**
487 * The default initial table capacity. Must be a power of 2
488 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
489 */
490 private static final int DEFAULT_CAPACITY = 16;
491
492 /**
493 * The largest possible (non-power of two) array size.
494 * Needed by toArray and related methods.
495 */
496 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
497
498 /**
499 * The default concurrency level for this table. Unused but
500 * defined for compatibility with previous versions of this class.
501 */
502 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
503
504 /**
505 * The load factor for this table. Overrides of this value in
506 * constructors affect only the initial table capacity. The
507 * actual floating point value isn't normally used -- it is
508 * simpler to use expressions such as {@code n - (n >>> 2)} for
509 * the associated resizing threshold.
510 */
511 private static final float LOAD_FACTOR = 0.75f;
512
513 /**
514 * The bin count threshold for using a tree rather than list for a
515 * bin. The value reflects the approximate break-even point for
516 * using tree-based operations.
517 */
518 private static final int TREE_THRESHOLD = 8;
519
520 /**
521 * Minimum number of rebinnings per transfer step. Ranges are
522 * subdivided to allow multiple resizer threads. This value
523 * serves as a lower bound to avoid resizers encountering
524 * excessive memory contention. The value should be at least
525 * DEFAULT_CAPACITY.
526 */
527 private static final int MIN_TRANSFER_STRIDE = 16;
528
529 /*
530 * Encodings for Node hash fields. See above for explanation.
531 */
532 static final int MOVED = 0x80000000; // hash field for forwarding nodes
533 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
534
535 /** Number of CPUS, to place bounds on some sizings */
536 static final int NCPU = Runtime.getRuntime().availableProcessors();
537
538 /* ---------------- Counters -------------- */
539
540 // Adapted from LongAdder and Striped64.
541 // See their internal docs for explanation.
542
543 // A padded cell for distributing counts
544 static final class CounterCell {
545 volatile long p0, p1, p2, p3, p4, p5, p6;
546 volatile long value;
547 volatile long q0, q1, q2, q3, q4, q5, q6;
548 CounterCell(long x) { value = x; }
549 }
550
551 /**
552 * Holder for the thread-local hash code determining which
553 * CounterCell to use. The code is initialized via the
554 * counterHashCodeGenerator, but may be moved upon collisions.
555 */
556 static final class CounterHashCode {
557 int code;
558 }
559
560 /**
561 * Generates initial value for per-thread CounterHashCodes
562 */
563 static final AtomicInteger counterHashCodeGenerator = new AtomicInteger();
564
565 /**
566 * Increment for counterHashCodeGenerator. See class ThreadLocal
567 * for explanation.
568 */
569 static final int SEED_INCREMENT = 0x61c88647;
570
571 /**
572 * Per-thread counter hash codes. Shared across all instances
573 */
574 static final ThreadLocal<CounterHashCode> threadCounterHashCode =
575 new ThreadLocal<CounterHashCode>();
576
577 /* ---------------- Fields -------------- */
578
579 /**
580 * The array of bins. Lazily initialized upon first insertion.
581 * Size is always a power of two. Accessed directly by iterators.
582 */
583 transient volatile Node<V>[] table;
584
585 /**
586 * The next table to use; non-null only while resizing.
587 */
588 private transient volatile Node<V>[] nextTable;
589
590 /**
591 * Base counter value, used mainly when there is no contention,
592 * but also as a fallback during table initialization
593 * races. Updated via CAS.
594 */
595 private transient volatile long baseCount;
596
597 /**
598 * Table initialization and resizing control. When negative, the
599 * table is being initialized or resized: -1 for initialization,
600 * else -(1 + the number of active resizing threads). Otherwise,
601 * when table is null, holds the initial table size to use upon
602 * creation, or 0 for default. After initialization, holds the
603 * next element count value upon which to resize the table.
604 */
605 private transient volatile int sizeCtl;
606
607 /**
608 * The next table index (plus one) to split while resizing.
609 */
610 private transient volatile int transferIndex;
611
612 /**
613 * The least available table index to split while resizing.
614 */
615 private transient volatile int transferOrigin;
616
617 /**
618 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
619 */
620 private transient volatile int counterBusy;
621
622 /**
623 * Table of counter cells. When non-null, size is a power of 2.
624 */
625 private transient volatile CounterCell[] counterCells;
626
627 // views
628 private transient KeySetView<K,V> keySet;
629 private transient ValuesView<K,V> values;
630 private transient EntrySetView<K,V> entrySet;
631
632 /** For serialization compatibility. Null unless serialized; see below */
633 private Segment<K,V>[] segments;
634
635 /* ---------------- Table element access -------------- */
636
637 /*
638 * Volatile access methods are used for table elements as well as
639 * elements of in-progress next table while resizing. Uses are
640 * null checked by callers, and implicitly bounds-checked, relying
641 * on the invariants that tab arrays have non-zero size, and all
642 * indices are masked with (tab.length - 1) which is never
643 * negative and always less than length. Note that, to be correct
644 * wrt arbitrary concurrency errors by users, bounds checks must
645 * operate on local variables, which accounts for some odd-looking
646 * inline assignments below.
647 */
648
649 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
650 (Node<V>[] tab, int i) { // used by Traverser
651 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
652 }
653
654 private static final <V> boolean casTabAt
655 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
656 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
657 }
658
659 private static final <V> void setTabAt
660 (Node<V>[] tab, int i, Node<V> v) {
661 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
662 }
663
664 /* ---------------- Nodes -------------- */
665
666 /**
667 * Key-value entry. Note that this is never exported out as a
668 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
669 * field of MOVED are special, and do not contain user keys or
670 * values. Otherwise, keys are never null, and null val fields
671 * indicate that a node is in the process of being deleted or
672 * created. For purposes of read-only access, a key may be read
673 * before a val, but can only be used after checking val to be
674 * non-null.
675 */
676 static class Node<V> {
677 final int hash;
678 final Object key;
679 volatile V val;
680 volatile Node<V> next;
681
682 Node(int hash, Object key, V val, Node<V> next) {
683 this.hash = hash;
684 this.key = key;
685 this.val = val;
686 this.next = next;
687 }
688 }
689
690 /* ---------------- TreeBins -------------- */
691
692 /**
693 * Nodes for use in TreeBins
694 */
695 static final class TreeNode<V> extends Node<V> {
696 TreeNode<V> parent; // red-black tree links
697 TreeNode<V> left;
698 TreeNode<V> right;
699 TreeNode<V> prev; // needed to unlink next upon deletion
700 boolean red;
701
702 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
703 super(hash, key, val, next);
704 this.parent = parent;
705 }
706 }
707
708 /**
709 * A specialized form of red-black tree for use in bins
710 * whose size exceeds a threshold.
711 *
712 * TreeBins use a special form of comparison for search and
713 * related operations (which is the main reason we cannot use
714 * existing collections such as TreeMaps). TreeBins contain
715 * Comparable elements, but may contain others, as well as
716 * elements that are Comparable but not necessarily Comparable<T>
717 * for the same T, so we cannot invoke compareTo among them. To
718 * handle this, the tree is ordered primarily by hash value, then
719 * by getClass().getName() order, and then by Comparator order
720 * among elements of the same class. On lookup at a node, if
721 * elements are not comparable or compare as 0, both left and
722 * right children may need to be searched in the case of tied hash
723 * values. (This corresponds to the full list search that would be
724 * necessary if all elements were non-Comparable and had tied
725 * hashes.) The red-black balancing code is updated from
726 * pre-jdk-collections
727 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
728 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
729 * Algorithms" (CLR).
730 *
731 * TreeBins also maintain a separate locking discipline than
732 * regular bins. Because they are forwarded via special MOVED
733 * nodes at bin heads (which can never change once established),
734 * we cannot use those nodes as locks. Instead, TreeBin
735 * extends AbstractQueuedSynchronizer to support a simple form of
736 * read-write lock. For update operations and table validation,
737 * the exclusive form of lock behaves in the same way as bin-head
738 * locks. However, lookups use shared read-lock mechanics to allow
739 * multiple readers in the absence of writers. Additionally,
740 * these lookups do not ever block: While the lock is not
741 * available, they proceed along the slow traversal path (via
742 * next-pointers) until the lock becomes available or the list is
743 * exhausted, whichever comes first. (These cases are not fast,
744 * but maximize aggregate expected throughput.) The AQS mechanics
745 * for doing this are straightforward. The lock state is held as
746 * AQS getState(). Read counts are negative; the write count (1)
747 * is positive. There are no signalling preferences among readers
748 * and writers. Since we don't need to export full Lock API, we
749 * just override the minimal AQS methods and use them directly.
750 */
751 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
752 private static final long serialVersionUID = 2249069246763182397L;
753 transient TreeNode<V> root; // root of tree
754 transient TreeNode<V> first; // head of next-pointer list
755
756 /* AQS overrides */
757 public final boolean isHeldExclusively() { return getState() > 0; }
758 public final boolean tryAcquire(int ignore) {
759 if (compareAndSetState(0, 1)) {
760 setExclusiveOwnerThread(Thread.currentThread());
761 return true;
762 }
763 return false;
764 }
765 public final boolean tryRelease(int ignore) {
766 setExclusiveOwnerThread(null);
767 setState(0);
768 return true;
769 }
770 public final int tryAcquireShared(int ignore) {
771 for (int c;;) {
772 if ((c = getState()) > 0)
773 return -1;
774 if (compareAndSetState(c, c -1))
775 return 1;
776 }
777 }
778 public final boolean tryReleaseShared(int ignore) {
779 int c;
780 do {} while (!compareAndSetState(c = getState(), c + 1));
781 return c == -1;
782 }
783
784 /** From CLR */
785 private void rotateLeft(TreeNode<V> p) {
786 if (p != null) {
787 TreeNode<V> r = p.right, pp, rl;
788 if ((rl = p.right = r.left) != null)
789 rl.parent = p;
790 if ((pp = r.parent = p.parent) == null)
791 root = r;
792 else if (pp.left == p)
793 pp.left = r;
794 else
795 pp.right = r;
796 r.left = p;
797 p.parent = r;
798 }
799 }
800
801 /** From CLR */
802 private void rotateRight(TreeNode<V> p) {
803 if (p != null) {
804 TreeNode<V> l = p.left, pp, lr;
805 if ((lr = p.left = l.right) != null)
806 lr.parent = p;
807 if ((pp = l.parent = p.parent) == null)
808 root = l;
809 else if (pp.right == p)
810 pp.right = l;
811 else
812 pp.left = l;
813 l.right = p;
814 p.parent = l;
815 }
816 }
817
818 /**
819 * Returns the TreeNode (or null if not found) for the given key
820 * starting at given root.
821 */
822 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
823 (int h, Object k, TreeNode<V> p) {
824 Class<?> c = k.getClass();
825 while (p != null) {
826 int dir, ph; Object pk; Class<?> pc;
827 if ((ph = p.hash) == h) {
828 if ((pk = p.key) == k || k.equals(pk))
829 return p;
830 if (c != (pc = pk.getClass()) ||
831 !(k instanceof Comparable) ||
832 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
833 if ((dir = (c == pc) ? 0 :
834 c.getName().compareTo(pc.getName())) == 0) {
835 TreeNode<V> r = null, pl, pr; // check both sides
836 if ((pr = p.right) != null && h >= pr.hash &&
837 (r = getTreeNode(h, k, pr)) != null)
838 return r;
839 else if ((pl = p.left) != null && h <= pl.hash)
840 dir = -1;
841 else // nothing there
842 return null;
843 }
844 }
845 }
846 else
847 dir = (h < ph) ? -1 : 1;
848 p = (dir > 0) ? p.right : p.left;
849 }
850 return null;
851 }
852
853 /**
854 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
855 * read-lock to call getTreeNode, but during failure to get
856 * lock, searches along next links.
857 */
858 final V getValue(int h, Object k) {
859 Node<V> r = null;
860 int c = getState(); // Must read lock state first
861 for (Node<V> e = first; e != null; e = e.next) {
862 if (c <= 0 && compareAndSetState(c, c - 1)) {
863 try {
864 r = getTreeNode(h, k, root);
865 } finally {
866 releaseShared(0);
867 }
868 break;
869 }
870 else if (e.hash == h && k.equals(e.key)) {
871 r = e;
872 break;
873 }
874 else
875 c = getState();
876 }
877 return r == null ? null : r.val;
878 }
879
880 /**
881 * Finds or adds a node.
882 * @return null if added
883 */
884 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
885 (int h, Object k, V v) {
886 Class<?> c = k.getClass();
887 TreeNode<V> pp = root, p = null;
888 int dir = 0;
889 while (pp != null) { // find existing node or leaf to insert at
890 int ph; Object pk; Class<?> pc;
891 p = pp;
892 if ((ph = p.hash) == h) {
893 if ((pk = p.key) == k || k.equals(pk))
894 return p;
895 if (c != (pc = pk.getClass()) ||
896 !(k instanceof Comparable) ||
897 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
898 TreeNode<V> s = null, r = null, pr;
899 if ((dir = (c == pc) ? 0 :
900 c.getName().compareTo(pc.getName())) == 0) {
901 if ((pr = p.right) != null && h >= pr.hash &&
902 (r = getTreeNode(h, k, pr)) != null)
903 return r;
904 else // continue left
905 dir = -1;
906 }
907 else if ((pr = p.right) != null && h >= pr.hash)
908 s = pr;
909 if (s != null && (r = getTreeNode(h, k, s)) != null)
910 return r;
911 }
912 }
913 else
914 dir = (h < ph) ? -1 : 1;
915 pp = (dir > 0) ? p.right : p.left;
916 }
917
918 TreeNode<V> f = first;
919 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
920 if (p == null)
921 root = x;
922 else { // attach and rebalance; adapted from CLR
923 TreeNode<V> xp, xpp;
924 if (f != null)
925 f.prev = x;
926 if (dir <= 0)
927 p.left = x;
928 else
929 p.right = x;
930 x.red = true;
931 while (x != null && (xp = x.parent) != null && xp.red &&
932 (xpp = xp.parent) != null) {
933 TreeNode<V> xppl = xpp.left;
934 if (xp == xppl) {
935 TreeNode<V> y = xpp.right;
936 if (y != null && y.red) {
937 y.red = false;
938 xp.red = false;
939 xpp.red = true;
940 x = xpp;
941 }
942 else {
943 if (x == xp.right) {
944 rotateLeft(x = xp);
945 xpp = (xp = x.parent) == null ? null : xp.parent;
946 }
947 if (xp != null) {
948 xp.red = false;
949 if (xpp != null) {
950 xpp.red = true;
951 rotateRight(xpp);
952 }
953 }
954 }
955 }
956 else {
957 TreeNode<V> y = xppl;
958 if (y != null && y.red) {
959 y.red = false;
960 xp.red = false;
961 xpp.red = true;
962 x = xpp;
963 }
964 else {
965 if (x == xp.left) {
966 rotateRight(x = xp);
967 xpp = (xp = x.parent) == null ? null : xp.parent;
968 }
969 if (xp != null) {
970 xp.red = false;
971 if (xpp != null) {
972 xpp.red = true;
973 rotateLeft(xpp);
974 }
975 }
976 }
977 }
978 }
979 TreeNode<V> r = root;
980 if (r != null && r.red)
981 r.red = false;
982 }
983 return null;
984 }
985
986 /**
987 * Removes the given node, that must be present before this
988 * call. This is messier than typical red-black deletion code
989 * because we cannot swap the contents of an interior node
990 * with a leaf successor that is pinned by "next" pointers
991 * that are accessible independently of lock. So instead we
992 * swap the tree linkages.
993 */
994 final void deleteTreeNode(TreeNode<V> p) {
995 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
996 TreeNode<V> pred = p.prev;
997 if (pred == null)
998 first = next;
999 else
1000 pred.next = next;
1001 if (next != null)
1002 next.prev = pred;
1003 TreeNode<V> replacement;
1004 TreeNode<V> pl = p.left;
1005 TreeNode<V> pr = p.right;
1006 if (pl != null && pr != null) {
1007 TreeNode<V> s = pr, sl;
1008 while ((sl = s.left) != null) // find successor
1009 s = sl;
1010 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
1011 TreeNode<V> sr = s.right;
1012 TreeNode<V> pp = p.parent;
1013 if (s == pr) { // p was s's direct parent
1014 p.parent = s;
1015 s.right = p;
1016 }
1017 else {
1018 TreeNode<V> sp = s.parent;
1019 if ((p.parent = sp) != null) {
1020 if (s == sp.left)
1021 sp.left = p;
1022 else
1023 sp.right = p;
1024 }
1025 if ((s.right = pr) != null)
1026 pr.parent = s;
1027 }
1028 p.left = null;
1029 if ((p.right = sr) != null)
1030 sr.parent = p;
1031 if ((s.left = pl) != null)
1032 pl.parent = s;
1033 if ((s.parent = pp) == null)
1034 root = s;
1035 else if (p == pp.left)
1036 pp.left = s;
1037 else
1038 pp.right = s;
1039 replacement = sr;
1040 }
1041 else
1042 replacement = (pl != null) ? pl : pr;
1043 TreeNode<V> pp = p.parent;
1044 if (replacement == null) {
1045 if (pp == null) {
1046 root = null;
1047 return;
1048 }
1049 replacement = p;
1050 }
1051 else {
1052 replacement.parent = pp;
1053 if (pp == null)
1054 root = replacement;
1055 else if (p == pp.left)
1056 pp.left = replacement;
1057 else
1058 pp.right = replacement;
1059 p.left = p.right = p.parent = null;
1060 }
1061 if (!p.red) { // rebalance, from CLR
1062 TreeNode<V> x = replacement;
1063 while (x != null) {
1064 TreeNode<V> xp, xpl;
1065 if (x.red || (xp = x.parent) == null) {
1066 x.red = false;
1067 break;
1068 }
1069 if (x == (xpl = xp.left)) {
1070 TreeNode<V> sib = xp.right;
1071 if (sib != null && sib.red) {
1072 sib.red = false;
1073 xp.red = true;
1074 rotateLeft(xp);
1075 sib = (xp = x.parent) == null ? null : xp.right;
1076 }
1077 if (sib == null)
1078 x = xp;
1079 else {
1080 TreeNode<V> sl = sib.left, sr = sib.right;
1081 if ((sr == null || !sr.red) &&
1082 (sl == null || !sl.red)) {
1083 sib.red = true;
1084 x = xp;
1085 }
1086 else {
1087 if (sr == null || !sr.red) {
1088 if (sl != null)
1089 sl.red = false;
1090 sib.red = true;
1091 rotateRight(sib);
1092 sib = (xp = x.parent) == null ?
1093 null : xp.right;
1094 }
1095 if (sib != null) {
1096 sib.red = (xp == null) ? false : xp.red;
1097 if ((sr = sib.right) != null)
1098 sr.red = false;
1099 }
1100 if (xp != null) {
1101 xp.red = false;
1102 rotateLeft(xp);
1103 }
1104 x = root;
1105 }
1106 }
1107 }
1108 else { // symmetric
1109 TreeNode<V> sib = xpl;
1110 if (sib != null && sib.red) {
1111 sib.red = false;
1112 xp.red = true;
1113 rotateRight(xp);
1114 sib = (xp = x.parent) == null ? null : xp.left;
1115 }
1116 if (sib == null)
1117 x = xp;
1118 else {
1119 TreeNode<V> sl = sib.left, sr = sib.right;
1120 if ((sl == null || !sl.red) &&
1121 (sr == null || !sr.red)) {
1122 sib.red = true;
1123 x = xp;
1124 }
1125 else {
1126 if (sl == null || !sl.red) {
1127 if (sr != null)
1128 sr.red = false;
1129 sib.red = true;
1130 rotateLeft(sib);
1131 sib = (xp = x.parent) == null ?
1132 null : xp.left;
1133 }
1134 if (sib != null) {
1135 sib.red = (xp == null) ? false : xp.red;
1136 if ((sl = sib.left) != null)
1137 sl.red = false;
1138 }
1139 if (xp != null) {
1140 xp.red = false;
1141 rotateRight(xp);
1142 }
1143 x = root;
1144 }
1145 }
1146 }
1147 }
1148 }
1149 if (p == replacement && (pp = p.parent) != null) {
1150 if (p == pp.left) // detach pointers
1151 pp.left = null;
1152 else if (p == pp.right)
1153 pp.right = null;
1154 p.parent = null;
1155 }
1156 }
1157 }
1158
1159 /* ---------------- Collision reduction methods -------------- */
1160
1161 /**
1162 * Spreads higher bits to lower, and also forces top bit to 0.
1163 * Because the table uses power-of-two masking, sets of hashes
1164 * that vary only in bits above the current mask will always
1165 * collide. (Among known examples are sets of Float keys holding
1166 * consecutive whole numbers in small tables.) To counter this,
1167 * we apply a transform that spreads the impact of higher bits
1168 * downward. There is a tradeoff between speed, utility, and
1169 * quality of bit-spreading. Because many common sets of hashes
1170 * are already reasonably distributed across bits (so don't benefit
1171 * from spreading), and because we use trees to handle large sets
1172 * of collisions in bins, we don't need excessively high quality.
1173 */
1174 private static final int spread(int h) {
1175 h ^= (h >>> 18) ^ (h >>> 12);
1176 return (h ^ (h >>> 10)) & HASH_BITS;
1177 }
1178
1179 /**
1180 * Replaces a list bin with a tree bin if key is comparable. Call
1181 * only when locked.
1182 */
1183 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1184 if (key instanceof Comparable) {
1185 TreeBin<V> t = new TreeBin<V>();
1186 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1187 t.putTreeNode(e.hash, e.key, e.val);
1188 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1189 }
1190 }
1191
1192 /* ---------------- Internal access and update methods -------------- */
1193
1194 /** Implementation for get and containsKey */
1195 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1196 int h = spread(k.hashCode());
1197 retry: for (Node<V>[] tab = table; tab != null;) {
1198 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1199 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1200 if ((eh = e.hash) < 0) {
1201 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1202 return ((TreeBin<V>)ek).getValue(h, k);
1203 else { // restart with new table
1204 tab = (Node<V>[])ek;
1205 continue retry;
1206 }
1207 }
1208 else if (eh == h && (ev = e.val) != null &&
1209 ((ek = e.key) == k || k.equals(ek)))
1210 return ev;
1211 }
1212 break;
1213 }
1214 return null;
1215 }
1216
1217 /**
1218 * Implementation for the four public remove/replace methods:
1219 * Replaces node value with v, conditional upon match of cv if
1220 * non-null. If resulting value is null, delete.
1221 */
1222 @SuppressWarnings("unchecked") private final V internalReplace
1223 (Object k, V v, Object cv) {
1224 int h = spread(k.hashCode());
1225 V oldVal = null;
1226 for (Node<V>[] tab = table;;) {
1227 Node<V> f; int i, fh; Object fk;
1228 if (tab == null ||
1229 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1230 break;
1231 else if ((fh = f.hash) < 0) {
1232 if ((fk = f.key) instanceof TreeBin) {
1233 TreeBin<V> t = (TreeBin<V>)fk;
1234 boolean validated = false;
1235 boolean deleted = false;
1236 t.acquire(0);
1237 try {
1238 if (tabAt(tab, i) == f) {
1239 validated = true;
1240 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1241 if (p != null) {
1242 V pv = p.val;
1243 if (cv == null || cv == pv || cv.equals(pv)) {
1244 oldVal = pv;
1245 if ((p.val = v) == null) {
1246 deleted = true;
1247 t.deleteTreeNode(p);
1248 }
1249 }
1250 }
1251 }
1252 } finally {
1253 t.release(0);
1254 }
1255 if (validated) {
1256 if (deleted)
1257 addCount(-1L, -1);
1258 break;
1259 }
1260 }
1261 else
1262 tab = (Node<V>[])fk;
1263 }
1264 else if (fh != h && f.next == null) // precheck
1265 break; // rules out possible existence
1266 else {
1267 boolean validated = false;
1268 boolean deleted = false;
1269 synchronized (f) {
1270 if (tabAt(tab, i) == f) {
1271 validated = true;
1272 for (Node<V> e = f, pred = null;;) {
1273 Object ek; V ev;
1274 if (e.hash == h &&
1275 ((ev = e.val) != null) &&
1276 ((ek = e.key) == k || k.equals(ek))) {
1277 if (cv == null || cv == ev || cv.equals(ev)) {
1278 oldVal = ev;
1279 if ((e.val = v) == null) {
1280 deleted = true;
1281 Node<V> en = e.next;
1282 if (pred != null)
1283 pred.next = en;
1284 else
1285 setTabAt(tab, i, en);
1286 }
1287 }
1288 break;
1289 }
1290 pred = e;
1291 if ((e = e.next) == null)
1292 break;
1293 }
1294 }
1295 }
1296 if (validated) {
1297 if (deleted)
1298 addCount(-1L, -1);
1299 break;
1300 }
1301 }
1302 }
1303 return oldVal;
1304 }
1305
1306 /*
1307 * Internal versions of insertion methods
1308 * All have the same basic structure as the first (internalPut):
1309 * 1. If table uninitialized, create
1310 * 2. If bin empty, try to CAS new node
1311 * 3. If bin stale, use new table
1312 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1313 * 5. Lock and validate; if valid, scan and add or update
1314 *
1315 * The putAll method differs mainly in attempting to pre-allocate
1316 * enough table space, and also more lazily performs count updates
1317 * and checks.
1318 *
1319 * Most of the function-accepting methods can't be factored nicely
1320 * because they require different functional forms, so instead
1321 * sprawl out similar mechanics.
1322 */
1323
1324 /** Implementation for put and putIfAbsent */
1325 @SuppressWarnings("unchecked") private final V internalPut
1326 (K k, V v, boolean onlyIfAbsent) {
1327 if (k == null || v == null) throw new NullPointerException();
1328 int h = spread(k.hashCode());
1329 int len = 0;
1330 for (Node<V>[] tab = table;;) {
1331 int i, fh; Node<V> f; Object fk; V fv;
1332 if (tab == null)
1333 tab = initTable();
1334 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1335 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1336 break; // no lock when adding to empty bin
1337 }
1338 else if ((fh = f.hash) < 0) {
1339 if ((fk = f.key) instanceof TreeBin) {
1340 TreeBin<V> t = (TreeBin<V>)fk;
1341 V oldVal = null;
1342 t.acquire(0);
1343 try {
1344 if (tabAt(tab, i) == f) {
1345 len = 2;
1346 TreeNode<V> p = t.putTreeNode(h, k, v);
1347 if (p != null) {
1348 oldVal = p.val;
1349 if (!onlyIfAbsent)
1350 p.val = v;
1351 }
1352 }
1353 } finally {
1354 t.release(0);
1355 }
1356 if (len != 0) {
1357 if (oldVal != null)
1358 return oldVal;
1359 break;
1360 }
1361 }
1362 else
1363 tab = (Node<V>[])fk;
1364 }
1365 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1366 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1367 return fv;
1368 else {
1369 V oldVal = null;
1370 synchronized (f) {
1371 if (tabAt(tab, i) == f) {
1372 len = 1;
1373 for (Node<V> e = f;; ++len) {
1374 Object ek; V ev;
1375 if (e.hash == h &&
1376 (ev = e.val) != null &&
1377 ((ek = e.key) == k || k.equals(ek))) {
1378 oldVal = ev;
1379 if (!onlyIfAbsent)
1380 e.val = v;
1381 break;
1382 }
1383 Node<V> last = e;
1384 if ((e = e.next) == null) {
1385 last.next = new Node<V>(h, k, v, null);
1386 if (len >= TREE_THRESHOLD)
1387 replaceWithTreeBin(tab, i, k);
1388 break;
1389 }
1390 }
1391 }
1392 }
1393 if (len != 0) {
1394 if (oldVal != null)
1395 return oldVal;
1396 break;
1397 }
1398 }
1399 }
1400 addCount(1L, len);
1401 return null;
1402 }
1403
1404 /** Implementation for computeIfAbsent */
1405 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1406 (K k, Fun<? super K, ? extends V> mf) {
1407 if (k == null || mf == null)
1408 throw new NullPointerException();
1409 int h = spread(k.hashCode());
1410 V val = null;
1411 int len = 0;
1412 for (Node<V>[] tab = table;;) {
1413 Node<V> f; int i; Object fk;
1414 if (tab == null)
1415 tab = initTable();
1416 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1417 Node<V> node = new Node<V>(h, k, null, null);
1418 synchronized (node) {
1419 if (casTabAt(tab, i, null, node)) {
1420 len = 1;
1421 try {
1422 if ((val = mf.apply(k)) != null)
1423 node.val = val;
1424 } finally {
1425 if (val == null)
1426 setTabAt(tab, i, null);
1427 }
1428 }
1429 }
1430 if (len != 0)
1431 break;
1432 }
1433 else if (f.hash < 0) {
1434 if ((fk = f.key) instanceof TreeBin) {
1435 TreeBin<V> t = (TreeBin<V>)fk;
1436 boolean added = false;
1437 t.acquire(0);
1438 try {
1439 if (tabAt(tab, i) == f) {
1440 len = 1;
1441 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1442 if (p != null)
1443 val = p.val;
1444 else if ((val = mf.apply(k)) != null) {
1445 added = true;
1446 len = 2;
1447 t.putTreeNode(h, k, val);
1448 }
1449 }
1450 } finally {
1451 t.release(0);
1452 }
1453 if (len != 0) {
1454 if (!added)
1455 return val;
1456 break;
1457 }
1458 }
1459 else
1460 tab = (Node<V>[])fk;
1461 }
1462 else {
1463 for (Node<V> e = f; e != null; e = e.next) { // prescan
1464 Object ek; V ev;
1465 if (e.hash == h && (ev = e.val) != null &&
1466 ((ek = e.key) == k || k.equals(ek)))
1467 return ev;
1468 }
1469 boolean added = false;
1470 synchronized (f) {
1471 if (tabAt(tab, i) == f) {
1472 len = 1;
1473 for (Node<V> e = f;; ++len) {
1474 Object ek; V ev;
1475 if (e.hash == h &&
1476 (ev = e.val) != null &&
1477 ((ek = e.key) == k || k.equals(ek))) {
1478 val = ev;
1479 break;
1480 }
1481 Node<V> last = e;
1482 if ((e = e.next) == null) {
1483 if ((val = mf.apply(k)) != null) {
1484 added = true;
1485 last.next = new Node<V>(h, k, val, null);
1486 if (len >= TREE_THRESHOLD)
1487 replaceWithTreeBin(tab, i, k);
1488 }
1489 break;
1490 }
1491 }
1492 }
1493 }
1494 if (len != 0) {
1495 if (!added)
1496 return val;
1497 break;
1498 }
1499 }
1500 }
1501 if (val != null)
1502 addCount(1L, len);
1503 return val;
1504 }
1505
1506 /** Implementation for compute */
1507 @SuppressWarnings("unchecked") private final V internalCompute
1508 (K k, boolean onlyIfPresent,
1509 BiFun<? super K, ? super V, ? extends V> mf) {
1510 if (k == null || mf == null)
1511 throw new NullPointerException();
1512 int h = spread(k.hashCode());
1513 V val = null;
1514 int delta = 0;
1515 int len = 0;
1516 for (Node<V>[] tab = table;;) {
1517 Node<V> f; int i, fh; Object fk;
1518 if (tab == null)
1519 tab = initTable();
1520 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1521 if (onlyIfPresent)
1522 break;
1523 Node<V> node = new Node<V>(h, k, null, null);
1524 synchronized (node) {
1525 if (casTabAt(tab, i, null, node)) {
1526 try {
1527 len = 1;
1528 if ((val = mf.apply(k, null)) != null) {
1529 node.val = val;
1530 delta = 1;
1531 }
1532 } finally {
1533 if (delta == 0)
1534 setTabAt(tab, i, null);
1535 }
1536 }
1537 }
1538 if (len != 0)
1539 break;
1540 }
1541 else if ((fh = f.hash) < 0) {
1542 if ((fk = f.key) instanceof TreeBin) {
1543 TreeBin<V> t = (TreeBin<V>)fk;
1544 t.acquire(0);
1545 try {
1546 if (tabAt(tab, i) == f) {
1547 len = 1;
1548 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1549 if (p == null && onlyIfPresent)
1550 break;
1551 V pv = (p == null) ? null : p.val;
1552 if ((val = mf.apply(k, pv)) != null) {
1553 if (p != null)
1554 p.val = val;
1555 else {
1556 len = 2;
1557 delta = 1;
1558 t.putTreeNode(h, k, val);
1559 }
1560 }
1561 else if (p != null) {
1562 delta = -1;
1563 t.deleteTreeNode(p);
1564 }
1565 }
1566 } finally {
1567 t.release(0);
1568 }
1569 if (len != 0)
1570 break;
1571 }
1572 else
1573 tab = (Node<V>[])fk;
1574 }
1575 else {
1576 synchronized (f) {
1577 if (tabAt(tab, i) == f) {
1578 len = 1;
1579 for (Node<V> e = f, pred = null;; ++len) {
1580 Object ek; V ev;
1581 if (e.hash == h &&
1582 (ev = e.val) != null &&
1583 ((ek = e.key) == k || k.equals(ek))) {
1584 val = mf.apply(k, ev);
1585 if (val != null)
1586 e.val = val;
1587 else {
1588 delta = -1;
1589 Node<V> en = e.next;
1590 if (pred != null)
1591 pred.next = en;
1592 else
1593 setTabAt(tab, i, en);
1594 }
1595 break;
1596 }
1597 pred = e;
1598 if ((e = e.next) == null) {
1599 if (!onlyIfPresent &&
1600 (val = mf.apply(k, null)) != null) {
1601 pred.next = new Node<V>(h, k, val, null);
1602 delta = 1;
1603 if (len >= TREE_THRESHOLD)
1604 replaceWithTreeBin(tab, i, k);
1605 }
1606 break;
1607 }
1608 }
1609 }
1610 }
1611 if (len != 0)
1612 break;
1613 }
1614 }
1615 if (delta != 0)
1616 addCount((long)delta, len);
1617 return val;
1618 }
1619
1620 /** Implementation for merge */
1621 @SuppressWarnings("unchecked") private final V internalMerge
1622 (K k, V v, BiFun<? super V, ? super V, ? extends V> mf) {
1623 if (k == null || v == null || mf == null)
1624 throw new NullPointerException();
1625 int h = spread(k.hashCode());
1626 V val = null;
1627 int delta = 0;
1628 int len = 0;
1629 for (Node<V>[] tab = table;;) {
1630 int i; Node<V> f; Object fk; V fv;
1631 if (tab == null)
1632 tab = initTable();
1633 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1634 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1635 delta = 1;
1636 val = v;
1637 break;
1638 }
1639 }
1640 else if (f.hash < 0) {
1641 if ((fk = f.key) instanceof TreeBin) {
1642 TreeBin<V> t = (TreeBin<V>)fk;
1643 t.acquire(0);
1644 try {
1645 if (tabAt(tab, i) == f) {
1646 len = 1;
1647 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1648 val = (p == null) ? v : mf.apply(p.val, v);
1649 if (val != null) {
1650 if (p != null)
1651 p.val = val;
1652 else {
1653 len = 2;
1654 delta = 1;
1655 t.putTreeNode(h, k, val);
1656 }
1657 }
1658 else if (p != null) {
1659 delta = -1;
1660 t.deleteTreeNode(p);
1661 }
1662 }
1663 } finally {
1664 t.release(0);
1665 }
1666 if (len != 0)
1667 break;
1668 }
1669 else
1670 tab = (Node<V>[])fk;
1671 }
1672 else {
1673 synchronized (f) {
1674 if (tabAt(tab, i) == f) {
1675 len = 1;
1676 for (Node<V> e = f, pred = null;; ++len) {
1677 Object ek; V ev;
1678 if (e.hash == h &&
1679 (ev = e.val) != null &&
1680 ((ek = e.key) == k || k.equals(ek))) {
1681 val = mf.apply(ev, v);
1682 if (val != null)
1683 e.val = val;
1684 else {
1685 delta = -1;
1686 Node<V> en = e.next;
1687 if (pred != null)
1688 pred.next = en;
1689 else
1690 setTabAt(tab, i, en);
1691 }
1692 break;
1693 }
1694 pred = e;
1695 if ((e = e.next) == null) {
1696 val = v;
1697 pred.next = new Node<V>(h, k, val, null);
1698 delta = 1;
1699 if (len >= TREE_THRESHOLD)
1700 replaceWithTreeBin(tab, i, k);
1701 break;
1702 }
1703 }
1704 }
1705 }
1706 if (len != 0)
1707 break;
1708 }
1709 }
1710 if (delta != 0)
1711 addCount((long)delta, len);
1712 return val;
1713 }
1714
1715 /** Implementation for putAll */
1716 @SuppressWarnings("unchecked") private final void internalPutAll
1717 (Map<? extends K, ? extends V> m) {
1718 tryPresize(m.size());
1719 long delta = 0L; // number of uncommitted additions
1720 boolean npe = false; // to throw exception on exit for nulls
1721 try { // to clean up counts on other exceptions
1722 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1723 Object k; V v;
1724 if (entry == null || (k = entry.getKey()) == null ||
1725 (v = entry.getValue()) == null) {
1726 npe = true;
1727 break;
1728 }
1729 int h = spread(k.hashCode());
1730 for (Node<V>[] tab = table;;) {
1731 int i; Node<V> f; int fh; Object fk;
1732 if (tab == null)
1733 tab = initTable();
1734 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1735 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1736 ++delta;
1737 break;
1738 }
1739 }
1740 else if ((fh = f.hash) < 0) {
1741 if ((fk = f.key) instanceof TreeBin) {
1742 TreeBin<V> t = (TreeBin<V>)fk;
1743 boolean validated = false;
1744 t.acquire(0);
1745 try {
1746 if (tabAt(tab, i) == f) {
1747 validated = true;
1748 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1749 if (p != null)
1750 p.val = v;
1751 else {
1752 t.putTreeNode(h, k, v);
1753 ++delta;
1754 }
1755 }
1756 } finally {
1757 t.release(0);
1758 }
1759 if (validated)
1760 break;
1761 }
1762 else
1763 tab = (Node<V>[])fk;
1764 }
1765 else {
1766 int len = 0;
1767 synchronized (f) {
1768 if (tabAt(tab, i) == f) {
1769 len = 1;
1770 for (Node<V> e = f;; ++len) {
1771 Object ek; V ev;
1772 if (e.hash == h &&
1773 (ev = e.val) != null &&
1774 ((ek = e.key) == k || k.equals(ek))) {
1775 e.val = v;
1776 break;
1777 }
1778 Node<V> last = e;
1779 if ((e = e.next) == null) {
1780 ++delta;
1781 last.next = new Node<V>(h, k, v, null);
1782 if (len >= TREE_THRESHOLD)
1783 replaceWithTreeBin(tab, i, k);
1784 break;
1785 }
1786 }
1787 }
1788 }
1789 if (len != 0) {
1790 if (len > 1)
1791 addCount(delta, len);
1792 break;
1793 }
1794 }
1795 }
1796 }
1797 } finally {
1798 if (delta != 0L)
1799 addCount(delta, 2);
1800 }
1801 if (npe)
1802 throw new NullPointerException();
1803 }
1804
1805 /**
1806 * Implementation for clear. Steps through each bin, removing all
1807 * nodes.
1808 */
1809 @SuppressWarnings("unchecked") private final void internalClear() {
1810 long delta = 0L; // negative number of deletions
1811 int i = 0;
1812 Node<V>[] tab = table;
1813 while (tab != null && i < tab.length) {
1814 Node<V> f = tabAt(tab, i);
1815 if (f == null)
1816 ++i;
1817 else if (f.hash < 0) {
1818 Object fk;
1819 if ((fk = f.key) instanceof TreeBin) {
1820 TreeBin<V> t = (TreeBin<V>)fk;
1821 t.acquire(0);
1822 try {
1823 if (tabAt(tab, i) == f) {
1824 for (Node<V> p = t.first; p != null; p = p.next) {
1825 if (p.val != null) { // (currently always true)
1826 p.val = null;
1827 --delta;
1828 }
1829 }
1830 t.first = null;
1831 t.root = null;
1832 ++i;
1833 }
1834 } finally {
1835 t.release(0);
1836 }
1837 }
1838 else
1839 tab = (Node<V>[])fk;
1840 }
1841 else {
1842 synchronized (f) {
1843 if (tabAt(tab, i) == f) {
1844 for (Node<V> e = f; e != null; e = e.next) {
1845 if (e.val != null) { // (currently always true)
1846 e.val = null;
1847 --delta;
1848 }
1849 }
1850 setTabAt(tab, i, null);
1851 ++i;
1852 }
1853 }
1854 }
1855 }
1856 if (delta != 0L)
1857 addCount(delta, -1);
1858 }
1859
1860 /* ---------------- Table Initialization and Resizing -------------- */
1861
1862 /**
1863 * Returns a power of two table size for the given desired capacity.
1864 * See Hackers Delight, sec 3.2
1865 */
1866 private static final int tableSizeFor(int c) {
1867 int n = c - 1;
1868 n |= n >>> 1;
1869 n |= n >>> 2;
1870 n |= n >>> 4;
1871 n |= n >>> 8;
1872 n |= n >>> 16;
1873 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1874 }
1875
1876 /**
1877 * Initializes table, using the size recorded in sizeCtl.
1878 */
1879 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1880 Node<V>[] tab; int sc;
1881 while ((tab = table) == null) {
1882 if ((sc = sizeCtl) < 0)
1883 Thread.yield(); // lost initialization race; just spin
1884 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1885 try {
1886 if ((tab = table) == null) {
1887 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1888 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1889 table = tab = (Node<V>[])tb;
1890 sc = n - (n >>> 2);
1891 }
1892 } finally {
1893 sizeCtl = sc;
1894 }
1895 break;
1896 }
1897 }
1898 return tab;
1899 }
1900
1901 /**
1902 * Adds to count, and if table is too small and not already
1903 * resizing, initiates transfer. If already resizing, helps
1904 * perform transfer if work is available. Rechecks occupancy
1905 * after a transfer to see if another resize is already needed
1906 * because resizings are lagging additions.
1907 *
1908 * @param x the count to add
1909 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1910 */
1911 private final void addCount(long x, int check) {
1912 CounterCell[] as; long b, s;
1913 if ((as = counterCells) != null ||
1914 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1915 CounterHashCode hc; CounterCell a; long v; int m;
1916 boolean uncontended = true;
1917 if ((hc = threadCounterHashCode.get()) == null ||
1918 as == null || (m = as.length - 1) < 0 ||
1919 (a = as[m & hc.code]) == null ||
1920 !(uncontended =
1921 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1922 fullAddCount(x, hc, uncontended);
1923 return;
1924 }
1925 if (check <= 1)
1926 return;
1927 s = sumCount();
1928 }
1929 if (check >= 0) {
1930 Node<V>[] tab, nt; int sc;
1931 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1932 tab.length < MAXIMUM_CAPACITY) {
1933 if (sc < 0) {
1934 if (sc == -1 || transferIndex <= transferOrigin ||
1935 (nt = nextTable) == null)
1936 break;
1937 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1938 transfer(tab, nt);
1939 }
1940 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1941 transfer(tab, null);
1942 s = sumCount();
1943 }
1944 }
1945 }
1946
1947 /**
1948 * Tries to presize table to accommodate the given number of elements.
1949 *
1950 * @param size number of elements (doesn't need to be perfectly accurate)
1951 */
1952 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1953 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1954 tableSizeFor(size + (size >>> 1) + 1);
1955 int sc;
1956 while ((sc = sizeCtl) >= 0) {
1957 Node<V>[] tab = table; int n;
1958 if (tab == null || (n = tab.length) == 0) {
1959 n = (sc > c) ? sc : c;
1960 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1961 try {
1962 if (table == tab) {
1963 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1964 table = (Node<V>[])tb;
1965 sc = n - (n >>> 2);
1966 }
1967 } finally {
1968 sizeCtl = sc;
1969 }
1970 }
1971 }
1972 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1973 break;
1974 else if (tab == table &&
1975 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1976 transfer(tab, null);
1977 }
1978 }
1979
1980 /*
1981 * Moves and/or copies the nodes in each bin to new table. See
1982 * above for explanation.
1983 */
1984 @SuppressWarnings("unchecked") private final void transfer
1985 (Node<V>[] tab, Node<V>[] nextTab) {
1986 int n = tab.length, stride;
1987 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1988 stride = MIN_TRANSFER_STRIDE; // subdivide range
1989 if (nextTab == null) { // initiating
1990 try {
1991 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1992 nextTab = (Node<V>[])tb;
1993 } catch (Throwable ex) { // try to cope with OOME
1994 sizeCtl = Integer.MAX_VALUE;
1995 return;
1996 }
1997 nextTable = nextTab;
1998 transferOrigin = n;
1999 transferIndex = n;
2000 Node<V> rev = new Node<V>(MOVED, tab, null, null);
2001 for (int k = n; k > 0;) { // progressively reveal ready slots
2002 int nextk = (k > stride) ? k - stride : 0;
2003 for (int m = nextk; m < k; ++m)
2004 nextTab[m] = rev;
2005 for (int m = n + nextk; m < n + k; ++m)
2006 nextTab[m] = rev;
2007 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2008 }
2009 }
2010 int nextn = nextTab.length;
2011 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
2012 boolean advance = true;
2013 for (int i = 0, bound = 0;;) {
2014 int nextIndex, nextBound; Node<V> f; Object fk;
2015 while (advance) {
2016 if (--i >= bound)
2017 advance = false;
2018 else if ((nextIndex = transferIndex) <= transferOrigin) {
2019 i = -1;
2020 advance = false;
2021 }
2022 else if (U.compareAndSwapInt
2023 (this, TRANSFERINDEX, nextIndex,
2024 nextBound = (nextIndex > stride ?
2025 nextIndex - stride : 0))) {
2026 bound = nextBound;
2027 i = nextIndex - 1;
2028 advance = false;
2029 }
2030 }
2031 if (i < 0 || i >= n || i + n >= nextn) {
2032 for (int sc;;) {
2033 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2034 if (sc == -1) {
2035 nextTable = null;
2036 table = nextTab;
2037 sizeCtl = (n << 1) - (n >>> 1);
2038 }
2039 return;
2040 }
2041 }
2042 }
2043 else if ((f = tabAt(tab, i)) == null) {
2044 if (casTabAt(tab, i, null, fwd)) {
2045 setTabAt(nextTab, i, null);
2046 setTabAt(nextTab, i + n, null);
2047 advance = true;
2048 }
2049 }
2050 else if (f.hash >= 0) {
2051 synchronized (f) {
2052 if (tabAt(tab, i) == f) {
2053 int runBit = f.hash & n;
2054 Node<V> lastRun = f, lo = null, hi = null;
2055 for (Node<V> p = f.next; p != null; p = p.next) {
2056 int b = p.hash & n;
2057 if (b != runBit) {
2058 runBit = b;
2059 lastRun = p;
2060 }
2061 }
2062 if (runBit == 0)
2063 lo = lastRun;
2064 else
2065 hi = lastRun;
2066 for (Node<V> p = f; p != lastRun; p = p.next) {
2067 int ph = p.hash;
2068 Object pk = p.key; V pv = p.val;
2069 if ((ph & n) == 0)
2070 lo = new Node<V>(ph, pk, pv, lo);
2071 else
2072 hi = new Node<V>(ph, pk, pv, hi);
2073 }
2074 setTabAt(nextTab, i, lo);
2075 setTabAt(nextTab, i + n, hi);
2076 setTabAt(tab, i, fwd);
2077 advance = true;
2078 }
2079 }
2080 }
2081 else if ((fk = f.key) instanceof TreeBin) {
2082 TreeBin<V> t = (TreeBin<V>)fk;
2083 t.acquire(0);
2084 try {
2085 if (tabAt(tab, i) == f) {
2086 TreeBin<V> lt = new TreeBin<V>();
2087 TreeBin<V> ht = new TreeBin<V>();
2088 int lc = 0, hc = 0;
2089 for (Node<V> e = t.first; e != null; e = e.next) {
2090 int h = e.hash;
2091 Object k = e.key; V v = e.val;
2092 if ((h & n) == 0) {
2093 ++lc;
2094 lt.putTreeNode(h, k, v);
2095 }
2096 else {
2097 ++hc;
2098 ht.putTreeNode(h, k, v);
2099 }
2100 }
2101 Node<V> ln, hn; // throw away trees if too small
2102 if (lc < TREE_THRESHOLD) {
2103 ln = null;
2104 for (Node<V> p = lt.first; p != null; p = p.next)
2105 ln = new Node<V>(p.hash, p.key, p.val, ln);
2106 }
2107 else
2108 ln = new Node<V>(MOVED, lt, null, null);
2109 setTabAt(nextTab, i, ln);
2110 if (hc < TREE_THRESHOLD) {
2111 hn = null;
2112 for (Node<V> p = ht.first; p != null; p = p.next)
2113 hn = new Node<V>(p.hash, p.key, p.val, hn);
2114 }
2115 else
2116 hn = new Node<V>(MOVED, ht, null, null);
2117 setTabAt(nextTab, i + n, hn);
2118 setTabAt(tab, i, fwd);
2119 advance = true;
2120 }
2121 } finally {
2122 t.release(0);
2123 }
2124 }
2125 else
2126 advance = true; // already processed
2127 }
2128 }
2129
2130 /* ---------------- Counter support -------------- */
2131
2132 final long sumCount() {
2133 CounterCell[] as = counterCells; CounterCell a;
2134 long sum = baseCount;
2135 if (as != null) {
2136 for (int i = 0; i < as.length; ++i) {
2137 if ((a = as[i]) != null)
2138 sum += a.value;
2139 }
2140 }
2141 return sum;
2142 }
2143
2144 // See LongAdder version for explanation
2145 private final void fullAddCount(long x, CounterHashCode hc,
2146 boolean wasUncontended) {
2147 int h;
2148 if (hc == null) {
2149 hc = new CounterHashCode();
2150 int s = counterHashCodeGenerator.addAndGet(SEED_INCREMENT);
2151 h = hc.code = (s == 0) ? 1 : s; // Avoid zero
2152 threadCounterHashCode.set(hc);
2153 }
2154 else
2155 h = hc.code;
2156 boolean collide = false; // True if last slot nonempty
2157 for (;;) {
2158 CounterCell[] as; CounterCell a; int n; long v;
2159 if ((as = counterCells) != null && (n = as.length) > 0) {
2160 if ((a = as[(n - 1) & h]) == null) {
2161 if (counterBusy == 0) { // Try to attach new Cell
2162 CounterCell r = new CounterCell(x); // Optimistic create
2163 if (counterBusy == 0 &&
2164 U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2165 boolean created = false;
2166 try { // Recheck under lock
2167 CounterCell[] rs; int m, j;
2168 if ((rs = counterCells) != null &&
2169 (m = rs.length) > 0 &&
2170 rs[j = (m - 1) & h] == null) {
2171 rs[j] = r;
2172 created = true;
2173 }
2174 } finally {
2175 counterBusy = 0;
2176 }
2177 if (created)
2178 break;
2179 continue; // Slot is now non-empty
2180 }
2181 }
2182 collide = false;
2183 }
2184 else if (!wasUncontended) // CAS already known to fail
2185 wasUncontended = true; // Continue after rehash
2186 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2187 break;
2188 else if (counterCells != as || n >= NCPU)
2189 collide = false; // At max size or stale
2190 else if (!collide)
2191 collide = true;
2192 else if (counterBusy == 0 &&
2193 U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2194 try {
2195 if (counterCells == as) {// Expand table unless stale
2196 CounterCell[] rs = new CounterCell[n << 1];
2197 for (int i = 0; i < n; ++i)
2198 rs[i] = as[i];
2199 counterCells = rs;
2200 }
2201 } finally {
2202 counterBusy = 0;
2203 }
2204 collide = false;
2205 continue; // Retry with expanded table
2206 }
2207 h ^= h << 13; // Rehash
2208 h ^= h >>> 17;
2209 h ^= h << 5;
2210 }
2211 else if (counterBusy == 0 && counterCells == as &&
2212 U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2213 boolean init = false;
2214 try { // Initialize table
2215 if (counterCells == as) {
2216 CounterCell[] rs = new CounterCell[2];
2217 rs[h & 1] = new CounterCell(x);
2218 counterCells = rs;
2219 init = true;
2220 }
2221 } finally {
2222 counterBusy = 0;
2223 }
2224 if (init)
2225 break;
2226 }
2227 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2228 break; // Fall back on using base
2229 }
2230 hc.code = h; // Record index for next time
2231 }
2232
2233 /* ----------------Table Traversal -------------- */
2234
2235 /**
2236 * Encapsulates traversal for methods such as containsValue; also
2237 * serves as a base class for other iterators and bulk tasks.
2238 *
2239 * At each step, the iterator snapshots the key ("nextKey") and
2240 * value ("nextVal") of a valid node (i.e., one that, at point of
2241 * snapshot, has a non-null user value). Because val fields can
2242 * change (including to null, indicating deletion), field nextVal
2243 * might not be accurate at point of use, but still maintains the
2244 * weak consistency property of holding a value that was once
2245 * valid. To support iterator.remove, the nextKey field is not
2246 * updated (nulled out) when the iterator cannot advance.
2247 *
2248 * Internal traversals directly access these fields, as in:
2249 * {@code while (it.advance() != null) { process(it.nextKey); }}
2250 *
2251 * Exported iterators must track whether the iterator has advanced
2252 * (in hasNext vs next) (by setting/checking/nulling field
2253 * nextVal), and then extract key, value, or key-value pairs as
2254 * return values of next().
2255 *
2256 * The iterator visits once each still-valid node that was
2257 * reachable upon iterator construction. It might miss some that
2258 * were added to a bin after the bin was visited, which is OK wrt
2259 * consistency guarantees. Maintaining this property in the face
2260 * of possible ongoing resizes requires a fair amount of
2261 * bookkeeping state that is difficult to optimize away amidst
2262 * volatile accesses. Even so, traversal maintains reasonable
2263 * throughput.
2264 *
2265 * Normally, iteration proceeds bin-by-bin traversing lists.
2266 * However, if the table has been resized, then all future steps
2267 * must traverse both the bin at the current index as well as at
2268 * (index + baseSize); and so on for further resizings. To
2269 * paranoically cope with potential sharing by users of iterators
2270 * across threads, iteration terminates if a bounds checks fails
2271 * for a table read.
2272 *
2273 * This class extends CountedCompleter to streamline parallel
2274 * iteration in bulk operations. This adds only a few fields of
2275 * space overhead, which is small enough in cases where it is not
2276 * needed to not worry about it. Because CountedCompleter is
2277 * Serializable, but iterators need not be, we need to add warning
2278 * suppressions.
2279 */
2280 @SuppressWarnings("serial") static class Traverser<K,V,R>
2281 extends CountedCompleter<R> {
2282 final ConcurrentHashMap<K, V> map;
2283 Node<V> next; // the next entry to use
2284 Object nextKey; // cached key field of next
2285 V nextVal; // cached val field of next
2286 Node<V>[] tab; // current table; updated if resized
2287 int index; // index of bin to use next
2288 int baseIndex; // current index of initial table
2289 int baseLimit; // index bound for initial table
2290 int baseSize; // initial table size
2291 int batch; // split control
2292
2293 /** Creates iterator for all entries in the table. */
2294 Traverser(ConcurrentHashMap<K, V> map) {
2295 this.map = map;
2296 }
2297
2298 /** Creates iterator for split() methods and task constructors */
2299 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2300 super(it);
2301 this.batch = batch;
2302 if ((this.map = map) != null && it != null) { // split parent
2303 Node<V>[] t;
2304 if ((t = it.tab) == null &&
2305 (t = it.tab = map.table) != null)
2306 it.baseLimit = it.baseSize = t.length;
2307 this.tab = t;
2308 this.baseSize = it.baseSize;
2309 int hi = this.baseLimit = it.baseLimit;
2310 it.baseLimit = this.index = this.baseIndex =
2311 (hi + it.baseIndex + 1) >>> 1;
2312 }
2313 }
2314
2315 /**
2316 * Advances next; returns nextVal or null if terminated.
2317 * See above for explanation.
2318 */
2319 @SuppressWarnings("unchecked") final V advance() {
2320 Node<V> e = next;
2321 V ev = null;
2322 outer: do {
2323 if (e != null) // advance past used/skipped node
2324 e = e.next;
2325 while (e == null) { // get to next non-null bin
2326 ConcurrentHashMap<K, V> m;
2327 Node<V>[] t; int b, i, n; Object ek; // must use locals
2328 if ((t = tab) != null)
2329 n = t.length;
2330 else if ((m = map) != null && (t = tab = m.table) != null)
2331 n = baseLimit = baseSize = t.length;
2332 else
2333 break outer;
2334 if ((b = baseIndex) >= baseLimit ||
2335 (i = index) < 0 || i >= n)
2336 break outer;
2337 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2338 if ((ek = e.key) instanceof TreeBin)
2339 e = ((TreeBin<V>)ek).first;
2340 else {
2341 tab = (Node<V>[])ek;
2342 continue; // restarts due to null val
2343 }
2344 } // visit upper slots if present
2345 index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2346 }
2347 nextKey = e.key;
2348 } while ((ev = e.val) == null); // skip deleted or special nodes
2349 next = e;
2350 return nextVal = ev;
2351 }
2352
2353 public final void remove() {
2354 Object k = nextKey;
2355 if (k == null && (advance() == null || (k = nextKey) == null))
2356 throw new IllegalStateException();
2357 map.internalReplace(k, null, null);
2358 }
2359
2360 public final boolean hasNext() {
2361 return nextVal != null || advance() != null;
2362 }
2363
2364 public final boolean hasMoreElements() { return hasNext(); }
2365
2366 public void compute() { } // default no-op CountedCompleter body
2367
2368 /**
2369 * Returns a batch value > 0 if this task should (and must) be
2370 * split, if so, adding to pending count, and in any case
2371 * updating batch value. The initial batch value is approx
2372 * exp2 of the number of times (minus one) to split task by
2373 * two before executing leaf action. This value is faster to
2374 * compute and more convenient to use as a guide to splitting
2375 * than is the depth, since it is used while dividing by two
2376 * anyway.
2377 */
2378 final int preSplit() {
2379 ConcurrentHashMap<K, V> m; int b; Node<V>[] t; ForkJoinPool pool;
2380 if ((b = batch) < 0 && (m = map) != null) { // force initialization
2381 if ((t = tab) == null && (t = tab = m.table) != null)
2382 baseLimit = baseSize = t.length;
2383 if (t != null) {
2384 long n = m.sumCount();
2385 int par = ((pool = getPool()) == null) ?
2386 ForkJoinPool.getCommonPoolParallelism() :
2387 pool.getParallelism();
2388 int sp = par << 3; // slack of 8
2389 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2390 }
2391 }
2392 b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2393 if ((batch = b) > 0)
2394 addToPendingCount(1);
2395 return b;
2396 }
2397
2398 }
2399
2400 /* ---------------- Public operations -------------- */
2401
2402 /**
2403 * Creates a new, empty map with the default initial table size (16).
2404 */
2405 public ConcurrentHashMap() {
2406 }
2407
2408 /**
2409 * Creates a new, empty map with an initial table size
2410 * accommodating the specified number of elements without the need
2411 * to dynamically resize.
2412 *
2413 * @param initialCapacity The implementation performs internal
2414 * sizing to accommodate this many elements.
2415 * @throws IllegalArgumentException if the initial capacity of
2416 * elements is negative
2417 */
2418 public ConcurrentHashMap(int initialCapacity) {
2419 if (initialCapacity < 0)
2420 throw new IllegalArgumentException();
2421 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2422 MAXIMUM_CAPACITY :
2423 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2424 this.sizeCtl = cap;
2425 }
2426
2427 /**
2428 * Creates a new map with the same mappings as the given map.
2429 *
2430 * @param m the map
2431 */
2432 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2433 this.sizeCtl = DEFAULT_CAPACITY;
2434 internalPutAll(m);
2435 }
2436
2437 /**
2438 * Creates a new, empty map with an initial table size based on
2439 * the given number of elements ({@code initialCapacity}) and
2440 * initial table density ({@code loadFactor}).
2441 *
2442 * @param initialCapacity the initial capacity. The implementation
2443 * performs internal sizing to accommodate this many elements,
2444 * given the specified load factor.
2445 * @param loadFactor the load factor (table density) for
2446 * establishing the initial table size
2447 * @throws IllegalArgumentException if the initial capacity of
2448 * elements is negative or the load factor is nonpositive
2449 *
2450 * @since 1.6
2451 */
2452 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2453 this(initialCapacity, loadFactor, 1);
2454 }
2455
2456 /**
2457 * Creates a new, empty map with an initial table size based on
2458 * the given number of elements ({@code initialCapacity}), table
2459 * density ({@code loadFactor}), and number of concurrently
2460 * updating threads ({@code concurrencyLevel}).
2461 *
2462 * @param initialCapacity the initial capacity. The implementation
2463 * performs internal sizing to accommodate this many elements,
2464 * given the specified load factor.
2465 * @param loadFactor the load factor (table density) for
2466 * establishing the initial table size
2467 * @param concurrencyLevel the estimated number of concurrently
2468 * updating threads. The implementation may use this value as
2469 * a sizing hint.
2470 * @throws IllegalArgumentException if the initial capacity is
2471 * negative or the load factor or concurrencyLevel are
2472 * nonpositive
2473 */
2474 public ConcurrentHashMap(int initialCapacity,
2475 float loadFactor, int concurrencyLevel) {
2476 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2477 throw new IllegalArgumentException();
2478 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2479 initialCapacity = concurrencyLevel; // as estimated threads
2480 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2481 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2482 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2483 this.sizeCtl = cap;
2484 }
2485
2486 /**
2487 * Creates a new {@link Set} backed by a ConcurrentHashMap
2488 * from the given type to {@code Boolean.TRUE}.
2489 *
2490 * @return the new set
2491 */
2492 public static <K> KeySetView<K,Boolean> newKeySet() {
2493 return new KeySetView<K,Boolean>(new ConcurrentHashMap<K,Boolean>(),
2494 Boolean.TRUE);
2495 }
2496
2497 /**
2498 * Creates a new {@link Set} backed by a ConcurrentHashMap
2499 * from the given type to {@code Boolean.TRUE}.
2500 *
2501 * @param initialCapacity The implementation performs internal
2502 * sizing to accommodate this many elements.
2503 * @throws IllegalArgumentException if the initial capacity of
2504 * elements is negative
2505 * @return the new set
2506 */
2507 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2508 return new KeySetView<K,Boolean>
2509 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2510 }
2511
2512 /**
2513 * {@inheritDoc}
2514 */
2515 public boolean isEmpty() {
2516 return sumCount() <= 0L; // ignore transient negative values
2517 }
2518
2519 /**
2520 * {@inheritDoc}
2521 */
2522 public int size() {
2523 long n = sumCount();
2524 return ((n < 0L) ? 0 :
2525 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2526 (int)n);
2527 }
2528
2529 /**
2530 * Returns the number of mappings. This method should be used
2531 * instead of {@link #size} because a ConcurrentHashMap may
2532 * contain more mappings than can be represented as an int. The
2533 * value returned is an estimate; the actual count may differ if
2534 * there are concurrent insertions or removals.
2535 *
2536 * @return the number of mappings
2537 */
2538 public long mappingCount() {
2539 long n = sumCount();
2540 return (n < 0L) ? 0L : n; // ignore transient negative values
2541 }
2542
2543 /**
2544 * Returns the value to which the specified key is mapped,
2545 * or {@code null} if this map contains no mapping for the key.
2546 *
2547 * <p>More formally, if this map contains a mapping from a key
2548 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2549 * then this method returns {@code v}; otherwise it returns
2550 * {@code null}. (There can be at most one such mapping.)
2551 *
2552 * @throws NullPointerException if the specified key is null
2553 */
2554 public V get(Object key) {
2555 return internalGet(key);
2556 }
2557
2558 /**
2559 * Returns the value to which the specified key is mapped,
2560 * or the given defaultValue if this map contains no mapping for the key.
2561 *
2562 * @param key the key
2563 * @param defaultValue the value to return if this map contains
2564 * no mapping for the given key
2565 * @return the mapping for the key, if present; else the defaultValue
2566 * @throws NullPointerException if the specified key is null
2567 */
2568 public V getValueOrDefault(Object key, V defaultValue) {
2569 V v;
2570 return (v = internalGet(key)) == null ? defaultValue : v;
2571 }
2572
2573 /**
2574 * Tests if the specified object is a key in this table.
2575 *
2576 * @param key possible key
2577 * @return {@code true} if and only if the specified object
2578 * is a key in this table, as determined by the
2579 * {@code equals} method; {@code false} otherwise
2580 * @throws NullPointerException if the specified key is null
2581 */
2582 public boolean containsKey(Object key) {
2583 return internalGet(key) != null;
2584 }
2585
2586 /**
2587 * Returns {@code true} if this map maps one or more keys to the
2588 * specified value. Note: This method may require a full traversal
2589 * of the map, and is much slower than method {@code containsKey}.
2590 *
2591 * @param value value whose presence in this map is to be tested
2592 * @return {@code true} if this map maps one or more keys to the
2593 * specified value
2594 * @throws NullPointerException if the specified value is null
2595 */
2596 public boolean containsValue(Object value) {
2597 if (value == null)
2598 throw new NullPointerException();
2599 V v;
2600 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2601 while ((v = it.advance()) != null) {
2602 if (v == value || value.equals(v))
2603 return true;
2604 }
2605 return false;
2606 }
2607
2608 /**
2609 * Legacy method testing if some key maps into the specified value
2610 * in this table. This method is identical in functionality to
2611 * {@link #containsValue}, and exists solely to ensure
2612 * full compatibility with class {@link java.util.Hashtable},
2613 * which supported this method prior to introduction of the
2614 * Java Collections framework.
2615 *
2616 * @param value a value to search for
2617 * @return {@code true} if and only if some key maps to the
2618 * {@code value} argument in this table as
2619 * determined by the {@code equals} method;
2620 * {@code false} otherwise
2621 * @throws NullPointerException if the specified value is null
2622 */
2623 @Deprecated public boolean contains(Object value) {
2624 return containsValue(value);
2625 }
2626
2627 /**
2628 * Maps the specified key to the specified value in this table.
2629 * Neither the key nor the value can be null.
2630 *
2631 * <p>The value can be retrieved by calling the {@code get} method
2632 * with a key that is equal to the original key.
2633 *
2634 * @param key key with which the specified value is to be associated
2635 * @param value value to be associated with the specified key
2636 * @return the previous value associated with {@code key}, or
2637 * {@code null} if there was no mapping for {@code key}
2638 * @throws NullPointerException if the specified key or value is null
2639 */
2640 public V put(K key, V value) {
2641 return internalPut(key, value, false);
2642 }
2643
2644 /**
2645 * {@inheritDoc}
2646 *
2647 * @return the previous value associated with the specified key,
2648 * or {@code null} if there was no mapping for the key
2649 * @throws NullPointerException if the specified key or value is null
2650 */
2651 public V putIfAbsent(K key, V value) {
2652 return internalPut(key, value, true);
2653 }
2654
2655 /**
2656 * Copies all of the mappings from the specified map to this one.
2657 * These mappings replace any mappings that this map had for any of the
2658 * keys currently in the specified map.
2659 *
2660 * @param m mappings to be stored in this map
2661 */
2662 public void putAll(Map<? extends K, ? extends V> m) {
2663 internalPutAll(m);
2664 }
2665
2666 /**
2667 * If the specified key is not already associated with a value,
2668 * computes its value using the given mappingFunction and enters
2669 * it into the map unless null. This is equivalent to
2670 * <pre> {@code
2671 * if (map.containsKey(key))
2672 * return map.get(key);
2673 * value = mappingFunction.apply(key);
2674 * if (value != null)
2675 * map.put(key, value);
2676 * return value;}</pre>
2677 *
2678 * except that the action is performed atomically. If the
2679 * function returns {@code null} no mapping is recorded. If the
2680 * function itself throws an (unchecked) exception, the exception
2681 * is rethrown to its caller, and no mapping is recorded. Some
2682 * attempted update operations on this map by other threads may be
2683 * blocked while computation is in progress, so the computation
2684 * should be short and simple, and must not attempt to update any
2685 * other mappings of this Map. The most appropriate usage is to
2686 * construct a new object serving as an initial mapped value, or
2687 * memoized result, as in:
2688 *
2689 * <pre> {@code
2690 * map.computeIfAbsent(key, new Fun<K, V>() {
2691 * public V map(K k) { return new Value(f(k)); }});}</pre>
2692 *
2693 * @param key key with which the specified value is to be associated
2694 * @param mappingFunction the function to compute a value
2695 * @return the current (existing or computed) value associated with
2696 * the specified key, or null if the computed value is null
2697 * @throws NullPointerException if the specified key or mappingFunction
2698 * is null
2699 * @throws IllegalStateException if the computation detectably
2700 * attempts a recursive update to this map that would
2701 * otherwise never complete
2702 * @throws RuntimeException or Error if the mappingFunction does so,
2703 * in which case the mapping is left unestablished
2704 */
2705 public V computeIfAbsent
2706 (K key, Fun<? super K, ? extends V> mappingFunction) {
2707 return internalComputeIfAbsent(key, mappingFunction);
2708 }
2709
2710 /**
2711 * If the given key is present, computes a new mapping value given a key and
2712 * its current mapped value. This is equivalent to
2713 * <pre> {@code
2714 * if (map.containsKey(key)) {
2715 * value = remappingFunction.apply(key, map.get(key));
2716 * if (value != null)
2717 * map.put(key, value);
2718 * else
2719 * map.remove(key);
2720 * }
2721 * }</pre>
2722 *
2723 * except that the action is performed atomically. If the
2724 * function returns {@code null}, the mapping is removed. If the
2725 * function itself throws an (unchecked) exception, the exception
2726 * is rethrown to its caller, and the current mapping is left
2727 * unchanged. Some attempted update operations on this map by
2728 * other threads may be blocked while computation is in progress,
2729 * so the computation should be short and simple, and must not
2730 * attempt to update any other mappings of this Map. For example,
2731 * to either create or append new messages to a value mapping:
2732 *
2733 * @param key key with which the specified value is to be associated
2734 * @param remappingFunction the function to compute a value
2735 * @return the new value associated with the specified key, or null if none
2736 * @throws NullPointerException if the specified key or remappingFunction
2737 * is null
2738 * @throws IllegalStateException if the computation detectably
2739 * attempts a recursive update to this map that would
2740 * otherwise never complete
2741 * @throws RuntimeException or Error if the remappingFunction does so,
2742 * in which case the mapping is unchanged
2743 */
2744 public V computeIfPresent
2745 (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2746 return internalCompute(key, true, remappingFunction);
2747 }
2748
2749 /**
2750 * Computes a new mapping value given a key and
2751 * its current mapped value (or {@code null} if there is no current
2752 * mapping). This is equivalent to
2753 * <pre> {@code
2754 * value = remappingFunction.apply(key, map.get(key));
2755 * if (value != null)
2756 * map.put(key, value);
2757 * else
2758 * map.remove(key);
2759 * }</pre>
2760 *
2761 * except that the action is performed atomically. If the
2762 * function returns {@code null}, the mapping is removed. If the
2763 * function itself throws an (unchecked) exception, the exception
2764 * is rethrown to its caller, and the current mapping is left
2765 * unchanged. Some attempted update operations on this map by
2766 * other threads may be blocked while computation is in progress,
2767 * so the computation should be short and simple, and must not
2768 * attempt to update any other mappings of this Map. For example,
2769 * to either create or append new messages to a value mapping:
2770 *
2771 * <pre> {@code
2772 * Map<Key, String> map = ...;
2773 * final String msg = ...;
2774 * map.compute(key, new BiFun<Key, String, String>() {
2775 * public String apply(Key k, String v) {
2776 * return (v == null) ? msg : v + msg;});}}</pre>
2777 *
2778 * @param key key with which the specified value is to be associated
2779 * @param remappingFunction the function to compute a value
2780 * @return the new value associated with the specified key, or null if none
2781 * @throws NullPointerException if the specified key or remappingFunction
2782 * is null
2783 * @throws IllegalStateException if the computation detectably
2784 * attempts a recursive update to this map that would
2785 * otherwise never complete
2786 * @throws RuntimeException or Error if the remappingFunction does so,
2787 * in which case the mapping is unchanged
2788 */
2789 public V compute
2790 (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2791 return internalCompute(key, false, remappingFunction);
2792 }
2793
2794 /**
2795 * If the specified key is not already associated
2796 * with a value, associate it with the given value.
2797 * Otherwise, replace the value with the results of
2798 * the given remapping function. This is equivalent to:
2799 * <pre> {@code
2800 * if (!map.containsKey(key))
2801 * map.put(value);
2802 * else {
2803 * newValue = remappingFunction.apply(map.get(key), value);
2804 * if (value != null)
2805 * map.put(key, value);
2806 * else
2807 * map.remove(key);
2808 * }
2809 * }</pre>
2810 * except that the action is performed atomically. If the
2811 * function returns {@code null}, the mapping is removed. If the
2812 * function itself throws an (unchecked) exception, the exception
2813 * is rethrown to its caller, and the current mapping is left
2814 * unchanged. Some attempted update operations on this map by
2815 * other threads may be blocked while computation is in progress,
2816 * so the computation should be short and simple, and must not
2817 * attempt to update any other mappings of this Map.
2818 */
2819 public V merge
2820 (K key, V value,
2821 BiFun<? super V, ? super V, ? extends V> remappingFunction) {
2822 return internalMerge(key, value, remappingFunction);
2823 }
2824
2825 /**
2826 * Removes the key (and its corresponding value) from this map.
2827 * This method does nothing if the key is not in the map.
2828 *
2829 * @param key the key that needs to be removed
2830 * @return the previous value associated with {@code key}, or
2831 * {@code null} if there was no mapping for {@code key}
2832 * @throws NullPointerException if the specified key is null
2833 */
2834 public V remove(Object key) {
2835 return internalReplace(key, null, null);
2836 }
2837
2838 /**
2839 * {@inheritDoc}
2840 *
2841 * @throws NullPointerException if the specified key is null
2842 */
2843 public boolean remove(Object key, Object value) {
2844 return value != null && internalReplace(key, null, value) != null;
2845 }
2846
2847 /**
2848 * {@inheritDoc}
2849 *
2850 * @throws NullPointerException if any of the arguments are null
2851 */
2852 public boolean replace(K key, V oldValue, V newValue) {
2853 if (key == null || oldValue == null || newValue == null)
2854 throw new NullPointerException();
2855 return internalReplace(key, newValue, oldValue) != null;
2856 }
2857
2858 /**
2859 * {@inheritDoc}
2860 *
2861 * @return the previous value associated with the specified key,
2862 * or {@code null} if there was no mapping for the key
2863 * @throws NullPointerException if the specified key or value is null
2864 */
2865 public V replace(K key, V value) {
2866 if (key == null || value == null)
2867 throw new NullPointerException();
2868 return internalReplace(key, value, null);
2869 }
2870
2871 /**
2872 * Removes all of the mappings from this map.
2873 */
2874 public void clear() {
2875 internalClear();
2876 }
2877
2878 /**
2879 * Returns a {@link Set} view of the keys contained in this map.
2880 * The set is backed by the map, so changes to the map are
2881 * reflected in the set, and vice-versa.
2882 *
2883 * @return the set view
2884 */
2885 public KeySetView<K,V> keySet() {
2886 KeySetView<K,V> ks = keySet;
2887 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2888 }
2889
2890 /**
2891 * Returns a {@link Set} view of the keys in this map, using the
2892 * given common mapped value for any additions (i.e., {@link
2893 * Collection#add} and {@link Collection#addAll}). This is of
2894 * course only appropriate if it is acceptable to use the same
2895 * value for all additions from this view.
2896 *
2897 * @param mappedValue the mapped value to use for any
2898 * additions.
2899 * @return the set view
2900 * @throws NullPointerException if the mappedValue is null
2901 */
2902 public KeySetView<K,V> keySet(V mappedValue) {
2903 if (mappedValue == null)
2904 throw new NullPointerException();
2905 return new KeySetView<K,V>(this, mappedValue);
2906 }
2907
2908 /**
2909 * Returns a {@link Collection} view of the values contained in this map.
2910 * The collection is backed by the map, so changes to the map are
2911 * reflected in the collection, and vice-versa.
2912 */
2913 public ValuesView<K,V> values() {
2914 ValuesView<K,V> vs = values;
2915 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2916 }
2917
2918 /**
2919 * Returns a {@link Set} view of the mappings contained in this map.
2920 * The set is backed by the map, so changes to the map are
2921 * reflected in the set, and vice-versa. The set supports element
2922 * removal, which removes the corresponding mapping from the map,
2923 * via the {@code Iterator.remove}, {@code Set.remove},
2924 * {@code removeAll}, {@code retainAll}, and {@code clear}
2925 * operations. It does not support the {@code add} or
2926 * {@code addAll} operations.
2927 *
2928 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2929 * that will never throw {@link ConcurrentModificationException},
2930 * and guarantees to traverse elements as they existed upon
2931 * construction of the iterator, and may (but is not guaranteed to)
2932 * reflect any modifications subsequent to construction.
2933 */
2934 public Set<Map.Entry<K,V>> entrySet() {
2935 EntrySetView<K,V> es = entrySet;
2936 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2937 }
2938
2939 /**
2940 * Returns an enumeration of the keys in this table.
2941 *
2942 * @return an enumeration of the keys in this table
2943 * @see #keySet()
2944 */
2945 public Enumeration<K> keys() {
2946 return new KeyIterator<K,V>(this);
2947 }
2948
2949 /**
2950 * Returns an enumeration of the values in this table.
2951 *
2952 * @return an enumeration of the values in this table
2953 * @see #values()
2954 */
2955 public Enumeration<V> elements() {
2956 return new ValueIterator<K,V>(this);
2957 }
2958
2959 /**
2960 * Returns a partitionable iterator of the keys in this map.
2961 *
2962 * @return a partitionable iterator of the keys in this map
2963 */
2964 public Spliterator<K> keySpliterator() {
2965 return new KeyIterator<K,V>(this);
2966 }
2967
2968 /**
2969 * Returns a partitionable iterator of the values in this map.
2970 *
2971 * @return a partitionable iterator of the values in this map
2972 */
2973 public Spliterator<V> valueSpliterator() {
2974 return new ValueIterator<K,V>(this);
2975 }
2976
2977 /**
2978 * Returns a partitionable iterator of the entries in this map.
2979 *
2980 * @return a partitionable iterator of the entries in this map
2981 */
2982 public Spliterator<Map.Entry<K,V>> entrySpliterator() {
2983 return new EntryIterator<K,V>(this);
2984 }
2985
2986 /**
2987 * Returns the hash code value for this {@link Map}, i.e.,
2988 * the sum of, for each key-value pair in the map,
2989 * {@code key.hashCode() ^ value.hashCode()}.
2990 *
2991 * @return the hash code value for this map
2992 */
2993 public int hashCode() {
2994 int h = 0;
2995 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2996 V v;
2997 while ((v = it.advance()) != null) {
2998 h += it.nextKey.hashCode() ^ v.hashCode();
2999 }
3000 return h;
3001 }
3002
3003 /**
3004 * Returns a string representation of this map. The string
3005 * representation consists of a list of key-value mappings (in no
3006 * particular order) enclosed in braces ("{@code {}}"). Adjacent
3007 * mappings are separated by the characters {@code ", "} (comma
3008 * and space). Each key-value mapping is rendered as the key
3009 * followed by an equals sign ("{@code =}") followed by the
3010 * associated value.
3011 *
3012 * @return a string representation of this map
3013 */
3014 public String toString() {
3015 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3016 StringBuilder sb = new StringBuilder();
3017 sb.append('{');
3018 V v;
3019 if ((v = it.advance()) != null) {
3020 for (;;) {
3021 Object k = it.nextKey;
3022 sb.append(k == this ? "(this Map)" : k);
3023 sb.append('=');
3024 sb.append(v == this ? "(this Map)" : v);
3025 if ((v = it.advance()) == null)
3026 break;
3027 sb.append(',').append(' ');
3028 }
3029 }
3030 return sb.append('}').toString();
3031 }
3032
3033 /**
3034 * Compares the specified object with this map for equality.
3035 * Returns {@code true} if the given object is a map with the same
3036 * mappings as this map. This operation may return misleading
3037 * results if either map is concurrently modified during execution
3038 * of this method.
3039 *
3040 * @param o object to be compared for equality with this map
3041 * @return {@code true} if the specified object is equal to this map
3042 */
3043 public boolean equals(Object o) {
3044 if (o != this) {
3045 if (!(o instanceof Map))
3046 return false;
3047 Map<?,?> m = (Map<?,?>) o;
3048 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3049 V val;
3050 while ((val = it.advance()) != null) {
3051 Object v = m.get(it.nextKey);
3052 if (v == null || (v != val && !v.equals(val)))
3053 return false;
3054 }
3055 for (Map.Entry<?,?> e : m.entrySet()) {
3056 Object mk, mv, v;
3057 if ((mk = e.getKey()) == null ||
3058 (mv = e.getValue()) == null ||
3059 (v = internalGet(mk)) == null ||
3060 (mv != v && !mv.equals(v)))
3061 return false;
3062 }
3063 }
3064 return true;
3065 }
3066
3067 /* ----------------Iterators -------------- */
3068
3069 @SuppressWarnings("serial") static final class KeyIterator<K,V>
3070 extends Traverser<K,V,Object>
3071 implements Spliterator<K>, Enumeration<K> {
3072 KeyIterator(ConcurrentHashMap<K, V> map) { super(map); }
3073 KeyIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3074 super(map, it, -1);
3075 }
3076 public KeyIterator<K,V> split() {
3077 if (nextKey != null)
3078 throw new IllegalStateException();
3079 return new KeyIterator<K,V>(map, this);
3080 }
3081 @SuppressWarnings("unchecked") public final K next() {
3082 if (nextVal == null && advance() == null)
3083 throw new NoSuchElementException();
3084 Object k = nextKey;
3085 nextVal = null;
3086 return (K) k;
3087 }
3088
3089 public final K nextElement() { return next(); }
3090 }
3091
3092 @SuppressWarnings("serial") static final class ValueIterator<K,V>
3093 extends Traverser<K,V,Object>
3094 implements Spliterator<V>, Enumeration<V> {
3095 ValueIterator(ConcurrentHashMap<K, V> map) { super(map); }
3096 ValueIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3097 super(map, it, -1);
3098 }
3099 public ValueIterator<K,V> split() {
3100 if (nextKey != null)
3101 throw new IllegalStateException();
3102 return new ValueIterator<K,V>(map, this);
3103 }
3104
3105 public final V next() {
3106 V v;
3107 if ((v = nextVal) == null && (v = advance()) == null)
3108 throw new NoSuchElementException();
3109 nextVal = null;
3110 return v;
3111 }
3112
3113 public final V nextElement() { return next(); }
3114 }
3115
3116 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3117 extends Traverser<K,V,Object>
3118 implements Spliterator<Map.Entry<K,V>> {
3119 EntryIterator(ConcurrentHashMap<K, V> map) { super(map); }
3120 EntryIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3121 super(map, it, -1);
3122 }
3123 public EntryIterator<K,V> split() {
3124 if (nextKey != null)
3125 throw new IllegalStateException();
3126 return new EntryIterator<K,V>(map, this);
3127 }
3128
3129 @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3130 V v;
3131 if ((v = nextVal) == null && (v = advance()) == null)
3132 throw new NoSuchElementException();
3133 Object k = nextKey;
3134 nextVal = null;
3135 return new MapEntry<K,V>((K)k, v, map);
3136 }
3137 }
3138
3139 /**
3140 * Exported Entry for iterators
3141 */
3142 static final class MapEntry<K,V> implements Map.Entry<K, V> {
3143 final K key; // non-null
3144 V val; // non-null
3145 final ConcurrentHashMap<K, V> map;
3146 MapEntry(K key, V val, ConcurrentHashMap<K, V> map) {
3147 this.key = key;
3148 this.val = val;
3149 this.map = map;
3150 }
3151 public final K getKey() { return key; }
3152 public final V getValue() { return val; }
3153 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3154 public final String toString(){ return key + "=" + val; }
3155
3156 public final boolean equals(Object o) {
3157 Object k, v; Map.Entry<?,?> e;
3158 return ((o instanceof Map.Entry) &&
3159 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3160 (v = e.getValue()) != null &&
3161 (k == key || k.equals(key)) &&
3162 (v == val || v.equals(val)));
3163 }
3164
3165 /**
3166 * Sets our entry's value and writes through to the map. The
3167 * value to return is somewhat arbitrary here. Since we do not
3168 * necessarily track asynchronous changes, the most recent
3169 * "previous" value could be different from what we return (or
3170 * could even have been removed in which case the put will
3171 * re-establish). We do not and cannot guarantee more.
3172 */
3173 public final V setValue(V value) {
3174 if (value == null) throw new NullPointerException();
3175 V v = val;
3176 val = value;
3177 map.put(key, value);
3178 return v;
3179 }
3180 }
3181
3182 /**
3183 * Returns exportable snapshot entry for the given key and value
3184 * when write-through can't or shouldn't be used.
3185 */
3186 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3187 return new AbstractMap.SimpleEntry<K,V>(k, v);
3188 }
3189
3190 /* ---------------- Serialization Support -------------- */
3191
3192 /**
3193 * Stripped-down version of helper class used in previous version,
3194 * declared for the sake of serialization compatibility
3195 */
3196 static class Segment<K,V> implements Serializable {
3197 private static final long serialVersionUID = 2249069246763182397L;
3198 final float loadFactor;
3199 Segment(float lf) { this.loadFactor = lf; }
3200 }
3201
3202 /**
3203 * Saves the state of the {@code ConcurrentHashMap} instance to a
3204 * stream (i.e., serializes it).
3205 * @param s the stream
3206 * @serialData
3207 * the key (Object) and value (Object)
3208 * for each key-value mapping, followed by a null pair.
3209 * The key-value mappings are emitted in no particular order.
3210 */
3211 @SuppressWarnings("unchecked") private void writeObject
3212 (java.io.ObjectOutputStream s)
3213 throws java.io.IOException {
3214 if (segments == null) { // for serialization compatibility
3215 segments = (Segment<K,V>[])
3216 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3217 for (int i = 0; i < segments.length; ++i)
3218 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3219 }
3220 s.defaultWriteObject();
3221 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3222 V v;
3223 while ((v = it.advance()) != null) {
3224 s.writeObject(it.nextKey);
3225 s.writeObject(v);
3226 }
3227 s.writeObject(null);
3228 s.writeObject(null);
3229 segments = null; // throw away
3230 }
3231
3232 /**
3233 * Reconstitutes the instance from a stream (that is, deserializes it).
3234 * @param s the stream
3235 */
3236 @SuppressWarnings("unchecked") private void readObject
3237 (java.io.ObjectInputStream s)
3238 throws java.io.IOException, ClassNotFoundException {
3239 s.defaultReadObject();
3240 this.segments = null; // unneeded
3241
3242 // Create all nodes, then place in table once size is known
3243 long size = 0L;
3244 Node<V> p = null;
3245 for (;;) {
3246 K k = (K) s.readObject();
3247 V v = (V) s.readObject();
3248 if (k != null && v != null) {
3249 int h = spread(k.hashCode());
3250 p = new Node<V>(h, k, v, p);
3251 ++size;
3252 }
3253 else
3254 break;
3255 }
3256 if (p != null) {
3257 boolean init = false;
3258 int n;
3259 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3260 n = MAXIMUM_CAPACITY;
3261 else {
3262 int sz = (int)size;
3263 n = tableSizeFor(sz + (sz >>> 1) + 1);
3264 }
3265 int sc = sizeCtl;
3266 boolean collide = false;
3267 if (n > sc &&
3268 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3269 try {
3270 if (table == null) {
3271 init = true;
3272 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3273 Node<V>[] tab = (Node<V>[])rt;
3274 int mask = n - 1;
3275 while (p != null) {
3276 int j = p.hash & mask;
3277 Node<V> next = p.next;
3278 Node<V> q = p.next = tabAt(tab, j);
3279 setTabAt(tab, j, p);
3280 if (!collide && q != null && q.hash == p.hash)
3281 collide = true;
3282 p = next;
3283 }
3284 table = tab;
3285 addCount(size, -1);
3286 sc = n - (n >>> 2);
3287 }
3288 } finally {
3289 sizeCtl = sc;
3290 }
3291 if (collide) { // rescan and convert to TreeBins
3292 Node<V>[] tab = table;
3293 for (int i = 0; i < tab.length; ++i) {
3294 int c = 0;
3295 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3296 if (++c > TREE_THRESHOLD &&
3297 (e.key instanceof Comparable)) {
3298 replaceWithTreeBin(tab, i, e.key);
3299 break;
3300 }
3301 }
3302 }
3303 }
3304 }
3305 if (!init) { // Can only happen if unsafely published.
3306 while (p != null) {
3307 internalPut((K)p.key, p.val, false);
3308 p = p.next;
3309 }
3310 }
3311 }
3312 }
3313
3314 // -------------------------------------------------------
3315
3316 // Sams
3317 /** Interface describing a void action of one argument */
3318 public interface Action<A> { void apply(A a); }
3319 /** Interface describing a void action of two arguments */
3320 public interface BiAction<A,B> { void apply(A a, B b); }
3321 /** Interface describing a function of one argument */
3322 public interface Fun<A,T> { T apply(A a); }
3323 /** Interface describing a function of two arguments */
3324 public interface BiFun<A,B,T> { T apply(A a, B b); }
3325 /** Interface describing a function of no arguments */
3326 public interface Generator<T> { T apply(); }
3327 /** Interface describing a function mapping its argument to a double */
3328 public interface ObjectToDouble<A> { double apply(A a); }
3329 /** Interface describing a function mapping its argument to a long */
3330 public interface ObjectToLong<A> { long apply(A a); }
3331 /** Interface describing a function mapping its argument to an int */
3332 public interface ObjectToInt<A> {int apply(A a); }
3333 /** Interface describing a function mapping two arguments to a double */
3334 public interface ObjectByObjectToDouble<A,B> { double apply(A a, B b); }
3335 /** Interface describing a function mapping two arguments to a long */
3336 public interface ObjectByObjectToLong<A,B> { long apply(A a, B b); }
3337 /** Interface describing a function mapping two arguments to an int */
3338 public interface ObjectByObjectToInt<A,B> {int apply(A a, B b); }
3339 /** Interface describing a function mapping a double to a double */
3340 public interface DoubleToDouble { double apply(double a); }
3341 /** Interface describing a function mapping a long to a long */
3342 public interface LongToLong { long apply(long a); }
3343 /** Interface describing a function mapping an int to an int */
3344 public interface IntToInt { int apply(int a); }
3345 /** Interface describing a function mapping two doubles to a double */
3346 public interface DoubleByDoubleToDouble { double apply(double a, double b); }
3347 /** Interface describing a function mapping two longs to a long */
3348 public interface LongByLongToLong { long apply(long a, long b); }
3349 /** Interface describing a function mapping two ints to an int */
3350 public interface IntByIntToInt { int apply(int a, int b); }
3351
3352
3353 // -------------------------------------------------------
3354
3355 // Sequential bulk operations
3356
3357 /**
3358 * Performs the given action for each (key, value).
3359 *
3360 * @param action the action
3361 */
3362 @SuppressWarnings("unchecked") public void forEachSequentially
3363 (BiAction<K,V> action) {
3364 if (action == null) throw new NullPointerException();
3365 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3366 V v;
3367 while ((v = it.advance()) != null)
3368 action.apply((K)it.nextKey, v);
3369 }
3370
3371 /**
3372 * Performs the given action for each non-null transformation
3373 * of each (key, value).
3374 *
3375 * @param transformer a function returning the transformation
3376 * for an element, or null of there is no transformation (in
3377 * which case the action is not applied).
3378 * @param action the action
3379 */
3380 @SuppressWarnings("unchecked") public <U> void forEachSequentially
3381 (BiFun<? super K, ? super V, ? extends U> transformer,
3382 Action<U> action) {
3383 if (transformer == null || action == null)
3384 throw new NullPointerException();
3385 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3386 V v; U u;
3387 while ((v = it.advance()) != null) {
3388 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3389 action.apply(u);
3390 }
3391 }
3392
3393 /**
3394 * Returns a non-null result from applying the given search
3395 * function on each (key, value), or null if none.
3396 *
3397 * @param searchFunction a function returning a non-null
3398 * result on success, else null
3399 * @return a non-null result from applying the given search
3400 * function on each (key, value), or null if none
3401 */
3402 @SuppressWarnings("unchecked") public <U> U searchSequentially
3403 (BiFun<? super K, ? super V, ? extends U> searchFunction) {
3404 if (searchFunction == null) throw new NullPointerException();
3405 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3406 V v; U u;
3407 while ((v = it.advance()) != null) {
3408 if ((u = searchFunction.apply((K)it.nextKey, v)) != null)
3409 return u;
3410 }
3411 return null;
3412 }
3413
3414 /**
3415 * Returns the result of accumulating the given transformation
3416 * of all (key, value) pairs using the given reducer to
3417 * combine values, or null if none.
3418 *
3419 * @param transformer a function returning the transformation
3420 * for an element, or null of there is no transformation (in
3421 * which case it is not combined).
3422 * @param reducer a commutative associative combining function
3423 * @return the result of accumulating the given transformation
3424 * of all (key, value) pairs
3425 */
3426 @SuppressWarnings("unchecked") public <U> U reduceSequentially
3427 (BiFun<? super K, ? super V, ? extends U> transformer,
3428 BiFun<? super U, ? super U, ? extends U> reducer) {
3429 if (transformer == null || reducer == null)
3430 throw new NullPointerException();
3431 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3432 U r = null, u; V v;
3433 while ((v = it.advance()) != null) {
3434 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3435 r = (r == null) ? u : reducer.apply(r, u);
3436 }
3437 return r;
3438 }
3439
3440 /**
3441 * Returns the result of accumulating the given transformation
3442 * of all (key, value) pairs using the given reducer to
3443 * combine values, and the given basis as an identity value.
3444 *
3445 * @param transformer a function returning the transformation
3446 * for an element
3447 * @param basis the identity (initial default value) for the reduction
3448 * @param reducer a commutative associative combining function
3449 * @return the result of accumulating the given transformation
3450 * of all (key, value) pairs
3451 */
3452 @SuppressWarnings("unchecked") public double reduceToDoubleSequentially
3453 (ObjectByObjectToDouble<? super K, ? super V> transformer,
3454 double basis,
3455 DoubleByDoubleToDouble reducer) {
3456 if (transformer == null || reducer == null)
3457 throw new NullPointerException();
3458 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3459 double r = basis; V v;
3460 while ((v = it.advance()) != null)
3461 r = reducer.apply(r, transformer.apply((K)it.nextKey, v));
3462 return r;
3463 }
3464
3465 /**
3466 * Returns the result of accumulating the given transformation
3467 * of all (key, value) pairs using the given reducer to
3468 * combine values, and the given basis as an identity value.
3469 *
3470 * @param transformer a function returning the transformation
3471 * for an element
3472 * @param basis the identity (initial default value) for the reduction
3473 * @param reducer a commutative associative combining function
3474 * @return the result of accumulating the given transformation
3475 * of all (key, value) pairs
3476 */
3477 @SuppressWarnings("unchecked") public long reduceToLongSequentially
3478 (ObjectByObjectToLong<? super K, ? super V> transformer,
3479 long basis,
3480 LongByLongToLong reducer) {
3481 if (transformer == null || reducer == null)
3482 throw new NullPointerException();
3483 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3484 long r = basis; V v;
3485 while ((v = it.advance()) != null)
3486 r = reducer.apply(r, transformer.apply((K)it.nextKey, v));
3487 return r;
3488 }
3489
3490 /**
3491 * Returns the result of accumulating the given transformation
3492 * of all (key, value) pairs using the given reducer to
3493 * combine values, and the given basis as an identity value.
3494 *
3495 * @param transformer a function returning the transformation
3496 * for an element
3497 * @param basis the identity (initial default value) for the reduction
3498 * @param reducer a commutative associative combining function
3499 * @return the result of accumulating the given transformation
3500 * of all (key, value) pairs
3501 */
3502 @SuppressWarnings("unchecked") public int reduceToIntSequentially
3503 (ObjectByObjectToInt<? super K, ? super V> transformer,
3504 int basis,
3505 IntByIntToInt reducer) {
3506 if (transformer == null || reducer == null)
3507 throw new NullPointerException();
3508 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3509 int r = basis; V v;
3510 while ((v = it.advance()) != null)
3511 r = reducer.apply(r, transformer.apply((K)it.nextKey, v));
3512 return r;
3513 }
3514
3515 /**
3516 * Performs the given action for each key.
3517 *
3518 * @param action the action
3519 */
3520 @SuppressWarnings("unchecked") public void forEachKeySequentially
3521 (Action<K> action) {
3522 if (action == null) throw new NullPointerException();
3523 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3524 while (it.advance() != null)
3525 action.apply((K)it.nextKey);
3526 }
3527
3528 /**
3529 * Performs the given action for each non-null transformation
3530 * of each key.
3531 *
3532 * @param transformer a function returning the transformation
3533 * for an element, or null of there is no transformation (in
3534 * which case the action is not applied).
3535 * @param action the action
3536 */
3537 @SuppressWarnings("unchecked") public <U> void forEachKeySequentially
3538 (Fun<? super K, ? extends U> transformer,
3539 Action<U> action) {
3540 if (transformer == null || action == null)
3541 throw new NullPointerException();
3542 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3543 U u;
3544 while (it.advance() != null) {
3545 if ((u = transformer.apply((K)it.nextKey)) != null)
3546 action.apply(u);
3547 }
3548 ForkJoinTasks.forEachKey
3549 (this, transformer, action).invoke();
3550 }
3551
3552 /**
3553 * Returns a non-null result from applying the given search
3554 * function on each key, or null if none.
3555 *
3556 * @param searchFunction a function returning a non-null
3557 * result on success, else null
3558 * @return a non-null result from applying the given search
3559 * function on each key, or null if none
3560 */
3561 @SuppressWarnings("unchecked") public <U> U searchKeysSequentially
3562 (Fun<? super K, ? extends U> searchFunction) {
3563 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3564 U u;
3565 while (it.advance() != null) {
3566 if ((u = searchFunction.apply((K)it.nextKey)) != null)
3567 return u;
3568 }
3569 return null;
3570 }
3571
3572 /**
3573 * Returns the result of accumulating all keys using the given
3574 * reducer to combine values, or null if none.
3575 *
3576 * @param reducer a commutative associative combining function
3577 * @return the result of accumulating all keys using the given
3578 * reducer to combine values, or null if none
3579 */
3580 @SuppressWarnings("unchecked") public K reduceKeysSequentially
3581 (BiFun<? super K, ? super K, ? extends K> reducer) {
3582 if (reducer == null) throw new NullPointerException();
3583 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3584 K r = null;
3585 while (it.advance() != null) {
3586 K u = (K)it.nextKey;
3587 r = (r == null) ? u : reducer.apply(r, u);
3588 }
3589 return r;
3590 }
3591
3592 /**
3593 * Returns the result of accumulating the given transformation
3594 * of all keys using the given reducer to combine values, or
3595 * null if none.
3596 *
3597 * @param transformer a function returning the transformation
3598 * for an element, or null of there is no transformation (in
3599 * which case it is not combined).
3600 * @param reducer a commutative associative combining function
3601 * @return the result of accumulating the given transformation
3602 * of all keys
3603 */
3604 @SuppressWarnings("unchecked") public <U> U reduceKeysSequentially
3605 (Fun<? super K, ? extends U> transformer,
3606 BiFun<? super U, ? super U, ? extends U> reducer) {
3607 if (transformer == null || reducer == null)
3608 throw new NullPointerException();
3609 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3610 U r = null, u;
3611 while (it.advance() != null) {
3612 if ((u = transformer.apply((K)it.nextKey)) != null)
3613 r = (r == null) ? u : reducer.apply(r, u);
3614 }
3615 return r;
3616 }
3617
3618 /**
3619 * Returns the result of accumulating the given transformation
3620 * of all keys using the given reducer to combine values, and
3621 * the given basis as an identity value.
3622 *
3623 * @param transformer a function returning the transformation
3624 * for an element
3625 * @param basis the identity (initial default value) for the reduction
3626 * @param reducer a commutative associative combining function
3627 * @return the result of accumulating the given transformation
3628 * of all keys
3629 */
3630 @SuppressWarnings("unchecked") public double reduceKeysToDoubleSequentially
3631 (ObjectToDouble<? super K> transformer,
3632 double basis,
3633 DoubleByDoubleToDouble reducer) {
3634 if (transformer == null || reducer == null)
3635 throw new NullPointerException();
3636 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3637 double r = basis;
3638 while (it.advance() != null)
3639 r = reducer.apply(r, transformer.apply((K)it.nextKey));
3640 return r;
3641 }
3642
3643 /**
3644 * Returns the result of accumulating the given transformation
3645 * of all keys using the given reducer to combine values, and
3646 * the given basis as an identity value.
3647 *
3648 * @param transformer a function returning the transformation
3649 * for an element
3650 * @param basis the identity (initial default value) for the reduction
3651 * @param reducer a commutative associative combining function
3652 * @return the result of accumulating the given transformation
3653 * of all keys
3654 */
3655 @SuppressWarnings("unchecked") public long reduceKeysToLongSequentially
3656 (ObjectToLong<? super K> transformer,
3657 long basis,
3658 LongByLongToLong reducer) {
3659 if (transformer == null || reducer == null)
3660 throw new NullPointerException();
3661 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3662 long r = basis;
3663 while (it.advance() != null)
3664 r = reducer.apply(r, transformer.apply((K)it.nextKey));
3665 return r;
3666 }
3667
3668 /**
3669 * Returns the result of accumulating the given transformation
3670 * of all keys using the given reducer to combine values, and
3671 * the given basis as an identity value.
3672 *
3673 * @param transformer a function returning the transformation
3674 * for an element
3675 * @param basis the identity (initial default value) for the reduction
3676 * @param reducer a commutative associative combining function
3677 * @return the result of accumulating the given transformation
3678 * of all keys
3679 */
3680 @SuppressWarnings("unchecked") public int reduceKeysToIntSequentially
3681 (ObjectToInt<? super K> transformer,
3682 int basis,
3683 IntByIntToInt reducer) {
3684 if (transformer == null || reducer == null)
3685 throw new NullPointerException();
3686 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3687 int r = basis;
3688 while (it.advance() != null)
3689 r = reducer.apply(r, transformer.apply((K)it.nextKey));
3690 return r;
3691 }
3692
3693 /**
3694 * Performs the given action for each value.
3695 *
3696 * @param action the action
3697 */
3698 public void forEachValueSequentially(Action<V> action) {
3699 if (action == null) throw new NullPointerException();
3700 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3701 V v;
3702 while ((v = it.advance()) != null)
3703 action.apply(v);
3704 }
3705
3706 /**
3707 * Performs the given action for each non-null transformation
3708 * of each value.
3709 *
3710 * @param transformer a function returning the transformation
3711 * for an element, or null of there is no transformation (in
3712 * which case the action is not applied).
3713 */
3714 public <U> void forEachValueSequentially
3715 (Fun<? super V, ? extends U> transformer,
3716 Action<U> action) {
3717 if (transformer == null || action == null)
3718 throw new NullPointerException();
3719 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3720 V v; U u;
3721 while ((v = it.advance()) != null) {
3722 if ((u = transformer.apply(v)) != null)
3723 action.apply(u);
3724 }
3725 }
3726
3727 /**
3728 * Returns a non-null result from applying the given search
3729 * function on each value, or null if none.
3730 *
3731 * @param searchFunction a function returning a non-null
3732 * result on success, else null
3733 * @return a non-null result from applying the given search
3734 * function on each value, or null if none
3735 *
3736 */
3737 public <U> U searchValuesSequentially
3738 (Fun<? super V, ? extends U> searchFunction) {
3739 if (searchFunction == null) throw new NullPointerException();
3740 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3741 V v; U u;
3742 while ((v = it.advance()) != null) {
3743 if ((u = searchFunction.apply(v)) != null)
3744 return u;
3745 }
3746 return null;
3747 }
3748
3749 /**
3750 * Returns the result of accumulating all values using the
3751 * given reducer to combine values, or null if none.
3752 *
3753 * @param reducer a commutative associative combining function
3754 * @return the result of accumulating all values
3755 */
3756 public V reduceValuesSequentially
3757 (BiFun<? super V, ? super V, ? extends V> reducer) {
3758 if (reducer == null) throw new NullPointerException();
3759 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3760 V r = null; V v;
3761 while ((v = it.advance()) != null)
3762 r = (r == null) ? v : reducer.apply(r, v);
3763 return r;
3764 }
3765
3766 /**
3767 * Returns the result of accumulating the given transformation
3768 * of all values using the given reducer to combine values, or
3769 * null if none.
3770 *
3771 * @param transformer a function returning the transformation
3772 * for an element, or null of there is no transformation (in
3773 * which case it is not combined).
3774 * @param reducer a commutative associative combining function
3775 * @return the result of accumulating the given transformation
3776 * of all values
3777 */
3778 public <U> U reduceValuesSequentially
3779 (Fun<? super V, ? extends U> transformer,
3780 BiFun<? super U, ? super U, ? extends U> reducer) {
3781 if (transformer == null || reducer == null)
3782 throw new NullPointerException();
3783 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3784 U r = null, u; V v;
3785 while ((v = it.advance()) != null) {
3786 if ((u = transformer.apply(v)) != null)
3787 r = (r == null) ? u : reducer.apply(r, u);
3788 }
3789 return r;
3790 }
3791
3792 /**
3793 * Returns the result of accumulating the given transformation
3794 * of all values using the given reducer to combine values,
3795 * and the given basis as an identity value.
3796 *
3797 * @param transformer a function returning the transformation
3798 * for an element
3799 * @param basis the identity (initial default value) for the reduction
3800 * @param reducer a commutative associative combining function
3801 * @return the result of accumulating the given transformation
3802 * of all values
3803 */
3804 public double reduceValuesToDoubleSequentially
3805 (ObjectToDouble<? super V> transformer,
3806 double basis,
3807 DoubleByDoubleToDouble reducer) {
3808 if (transformer == null || reducer == null)
3809 throw new NullPointerException();
3810 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3811 double r = basis; V v;
3812 while ((v = it.advance()) != null)
3813 r = reducer.apply(r, transformer.apply(v));
3814 return r;
3815 }
3816
3817 /**
3818 * Returns the result of accumulating the given transformation
3819 * of all values using the given reducer to combine values,
3820 * and the given basis as an identity value.
3821 *
3822 * @param transformer a function returning the transformation
3823 * for an element
3824 * @param basis the identity (initial default value) for the reduction
3825 * @param reducer a commutative associative combining function
3826 * @return the result of accumulating the given transformation
3827 * of all values
3828 */
3829 public long reduceValuesToLongSequentially
3830 (ObjectToLong<? super V> transformer,
3831 long basis,
3832 LongByLongToLong reducer) {
3833 if (transformer == null || reducer == null)
3834 throw new NullPointerException();
3835 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3836 long r = basis; V v;
3837 while ((v = it.advance()) != null)
3838 r = reducer.apply(r, transformer.apply(v));
3839 return r;
3840 }
3841
3842 /**
3843 * Returns the result of accumulating the given transformation
3844 * of all values using the given reducer to combine values,
3845 * and the given basis as an identity value.
3846 *
3847 * @param transformer a function returning the transformation
3848 * for an element
3849 * @param basis the identity (initial default value) for the reduction
3850 * @param reducer a commutative associative combining function
3851 * @return the result of accumulating the given transformation
3852 * of all values
3853 */
3854 public int reduceValuesToIntSequentially
3855 (ObjectToInt<? super V> transformer,
3856 int basis,
3857 IntByIntToInt reducer) {
3858 if (transformer == null || reducer == null)
3859 throw new NullPointerException();
3860 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3861 int r = basis; V v;
3862 while ((v = it.advance()) != null)
3863 r = reducer.apply(r, transformer.apply(v));
3864 return r;
3865 }
3866
3867 /**
3868 * Performs the given action for each entry.
3869 *
3870 * @param action the action
3871 */
3872 @SuppressWarnings("unchecked") public void forEachEntrySequentially
3873 (Action<Map.Entry<K,V>> action) {
3874 if (action == null) throw new NullPointerException();
3875 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3876 V v;
3877 while ((v = it.advance()) != null)
3878 action.apply(entryFor((K)it.nextKey, v));
3879 }
3880
3881 /**
3882 * Performs the given action for each non-null transformation
3883 * of each entry.
3884 *
3885 * @param transformer a function returning the transformation
3886 * for an element, or null of there is no transformation (in
3887 * which case the action is not applied).
3888 * @param action the action
3889 */
3890 @SuppressWarnings("unchecked") public <U> void forEachEntrySequentially
3891 (Fun<Map.Entry<K,V>, ? extends U> transformer,
3892 Action<U> action) {
3893 if (transformer == null || action == null)
3894 throw new NullPointerException();
3895 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3896 V v; U u;
3897 while ((v = it.advance()) != null) {
3898 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3899 action.apply(u);
3900 }
3901 }
3902
3903 /**
3904 * Returns a non-null result from applying the given search
3905 * function on each entry, or null if none.
3906 *
3907 * @param searchFunction a function returning a non-null
3908 * result on success, else null
3909 * @return a non-null result from applying the given search
3910 * function on each entry, or null if none
3911 */
3912 @SuppressWarnings("unchecked") public <U> U searchEntriesSequentially
3913 (Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
3914 if (searchFunction == null) throw new NullPointerException();
3915 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3916 V v; U u;
3917 while ((v = it.advance()) != null) {
3918 if ((u = searchFunction.apply(entryFor((K)it.nextKey, v))) != null)
3919 return u;
3920 }
3921 return null;
3922 }
3923
3924 /**
3925 * Returns the result of accumulating all entries using the
3926 * given reducer to combine values, or null if none.
3927 *
3928 * @param reducer a commutative associative combining function
3929 * @return the result of accumulating all entries
3930 */
3931 @SuppressWarnings("unchecked") public Map.Entry<K,V> reduceEntriesSequentially
3932 (BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3933 if (reducer == null) throw new NullPointerException();
3934 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3935 Map.Entry<K,V> r = null; V v;
3936 while ((v = it.advance()) != null) {
3937 Map.Entry<K,V> u = entryFor((K)it.nextKey, v);
3938 r = (r == null) ? u : reducer.apply(r, u);
3939 }
3940 return r;
3941 }
3942
3943 /**
3944 * Returns the result of accumulating the given transformation
3945 * of all entries using the given reducer to combine values,
3946 * or null if none.
3947 *
3948 * @param transformer a function returning the transformation
3949 * for an element, or null of there is no transformation (in
3950 * which case it is not combined).
3951 * @param reducer a commutative associative combining function
3952 * @return the result of accumulating the given transformation
3953 * of all entries
3954 */
3955 @SuppressWarnings("unchecked") public <U> U reduceEntriesSequentially
3956 (Fun<Map.Entry<K,V>, ? extends U> transformer,
3957 BiFun<? super U, ? super U, ? extends U> reducer) {
3958 if (transformer == null || reducer == null)
3959 throw new NullPointerException();
3960 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3961 U r = null, u; V v;
3962 while ((v = it.advance()) != null) {
3963 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3964 r = (r == null) ? u : reducer.apply(r, u);
3965 }
3966 return r;
3967 }
3968
3969 /**
3970 * Returns the result of accumulating the given transformation
3971 * of all entries using the given reducer to combine values,
3972 * and the given basis as an identity value.
3973 *
3974 * @param transformer a function returning the transformation
3975 * for an element
3976 * @param basis the identity (initial default value) for the reduction
3977 * @param reducer a commutative associative combining function
3978 * @return the result of accumulating the given transformation
3979 * of all entries
3980 */
3981 @SuppressWarnings("unchecked") public double reduceEntriesToDoubleSequentially
3982 (ObjectToDouble<Map.Entry<K,V>> transformer,
3983 double basis,
3984 DoubleByDoubleToDouble reducer) {
3985 if (transformer == null || reducer == null)
3986 throw new NullPointerException();
3987 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3988 double r = basis; V v;
3989 while ((v = it.advance()) != null)
3990 r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v)));
3991 return r;
3992 }
3993
3994 /**
3995 * Returns the result of accumulating the given transformation
3996 * of all entries using the given reducer to combine values,
3997 * and the given basis as an identity value.
3998 *
3999 * @param transformer a function returning the transformation
4000 * for an element
4001 * @param basis the identity (initial default value) for the reduction
4002 * @param reducer a commutative associative combining function
4003 * @return the result of accumulating the given transformation
4004 * of all entries
4005 */
4006 @SuppressWarnings("unchecked") public long reduceEntriesToLongSequentially
4007 (ObjectToLong<Map.Entry<K,V>> transformer,
4008 long basis,
4009 LongByLongToLong reducer) {
4010 if (transformer == null || reducer == null)
4011 throw new NullPointerException();
4012 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4013 long r = basis; V v;
4014 while ((v = it.advance()) != null)
4015 r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v)));
4016 return r;
4017 }
4018
4019 /**
4020 * Returns the result of accumulating the given transformation
4021 * of all entries using the given reducer to combine values,
4022 * and the given basis as an identity value.
4023 *
4024 * @param transformer a function returning the transformation
4025 * for an element
4026 * @param basis the identity (initial default value) for the reduction
4027 * @param reducer a commutative associative combining function
4028 * @return the result of accumulating the given transformation
4029 * of all entries
4030 */
4031 @SuppressWarnings("unchecked") public int reduceEntriesToIntSequentially
4032 (ObjectToInt<Map.Entry<K,V>> transformer,
4033 int basis,
4034 IntByIntToInt reducer) {
4035 if (transformer == null || reducer == null)
4036 throw new NullPointerException();
4037 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4038 int r = basis; V v;
4039 while ((v = it.advance()) != null)
4040 r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v)));
4041 return r;
4042 }
4043
4044 // Parallel bulk operations
4045
4046 /**
4047 * Performs the given action for each (key, value).
4048 *
4049 * @param action the action
4050 */
4051 public void forEachInParallel(BiAction<K,V> action) {
4052 ForkJoinTasks.forEach
4053 (this, action).invoke();
4054 }
4055
4056 /**
4057 * Performs the given action for each non-null transformation
4058 * of each (key, value).
4059 *
4060 * @param transformer a function returning the transformation
4061 * for an element, or null of there is no transformation (in
4062 * which case the action is not applied).
4063 * @param action the action
4064 */
4065 public <U> void forEachInParallel
4066 (BiFun<? super K, ? super V, ? extends U> transformer,
4067 Action<U> action) {
4068 ForkJoinTasks.forEach
4069 (this, transformer, action).invoke();
4070 }
4071
4072 /**
4073 * Returns a non-null result from applying the given search
4074 * function on each (key, value), or null if none. Upon
4075 * success, further element processing is suppressed and the
4076 * results of any other parallel invocations of the search
4077 * function are ignored.
4078 *
4079 * @param searchFunction a function returning a non-null
4080 * result on success, else null
4081 * @return a non-null result from applying the given search
4082 * function on each (key, value), or null if none
4083 */
4084 public <U> U searchInParallel
4085 (BiFun<? super K, ? super V, ? extends U> searchFunction) {
4086 return ForkJoinTasks.search
4087 (this, searchFunction).invoke();
4088 }
4089
4090 /**
4091 * Returns the result of accumulating the given transformation
4092 * of all (key, value) pairs using the given reducer to
4093 * combine values, or null if none.
4094 *
4095 * @param transformer a function returning the transformation
4096 * for an element, or null of there is no transformation (in
4097 * which case it is not combined).
4098 * @param reducer a commutative associative combining function
4099 * @return the result of accumulating the given transformation
4100 * of all (key, value) pairs
4101 */
4102 public <U> U reduceInParallel
4103 (BiFun<? super K, ? super V, ? extends U> transformer,
4104 BiFun<? super U, ? super U, ? extends U> reducer) {
4105 return ForkJoinTasks.reduce
4106 (this, transformer, reducer).invoke();
4107 }
4108
4109 /**
4110 * Returns the result of accumulating the given transformation
4111 * of all (key, value) pairs using the given reducer to
4112 * combine values, and the given basis as an identity value.
4113 *
4114 * @param transformer a function returning the transformation
4115 * for an element
4116 * @param basis the identity (initial default value) for the reduction
4117 * @param reducer a commutative associative combining function
4118 * @return the result of accumulating the given transformation
4119 * of all (key, value) pairs
4120 */
4121 public double reduceToDoubleInParallel
4122 (ObjectByObjectToDouble<? super K, ? super V> transformer,
4123 double basis,
4124 DoubleByDoubleToDouble reducer) {
4125 return ForkJoinTasks.reduceToDouble
4126 (this, transformer, basis, reducer).invoke();
4127 }
4128
4129 /**
4130 * Returns the result of accumulating the given transformation
4131 * of all (key, value) pairs using the given reducer to
4132 * combine values, and the given basis as an identity value.
4133 *
4134 * @param transformer a function returning the transformation
4135 * for an element
4136 * @param basis the identity (initial default value) for the reduction
4137 * @param reducer a commutative associative combining function
4138 * @return the result of accumulating the given transformation
4139 * of all (key, value) pairs
4140 */
4141 public long reduceToLongInParallel
4142 (ObjectByObjectToLong<? super K, ? super V> transformer,
4143 long basis,
4144 LongByLongToLong reducer) {
4145 return ForkJoinTasks.reduceToLong
4146 (this, transformer, basis, reducer).invoke();
4147 }
4148
4149 /**
4150 * Returns the result of accumulating the given transformation
4151 * of all (key, value) pairs using the given reducer to
4152 * combine values, and the given basis as an identity value.
4153 *
4154 * @param transformer a function returning the transformation
4155 * for an element
4156 * @param basis the identity (initial default value) for the reduction
4157 * @param reducer a commutative associative combining function
4158 * @return the result of accumulating the given transformation
4159 * of all (key, value) pairs
4160 */
4161 public int reduceToIntInParallel
4162 (ObjectByObjectToInt<? super K, ? super V> transformer,
4163 int basis,
4164 IntByIntToInt reducer) {
4165 return ForkJoinTasks.reduceToInt
4166 (this, transformer, basis, reducer).invoke();
4167 }
4168
4169 /**
4170 * Performs the given action for each key.
4171 *
4172 * @param action the action
4173 */
4174 public void forEachKeyInParallel(Action<K> action) {
4175 ForkJoinTasks.forEachKey
4176 (this, action).invoke();
4177 }
4178
4179 /**
4180 * Performs the given action for each non-null transformation
4181 * of each key.
4182 *
4183 * @param transformer a function returning the transformation
4184 * for an element, or null of there is no transformation (in
4185 * which case the action is not applied).
4186 * @param action the action
4187 */
4188 public <U> void forEachKeyInParallel
4189 (Fun<? super K, ? extends U> transformer,
4190 Action<U> action) {
4191 ForkJoinTasks.forEachKey
4192 (this, transformer, action).invoke();
4193 }
4194
4195 /**
4196 * Returns a non-null result from applying the given search
4197 * function on each key, or null if none. Upon success,
4198 * further element processing is suppressed and the results of
4199 * any other parallel invocations of the search function are
4200 * ignored.
4201 *
4202 * @param searchFunction a function returning a non-null
4203 * result on success, else null
4204 * @return a non-null result from applying the given search
4205 * function on each key, or null if none
4206 */
4207 public <U> U searchKeysInParallel
4208 (Fun<? super K, ? extends U> searchFunction) {
4209 return ForkJoinTasks.searchKeys
4210 (this, searchFunction).invoke();
4211 }
4212
4213 /**
4214 * Returns the result of accumulating all keys using the given
4215 * reducer to combine values, or null if none.
4216 *
4217 * @param reducer a commutative associative combining function
4218 * @return the result of accumulating all keys using the given
4219 * reducer to combine values, or null if none
4220 */
4221 public K reduceKeysInParallel
4222 (BiFun<? super K, ? super K, ? extends K> reducer) {
4223 return ForkJoinTasks.reduceKeys
4224 (this, reducer).invoke();
4225 }
4226
4227 /**
4228 * Returns the result of accumulating the given transformation
4229 * of all keys using the given reducer to combine values, or
4230 * null if none.
4231 *
4232 * @param transformer a function returning the transformation
4233 * for an element, or null of there is no transformation (in
4234 * which case it is not combined).
4235 * @param reducer a commutative associative combining function
4236 * @return the result of accumulating the given transformation
4237 * of all keys
4238 */
4239 public <U> U reduceKeysInParallel
4240 (Fun<? super K, ? extends U> transformer,
4241 BiFun<? super U, ? super U, ? extends U> reducer) {
4242 return ForkJoinTasks.reduceKeys
4243 (this, transformer, reducer).invoke();
4244 }
4245
4246 /**
4247 * Returns the result of accumulating the given transformation
4248 * of all keys using the given reducer to combine values, and
4249 * the given basis as an identity value.
4250 *
4251 * @param transformer a function returning the transformation
4252 * for an element
4253 * @param basis the identity (initial default value) for the reduction
4254 * @param reducer a commutative associative combining function
4255 * @return the result of accumulating the given transformation
4256 * of all keys
4257 */
4258 public double reduceKeysToDoubleInParallel
4259 (ObjectToDouble<? super K> transformer,
4260 double basis,
4261 DoubleByDoubleToDouble reducer) {
4262 return ForkJoinTasks.reduceKeysToDouble
4263 (this, transformer, basis, reducer).invoke();
4264 }
4265
4266 /**
4267 * Returns the result of accumulating the given transformation
4268 * of all keys using the given reducer to combine values, and
4269 * the given basis as an identity value.
4270 *
4271 * @param transformer a function returning the transformation
4272 * for an element
4273 * @param basis the identity (initial default value) for the reduction
4274 * @param reducer a commutative associative combining function
4275 * @return the result of accumulating the given transformation
4276 * of all keys
4277 */
4278 public long reduceKeysToLongInParallel
4279 (ObjectToLong<? super K> transformer,
4280 long basis,
4281 LongByLongToLong reducer) {
4282 return ForkJoinTasks.reduceKeysToLong
4283 (this, transformer, basis, reducer).invoke();
4284 }
4285
4286 /**
4287 * Returns the result of accumulating the given transformation
4288 * of all keys using the given reducer to combine values, and
4289 * the given basis as an identity value.
4290 *
4291 * @param transformer a function returning the transformation
4292 * for an element
4293 * @param basis the identity (initial default value) for the reduction
4294 * @param reducer a commutative associative combining function
4295 * @return the result of accumulating the given transformation
4296 * of all keys
4297 */
4298 public int reduceKeysToIntInParallel
4299 (ObjectToInt<? super K> transformer,
4300 int basis,
4301 IntByIntToInt reducer) {
4302 return ForkJoinTasks.reduceKeysToInt
4303 (this, transformer, basis, reducer).invoke();
4304 }
4305
4306 /**
4307 * Performs the given action for each value.
4308 *
4309 * @param action the action
4310 */
4311 public void forEachValueInParallel(Action<V> action) {
4312 ForkJoinTasks.forEachValue
4313 (this, action).invoke();
4314 }
4315
4316 /**
4317 * Performs the given action for each non-null transformation
4318 * of each value.
4319 *
4320 * @param transformer a function returning the transformation
4321 * for an element, or null of there is no transformation (in
4322 * which case the action is not applied).
4323 */
4324 public <U> void forEachValueInParallel
4325 (Fun<? super V, ? extends U> transformer,
4326 Action<U> action) {
4327 ForkJoinTasks.forEachValue
4328 (this, transformer, action).invoke();
4329 }
4330
4331 /**
4332 * Returns a non-null result from applying the given search
4333 * function on each value, or null if none. Upon success,
4334 * further element processing is suppressed and the results of
4335 * any other parallel invocations of the search function are
4336 * ignored.
4337 *
4338 * @param searchFunction a function returning a non-null
4339 * result on success, else null
4340 * @return a non-null result from applying the given search
4341 * function on each value, or null if none
4342 *
4343 */
4344 public <U> U searchValuesInParallel
4345 (Fun<? super V, ? extends U> searchFunction) {
4346 return ForkJoinTasks.searchValues
4347 (this, searchFunction).invoke();
4348 }
4349
4350 /**
4351 * Returns the result of accumulating all values using the
4352 * given reducer to combine values, or null if none.
4353 *
4354 * @param reducer a commutative associative combining function
4355 * @return the result of accumulating all values
4356 */
4357 public V reduceValuesInParallel
4358 (BiFun<? super V, ? super V, ? extends V> reducer) {
4359 return ForkJoinTasks.reduceValues
4360 (this, reducer).invoke();
4361 }
4362
4363 /**
4364 * Returns the result of accumulating the given transformation
4365 * of all values using the given reducer to combine values, or
4366 * null if none.
4367 *
4368 * @param transformer a function returning the transformation
4369 * for an element, or null of there is no transformation (in
4370 * which case it is not combined).
4371 * @param reducer a commutative associative combining function
4372 * @return the result of accumulating the given transformation
4373 * of all values
4374 */
4375 public <U> U reduceValuesInParallel
4376 (Fun<? super V, ? extends U> transformer,
4377 BiFun<? super U, ? super U, ? extends U> reducer) {
4378 return ForkJoinTasks.reduceValues
4379 (this, transformer, reducer).invoke();
4380 }
4381
4382 /**
4383 * Returns the result of accumulating the given transformation
4384 * of all values using the given reducer to combine values,
4385 * and the given basis as an identity value.
4386 *
4387 * @param transformer a function returning the transformation
4388 * for an element
4389 * @param basis the identity (initial default value) for the reduction
4390 * @param reducer a commutative associative combining function
4391 * @return the result of accumulating the given transformation
4392 * of all values
4393 */
4394 public double reduceValuesToDoubleInParallel
4395 (ObjectToDouble<? super V> transformer,
4396 double basis,
4397 DoubleByDoubleToDouble reducer) {
4398 return ForkJoinTasks.reduceValuesToDouble
4399 (this, transformer, basis, reducer).invoke();
4400 }
4401
4402 /**
4403 * Returns the result of accumulating the given transformation
4404 * of all values using the given reducer to combine values,
4405 * and the given basis as an identity value.
4406 *
4407 * @param transformer a function returning the transformation
4408 * for an element
4409 * @param basis the identity (initial default value) for the reduction
4410 * @param reducer a commutative associative combining function
4411 * @return the result of accumulating the given transformation
4412 * of all values
4413 */
4414 public long reduceValuesToLongInParallel
4415 (ObjectToLong<? super V> transformer,
4416 long basis,
4417 LongByLongToLong reducer) {
4418 return ForkJoinTasks.reduceValuesToLong
4419 (this, transformer, basis, reducer).invoke();
4420 }
4421
4422 /**
4423 * Returns the result of accumulating the given transformation
4424 * of all values using the given reducer to combine values,
4425 * and the given basis as an identity value.
4426 *
4427 * @param transformer a function returning the transformation
4428 * for an element
4429 * @param basis the identity (initial default value) for the reduction
4430 * @param reducer a commutative associative combining function
4431 * @return the result of accumulating the given transformation
4432 * of all values
4433 */
4434 public int reduceValuesToIntInParallel
4435 (ObjectToInt<? super V> transformer,
4436 int basis,
4437 IntByIntToInt reducer) {
4438 return ForkJoinTasks.reduceValuesToInt
4439 (this, transformer, basis, reducer).invoke();
4440 }
4441
4442 /**
4443 * Performs the given action for each entry.
4444 *
4445 * @param action the action
4446 */
4447 public void forEachEntryInParallel(Action<Map.Entry<K,V>> action) {
4448 ForkJoinTasks.forEachEntry
4449 (this, action).invoke();
4450 }
4451
4452 /**
4453 * Performs the given action for each non-null transformation
4454 * of each entry.
4455 *
4456 * @param transformer a function returning the transformation
4457 * for an element, or null of there is no transformation (in
4458 * which case the action is not applied).
4459 * @param action the action
4460 */
4461 public <U> void forEachEntryInParallel
4462 (Fun<Map.Entry<K,V>, ? extends U> transformer,
4463 Action<U> action) {
4464 ForkJoinTasks.forEachEntry
4465 (this, transformer, action).invoke();
4466 }
4467
4468 /**
4469 * Returns a non-null result from applying the given search
4470 * function on each entry, or null if none. Upon success,
4471 * further element processing is suppressed and the results of
4472 * any other parallel invocations of the search function are
4473 * ignored.
4474 *
4475 * @param searchFunction a function returning a non-null
4476 * result on success, else null
4477 * @return a non-null result from applying the given search
4478 * function on each entry, or null if none
4479 */
4480 public <U> U searchEntriesInParallel
4481 (Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
4482 return ForkJoinTasks.searchEntries
4483 (this, searchFunction).invoke();
4484 }
4485
4486 /**
4487 * Returns the result of accumulating all entries using the
4488 * given reducer to combine values, or null if none.
4489 *
4490 * @param reducer a commutative associative combining function
4491 * @return the result of accumulating all entries
4492 */
4493 public Map.Entry<K,V> reduceEntriesInParallel
4494 (BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4495 return ForkJoinTasks.reduceEntries
4496 (this, reducer).invoke();
4497 }
4498
4499 /**
4500 * Returns the result of accumulating the given transformation
4501 * of all entries using the given reducer to combine values,
4502 * or null if none.
4503 *
4504 * @param transformer a function returning the transformation
4505 * for an element, or null of there is no transformation (in
4506 * which case it is not combined).
4507 * @param reducer a commutative associative combining function
4508 * @return the result of accumulating the given transformation
4509 * of all entries
4510 */
4511 public <U> U reduceEntriesInParallel
4512 (Fun<Map.Entry<K,V>, ? extends U> transformer,
4513 BiFun<? super U, ? super U, ? extends U> reducer) {
4514 return ForkJoinTasks.reduceEntries
4515 (this, transformer, reducer).invoke();
4516 }
4517
4518 /**
4519 * Returns the result of accumulating the given transformation
4520 * of all entries using the given reducer to combine values,
4521 * and the given basis as an identity value.
4522 *
4523 * @param transformer a function returning the transformation
4524 * for an element
4525 * @param basis the identity (initial default value) for the reduction
4526 * @param reducer a commutative associative combining function
4527 * @return the result of accumulating the given transformation
4528 * of all entries
4529 */
4530 public double reduceEntriesToDoubleInParallel
4531 (ObjectToDouble<Map.Entry<K,V>> transformer,
4532 double basis,
4533 DoubleByDoubleToDouble reducer) {
4534 return ForkJoinTasks.reduceEntriesToDouble
4535 (this, transformer, basis, reducer).invoke();
4536 }
4537
4538 /**
4539 * Returns the result of accumulating the given transformation
4540 * of all entries using the given reducer to combine values,
4541 * and the given basis as an identity value.
4542 *
4543 * @param transformer a function returning the transformation
4544 * for an element
4545 * @param basis the identity (initial default value) for the reduction
4546 * @param reducer a commutative associative combining function
4547 * @return the result of accumulating the given transformation
4548 * of all entries
4549 */
4550 public long reduceEntriesToLongInParallel
4551 (ObjectToLong<Map.Entry<K,V>> transformer,
4552 long basis,
4553 LongByLongToLong reducer) {
4554 return ForkJoinTasks.reduceEntriesToLong
4555 (this, transformer, basis, reducer).invoke();
4556 }
4557
4558 /**
4559 * Returns the result of accumulating the given transformation
4560 * of all entries using the given reducer to combine values,
4561 * and the given basis as an identity value.
4562 *
4563 * @param transformer a function returning the transformation
4564 * for an element
4565 * @param basis the identity (initial default value) for the reduction
4566 * @param reducer a commutative associative combining function
4567 * @return the result of accumulating the given transformation
4568 * of all entries
4569 */
4570 public int reduceEntriesToIntInParallel
4571 (ObjectToInt<Map.Entry<K,V>> transformer,
4572 int basis,
4573 IntByIntToInt reducer) {
4574 return ForkJoinTasks.reduceEntriesToInt
4575 (this, transformer, basis, reducer).invoke();
4576 }
4577
4578
4579 /* ----------------Views -------------- */
4580
4581 /**
4582 * Base class for views.
4583 */
4584 static abstract class CHMView<K, V> {
4585 final ConcurrentHashMap<K, V> map;
4586 CHMView(ConcurrentHashMap<K, V> map) { this.map = map; }
4587
4588 /**
4589 * Returns the map backing this view.
4590 *
4591 * @return the map backing this view
4592 */
4593 public ConcurrentHashMap<K,V> getMap() { return map; }
4594
4595 public final int size() { return map.size(); }
4596 public final boolean isEmpty() { return map.isEmpty(); }
4597 public final void clear() { map.clear(); }
4598
4599 // implementations below rely on concrete classes supplying these
4600 abstract public Iterator<?> iterator();
4601 abstract public boolean contains(Object o);
4602 abstract public boolean remove(Object o);
4603
4604 private static final String oomeMsg = "Required array size too large";
4605
4606 public final Object[] toArray() {
4607 long sz = map.mappingCount();
4608 if (sz > (long)(MAX_ARRAY_SIZE))
4609 throw new OutOfMemoryError(oomeMsg);
4610 int n = (int)sz;
4611 Object[] r = new Object[n];
4612 int i = 0;
4613 Iterator<?> it = iterator();
4614 while (it.hasNext()) {
4615 if (i == n) {
4616 if (n >= MAX_ARRAY_SIZE)
4617 throw new OutOfMemoryError(oomeMsg);
4618 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4619 n = MAX_ARRAY_SIZE;
4620 else
4621 n += (n >>> 1) + 1;
4622 r = Arrays.copyOf(r, n);
4623 }
4624 r[i++] = it.next();
4625 }
4626 return (i == n) ? r : Arrays.copyOf(r, i);
4627 }
4628
4629 @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
4630 long sz = map.mappingCount();
4631 if (sz > (long)(MAX_ARRAY_SIZE))
4632 throw new OutOfMemoryError(oomeMsg);
4633 int m = (int)sz;
4634 T[] r = (a.length >= m) ? a :
4635 (T[])java.lang.reflect.Array
4636 .newInstance(a.getClass().getComponentType(), m);
4637 int n = r.length;
4638 int i = 0;
4639 Iterator<?> it = iterator();
4640 while (it.hasNext()) {
4641 if (i == n) {
4642 if (n >= MAX_ARRAY_SIZE)
4643 throw new OutOfMemoryError(oomeMsg);
4644 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4645 n = MAX_ARRAY_SIZE;
4646 else
4647 n += (n >>> 1) + 1;
4648 r = Arrays.copyOf(r, n);
4649 }
4650 r[i++] = (T)it.next();
4651 }
4652 if (a == r && i < n) {
4653 r[i] = null; // null-terminate
4654 return r;
4655 }
4656 return (i == n) ? r : Arrays.copyOf(r, i);
4657 }
4658
4659 public final int hashCode() {
4660 int h = 0;
4661 for (Iterator<?> it = iterator(); it.hasNext();)
4662 h += it.next().hashCode();
4663 return h;
4664 }
4665
4666 public final String toString() {
4667 StringBuilder sb = new StringBuilder();
4668 sb.append('[');
4669 Iterator<?> it = iterator();
4670 if (it.hasNext()) {
4671 for (;;) {
4672 Object e = it.next();
4673 sb.append(e == this ? "(this Collection)" : e);
4674 if (!it.hasNext())
4675 break;
4676 sb.append(',').append(' ');
4677 }
4678 }
4679 return sb.append(']').toString();
4680 }
4681
4682 public final boolean containsAll(Collection<?> c) {
4683 if (c != this) {
4684 for (Iterator<?> it = c.iterator(); it.hasNext();) {
4685 Object e = it.next();
4686 if (e == null || !contains(e))
4687 return false;
4688 }
4689 }
4690 return true;
4691 }
4692
4693 public final boolean removeAll(Collection<?> c) {
4694 boolean modified = false;
4695 for (Iterator<?> it = iterator(); it.hasNext();) {
4696 if (c.contains(it.next())) {
4697 it.remove();
4698 modified = true;
4699 }
4700 }
4701 return modified;
4702 }
4703
4704 public final boolean retainAll(Collection<?> c) {
4705 boolean modified = false;
4706 for (Iterator<?> it = iterator(); it.hasNext();) {
4707 if (!c.contains(it.next())) {
4708 it.remove();
4709 modified = true;
4710 }
4711 }
4712 return modified;
4713 }
4714
4715 }
4716
4717 /**
4718 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4719 * which additions may optionally be enabled by mapping to a
4720 * common value. This class cannot be directly instantiated. See
4721 * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
4722 * {@link #newKeySet(int)}.
4723 */
4724 public static class KeySetView<K,V> extends CHMView<K,V>
4725 implements Set<K>, java.io.Serializable {
4726 private static final long serialVersionUID = 7249069246763182397L;
4727 private final V value;
4728 KeySetView(ConcurrentHashMap<K, V> map, V value) { // non-public
4729 super(map);
4730 this.value = value;
4731 }
4732
4733 /**
4734 * Returns the default mapped value for additions,
4735 * or {@code null} if additions are not supported.
4736 *
4737 * @return the default mapped value for additions, or {@code null}
4738 * if not supported.
4739 */
4740 public V getMappedValue() { return value; }
4741
4742 // implement Set API
4743
4744 public boolean contains(Object o) { return map.containsKey(o); }
4745 public boolean remove(Object o) { return map.remove(o) != null; }
4746
4747 /**
4748 * Returns a "weakly consistent" iterator that will never
4749 * throw {@link ConcurrentModificationException}, and
4750 * guarantees to traverse elements as they existed upon
4751 * construction of the iterator, and may (but is not
4752 * guaranteed to) reflect any modifications subsequent to
4753 * construction.
4754 *
4755 * @return an iterator over the keys of this map
4756 */
4757 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4758 public boolean add(K e) {
4759 V v;
4760 if ((v = value) == null)
4761 throw new UnsupportedOperationException();
4762 if (e == null)
4763 throw new NullPointerException();
4764 return map.internalPut(e, v, true) == null;
4765 }
4766 public boolean addAll(Collection<? extends K> c) {
4767 boolean added = false;
4768 V v;
4769 if ((v = value) == null)
4770 throw new UnsupportedOperationException();
4771 for (K e : c) {
4772 if (e == null)
4773 throw new NullPointerException();
4774 if (map.internalPut(e, v, true) == null)
4775 added = true;
4776 }
4777 return added;
4778 }
4779 public boolean equals(Object o) {
4780 Set<?> c;
4781 return ((o instanceof Set) &&
4782 ((c = (Set<?>)o) == this ||
4783 (containsAll(c) && c.containsAll(this))));
4784 }
4785 }
4786
4787 /**
4788 * A view of a ConcurrentHashMap as a {@link Collection} of
4789 * values, in which additions are disabled. This class cannot be
4790 * directly instantiated. See {@link #values},
4791 *
4792 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4793 * that will never throw {@link ConcurrentModificationException},
4794 * and guarantees to traverse elements as they existed upon
4795 * construction of the iterator, and may (but is not guaranteed to)
4796 * reflect any modifications subsequent to construction.
4797 */
4798 public static final class ValuesView<K,V> extends CHMView<K,V>
4799 implements Collection<V> {
4800 ValuesView(ConcurrentHashMap<K, V> map) { super(map); }
4801 public final boolean contains(Object o) { return map.containsValue(o); }
4802 public final boolean remove(Object o) {
4803 if (o != null) {
4804 Iterator<V> it = new ValueIterator<K,V>(map);
4805 while (it.hasNext()) {
4806 if (o.equals(it.next())) {
4807 it.remove();
4808 return true;
4809 }
4810 }
4811 }
4812 return false;
4813 }
4814
4815 /**
4816 * Returns a "weakly consistent" iterator that will never
4817 * throw {@link ConcurrentModificationException}, and
4818 * guarantees to traverse elements as they existed upon
4819 * construction of the iterator, and may (but is not
4820 * guaranteed to) reflect any modifications subsequent to
4821 * construction.
4822 *
4823 * @return an iterator over the values of this map
4824 */
4825 public final Iterator<V> iterator() {
4826 return new ValueIterator<K,V>(map);
4827 }
4828 public final boolean add(V e) {
4829 throw new UnsupportedOperationException();
4830 }
4831 public final boolean addAll(Collection<? extends V> c) {
4832 throw new UnsupportedOperationException();
4833 }
4834
4835 }
4836
4837 /**
4838 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4839 * entries. This class cannot be directly instantiated. See
4840 * {@link #entrySet}.
4841 */
4842 public static final class EntrySetView<K,V> extends CHMView<K,V>
4843 implements Set<Map.Entry<K,V>> {
4844 EntrySetView(ConcurrentHashMap<K, V> map) { super(map); }
4845 public final boolean contains(Object o) {
4846 Object k, v, r; Map.Entry<?,?> e;
4847 return ((o instanceof Map.Entry) &&
4848 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4849 (r = map.get(k)) != null &&
4850 (v = e.getValue()) != null &&
4851 (v == r || v.equals(r)));
4852 }
4853 public final boolean remove(Object o) {
4854 Object k, v; Map.Entry<?,?> e;
4855 return ((o instanceof Map.Entry) &&
4856 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4857 (v = e.getValue()) != null &&
4858 map.remove(k, v));
4859 }
4860
4861 /**
4862 * Returns a "weakly consistent" iterator that will never
4863 * throw {@link ConcurrentModificationException}, and
4864 * guarantees to traverse elements as they existed upon
4865 * construction of the iterator, and may (but is not
4866 * guaranteed to) reflect any modifications subsequent to
4867 * construction.
4868 *
4869 * @return an iterator over the entries of this map
4870 */
4871 public final Iterator<Map.Entry<K,V>> iterator() {
4872 return new EntryIterator<K,V>(map);
4873 }
4874
4875 public final boolean add(Entry<K,V> e) {
4876 K key = e.getKey();
4877 V value = e.getValue();
4878 if (key == null || value == null)
4879 throw new NullPointerException();
4880 return map.internalPut(key, value, false) == null;
4881 }
4882 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4883 boolean added = false;
4884 for (Entry<K,V> e : c) {
4885 if (add(e))
4886 added = true;
4887 }
4888 return added;
4889 }
4890 public boolean equals(Object o) {
4891 Set<?> c;
4892 return ((o instanceof Set) &&
4893 ((c = (Set<?>)o) == this ||
4894 (containsAll(c) && c.containsAll(this))));
4895 }
4896 }
4897
4898 // ---------------------------------------------------------------------
4899
4900 /**
4901 * Predefined tasks for performing bulk parallel operations on
4902 * ConcurrentHashMaps. These tasks follow the forms and rules used
4903 * for bulk operations. Each method has the same name, but returns
4904 * a task rather than invoking it. These methods may be useful in
4905 * custom applications such as submitting a task without waiting
4906 * for completion, using a custom pool, or combining with other
4907 * tasks.
4908 */
4909 public static class ForkJoinTasks {
4910 private ForkJoinTasks() {}
4911
4912 /**
4913 * Returns a task that when invoked, performs the given
4914 * action for each (key, value)
4915 *
4916 * @param map the map
4917 * @param action the action
4918 * @return the task
4919 */
4920 public static <K,V> ForkJoinTask<Void> forEach
4921 (ConcurrentHashMap<K,V> map,
4922 BiAction<K,V> action) {
4923 if (action == null) throw new NullPointerException();
4924 return new ForEachMappingTask<K,V>(map, null, -1, action);
4925 }
4926
4927 /**
4928 * Returns a task that when invoked, performs the given
4929 * action for each non-null transformation of each (key, value)
4930 *
4931 * @param map the map
4932 * @param transformer a function returning the transformation
4933 * for an element, or null if there is no transformation (in
4934 * which case the action is not applied)
4935 * @param action the action
4936 * @return the task
4937 */
4938 public static <K,V,U> ForkJoinTask<Void> forEach
4939 (ConcurrentHashMap<K,V> map,
4940 BiFun<? super K, ? super V, ? extends U> transformer,
4941 Action<U> action) {
4942 if (transformer == null || action == null)
4943 throw new NullPointerException();
4944 return new ForEachTransformedMappingTask<K,V,U>
4945 (map, null, -1, transformer, action);
4946 }
4947
4948 /**
4949 * Returns a task that when invoked, returns a non-null result
4950 * from applying the given search function on each (key,
4951 * value), or null if none. Upon success, further element
4952 * processing is suppressed and the results of any other
4953 * parallel invocations of the search function are ignored.
4954 *
4955 * @param map the map
4956 * @param searchFunction a function returning a non-null
4957 * result on success, else null
4958 * @return the task
4959 */
4960 public static <K,V,U> ForkJoinTask<U> search
4961 (ConcurrentHashMap<K,V> map,
4962 BiFun<? super K, ? super V, ? extends U> searchFunction) {
4963 if (searchFunction == null) throw new NullPointerException();
4964 return new SearchMappingsTask<K,V,U>
4965 (map, null, -1, searchFunction,
4966 new AtomicReference<U>());
4967 }
4968
4969 /**
4970 * Returns a task that when invoked, returns the result of
4971 * accumulating the given transformation of all (key, value) pairs
4972 * using the given reducer to combine values, or null if none.
4973 *
4974 * @param map the map
4975 * @param transformer a function returning the transformation
4976 * for an element, or null if there is no transformation (in
4977 * which case it is not combined).
4978 * @param reducer a commutative associative combining function
4979 * @return the task
4980 */
4981 public static <K,V,U> ForkJoinTask<U> reduce
4982 (ConcurrentHashMap<K,V> map,
4983 BiFun<? super K, ? super V, ? extends U> transformer,
4984 BiFun<? super U, ? super U, ? extends U> reducer) {
4985 if (transformer == null || reducer == null)
4986 throw new NullPointerException();
4987 return new MapReduceMappingsTask<K,V,U>
4988 (map, null, -1, null, transformer, reducer);
4989 }
4990
4991 /**
4992 * Returns a task that when invoked, returns the result of
4993 * accumulating the given transformation of all (key, value) pairs
4994 * using the given reducer to combine values, and the given
4995 * basis as an identity value.
4996 *
4997 * @param map the map
4998 * @param transformer a function returning the transformation
4999 * for an element
5000 * @param basis the identity (initial default value) for the reduction
5001 * @param reducer a commutative associative combining function
5002 * @return the task
5003 */
5004 public static <K,V> ForkJoinTask<Double> reduceToDouble
5005 (ConcurrentHashMap<K,V> map,
5006 ObjectByObjectToDouble<? super K, ? super V> transformer,
5007 double basis,
5008 DoubleByDoubleToDouble reducer) {
5009 if (transformer == null || reducer == null)
5010 throw new NullPointerException();
5011 return new MapReduceMappingsToDoubleTask<K,V>
5012 (map, null, -1, null, transformer, basis, reducer);
5013 }
5014
5015 /**
5016 * Returns a task that when invoked, returns the result of
5017 * accumulating the given transformation of all (key, value) pairs
5018 * using the given reducer to combine values, and the given
5019 * basis as an identity value.
5020 *
5021 * @param map the map
5022 * @param transformer a function returning the transformation
5023 * for an element
5024 * @param basis the identity (initial default value) for the reduction
5025 * @param reducer a commutative associative combining function
5026 * @return the task
5027 */
5028 public static <K,V> ForkJoinTask<Long> reduceToLong
5029 (ConcurrentHashMap<K,V> map,
5030 ObjectByObjectToLong<? super K, ? super V> transformer,
5031 long basis,
5032 LongByLongToLong reducer) {
5033 if (transformer == null || reducer == null)
5034 throw new NullPointerException();
5035 return new MapReduceMappingsToLongTask<K,V>
5036 (map, null, -1, null, transformer, basis, reducer);
5037 }
5038
5039 /**
5040 * Returns a task that when invoked, returns the result of
5041 * accumulating the given transformation of all (key, value) pairs
5042 * using the given reducer to combine values, and the given
5043 * basis as an identity value.
5044 *
5045 * @param transformer a function returning the transformation
5046 * for an element
5047 * @param basis the identity (initial default value) for the reduction
5048 * @param reducer a commutative associative combining function
5049 * @return the task
5050 */
5051 public static <K,V> ForkJoinTask<Integer> reduceToInt
5052 (ConcurrentHashMap<K,V> map,
5053 ObjectByObjectToInt<? super K, ? super V> transformer,
5054 int basis,
5055 IntByIntToInt reducer) {
5056 if (transformer == null || reducer == null)
5057 throw new NullPointerException();
5058 return new MapReduceMappingsToIntTask<K,V>
5059 (map, null, -1, null, transformer, basis, reducer);
5060 }
5061
5062 /**
5063 * Returns a task that when invoked, performs the given action
5064 * for each key.
5065 *
5066 * @param map the map
5067 * @param action the action
5068 * @return the task
5069 */
5070 public static <K,V> ForkJoinTask<Void> forEachKey
5071 (ConcurrentHashMap<K,V> map,
5072 Action<K> action) {
5073 if (action == null) throw new NullPointerException();
5074 return new ForEachKeyTask<K,V>(map, null, -1, action);
5075 }
5076
5077 /**
5078 * Returns a task that when invoked, performs the given action
5079 * for each non-null transformation of each key.
5080 *
5081 * @param map the map
5082 * @param transformer a function returning the transformation
5083 * for an element, or null if there is no transformation (in
5084 * which case the action is not applied)
5085 * @param action the action
5086 * @return the task
5087 */
5088 public static <K,V,U> ForkJoinTask<Void> forEachKey
5089 (ConcurrentHashMap<K,V> map,
5090 Fun<? super K, ? extends U> transformer,
5091 Action<U> action) {
5092 if (transformer == null || action == null)
5093 throw new NullPointerException();
5094 return new ForEachTransformedKeyTask<K,V,U>
5095 (map, null, -1, transformer, action);
5096 }
5097
5098 /**
5099 * Returns a task that when invoked, returns a non-null result
5100 * from applying the given search function on each key, or
5101 * null if none. Upon success, further element processing is
5102 * suppressed and the results of any other parallel
5103 * invocations of the search function are ignored.
5104 *
5105 * @param map the map
5106 * @param searchFunction a function returning a non-null
5107 * result on success, else null
5108 * @return the task
5109 */
5110 public static <K,V,U> ForkJoinTask<U> searchKeys
5111 (ConcurrentHashMap<K,V> map,
5112 Fun<? super K, ? extends U> searchFunction) {
5113 if (searchFunction == null) throw new NullPointerException();
5114 return new SearchKeysTask<K,V,U>
5115 (map, null, -1, searchFunction,
5116 new AtomicReference<U>());
5117 }
5118
5119 /**
5120 * Returns a task that when invoked, returns the result of
5121 * accumulating all keys using the given reducer to combine
5122 * values, or null if none.
5123 *
5124 * @param map the map
5125 * @param reducer a commutative associative combining function
5126 * @return the task
5127 */
5128 public static <K,V> ForkJoinTask<K> reduceKeys
5129 (ConcurrentHashMap<K,V> map,
5130 BiFun<? super K, ? super K, ? extends K> reducer) {
5131 if (reducer == null) throw new NullPointerException();
5132 return new ReduceKeysTask<K,V>
5133 (map, null, -1, null, reducer);
5134 }
5135
5136 /**
5137 * Returns a task that when invoked, returns the result of
5138 * accumulating the given transformation of all keys using the given
5139 * reducer to combine values, or null if none.
5140 *
5141 * @param map the map
5142 * @param transformer a function returning the transformation
5143 * for an element, or null if there is no transformation (in
5144 * which case it is not combined).
5145 * @param reducer a commutative associative combining function
5146 * @return the task
5147 */
5148 public static <K,V,U> ForkJoinTask<U> reduceKeys
5149 (ConcurrentHashMap<K,V> map,
5150 Fun<? super K, ? extends U> transformer,
5151 BiFun<? super U, ? super U, ? extends U> reducer) {
5152 if (transformer == null || reducer == null)
5153 throw new NullPointerException();
5154 return new MapReduceKeysTask<K,V,U>
5155 (map, null, -1, null, transformer, reducer);
5156 }
5157
5158 /**
5159 * Returns a task that when invoked, returns the result of
5160 * accumulating the given transformation of all keys using the given
5161 * reducer to combine values, and the given basis as an
5162 * identity value.
5163 *
5164 * @param map the map
5165 * @param transformer a function returning the transformation
5166 * for an element
5167 * @param basis the identity (initial default value) for the reduction
5168 * @param reducer a commutative associative combining function
5169 * @return the task
5170 */
5171 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5172 (ConcurrentHashMap<K,V> map,
5173 ObjectToDouble<? super K> transformer,
5174 double basis,
5175 DoubleByDoubleToDouble reducer) {
5176 if (transformer == null || reducer == null)
5177 throw new NullPointerException();
5178 return new MapReduceKeysToDoubleTask<K,V>
5179 (map, null, -1, null, transformer, basis, reducer);
5180 }
5181
5182 /**
5183 * Returns a task that when invoked, returns the result of
5184 * accumulating the given transformation of all keys using the given
5185 * reducer to combine values, and the given basis as an
5186 * identity value.
5187 *
5188 * @param map the map
5189 * @param transformer a function returning the transformation
5190 * for an element
5191 * @param basis the identity (initial default value) for the reduction
5192 * @param reducer a commutative associative combining function
5193 * @return the task
5194 */
5195 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5196 (ConcurrentHashMap<K,V> map,
5197 ObjectToLong<? super K> transformer,
5198 long basis,
5199 LongByLongToLong reducer) {
5200 if (transformer == null || reducer == null)
5201 throw new NullPointerException();
5202 return new MapReduceKeysToLongTask<K,V>
5203 (map, null, -1, null, transformer, basis, reducer);
5204 }
5205
5206 /**
5207 * Returns a task that when invoked, returns the result of
5208 * accumulating the given transformation of all keys using the given
5209 * reducer to combine values, and the given basis as an
5210 * identity value.
5211 *
5212 * @param map the map
5213 * @param transformer a function returning the transformation
5214 * for an element
5215 * @param basis the identity (initial default value) for the reduction
5216 * @param reducer a commutative associative combining function
5217 * @return the task
5218 */
5219 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5220 (ConcurrentHashMap<K,V> map,
5221 ObjectToInt<? super K> transformer,
5222 int basis,
5223 IntByIntToInt reducer) {
5224 if (transformer == null || reducer == null)
5225 throw new NullPointerException();
5226 return new MapReduceKeysToIntTask<K,V>
5227 (map, null, -1, null, transformer, basis, reducer);
5228 }
5229
5230 /**
5231 * Returns a task that when invoked, performs the given action
5232 * for each value.
5233 *
5234 * @param map the map
5235 * @param action the action
5236 */
5237 public static <K,V> ForkJoinTask<Void> forEachValue
5238 (ConcurrentHashMap<K,V> map,
5239 Action<V> action) {
5240 if (action == null) throw new NullPointerException();
5241 return new ForEachValueTask<K,V>(map, null, -1, action);
5242 }
5243
5244 /**
5245 * Returns a task that when invoked, performs the given action
5246 * for each non-null transformation of each value.
5247 *
5248 * @param map the map
5249 * @param transformer a function returning the transformation
5250 * for an element, or null if there is no transformation (in
5251 * which case the action is not applied)
5252 * @param action the action
5253 */
5254 public static <K,V,U> ForkJoinTask<Void> forEachValue
5255 (ConcurrentHashMap<K,V> map,
5256 Fun<? super V, ? extends U> transformer,
5257 Action<U> action) {
5258 if (transformer == null || action == null)
5259 throw new NullPointerException();
5260 return new ForEachTransformedValueTask<K,V,U>
5261 (map, null, -1, transformer, action);
5262 }
5263
5264 /**
5265 * Returns a task that when invoked, returns a non-null result
5266 * from applying the given search function on each value, or
5267 * null if none. Upon success, further element processing is
5268 * suppressed and the results of any other parallel
5269 * invocations of the search function are ignored.
5270 *
5271 * @param map the map
5272 * @param searchFunction a function returning a non-null
5273 * result on success, else null
5274 * @return the task
5275 */
5276 public static <K,V,U> ForkJoinTask<U> searchValues
5277 (ConcurrentHashMap<K,V> map,
5278 Fun<? super V, ? extends U> searchFunction) {
5279 if (searchFunction == null) throw new NullPointerException();
5280 return new SearchValuesTask<K,V,U>
5281 (map, null, -1, searchFunction,
5282 new AtomicReference<U>());
5283 }
5284
5285 /**
5286 * Returns a task that when invoked, returns the result of
5287 * accumulating all values using the given reducer to combine
5288 * values, or null if none.
5289 *
5290 * @param map the map
5291 * @param reducer a commutative associative combining function
5292 * @return the task
5293 */
5294 public static <K,V> ForkJoinTask<V> reduceValues
5295 (ConcurrentHashMap<K,V> map,
5296 BiFun<? super V, ? super V, ? extends V> reducer) {
5297 if (reducer == null) throw new NullPointerException();
5298 return new ReduceValuesTask<K,V>
5299 (map, null, -1, null, reducer);
5300 }
5301
5302 /**
5303 * Returns a task that when invoked, returns the result of
5304 * accumulating the given transformation of all values using the
5305 * given reducer to combine values, or null if none.
5306 *
5307 * @param map the map
5308 * @param transformer a function returning the transformation
5309 * for an element, or null if there is no transformation (in
5310 * which case it is not combined).
5311 * @param reducer a commutative associative combining function
5312 * @return the task
5313 */
5314 public static <K,V,U> ForkJoinTask<U> reduceValues
5315 (ConcurrentHashMap<K,V> map,
5316 Fun<? super V, ? extends U> transformer,
5317 BiFun<? super U, ? super U, ? extends U> reducer) {
5318 if (transformer == null || reducer == null)
5319 throw new NullPointerException();
5320 return new MapReduceValuesTask<K,V,U>
5321 (map, null, -1, null, transformer, reducer);
5322 }
5323
5324 /**
5325 * Returns a task that when invoked, returns the result of
5326 * accumulating the given transformation of all values using the
5327 * given reducer to combine values, and the given basis as an
5328 * identity value.
5329 *
5330 * @param map the map
5331 * @param transformer a function returning the transformation
5332 * for an element
5333 * @param basis the identity (initial default value) for the reduction
5334 * @param reducer a commutative associative combining function
5335 * @return the task
5336 */
5337 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5338 (ConcurrentHashMap<K,V> map,
5339 ObjectToDouble<? super V> transformer,
5340 double basis,
5341 DoubleByDoubleToDouble reducer) {
5342 if (transformer == null || reducer == null)
5343 throw new NullPointerException();
5344 return new MapReduceValuesToDoubleTask<K,V>
5345 (map, null, -1, null, transformer, basis, reducer);
5346 }
5347
5348 /**
5349 * Returns a task that when invoked, returns the result of
5350 * accumulating the given transformation of all values using the
5351 * given reducer to combine values, and the given basis as an
5352 * identity value.
5353 *
5354 * @param map the map
5355 * @param transformer a function returning the transformation
5356 * for an element
5357 * @param basis the identity (initial default value) for the reduction
5358 * @param reducer a commutative associative combining function
5359 * @return the task
5360 */
5361 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5362 (ConcurrentHashMap<K,V> map,
5363 ObjectToLong<? super V> transformer,
5364 long basis,
5365 LongByLongToLong reducer) {
5366 if (transformer == null || reducer == null)
5367 throw new NullPointerException();
5368 return new MapReduceValuesToLongTask<K,V>
5369 (map, null, -1, null, transformer, basis, reducer);
5370 }
5371
5372 /**
5373 * Returns a task that when invoked, returns the result of
5374 * accumulating the given transformation of all values using the
5375 * given reducer to combine values, and the given basis as an
5376 * identity value.
5377 *
5378 * @param map the map
5379 * @param transformer a function returning the transformation
5380 * for an element
5381 * @param basis the identity (initial default value) for the reduction
5382 * @param reducer a commutative associative combining function
5383 * @return the task
5384 */
5385 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5386 (ConcurrentHashMap<K,V> map,
5387 ObjectToInt<? super V> transformer,
5388 int basis,
5389 IntByIntToInt reducer) {
5390 if (transformer == null || reducer == null)
5391 throw new NullPointerException();
5392 return new MapReduceValuesToIntTask<K,V>
5393 (map, null, -1, null, transformer, basis, reducer);
5394 }
5395
5396 /**
5397 * Returns a task that when invoked, perform the given action
5398 * for each entry.
5399 *
5400 * @param map the map
5401 * @param action the action
5402 */
5403 public static <K,V> ForkJoinTask<Void> forEachEntry
5404 (ConcurrentHashMap<K,V> map,
5405 Action<Map.Entry<K,V>> action) {
5406 if (action == null) throw new NullPointerException();
5407 return new ForEachEntryTask<K,V>(map, null, -1, action);
5408 }
5409
5410 /**
5411 * Returns a task that when invoked, perform the given action
5412 * for each non-null transformation of each entry.
5413 *
5414 * @param map the map
5415 * @param transformer a function returning the transformation
5416 * for an element, or null if there is no transformation (in
5417 * which case the action is not applied)
5418 * @param action the action
5419 */
5420 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5421 (ConcurrentHashMap<K,V> map,
5422 Fun<Map.Entry<K,V>, ? extends U> transformer,
5423 Action<U> action) {
5424 if (transformer == null || action == null)
5425 throw new NullPointerException();
5426 return new ForEachTransformedEntryTask<K,V,U>
5427 (map, null, -1, transformer, action);
5428 }
5429
5430 /**
5431 * Returns a task that when invoked, returns a non-null result
5432 * from applying the given search function on each entry, or
5433 * null if none. Upon success, further element processing is
5434 * suppressed and the results of any other parallel
5435 * invocations of the search function are ignored.
5436 *
5437 * @param map the map
5438 * @param searchFunction a function returning a non-null
5439 * result on success, else null
5440 * @return the task
5441 */
5442 public static <K,V,U> ForkJoinTask<U> searchEntries
5443 (ConcurrentHashMap<K,V> map,
5444 Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
5445 if (searchFunction == null) throw new NullPointerException();
5446 return new SearchEntriesTask<K,V,U>
5447 (map, null, -1, searchFunction,
5448 new AtomicReference<U>());
5449 }
5450
5451 /**
5452 * Returns a task that when invoked, returns the result of
5453 * accumulating all entries using the given reducer to combine
5454 * values, or null if none.
5455 *
5456 * @param map the map
5457 * @param reducer a commutative associative combining function
5458 * @return the task
5459 */
5460 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5461 (ConcurrentHashMap<K,V> map,
5462 BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5463 if (reducer == null) throw new NullPointerException();
5464 return new ReduceEntriesTask<K,V>
5465 (map, null, -1, null, reducer);
5466 }
5467
5468 /**
5469 * Returns a task that when invoked, returns the result of
5470 * accumulating the given transformation of all entries using the
5471 * given reducer to combine values, or null if none.
5472 *
5473 * @param map the map
5474 * @param transformer a function returning the transformation
5475 * for an element, or null if there is no transformation (in
5476 * which case it is not combined).
5477 * @param reducer a commutative associative combining function
5478 * @return the task
5479 */
5480 public static <K,V,U> ForkJoinTask<U> reduceEntries
5481 (ConcurrentHashMap<K,V> map,
5482 Fun<Map.Entry<K,V>, ? extends U> transformer,
5483 BiFun<? super U, ? super U, ? extends U> reducer) {
5484 if (transformer == null || reducer == null)
5485 throw new NullPointerException();
5486 return new MapReduceEntriesTask<K,V,U>
5487 (map, null, -1, null, transformer, reducer);
5488 }
5489
5490 /**
5491 * Returns a task that when invoked, returns the result of
5492 * accumulating the given transformation of all entries using the
5493 * given reducer to combine values, and the given basis as an
5494 * identity value.
5495 *
5496 * @param map the map
5497 * @param transformer a function returning the transformation
5498 * for an element
5499 * @param basis the identity (initial default value) for the reduction
5500 * @param reducer a commutative associative combining function
5501 * @return the task
5502 */
5503 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5504 (ConcurrentHashMap<K,V> map,
5505 ObjectToDouble<Map.Entry<K,V>> transformer,
5506 double basis,
5507 DoubleByDoubleToDouble reducer) {
5508 if (transformer == null || reducer == null)
5509 throw new NullPointerException();
5510 return new MapReduceEntriesToDoubleTask<K,V>
5511 (map, null, -1, null, transformer, basis, reducer);
5512 }
5513
5514 /**
5515 * Returns a task that when invoked, returns the result of
5516 * accumulating the given transformation of all entries using the
5517 * given reducer to combine values, and the given basis as an
5518 * identity value.
5519 *
5520 * @param map the map
5521 * @param transformer a function returning the transformation
5522 * for an element
5523 * @param basis the identity (initial default value) for the reduction
5524 * @param reducer a commutative associative combining function
5525 * @return the task
5526 */
5527 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5528 (ConcurrentHashMap<K,V> map,
5529 ObjectToLong<Map.Entry<K,V>> transformer,
5530 long basis,
5531 LongByLongToLong reducer) {
5532 if (transformer == null || reducer == null)
5533 throw new NullPointerException();
5534 return new MapReduceEntriesToLongTask<K,V>
5535 (map, null, -1, null, transformer, basis, reducer);
5536 }
5537
5538 /**
5539 * Returns a task that when invoked, returns the result of
5540 * accumulating the given transformation of all entries using the
5541 * given reducer to combine values, and the given basis as an
5542 * identity value.
5543 *
5544 * @param map the map
5545 * @param transformer a function returning the transformation
5546 * for an element
5547 * @param basis the identity (initial default value) for the reduction
5548 * @param reducer a commutative associative combining function
5549 * @return the task
5550 */
5551 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5552 (ConcurrentHashMap<K,V> map,
5553 ObjectToInt<Map.Entry<K,V>> transformer,
5554 int basis,
5555 IntByIntToInt reducer) {
5556 if (transformer == null || reducer == null)
5557 throw new NullPointerException();
5558 return new MapReduceEntriesToIntTask<K,V>
5559 (map, null, -1, null, transformer, basis, reducer);
5560 }
5561 }
5562
5563 // -------------------------------------------------------
5564
5565 /*
5566 * Task classes. Coded in a regular but ugly format/style to
5567 * simplify checks that each variant differs in the right way from
5568 * others. The null screenings exist because compilers cannot tell
5569 * that we've already null-checked task arguments, so we force
5570 * simplest hoisted bypass to help avoid convoluted traps.
5571 */
5572
5573 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5574 extends Traverser<K,V,Void> {
5575 final Action<K> action;
5576 ForEachKeyTask
5577 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5578 Action<K> action) {
5579 super(m, p, b);
5580 this.action = action;
5581 }
5582 @SuppressWarnings("unchecked") public final void compute() {
5583 final Action<K> action;
5584 if ((action = this.action) != null) {
5585 for (int b; (b = preSplit()) > 0;)
5586 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5587 while (advance() != null)
5588 action.apply((K)nextKey);
5589 propagateCompletion();
5590 }
5591 }
5592 }
5593
5594 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5595 extends Traverser<K,V,Void> {
5596 final Action<V> action;
5597 ForEachValueTask
5598 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5599 Action<V> action) {
5600 super(m, p, b);
5601 this.action = action;
5602 }
5603 @SuppressWarnings("unchecked") public final void compute() {
5604 final Action<V> action;
5605 if ((action = this.action) != null) {
5606 for (int b; (b = preSplit()) > 0;)
5607 new ForEachValueTask<K,V>(map, this, b, action).fork();
5608 V v;
5609 while ((v = advance()) != null)
5610 action.apply(v);
5611 propagateCompletion();
5612 }
5613 }
5614 }
5615
5616 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5617 extends Traverser<K,V,Void> {
5618 final Action<Entry<K,V>> action;
5619 ForEachEntryTask
5620 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5621 Action<Entry<K,V>> action) {
5622 super(m, p, b);
5623 this.action = action;
5624 }
5625 @SuppressWarnings("unchecked") public final void compute() {
5626 final Action<Entry<K,V>> action;
5627 if ((action = this.action) != null) {
5628 for (int b; (b = preSplit()) > 0;)
5629 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5630 V v;
5631 while ((v = advance()) != null)
5632 action.apply(entryFor((K)nextKey, v));
5633 propagateCompletion();
5634 }
5635 }
5636 }
5637
5638 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5639 extends Traverser<K,V,Void> {
5640 final BiAction<K,V> action;
5641 ForEachMappingTask
5642 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5643 BiAction<K,V> action) {
5644 super(m, p, b);
5645 this.action = action;
5646 }
5647 @SuppressWarnings("unchecked") public final void compute() {
5648 final BiAction<K,V> action;
5649 if ((action = this.action) != null) {
5650 for (int b; (b = preSplit()) > 0;)
5651 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5652 V v;
5653 while ((v = advance()) != null)
5654 action.apply((K)nextKey, v);
5655 propagateCompletion();
5656 }
5657 }
5658 }
5659
5660 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5661 extends Traverser<K,V,Void> {
5662 final Fun<? super K, ? extends U> transformer;
5663 final Action<U> action;
5664 ForEachTransformedKeyTask
5665 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5666 Fun<? super K, ? extends U> transformer, Action<U> action) {
5667 super(m, p, b);
5668 this.transformer = transformer; this.action = action;
5669 }
5670 @SuppressWarnings("unchecked") public final void compute() {
5671 final Fun<? super K, ? extends U> transformer;
5672 final Action<U> action;
5673 if ((transformer = this.transformer) != null &&
5674 (action = this.action) != null) {
5675 for (int b; (b = preSplit()) > 0;)
5676 new ForEachTransformedKeyTask<K,V,U>
5677 (map, this, b, transformer, action).fork();
5678 U u;
5679 while (advance() != null) {
5680 if ((u = transformer.apply((K)nextKey)) != null)
5681 action.apply(u);
5682 }
5683 propagateCompletion();
5684 }
5685 }
5686 }
5687
5688 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5689 extends Traverser<K,V,Void> {
5690 final Fun<? super V, ? extends U> transformer;
5691 final Action<U> action;
5692 ForEachTransformedValueTask
5693 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5694 Fun<? super V, ? extends U> transformer, Action<U> action) {
5695 super(m, p, b);
5696 this.transformer = transformer; this.action = action;
5697 }
5698 @SuppressWarnings("unchecked") public final void compute() {
5699 final Fun<? super V, ? extends U> transformer;
5700 final Action<U> action;
5701 if ((transformer = this.transformer) != null &&
5702 (action = this.action) != null) {
5703 for (int b; (b = preSplit()) > 0;)
5704 new ForEachTransformedValueTask<K,V,U>
5705 (map, this, b, transformer, action).fork();
5706 V v; U u;
5707 while ((v = advance()) != null) {
5708 if ((u = transformer.apply(v)) != null)
5709 action.apply(u);
5710 }
5711 propagateCompletion();
5712 }
5713 }
5714 }
5715
5716 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5717 extends Traverser<K,V,Void> {
5718 final Fun<Map.Entry<K,V>, ? extends U> transformer;
5719 final Action<U> action;
5720 ForEachTransformedEntryTask
5721 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5722 Fun<Map.Entry<K,V>, ? extends U> transformer, Action<U> action) {
5723 super(m, p, b);
5724 this.transformer = transformer; this.action = action;
5725 }
5726 @SuppressWarnings("unchecked") public final void compute() {
5727 final Fun<Map.Entry<K,V>, ? extends U> transformer;
5728 final Action<U> action;
5729 if ((transformer = this.transformer) != null &&
5730 (action = this.action) != null) {
5731 for (int b; (b = preSplit()) > 0;)
5732 new ForEachTransformedEntryTask<K,V,U>
5733 (map, this, b, transformer, action).fork();
5734 V v; U u;
5735 while ((v = advance()) != null) {
5736 if ((u = transformer.apply(entryFor((K)nextKey,
5737 v))) != null)
5738 action.apply(u);
5739 }
5740 propagateCompletion();
5741 }
5742 }
5743 }
5744
5745 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5746 extends Traverser<K,V,Void> {
5747 final BiFun<? super K, ? super V, ? extends U> transformer;
5748 final Action<U> action;
5749 ForEachTransformedMappingTask
5750 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5751 BiFun<? super K, ? super V, ? extends U> transformer,
5752 Action<U> action) {
5753 super(m, p, b);
5754 this.transformer = transformer; this.action = action;
5755 }
5756 @SuppressWarnings("unchecked") public final void compute() {
5757 final BiFun<? super K, ? super V, ? extends U> transformer;
5758 final Action<U> action;
5759 if ((transformer = this.transformer) != null &&
5760 (action = this.action) != null) {
5761 for (int b; (b = preSplit()) > 0;)
5762 new ForEachTransformedMappingTask<K,V,U>
5763 (map, this, b, transformer, action).fork();
5764 V v; U u;
5765 while ((v = advance()) != null) {
5766 if ((u = transformer.apply((K)nextKey, v)) != null)
5767 action.apply(u);
5768 }
5769 propagateCompletion();
5770 }
5771 }
5772 }
5773
5774 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5775 extends Traverser<K,V,U> {
5776 final Fun<? super K, ? extends U> searchFunction;
5777 final AtomicReference<U> result;
5778 SearchKeysTask
5779 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5780 Fun<? super K, ? extends U> searchFunction,
5781 AtomicReference<U> result) {
5782 super(m, p, b);
5783 this.searchFunction = searchFunction; this.result = result;
5784 }
5785 public final U getRawResult() { return result.get(); }
5786 @SuppressWarnings("unchecked") public final void compute() {
5787 final Fun<? super K, ? extends U> searchFunction;
5788 final AtomicReference<U> result;
5789 if ((searchFunction = this.searchFunction) != null &&
5790 (result = this.result) != null) {
5791 for (int b;;) {
5792 if (result.get() != null)
5793 return;
5794 if ((b = preSplit()) <= 0)
5795 break;
5796 new SearchKeysTask<K,V,U>
5797 (map, this, b, searchFunction, result).fork();
5798 }
5799 while (result.get() == null) {
5800 U u;
5801 if (advance() == null) {
5802 propagateCompletion();
5803 break;
5804 }
5805 if ((u = searchFunction.apply((K)nextKey)) != null) {
5806 if (result.compareAndSet(null, u))
5807 quietlyCompleteRoot();
5808 break;
5809 }
5810 }
5811 }
5812 }
5813 }
5814
5815 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5816 extends Traverser<K,V,U> {
5817 final Fun<? super V, ? extends U> searchFunction;
5818 final AtomicReference<U> result;
5819 SearchValuesTask
5820 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5821 Fun<? super V, ? extends U> searchFunction,
5822 AtomicReference<U> result) {
5823 super(m, p, b);
5824 this.searchFunction = searchFunction; this.result = result;
5825 }
5826 public final U getRawResult() { return result.get(); }
5827 @SuppressWarnings("unchecked") public final void compute() {
5828 final Fun<? super V, ? extends U> searchFunction;
5829 final AtomicReference<U> result;
5830 if ((searchFunction = this.searchFunction) != null &&
5831 (result = this.result) != null) {
5832 for (int b;;) {
5833 if (result.get() != null)
5834 return;
5835 if ((b = preSplit()) <= 0)
5836 break;
5837 new SearchValuesTask<K,V,U>
5838 (map, this, b, searchFunction, result).fork();
5839 }
5840 while (result.get() == null) {
5841 V v; U u;
5842 if ((v = advance()) == null) {
5843 propagateCompletion();
5844 break;
5845 }
5846 if ((u = searchFunction.apply(v)) != null) {
5847 if (result.compareAndSet(null, u))
5848 quietlyCompleteRoot();
5849 break;
5850 }
5851 }
5852 }
5853 }
5854 }
5855
5856 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5857 extends Traverser<K,V,U> {
5858 final Fun<Entry<K,V>, ? extends U> searchFunction;
5859 final AtomicReference<U> result;
5860 SearchEntriesTask
5861 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5862 Fun<Entry<K,V>, ? extends U> searchFunction,
5863 AtomicReference<U> result) {
5864 super(m, p, b);
5865 this.searchFunction = searchFunction; this.result = result;
5866 }
5867 public final U getRawResult() { return result.get(); }
5868 @SuppressWarnings("unchecked") public final void compute() {
5869 final Fun<Entry<K,V>, ? extends U> searchFunction;
5870 final AtomicReference<U> result;
5871 if ((searchFunction = this.searchFunction) != null &&
5872 (result = this.result) != null) {
5873 for (int b;;) {
5874 if (result.get() != null)
5875 return;
5876 if ((b = preSplit()) <= 0)
5877 break;
5878 new SearchEntriesTask<K,V,U>
5879 (map, this, b, searchFunction, result).fork();
5880 }
5881 while (result.get() == null) {
5882 V v; U u;
5883 if ((v = advance()) == null) {
5884 propagateCompletion();
5885 break;
5886 }
5887 if ((u = searchFunction.apply(entryFor((K)nextKey,
5888 v))) != null) {
5889 if (result.compareAndSet(null, u))
5890 quietlyCompleteRoot();
5891 return;
5892 }
5893 }
5894 }
5895 }
5896 }
5897
5898 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5899 extends Traverser<K,V,U> {
5900 final BiFun<? super K, ? super V, ? extends U> searchFunction;
5901 final AtomicReference<U> result;
5902 SearchMappingsTask
5903 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5904 BiFun<? super K, ? super V, ? extends U> searchFunction,
5905 AtomicReference<U> result) {
5906 super(m, p, b);
5907 this.searchFunction = searchFunction; this.result = result;
5908 }
5909 public final U getRawResult() { return result.get(); }
5910 @SuppressWarnings("unchecked") public final void compute() {
5911 final BiFun<? super K, ? super V, ? extends U> searchFunction;
5912 final AtomicReference<U> result;
5913 if ((searchFunction = this.searchFunction) != null &&
5914 (result = this.result) != null) {
5915 for (int b;;) {
5916 if (result.get() != null)
5917 return;
5918 if ((b = preSplit()) <= 0)
5919 break;
5920 new SearchMappingsTask<K,V,U>
5921 (map, this, b, searchFunction, result).fork();
5922 }
5923 while (result.get() == null) {
5924 V v; U u;
5925 if ((v = advance()) == null) {
5926 propagateCompletion();
5927 break;
5928 }
5929 if ((u = searchFunction.apply((K)nextKey, v)) != null) {
5930 if (result.compareAndSet(null, u))
5931 quietlyCompleteRoot();
5932 break;
5933 }
5934 }
5935 }
5936 }
5937 }
5938
5939 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5940 extends Traverser<K,V,K> {
5941 final BiFun<? super K, ? super K, ? extends K> reducer;
5942 K result;
5943 ReduceKeysTask<K,V> rights, nextRight;
5944 ReduceKeysTask
5945 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5946 ReduceKeysTask<K,V> nextRight,
5947 BiFun<? super K, ? super K, ? extends K> reducer) {
5948 super(m, p, b); this.nextRight = nextRight;
5949 this.reducer = reducer;
5950 }
5951 public final K getRawResult() { return result; }
5952 @SuppressWarnings("unchecked") public final void compute() {
5953 final BiFun<? super K, ? super K, ? extends K> reducer;
5954 if ((reducer = this.reducer) != null) {
5955 for (int b; (b = preSplit()) > 0;)
5956 (rights = new ReduceKeysTask<K,V>
5957 (map, this, b, rights, reducer)).fork();
5958 K r = null;
5959 while (advance() != null) {
5960 K u = (K)nextKey;
5961 r = (r == null) ? u : reducer.apply(r, u);
5962 }
5963 result = r;
5964 CountedCompleter<?> c;
5965 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5966 ReduceKeysTask<K,V>
5967 t = (ReduceKeysTask<K,V>)c,
5968 s = t.rights;
5969 while (s != null) {
5970 K tr, sr;
5971 if ((sr = s.result) != null)
5972 t.result = (((tr = t.result) == null) ? sr :
5973 reducer.apply(tr, sr));
5974 s = t.rights = s.nextRight;
5975 }
5976 }
5977 }
5978 }
5979 }
5980
5981 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5982 extends Traverser<K,V,V> {
5983 final BiFun<? super V, ? super V, ? extends V> reducer;
5984 V result;
5985 ReduceValuesTask<K,V> rights, nextRight;
5986 ReduceValuesTask
5987 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5988 ReduceValuesTask<K,V> nextRight,
5989 BiFun<? super V, ? super V, ? extends V> reducer) {
5990 super(m, p, b); this.nextRight = nextRight;
5991 this.reducer = reducer;
5992 }
5993 public final V getRawResult() { return result; }
5994 @SuppressWarnings("unchecked") public final void compute() {
5995 final BiFun<? super V, ? super V, ? extends V> reducer;
5996 if ((reducer = this.reducer) != null) {
5997 for (int b; (b = preSplit()) > 0;)
5998 (rights = new ReduceValuesTask<K,V>
5999 (map, this, b, rights, reducer)).fork();
6000 V r = null;
6001 V v;
6002 while ((v = advance()) != null) {
6003 V u = v;
6004 r = (r == null) ? u : reducer.apply(r, u);
6005 }
6006 result = r;
6007 CountedCompleter<?> c;
6008 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6009 ReduceValuesTask<K,V>
6010 t = (ReduceValuesTask<K,V>)c,
6011 s = t.rights;
6012 while (s != null) {
6013 V tr, sr;
6014 if ((sr = s.result) != null)
6015 t.result = (((tr = t.result) == null) ? sr :
6016 reducer.apply(tr, sr));
6017 s = t.rights = s.nextRight;
6018 }
6019 }
6020 }
6021 }
6022 }
6023
6024 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
6025 extends Traverser<K,V,Map.Entry<K,V>> {
6026 final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6027 Map.Entry<K,V> result;
6028 ReduceEntriesTask<K,V> rights, nextRight;
6029 ReduceEntriesTask
6030 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6031 ReduceEntriesTask<K,V> nextRight,
6032 BiFun<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
6033 super(m, p, b); this.nextRight = nextRight;
6034 this.reducer = reducer;
6035 }
6036 public final Map.Entry<K,V> getRawResult() { return result; }
6037 @SuppressWarnings("unchecked") public final void compute() {
6038 final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6039 if ((reducer = this.reducer) != null) {
6040 for (int b; (b = preSplit()) > 0;)
6041 (rights = new ReduceEntriesTask<K,V>
6042 (map, this, b, rights, reducer)).fork();
6043 Map.Entry<K,V> r = null;
6044 V v;
6045 while ((v = advance()) != null) {
6046 Map.Entry<K,V> u = entryFor((K)nextKey, v);
6047 r = (r == null) ? u : reducer.apply(r, u);
6048 }
6049 result = r;
6050 CountedCompleter<?> c;
6051 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6052 ReduceEntriesTask<K,V>
6053 t = (ReduceEntriesTask<K,V>)c,
6054 s = t.rights;
6055 while (s != null) {
6056 Map.Entry<K,V> tr, sr;
6057 if ((sr = s.result) != null)
6058 t.result = (((tr = t.result) == null) ? sr :
6059 reducer.apply(tr, sr));
6060 s = t.rights = s.nextRight;
6061 }
6062 }
6063 }
6064 }
6065 }
6066
6067 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
6068 extends Traverser<K,V,U> {
6069 final Fun<? super K, ? extends U> transformer;
6070 final BiFun<? super U, ? super U, ? extends U> reducer;
6071 U result;
6072 MapReduceKeysTask<K,V,U> rights, nextRight;
6073 MapReduceKeysTask
6074 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6075 MapReduceKeysTask<K,V,U> nextRight,
6076 Fun<? super K, ? extends U> transformer,
6077 BiFun<? super U, ? super U, ? extends U> reducer) {
6078 super(m, p, b); this.nextRight = nextRight;
6079 this.transformer = transformer;
6080 this.reducer = reducer;
6081 }
6082 public final U getRawResult() { return result; }
6083 @SuppressWarnings("unchecked") public final void compute() {
6084 final Fun<? super K, ? extends U> transformer;
6085 final BiFun<? super U, ? super U, ? extends U> reducer;
6086 if ((transformer = this.transformer) != null &&
6087 (reducer = this.reducer) != null) {
6088 for (int b; (b = preSplit()) > 0;)
6089 (rights = new MapReduceKeysTask<K,V,U>
6090 (map, this, b, rights, transformer, reducer)).fork();
6091 U r = null, u;
6092 while (advance() != null) {
6093 if ((u = transformer.apply((K)nextKey)) != null)
6094 r = (r == null) ? u : reducer.apply(r, u);
6095 }
6096 result = r;
6097 CountedCompleter<?> c;
6098 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6099 MapReduceKeysTask<K,V,U>
6100 t = (MapReduceKeysTask<K,V,U>)c,
6101 s = t.rights;
6102 while (s != null) {
6103 U tr, sr;
6104 if ((sr = s.result) != null)
6105 t.result = (((tr = t.result) == null) ? sr :
6106 reducer.apply(tr, sr));
6107 s = t.rights = s.nextRight;
6108 }
6109 }
6110 }
6111 }
6112 }
6113
6114 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6115 extends Traverser<K,V,U> {
6116 final Fun<? super V, ? extends U> transformer;
6117 final BiFun<? super U, ? super U, ? extends U> reducer;
6118 U result;
6119 MapReduceValuesTask<K,V,U> rights, nextRight;
6120 MapReduceValuesTask
6121 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6122 MapReduceValuesTask<K,V,U> nextRight,
6123 Fun<? super V, ? extends U> transformer,
6124 BiFun<? super U, ? super U, ? extends U> reducer) {
6125 super(m, p, b); this.nextRight = nextRight;
6126 this.transformer = transformer;
6127 this.reducer = reducer;
6128 }
6129 public final U getRawResult() { return result; }
6130 @SuppressWarnings("unchecked") public final void compute() {
6131 final Fun<? super V, ? extends U> transformer;
6132 final BiFun<? super U, ? super U, ? extends U> reducer;
6133 if ((transformer = this.transformer) != null &&
6134 (reducer = this.reducer) != null) {
6135 for (int b; (b = preSplit()) > 0;)
6136 (rights = new MapReduceValuesTask<K,V,U>
6137 (map, this, b, rights, transformer, reducer)).fork();
6138 U r = null, u;
6139 V v;
6140 while ((v = advance()) != null) {
6141 if ((u = transformer.apply(v)) != null)
6142 r = (r == null) ? u : reducer.apply(r, u);
6143 }
6144 result = r;
6145 CountedCompleter<?> c;
6146 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6147 MapReduceValuesTask<K,V,U>
6148 t = (MapReduceValuesTask<K,V,U>)c,
6149 s = t.rights;
6150 while (s != null) {
6151 U tr, sr;
6152 if ((sr = s.result) != null)
6153 t.result = (((tr = t.result) == null) ? sr :
6154 reducer.apply(tr, sr));
6155 s = t.rights = s.nextRight;
6156 }
6157 }
6158 }
6159 }
6160 }
6161
6162 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6163 extends Traverser<K,V,U> {
6164 final Fun<Map.Entry<K,V>, ? extends U> transformer;
6165 final BiFun<? super U, ? super U, ? extends U> reducer;
6166 U result;
6167 MapReduceEntriesTask<K,V,U> rights, nextRight;
6168 MapReduceEntriesTask
6169 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6170 MapReduceEntriesTask<K,V,U> nextRight,
6171 Fun<Map.Entry<K,V>, ? extends U> transformer,
6172 BiFun<? super U, ? super U, ? extends U> reducer) {
6173 super(m, p, b); this.nextRight = nextRight;
6174 this.transformer = transformer;
6175 this.reducer = reducer;
6176 }
6177 public final U getRawResult() { return result; }
6178 @SuppressWarnings("unchecked") public final void compute() {
6179 final Fun<Map.Entry<K,V>, ? extends U> transformer;
6180 final BiFun<? super U, ? super U, ? extends U> reducer;
6181 if ((transformer = this.transformer) != null &&
6182 (reducer = this.reducer) != null) {
6183 for (int b; (b = preSplit()) > 0;)
6184 (rights = new MapReduceEntriesTask<K,V,U>
6185 (map, this, b, rights, transformer, reducer)).fork();
6186 U r = null, u;
6187 V v;
6188 while ((v = advance()) != null) {
6189 if ((u = transformer.apply(entryFor((K)nextKey,
6190 v))) != null)
6191 r = (r == null) ? u : reducer.apply(r, u);
6192 }
6193 result = r;
6194 CountedCompleter<?> c;
6195 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6196 MapReduceEntriesTask<K,V,U>
6197 t = (MapReduceEntriesTask<K,V,U>)c,
6198 s = t.rights;
6199 while (s != null) {
6200 U tr, sr;
6201 if ((sr = s.result) != null)
6202 t.result = (((tr = t.result) == null) ? sr :
6203 reducer.apply(tr, sr));
6204 s = t.rights = s.nextRight;
6205 }
6206 }
6207 }
6208 }
6209 }
6210
6211 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6212 extends Traverser<K,V,U> {
6213 final BiFun<? super K, ? super V, ? extends U> transformer;
6214 final BiFun<? super U, ? super U, ? extends U> reducer;
6215 U result;
6216 MapReduceMappingsTask<K,V,U> rights, nextRight;
6217 MapReduceMappingsTask
6218 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6219 MapReduceMappingsTask<K,V,U> nextRight,
6220 BiFun<? super K, ? super V, ? extends U> transformer,
6221 BiFun<? super U, ? super U, ? extends U> reducer) {
6222 super(m, p, b); this.nextRight = nextRight;
6223 this.transformer = transformer;
6224 this.reducer = reducer;
6225 }
6226 public final U getRawResult() { return result; }
6227 @SuppressWarnings("unchecked") public final void compute() {
6228 final BiFun<? super K, ? super V, ? extends U> transformer;
6229 final BiFun<? super U, ? super U, ? extends U> reducer;
6230 if ((transformer = this.transformer) != null &&
6231 (reducer = this.reducer) != null) {
6232 for (int b; (b = preSplit()) > 0;)
6233 (rights = new MapReduceMappingsTask<K,V,U>
6234 (map, this, b, rights, transformer, reducer)).fork();
6235 U r = null, u;
6236 V v;
6237 while ((v = advance()) != null) {
6238 if ((u = transformer.apply((K)nextKey, v)) != null)
6239 r = (r == null) ? u : reducer.apply(r, u);
6240 }
6241 result = r;
6242 CountedCompleter<?> c;
6243 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6244 MapReduceMappingsTask<K,V,U>
6245 t = (MapReduceMappingsTask<K,V,U>)c,
6246 s = t.rights;
6247 while (s != null) {
6248 U tr, sr;
6249 if ((sr = s.result) != null)
6250 t.result = (((tr = t.result) == null) ? sr :
6251 reducer.apply(tr, sr));
6252 s = t.rights = s.nextRight;
6253 }
6254 }
6255 }
6256 }
6257 }
6258
6259 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6260 extends Traverser<K,V,Double> {
6261 final ObjectToDouble<? super K> transformer;
6262 final DoubleByDoubleToDouble reducer;
6263 final double basis;
6264 double result;
6265 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6266 MapReduceKeysToDoubleTask
6267 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6268 MapReduceKeysToDoubleTask<K,V> nextRight,
6269 ObjectToDouble<? super K> transformer,
6270 double basis,
6271 DoubleByDoubleToDouble reducer) {
6272 super(m, p, b); this.nextRight = nextRight;
6273 this.transformer = transformer;
6274 this.basis = basis; this.reducer = reducer;
6275 }
6276 public final Double getRawResult() { return result; }
6277 @SuppressWarnings("unchecked") public final void compute() {
6278 final ObjectToDouble<? super K> transformer;
6279 final DoubleByDoubleToDouble reducer;
6280 if ((transformer = this.transformer) != null &&
6281 (reducer = this.reducer) != null) {
6282 double r = this.basis;
6283 for (int b; (b = preSplit()) > 0;)
6284 (rights = new MapReduceKeysToDoubleTask<K,V>
6285 (map, this, b, rights, transformer, r, reducer)).fork();
6286 while (advance() != null)
6287 r = reducer.apply(r, transformer.apply((K)nextKey));
6288 result = r;
6289 CountedCompleter<?> c;
6290 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6291 MapReduceKeysToDoubleTask<K,V>
6292 t = (MapReduceKeysToDoubleTask<K,V>)c,
6293 s = t.rights;
6294 while (s != null) {
6295 t.result = reducer.apply(t.result, s.result);
6296 s = t.rights = s.nextRight;
6297 }
6298 }
6299 }
6300 }
6301 }
6302
6303 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6304 extends Traverser<K,V,Double> {
6305 final ObjectToDouble<? super V> transformer;
6306 final DoubleByDoubleToDouble reducer;
6307 final double basis;
6308 double result;
6309 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6310 MapReduceValuesToDoubleTask
6311 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6312 MapReduceValuesToDoubleTask<K,V> nextRight,
6313 ObjectToDouble<? super V> transformer,
6314 double basis,
6315 DoubleByDoubleToDouble reducer) {
6316 super(m, p, b); this.nextRight = nextRight;
6317 this.transformer = transformer;
6318 this.basis = basis; this.reducer = reducer;
6319 }
6320 public final Double getRawResult() { return result; }
6321 @SuppressWarnings("unchecked") public final void compute() {
6322 final ObjectToDouble<? super V> transformer;
6323 final DoubleByDoubleToDouble reducer;
6324 if ((transformer = this.transformer) != null &&
6325 (reducer = this.reducer) != null) {
6326 double r = this.basis;
6327 for (int b; (b = preSplit()) > 0;)
6328 (rights = new MapReduceValuesToDoubleTask<K,V>
6329 (map, this, b, rights, transformer, r, reducer)).fork();
6330 V v;
6331 while ((v = advance()) != null)
6332 r = reducer.apply(r, transformer.apply(v));
6333 result = r;
6334 CountedCompleter<?> c;
6335 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6336 MapReduceValuesToDoubleTask<K,V>
6337 t = (MapReduceValuesToDoubleTask<K,V>)c,
6338 s = t.rights;
6339 while (s != null) {
6340 t.result = reducer.apply(t.result, s.result);
6341 s = t.rights = s.nextRight;
6342 }
6343 }
6344 }
6345 }
6346 }
6347
6348 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6349 extends Traverser<K,V,Double> {
6350 final ObjectToDouble<Map.Entry<K,V>> transformer;
6351 final DoubleByDoubleToDouble reducer;
6352 final double basis;
6353 double result;
6354 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6355 MapReduceEntriesToDoubleTask
6356 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6357 MapReduceEntriesToDoubleTask<K,V> nextRight,
6358 ObjectToDouble<Map.Entry<K,V>> transformer,
6359 double basis,
6360 DoubleByDoubleToDouble reducer) {
6361 super(m, p, b); this.nextRight = nextRight;
6362 this.transformer = transformer;
6363 this.basis = basis; this.reducer = reducer;
6364 }
6365 public final Double getRawResult() { return result; }
6366 @SuppressWarnings("unchecked") public final void compute() {
6367 final ObjectToDouble<Map.Entry<K,V>> transformer;
6368 final DoubleByDoubleToDouble reducer;
6369 if ((transformer = this.transformer) != null &&
6370 (reducer = this.reducer) != null) {
6371 double r = this.basis;
6372 for (int b; (b = preSplit()) > 0;)
6373 (rights = new MapReduceEntriesToDoubleTask<K,V>
6374 (map, this, b, rights, transformer, r, reducer)).fork();
6375 V v;
6376 while ((v = advance()) != null)
6377 r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6378 v)));
6379 result = r;
6380 CountedCompleter<?> c;
6381 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6382 MapReduceEntriesToDoubleTask<K,V>
6383 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6384 s = t.rights;
6385 while (s != null) {
6386 t.result = reducer.apply(t.result, s.result);
6387 s = t.rights = s.nextRight;
6388 }
6389 }
6390 }
6391 }
6392 }
6393
6394 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6395 extends Traverser<K,V,Double> {
6396 final ObjectByObjectToDouble<? super K, ? super V> transformer;
6397 final DoubleByDoubleToDouble reducer;
6398 final double basis;
6399 double result;
6400 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6401 MapReduceMappingsToDoubleTask
6402 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6403 MapReduceMappingsToDoubleTask<K,V> nextRight,
6404 ObjectByObjectToDouble<? super K, ? super V> transformer,
6405 double basis,
6406 DoubleByDoubleToDouble reducer) {
6407 super(m, p, b); this.nextRight = nextRight;
6408 this.transformer = transformer;
6409 this.basis = basis; this.reducer = reducer;
6410 }
6411 public final Double getRawResult() { return result; }
6412 @SuppressWarnings("unchecked") public final void compute() {
6413 final ObjectByObjectToDouble<? super K, ? super V> transformer;
6414 final DoubleByDoubleToDouble reducer;
6415 if ((transformer = this.transformer) != null &&
6416 (reducer = this.reducer) != null) {
6417 double r = this.basis;
6418 for (int b; (b = preSplit()) > 0;)
6419 (rights = new MapReduceMappingsToDoubleTask<K,V>
6420 (map, this, b, rights, transformer, r, reducer)).fork();
6421 V v;
6422 while ((v = advance()) != null)
6423 r = reducer.apply(r, transformer.apply((K)nextKey, v));
6424 result = r;
6425 CountedCompleter<?> c;
6426 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6427 MapReduceMappingsToDoubleTask<K,V>
6428 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6429 s = t.rights;
6430 while (s != null) {
6431 t.result = reducer.apply(t.result, s.result);
6432 s = t.rights = s.nextRight;
6433 }
6434 }
6435 }
6436 }
6437 }
6438
6439 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6440 extends Traverser<K,V,Long> {
6441 final ObjectToLong<? super K> transformer;
6442 final LongByLongToLong reducer;
6443 final long basis;
6444 long result;
6445 MapReduceKeysToLongTask<K,V> rights, nextRight;
6446 MapReduceKeysToLongTask
6447 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6448 MapReduceKeysToLongTask<K,V> nextRight,
6449 ObjectToLong<? super K> transformer,
6450 long basis,
6451 LongByLongToLong reducer) {
6452 super(m, p, b); this.nextRight = nextRight;
6453 this.transformer = transformer;
6454 this.basis = basis; this.reducer = reducer;
6455 }
6456 public final Long getRawResult() { return result; }
6457 @SuppressWarnings("unchecked") public final void compute() {
6458 final ObjectToLong<? super K> transformer;
6459 final LongByLongToLong reducer;
6460 if ((transformer = this.transformer) != null &&
6461 (reducer = this.reducer) != null) {
6462 long r = this.basis;
6463 for (int b; (b = preSplit()) > 0;)
6464 (rights = new MapReduceKeysToLongTask<K,V>
6465 (map, this, b, rights, transformer, r, reducer)).fork();
6466 while (advance() != null)
6467 r = reducer.apply(r, transformer.apply((K)nextKey));
6468 result = r;
6469 CountedCompleter<?> c;
6470 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6471 MapReduceKeysToLongTask<K,V>
6472 t = (MapReduceKeysToLongTask<K,V>)c,
6473 s = t.rights;
6474 while (s != null) {
6475 t.result = reducer.apply(t.result, s.result);
6476 s = t.rights = s.nextRight;
6477 }
6478 }
6479 }
6480 }
6481 }
6482
6483 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6484 extends Traverser<K,V,Long> {
6485 final ObjectToLong<? super V> transformer;
6486 final LongByLongToLong reducer;
6487 final long basis;
6488 long result;
6489 MapReduceValuesToLongTask<K,V> rights, nextRight;
6490 MapReduceValuesToLongTask
6491 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6492 MapReduceValuesToLongTask<K,V> nextRight,
6493 ObjectToLong<? super V> transformer,
6494 long basis,
6495 LongByLongToLong reducer) {
6496 super(m, p, b); this.nextRight = nextRight;
6497 this.transformer = transformer;
6498 this.basis = basis; this.reducer = reducer;
6499 }
6500 public final Long getRawResult() { return result; }
6501 @SuppressWarnings("unchecked") public final void compute() {
6502 final ObjectToLong<? super V> transformer;
6503 final LongByLongToLong reducer;
6504 if ((transformer = this.transformer) != null &&
6505 (reducer = this.reducer) != null) {
6506 long r = this.basis;
6507 for (int b; (b = preSplit()) > 0;)
6508 (rights = new MapReduceValuesToLongTask<K,V>
6509 (map, this, b, rights, transformer, r, reducer)).fork();
6510 V v;
6511 while ((v = advance()) != null)
6512 r = reducer.apply(r, transformer.apply(v));
6513 result = r;
6514 CountedCompleter<?> c;
6515 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6516 MapReduceValuesToLongTask<K,V>
6517 t = (MapReduceValuesToLongTask<K,V>)c,
6518 s = t.rights;
6519 while (s != null) {
6520 t.result = reducer.apply(t.result, s.result);
6521 s = t.rights = s.nextRight;
6522 }
6523 }
6524 }
6525 }
6526 }
6527
6528 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6529 extends Traverser<K,V,Long> {
6530 final ObjectToLong<Map.Entry<K,V>> transformer;
6531 final LongByLongToLong reducer;
6532 final long basis;
6533 long result;
6534 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6535 MapReduceEntriesToLongTask
6536 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6537 MapReduceEntriesToLongTask<K,V> nextRight,
6538 ObjectToLong<Map.Entry<K,V>> transformer,
6539 long basis,
6540 LongByLongToLong reducer) {
6541 super(m, p, b); this.nextRight = nextRight;
6542 this.transformer = transformer;
6543 this.basis = basis; this.reducer = reducer;
6544 }
6545 public final Long getRawResult() { return result; }
6546 @SuppressWarnings("unchecked") public final void compute() {
6547 final ObjectToLong<Map.Entry<K,V>> transformer;
6548 final LongByLongToLong reducer;
6549 if ((transformer = this.transformer) != null &&
6550 (reducer = this.reducer) != null) {
6551 long r = this.basis;
6552 for (int b; (b = preSplit()) > 0;)
6553 (rights = new MapReduceEntriesToLongTask<K,V>
6554 (map, this, b, rights, transformer, r, reducer)).fork();
6555 V v;
6556 while ((v = advance()) != null)
6557 r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6558 v)));
6559 result = r;
6560 CountedCompleter<?> c;
6561 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6562 MapReduceEntriesToLongTask<K,V>
6563 t = (MapReduceEntriesToLongTask<K,V>)c,
6564 s = t.rights;
6565 while (s != null) {
6566 t.result = reducer.apply(t.result, s.result);
6567 s = t.rights = s.nextRight;
6568 }
6569 }
6570 }
6571 }
6572 }
6573
6574 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6575 extends Traverser<K,V,Long> {
6576 final ObjectByObjectToLong<? super K, ? super V> transformer;
6577 final LongByLongToLong reducer;
6578 final long basis;
6579 long result;
6580 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6581 MapReduceMappingsToLongTask
6582 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6583 MapReduceMappingsToLongTask<K,V> nextRight,
6584 ObjectByObjectToLong<? super K, ? super V> transformer,
6585 long basis,
6586 LongByLongToLong reducer) {
6587 super(m, p, b); this.nextRight = nextRight;
6588 this.transformer = transformer;
6589 this.basis = basis; this.reducer = reducer;
6590 }
6591 public final Long getRawResult() { return result; }
6592 @SuppressWarnings("unchecked") public final void compute() {
6593 final ObjectByObjectToLong<? super K, ? super V> transformer;
6594 final LongByLongToLong reducer;
6595 if ((transformer = this.transformer) != null &&
6596 (reducer = this.reducer) != null) {
6597 long r = this.basis;
6598 for (int b; (b = preSplit()) > 0;)
6599 (rights = new MapReduceMappingsToLongTask<K,V>
6600 (map, this, b, rights, transformer, r, reducer)).fork();
6601 V v;
6602 while ((v = advance()) != null)
6603 r = reducer.apply(r, transformer.apply((K)nextKey, v));
6604 result = r;
6605 CountedCompleter<?> c;
6606 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6607 MapReduceMappingsToLongTask<K,V>
6608 t = (MapReduceMappingsToLongTask<K,V>)c,
6609 s = t.rights;
6610 while (s != null) {
6611 t.result = reducer.apply(t.result, s.result);
6612 s = t.rights = s.nextRight;
6613 }
6614 }
6615 }
6616 }
6617 }
6618
6619 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6620 extends Traverser<K,V,Integer> {
6621 final ObjectToInt<? super K> transformer;
6622 final IntByIntToInt reducer;
6623 final int basis;
6624 int result;
6625 MapReduceKeysToIntTask<K,V> rights, nextRight;
6626 MapReduceKeysToIntTask
6627 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6628 MapReduceKeysToIntTask<K,V> nextRight,
6629 ObjectToInt<? super K> transformer,
6630 int basis,
6631 IntByIntToInt reducer) {
6632 super(m, p, b); this.nextRight = nextRight;
6633 this.transformer = transformer;
6634 this.basis = basis; this.reducer = reducer;
6635 }
6636 public final Integer getRawResult() { return result; }
6637 @SuppressWarnings("unchecked") public final void compute() {
6638 final ObjectToInt<? super K> transformer;
6639 final IntByIntToInt reducer;
6640 if ((transformer = this.transformer) != null &&
6641 (reducer = this.reducer) != null) {
6642 int r = this.basis;
6643 for (int b; (b = preSplit()) > 0;)
6644 (rights = new MapReduceKeysToIntTask<K,V>
6645 (map, this, b, rights, transformer, r, reducer)).fork();
6646 while (advance() != null)
6647 r = reducer.apply(r, transformer.apply((K)nextKey));
6648 result = r;
6649 CountedCompleter<?> c;
6650 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6651 MapReduceKeysToIntTask<K,V>
6652 t = (MapReduceKeysToIntTask<K,V>)c,
6653 s = t.rights;
6654 while (s != null) {
6655 t.result = reducer.apply(t.result, s.result);
6656 s = t.rights = s.nextRight;
6657 }
6658 }
6659 }
6660 }
6661 }
6662
6663 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6664 extends Traverser<K,V,Integer> {
6665 final ObjectToInt<? super V> transformer;
6666 final IntByIntToInt reducer;
6667 final int basis;
6668 int result;
6669 MapReduceValuesToIntTask<K,V> rights, nextRight;
6670 MapReduceValuesToIntTask
6671 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6672 MapReduceValuesToIntTask<K,V> nextRight,
6673 ObjectToInt<? super V> transformer,
6674 int basis,
6675 IntByIntToInt reducer) {
6676 super(m, p, b); this.nextRight = nextRight;
6677 this.transformer = transformer;
6678 this.basis = basis; this.reducer = reducer;
6679 }
6680 public final Integer getRawResult() { return result; }
6681 @SuppressWarnings("unchecked") public final void compute() {
6682 final ObjectToInt<? super V> transformer;
6683 final IntByIntToInt reducer;
6684 if ((transformer = this.transformer) != null &&
6685 (reducer = this.reducer) != null) {
6686 int r = this.basis;
6687 for (int b; (b = preSplit()) > 0;)
6688 (rights = new MapReduceValuesToIntTask<K,V>
6689 (map, this, b, rights, transformer, r, reducer)).fork();
6690 V v;
6691 while ((v = advance()) != null)
6692 r = reducer.apply(r, transformer.apply(v));
6693 result = r;
6694 CountedCompleter<?> c;
6695 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6696 MapReduceValuesToIntTask<K,V>
6697 t = (MapReduceValuesToIntTask<K,V>)c,
6698 s = t.rights;
6699 while (s != null) {
6700 t.result = reducer.apply(t.result, s.result);
6701 s = t.rights = s.nextRight;
6702 }
6703 }
6704 }
6705 }
6706 }
6707
6708 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6709 extends Traverser<K,V,Integer> {
6710 final ObjectToInt<Map.Entry<K,V>> transformer;
6711 final IntByIntToInt reducer;
6712 final int basis;
6713 int result;
6714 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6715 MapReduceEntriesToIntTask
6716 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6717 MapReduceEntriesToIntTask<K,V> nextRight,
6718 ObjectToInt<Map.Entry<K,V>> transformer,
6719 int basis,
6720 IntByIntToInt reducer) {
6721 super(m, p, b); this.nextRight = nextRight;
6722 this.transformer = transformer;
6723 this.basis = basis; this.reducer = reducer;
6724 }
6725 public final Integer getRawResult() { return result; }
6726 @SuppressWarnings("unchecked") public final void compute() {
6727 final ObjectToInt<Map.Entry<K,V>> transformer;
6728 final IntByIntToInt reducer;
6729 if ((transformer = this.transformer) != null &&
6730 (reducer = this.reducer) != null) {
6731 int r = this.basis;
6732 for (int b; (b = preSplit()) > 0;)
6733 (rights = new MapReduceEntriesToIntTask<K,V>
6734 (map, this, b, rights, transformer, r, reducer)).fork();
6735 V v;
6736 while ((v = advance()) != null)
6737 r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6738 v)));
6739 result = r;
6740 CountedCompleter<?> c;
6741 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6742 MapReduceEntriesToIntTask<K,V>
6743 t = (MapReduceEntriesToIntTask<K,V>)c,
6744 s = t.rights;
6745 while (s != null) {
6746 t.result = reducer.apply(t.result, s.result);
6747 s = t.rights = s.nextRight;
6748 }
6749 }
6750 }
6751 }
6752 }
6753
6754 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6755 extends Traverser<K,V,Integer> {
6756 final ObjectByObjectToInt<? super K, ? super V> transformer;
6757 final IntByIntToInt reducer;
6758 final int basis;
6759 int result;
6760 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6761 MapReduceMappingsToIntTask
6762 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6763 MapReduceMappingsToIntTask<K,V> nextRight,
6764 ObjectByObjectToInt<? super K, ? super V> transformer,
6765 int basis,
6766 IntByIntToInt reducer) {
6767 super(m, p, b); this.nextRight = nextRight;
6768 this.transformer = transformer;
6769 this.basis = basis; this.reducer = reducer;
6770 }
6771 public final Integer getRawResult() { return result; }
6772 @SuppressWarnings("unchecked") public final void compute() {
6773 final ObjectByObjectToInt<? super K, ? super V> transformer;
6774 final IntByIntToInt reducer;
6775 if ((transformer = this.transformer) != null &&
6776 (reducer = this.reducer) != null) {
6777 int r = this.basis;
6778 for (int b; (b = preSplit()) > 0;)
6779 (rights = new MapReduceMappingsToIntTask<K,V>
6780 (map, this, b, rights, transformer, r, reducer)).fork();
6781 V v;
6782 while ((v = advance()) != null)
6783 r = reducer.apply(r, transformer.apply((K)nextKey, v));
6784 result = r;
6785 CountedCompleter<?> c;
6786 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6787 MapReduceMappingsToIntTask<K,V>
6788 t = (MapReduceMappingsToIntTask<K,V>)c,
6789 s = t.rights;
6790 while (s != null) {
6791 t.result = reducer.apply(t.result, s.result);
6792 s = t.rights = s.nextRight;
6793 }
6794 }
6795 }
6796 }
6797 }
6798
6799 // Unsafe mechanics
6800 private static final sun.misc.Unsafe U;
6801 private static final long SIZECTL;
6802 private static final long TRANSFERINDEX;
6803 private static final long TRANSFERORIGIN;
6804 private static final long BASECOUNT;
6805 private static final long COUNTERBUSY;
6806 private static final long CELLVALUE;
6807 private static final long ABASE;
6808 private static final int ASHIFT;
6809
6810 static {
6811 int ss;
6812 try {
6813 U = sun.misc.Unsafe.getUnsafe();
6814 Class<?> k = ConcurrentHashMap.class;
6815 SIZECTL = U.objectFieldOffset
6816 (k.getDeclaredField("sizeCtl"));
6817 TRANSFERINDEX = U.objectFieldOffset
6818 (k.getDeclaredField("transferIndex"));
6819 TRANSFERORIGIN = U.objectFieldOffset
6820 (k.getDeclaredField("transferOrigin"));
6821 BASECOUNT = U.objectFieldOffset
6822 (k.getDeclaredField("baseCount"));
6823 COUNTERBUSY = U.objectFieldOffset
6824 (k.getDeclaredField("counterBusy"));
6825 Class<?> ck = CounterCell.class;
6826 CELLVALUE = U.objectFieldOffset
6827 (ck.getDeclaredField("value"));
6828 Class<?> sc = Node[].class;
6829 ABASE = U.arrayBaseOffset(sc);
6830 ss = U.arrayIndexScale(sc);
6831 ASHIFT = 31 - Integer.numberOfLeadingZeros(ss);
6832 } catch (Exception e) {
6833 throw new Error(e);
6834 }
6835 if ((ss & (ss-1)) != 0)
6836 throw new Error("data type scale not a power of two");
6837 }
6838
6839 }