ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.150
Committed: Fri Dec 14 16:33:42 2012 UTC (11 years, 5 months ago) by jsr166
Branch: MAIN
Changes since 1.149: +13 -13 lines
Log Message:
whitespace

File Contents

# User Rev Content
1 dl 1.2 /*
2     * Written by Doug Lea with assistance from members of JCP JSR-166
3 dl 1.36 * Expert Group and released to the public domain, as explained at
4 dl 1.100 * http://creativecommons.org/publicdomain/zero/1.0/
5 dl 1.2 */
6    
7 tim 1.1 package java.util.concurrent;
8 dl 1.119 import java.util.concurrent.ForkJoinPool;
9 dl 1.146 import java.util.concurrent.CountedCompleter;
10 dl 1.119
11     import java.util.Comparator;
12     import java.util.Arrays;
13     import java.util.Map;
14     import java.util.Set;
15     import java.util.Collection;
16     import java.util.AbstractMap;
17     import java.util.AbstractSet;
18     import java.util.AbstractCollection;
19     import java.util.Hashtable;
20     import java.util.HashMap;
21     import java.util.Iterator;
22     import java.util.Enumeration;
23     import java.util.ConcurrentModificationException;
24     import java.util.NoSuchElementException;
25     import java.util.concurrent.ConcurrentMap;
26     import java.util.concurrent.locks.AbstractQueuedSynchronizer;
27 dl 1.149 import java.util.concurrent.atomic.AtomicInteger;
28 dl 1.119 import java.util.concurrent.atomic.AtomicReference;
29 tim 1.1 import java.io.Serializable;
30    
31     /**
32 dl 1.4 * A hash table supporting full concurrency of retrievals and
33 dl 1.119 * high expected concurrency for updates. This class obeys the
34 dl 1.22 * same functional specification as {@link java.util.Hashtable}, and
35 dl 1.19 * includes versions of methods corresponding to each method of
36 dl 1.119 * {@code Hashtable}. However, even though all operations are
37 dl 1.19 * thread-safe, retrieval operations do <em>not</em> entail locking,
38     * and there is <em>not</em> any support for locking the entire table
39     * in a way that prevents all access. This class is fully
40 dl 1.119 * interoperable with {@code Hashtable} in programs that rely on its
41 dl 1.4 * thread safety but not on its synchronization details.
42 tim 1.11 *
43 jsr166 1.145 * <p>Retrieval operations (including {@code get}) generally do not
44 dl 1.119 * block, so may overlap with update operations (including {@code put}
45     * and {@code remove}). Retrievals reflect the results of the most
46     * recently <em>completed</em> update operations holding upon their
47 dl 1.126 * onset. (More formally, an update operation for a given key bears a
48     * <em>happens-before</em> relation with any (non-null) retrieval for
49     * that key reporting the updated value.) For aggregate operations
50     * such as {@code putAll} and {@code clear}, concurrent retrievals may
51     * reflect insertion or removal of only some entries. Similarly,
52     * Iterators and Enumerations return elements reflecting the state of
53     * the hash table at some point at or since the creation of the
54     * iterator/enumeration. They do <em>not</em> throw {@link
55     * ConcurrentModificationException}. However, iterators are designed
56     * to be used by only one thread at a time. Bear in mind that the
57     * results of aggregate status methods including {@code size}, {@code
58     * isEmpty}, and {@code containsValue} are typically useful only when
59     * a map is not undergoing concurrent updates in other threads.
60     * Otherwise the results of these methods reflect transient states
61     * that may be adequate for monitoring or estimation purposes, but not
62     * for program control.
63 tim 1.1 *
64 jsr166 1.145 * <p>The table is dynamically expanded when there are too many
65 dl 1.119 * collisions (i.e., keys that have distinct hash codes but fall into
66     * the same slot modulo the table size), with the expected average
67     * effect of maintaining roughly two bins per mapping (corresponding
68     * to a 0.75 load factor threshold for resizing). There may be much
69     * variance around this average as mappings are added and removed, but
70     * overall, this maintains a commonly accepted time/space tradeoff for
71     * hash tables. However, resizing this or any other kind of hash
72     * table may be a relatively slow operation. When possible, it is a
73     * good idea to provide a size estimate as an optional {@code
74     * initialCapacity} constructor argument. An additional optional
75     * {@code loadFactor} constructor argument provides a further means of
76     * customizing initial table capacity by specifying the table density
77     * to be used in calculating the amount of space to allocate for the
78     * given number of elements. Also, for compatibility with previous
79     * versions of this class, constructors may optionally specify an
80     * expected {@code concurrencyLevel} as an additional hint for
81     * internal sizing. Note that using many keys with exactly the same
82     * {@code hashCode()} is a sure way to slow down performance of any
83     * hash table.
84 tim 1.1 *
85 jsr166 1.145 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
86 dl 1.137 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
87     * (using {@link #keySet(Object)} when only keys are of interest, and the
88     * mapped values are (perhaps transiently) not used or all take the
89     * same mapping value.
90     *
91 jsr166 1.145 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
92 dl 1.137 * form of histogram or multiset) by using {@link LongAdder} values
93     * and initializing via {@link #computeIfAbsent}. For example, to add
94     * a count to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you
95     * can use {@code freqs.computeIfAbsent(k -> new
96     * LongAdder()).increment();}
97     *
98 dl 1.45 * <p>This class and its views and iterators implement all of the
99     * <em>optional</em> methods of the {@link Map} and {@link Iterator}
100     * interfaces.
101 dl 1.23 *
102 jsr166 1.145 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
103 dl 1.119 * does <em>not</em> allow {@code null} to be used as a key or value.
104 tim 1.1 *
105 dl 1.137 * <p>ConcurrentHashMaps support parallel operations using the {@link
106 jsr166 1.140 * ForkJoinPool#commonPool}. (Tasks that may be used in other contexts
107 dl 1.137 * are available in class {@link ForkJoinTasks}). These operations are
108     * designed to be safely, and often sensibly, applied even with maps
109     * that are being concurrently updated by other threads; for example,
110     * when computing a snapshot summary of the values in a shared
111     * registry. There are three kinds of operation, each with four
112     * forms, accepting functions with Keys, Values, Entries, and (Key,
113 dl 1.142 * Value) arguments and/or return values. (The first three forms are
114     * also available via the {@link #keySet()}, {@link #values()} and
115     * {@link #entrySet()} views). Because the elements of a
116 dl 1.137 * ConcurrentHashMap are not ordered in any particular way, and may be
117     * processed in different orders in different parallel executions, the
118     * correctness of supplied functions should not depend on any
119     * ordering, or on any other objects or values that may transiently
120     * change while computation is in progress; and except for forEach
121     * actions, should ideally be side-effect-free.
122     *
123     * <ul>
124     * <li> forEach: Perform a given action on each element.
125     * A variant form applies a given transformation on each element
126     * before performing the action.</li>
127     *
128     * <li> search: Return the first available non-null result of
129     * applying a given function on each element; skipping further
130     * search when a result is found.</li>
131     *
132     * <li> reduce: Accumulate each element. The supplied reduction
133     * function cannot rely on ordering (more formally, it should be
134     * both associative and commutative). There are five variants:
135     *
136     * <ul>
137     *
138     * <li> Plain reductions. (There is not a form of this method for
139     * (key, value) function arguments since there is no corresponding
140     * return type.)</li>
141     *
142     * <li> Mapped reductions that accumulate the results of a given
143     * function applied to each element.</li>
144     *
145     * <li> Reductions to scalar doubles, longs, and ints, using a
146     * given basis value.</li>
147     *
148     * </li>
149     * </ul>
150     * </ul>
151     *
152     * <p>The concurrency properties of bulk operations follow
153     * from those of ConcurrentHashMap: Any non-null result returned
154     * from {@code get(key)} and related access methods bears a
155     * happens-before relation with the associated insertion or
156     * update. The result of any bulk operation reflects the
157     * composition of these per-element relations (but is not
158     * necessarily atomic with respect to the map as a whole unless it
159     * is somehow known to be quiescent). Conversely, because keys
160     * and values in the map are never null, null serves as a reliable
161     * atomic indicator of the current lack of any result. To
162     * maintain this property, null serves as an implicit basis for
163     * all non-scalar reduction operations. For the double, long, and
164     * int versions, the basis should be one that, when combined with
165     * any other value, returns that other value (more formally, it
166     * should be the identity element for the reduction). Most common
167     * reductions have these properties; for example, computing a sum
168     * with basis 0 or a minimum with basis MAX_VALUE.
169     *
170     * <p>Search and transformation functions provided as arguments
171     * should similarly return null to indicate the lack of any result
172     * (in which case it is not used). In the case of mapped
173     * reductions, this also enables transformations to serve as
174     * filters, returning null (or, in the case of primitive
175     * specializations, the identity basis) if the element should not
176     * be combined. You can create compound transformations and
177     * filterings by composing them yourself under this "null means
178     * there is nothing there now" rule before using them in search or
179     * reduce operations.
180     *
181     * <p>Methods accepting and/or returning Entry arguments maintain
182     * key-value associations. They may be useful for example when
183     * finding the key for the greatest value. Note that "plain" Entry
184     * arguments can be supplied using {@code new
185     * AbstractMap.SimpleEntry(k,v)}.
186     *
187 jsr166 1.145 * <p>Bulk operations may complete abruptly, throwing an
188 dl 1.137 * exception encountered in the application of a supplied
189     * function. Bear in mind when handling such exceptions that other
190     * concurrently executing functions could also have thrown
191     * exceptions, or would have done so if the first exception had
192     * not occurred.
193     *
194     * <p>Parallel speedups for bulk operations compared to sequential
195     * processing are common but not guaranteed. Operations involving
196     * brief functions on small maps may execute more slowly than
197     * sequential loops if the underlying work to parallelize the
198 jsr166 1.141 * computation is more expensive than the computation itself.
199     * Similarly, parallelization may not lead to much actual parallelism
200     * if all processors are busy performing unrelated tasks.
201 dl 1.137 *
202 jsr166 1.145 * <p>All arguments to all task methods must be non-null.
203 dl 1.137 *
204     * <p><em>jsr166e note: During transition, this class
205     * uses nested functional interfaces with different names but the
206 jsr166 1.144 * same forms as those expected for JDK8.</em>
207 dl 1.137 *
208 dl 1.42 * <p>This class is a member of the
209 jsr166 1.88 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
210 dl 1.42 * Java Collections Framework</a>.
211     *
212 dl 1.8 * @since 1.5
213     * @author Doug Lea
214 dl 1.27 * @param <K> the type of keys maintained by this map
215 jsr166 1.64 * @param <V> the type of mapped values
216 dl 1.8 */
217 dl 1.119 public class ConcurrentHashMap<K, V>
218     implements ConcurrentMap<K, V>, Serializable {
219 dl 1.20 private static final long serialVersionUID = 7249069246763182397L;
220 tim 1.1
221 dl 1.119 /**
222     * A partitionable iterator. A Spliterator can be traversed
223     * directly, but can also be partitioned (before traversal) by
224     * creating another Spliterator that covers a non-overlapping
225     * portion of the elements, and so may be amenable to parallel
226     * execution.
227     *
228 jsr166 1.145 * <p>This interface exports a subset of expected JDK8
229 dl 1.119 * functionality.
230     *
231     * <p>Sample usage: Here is one (of the several) ways to compute
232     * the sum of the values held in a map using the ForkJoin
233     * framework. As illustrated here, Spliterators are well suited to
234     * designs in which a task repeatedly splits off half its work
235     * into forked subtasks until small enough to process directly,
236     * and then joins these subtasks. Variants of this style can also
237     * be used in completion-based designs.
238     *
239     * <pre>
240     * {@code ConcurrentHashMap<String, Long> m = ...
241     * // split as if have 8 * parallelism, for load balance
242     * int n = m.size();
243     * int p = aForkJoinPool.getParallelism() * 8;
244     * int split = (n < p)? n : p;
245     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), split, null));
246     * // ...
247     * static class SumValues extends RecursiveTask<Long> {
248     * final Spliterator<Long> s;
249     * final int split; // split while > 1
250     * final SumValues nextJoin; // records forked subtasks to join
251     * SumValues(Spliterator<Long> s, int depth, SumValues nextJoin) {
252     * this.s = s; this.depth = depth; this.nextJoin = nextJoin;
253     * }
254     * public Long compute() {
255     * long sum = 0;
256     * SumValues subtasks = null; // fork subtasks
257     * for (int s = split >>> 1; s > 0; s >>>= 1)
258     * (subtasks = new SumValues(s.split(), s, subtasks)).fork();
259     * while (s.hasNext()) // directly process remaining elements
260     * sum += s.next();
261     * for (SumValues t = subtasks; t != null; t = t.nextJoin)
262     * sum += t.join(); // collect subtask results
263     * return sum;
264     * }
265     * }
266     * }</pre>
267     */
268     public static interface Spliterator<T> extends Iterator<T> {
269     /**
270     * Returns a Spliterator covering approximately half of the
271     * elements, guaranteed not to overlap with those subsequently
272     * returned by this Spliterator. After invoking this method,
273     * the current Spliterator will <em>not</em> produce any of
274     * the elements of the returned Spliterator, but the two
275     * Spliterators together will produce all of the elements that
276     * would have been produced by this Spliterator had this
277     * method not been called. The exact number of elements
278     * produced by the returned Spliterator is not guaranteed, and
279     * may be zero (i.e., with {@code hasNext()} reporting {@code
280     * false}) if this Spliterator cannot be further split.
281     *
282     * @return a Spliterator covering approximately half of the
283     * elements
284     * @throws IllegalStateException if this Spliterator has
285     * already commenced traversing elements
286     */
287     Spliterator<T> split();
288     }
289    
290 tim 1.1 /*
291 dl 1.119 * Overview:
292     *
293     * The primary design goal of this hash table is to maintain
294     * concurrent readability (typically method get(), but also
295     * iterators and related methods) while minimizing update
296     * contention. Secondary goals are to keep space consumption about
297     * the same or better than java.util.HashMap, and to support high
298     * initial insertion rates on an empty table by many threads.
299     *
300     * Each key-value mapping is held in a Node. Because Node fields
301     * can contain special values, they are defined using plain Object
302     * types. Similarly in turn, all internal methods that use them
303     * work off Object types. And similarly, so do the internal
304 dl 1.149 * methods of auxiliary iterator and view classes. This also
305     * allows many of the public methods to be factored into a smaller
306     * number of internal methods (although sadly not so for the five
307 dl 1.119 * variants of put-related operations). The validation-based
308     * approach explained below leads to a lot of code sprawl because
309     * retry-control precludes factoring into smaller methods.
310     *
311     * The table is lazily initialized to a power-of-two size upon the
312     * first insertion. Each bin in the table normally contains a
313     * list of Nodes (most often, the list has only zero or one Node).
314     * Table accesses require volatile/atomic reads, writes, and
315     * CASes. Because there is no other way to arrange this without
316     * adding further indirections, we use intrinsics
317     * (sun.misc.Unsafe) operations. The lists of nodes within bins
318     * are always accurately traversable under volatile reads, so long
319     * as lookups check hash code and non-nullness of value before
320     * checking key equality.
321     *
322 dl 1.149 * We use the top (sign) bit of Node hash fields for control
323     * purposes -- it is available anyway because of addressing
324     * constraints. Nodes with negative hash fields are forwarding
325     * nodes to either TreeBins or resized tables. The lower 31 bits
326     * of each normal Node's hash field contain a transformation of
327     * the key's hash code.
328 dl 1.119 *
329     * Insertion (via put or its variants) of the first node in an
330     * empty bin is performed by just CASing it to the bin. This is
331     * by far the most common case for put operations under most
332     * key/hash distributions. Other update operations (insert,
333     * delete, and replace) require locks. We do not want to waste
334     * the space required to associate a distinct lock object with
335     * each bin, so instead use the first node of a bin list itself as
336 dl 1.149 * a lock. Locking support for these locks relies on builtin
337     * "synchronized" monitors.
338 dl 1.119 *
339     * Using the first node of a list as a lock does not by itself
340     * suffice though: When a node is locked, any update must first
341     * validate that it is still the first node after locking it, and
342     * retry if not. Because new nodes are always appended to lists,
343     * once a node is first in a bin, it remains first until deleted
344     * or the bin becomes invalidated (upon resizing). However,
345     * operations that only conditionally update may inspect nodes
346     * until the point of update. This is a converse of sorts to the
347     * lazy locking technique described by Herlihy & Shavit.
348     *
349     * The main disadvantage of per-bin locks is that other update
350     * operations on other nodes in a bin list protected by the same
351     * lock can stall, for example when user equals() or mapping
352     * functions take a long time. However, statistically, under
353     * random hash codes, this is not a common problem. Ideally, the
354     * frequency of nodes in bins follows a Poisson distribution
355     * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
356     * parameter of about 0.5 on average, given the resizing threshold
357     * of 0.75, although with a large variance because of resizing
358     * granularity. Ignoring variance, the expected occurrences of
359     * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
360     * first values are:
361     *
362     * 0: 0.60653066
363     * 1: 0.30326533
364     * 2: 0.07581633
365     * 3: 0.01263606
366     * 4: 0.00157952
367     * 5: 0.00015795
368     * 6: 0.00001316
369     * 7: 0.00000094
370     * 8: 0.00000006
371     * more: less than 1 in ten million
372     *
373     * Lock contention probability for two threads accessing distinct
374     * elements is roughly 1 / (8 * #elements) under random hashes.
375     *
376     * Actual hash code distributions encountered in practice
377     * sometimes deviate significantly from uniform randomness. This
378     * includes the case when N > (1<<30), so some keys MUST collide.
379     * Similarly for dumb or hostile usages in which multiple keys are
380     * designed to have identical hash codes. Also, although we guard
381     * against the worst effects of this (see method spread), sets of
382     * hashes may differ only in bits that do not impact their bin
383     * index for a given power-of-two mask. So we use a secondary
384     * strategy that applies when the number of nodes in a bin exceeds
385     * a threshold, and at least one of the keys implements
386     * Comparable. These TreeBins use a balanced tree to hold nodes
387     * (a specialized form of red-black trees), bounding search time
388     * to O(log N). Each search step in a TreeBin is around twice as
389     * slow as in a regular list, but given that N cannot exceed
390     * (1<<64) (before running out of addresses) this bounds search
391     * steps, lock hold times, etc, to reasonable constants (roughly
392     * 100 nodes inspected per operation worst case) so long as keys
393     * are Comparable (which is very common -- String, Long, etc).
394     * TreeBin nodes (TreeNodes) also maintain the same "next"
395     * traversal pointers as regular nodes, so can be traversed in
396     * iterators in the same way.
397     *
398     * The table is resized when occupancy exceeds a percentage
399 dl 1.149 * threshold (nominally, 0.75, but see below). Any thread
400     * noticing an overfull bin may assist in resizing after the
401     * initiating thread allocates and sets up the replacement
402     * array. However, rather than stalling, these other threads may
403     * proceed with insertions etc. The use of TreeBins shields us
404     * from the worst case effects of overfilling while resizes are in
405     * progress. Resizing proceeds by transferring bins, one by one,
406     * from the table to the next table. To enable concurrency, the
407     * next table must be (incrementally) prefilled with place-holders
408     * serving as reverse forwarders to the old table. Because we are
409     * using power-of-two expansion, the elements from each bin must
410     * either stay at same index, or move with a power of two
411     * offset. We eliminate unnecessary node creation by catching
412     * cases where old nodes can be reused because their next fields
413     * won't change. On average, only about one-sixth of them need
414     * cloning when a table doubles. The nodes they replace will be
415     * garbage collectable as soon as they are no longer referenced by
416     * any reader thread that may be in the midst of concurrently
417     * traversing table. Upon transfer, the old table bin contains
418     * only a special forwarding node (with hash field "MOVED") that
419     * contains the next table as its key. On encountering a
420     * forwarding node, access and update operations restart, using
421     * the new table.
422     *
423     * Each bin transfer requires its bin lock, which can stall
424     * waiting for locks while resizing. However, because other
425     * threads can join in and help resize rather than contend for
426     * locks, average aggregate waits become shorter as resizing
427     * progresses. The transfer operation must also ensure that all
428     * accessible bins in both the old and new table are usable by any
429     * traversal. This is arranged by proceeding from the last bin
430     * (table.length - 1) up towards the first. Upon seeing a
431     * forwarding node, traversals (see class Traverser) arrange to
432     * move to the new table without revisiting nodes. However, to
433     * ensure that no intervening nodes are skipped, bin splitting can
434     * only begin after the associated reverse-forwarders are in
435     * place.
436 dl 1.119 *
437     * The traversal scheme also applies to partial traversals of
438     * ranges of bins (via an alternate Traverser constructor)
439     * to support partitioned aggregate operations. Also, read-only
440     * operations give up if ever forwarded to a null table, which
441     * provides support for shutdown-style clearing, which is also not
442     * currently implemented.
443     *
444     * Lazy table initialization minimizes footprint until first use,
445     * and also avoids resizings when the first operation is from a
446     * putAll, constructor with map argument, or deserialization.
447     * These cases attempt to override the initial capacity settings,
448     * but harmlessly fail to take effect in cases of races.
449     *
450 dl 1.149 * The element count is maintained using a specialization of
451     * LongAdder. We need to incorporate a specialization rather than
452     * just use a LongAdder in order to access implicit
453     * contention-sensing that leads to creation of multiple
454     * CounterCells. The counter mechanics avoid contention on
455     * updates but can encounter cache thrashing if read too
456     * frequently during concurrent access. To avoid reading so often,
457     * resizing under contention is attempted only upon adding to a
458     * bin already holding two or more nodes. Under uniform hash
459     * distributions, the probability of this occurring at threshold
460     * is around 13%, meaning that only about 1 in 8 puts check
461     * threshold (and after resizing, many fewer do so). The bulk
462     * putAll operation further reduces contention by only committing
463     * count updates upon these size checks.
464 dl 1.119 *
465     * Maintaining API and serialization compatibility with previous
466     * versions of this class introduces several oddities. Mainly: We
467     * leave untouched but unused constructor arguments refering to
468     * concurrencyLevel. We accept a loadFactor constructor argument,
469     * but apply it only to initial table capacity (which is the only
470     * time that we can guarantee to honor it.) We also declare an
471     * unused "Segment" class that is instantiated in minimal form
472     * only when serializing.
473 dl 1.4 */
474 tim 1.1
475 dl 1.4 /* ---------------- Constants -------------- */
476 tim 1.11
477 dl 1.4 /**
478 dl 1.119 * The largest possible table capacity. This value must be
479     * exactly 1<<30 to stay within Java array allocation and indexing
480     * bounds for power of two table sizes, and is further required
481     * because the top two bits of 32bit hash fields are used for
482     * control purposes.
483 dl 1.4 */
484 dl 1.119 private static final int MAXIMUM_CAPACITY = 1 << 30;
485 dl 1.56
486     /**
487 dl 1.119 * The default initial table capacity. Must be a power of 2
488     * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
489 dl 1.56 */
490 dl 1.119 private static final int DEFAULT_CAPACITY = 16;
491 dl 1.56
492     /**
493 dl 1.119 * The largest possible (non-power of two) array size.
494     * Needed by toArray and related methods.
495 jsr166 1.59 */
496 dl 1.119 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
497 tim 1.1
498     /**
499 dl 1.119 * The default concurrency level for this table. Unused but
500     * defined for compatibility with previous versions of this class.
501 dl 1.4 */
502 dl 1.119 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
503 tim 1.11
504 tim 1.1 /**
505 dl 1.119 * The load factor for this table. Overrides of this value in
506     * constructors affect only the initial table capacity. The
507     * actual floating point value isn't normally used -- it is
508     * simpler to use expressions such as {@code n - (n >>> 2)} for
509     * the associated resizing threshold.
510 dl 1.99 */
511 dl 1.119 private static final float LOAD_FACTOR = 0.75f;
512 dl 1.99
513     /**
514 dl 1.119 * The bin count threshold for using a tree rather than list for a
515     * bin. The value reflects the approximate break-even point for
516     * using tree-based operations.
517     */
518     private static final int TREE_THRESHOLD = 8;
519    
520 dl 1.149 /**
521     * Minimum number of rebinnings per transfer step. Ranges are
522     * subdivided to allow multiple resizer threads. This value
523     * serves as a lower bound to avoid resizers encountering
524     * excessive memory contention. The value should be at least
525     * DEFAULT_CAPACITY.
526     */
527     private static final int MIN_TRANSFER_STRIDE = 16;
528    
529 dl 1.119 /*
530 dl 1.149 * Encodings for Node hash fields. See above for explanation.
531 dl 1.46 */
532 dl 1.119 static final int MOVED = 0x80000000; // hash field for forwarding nodes
533 dl 1.149 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
534    
535     /** Number of CPUS, to place bounds on some sizings */
536     static final int NCPU = Runtime.getRuntime().availableProcessors();
537    
538     /* ---------------- Counters -------------- */
539    
540     // Adapted from LongAdder and Striped64.
541     // See their internal docs for explanation.
542    
543     // A padded cell for distributing counts
544     static final class CounterCell {
545     volatile long p0, p1, p2, p3, p4, p5, p6;
546     volatile long value;
547     volatile long q0, q1, q2, q3, q4, q5, q6;
548     CounterCell(long x) { value = x; }
549     }
550    
551     /**
552     * Holder for the thread-local hash code determining which
553     * CounterCell to use. The code is initialized via the
554     * counterHashCodeGenerator, but may be moved upon collisions.
555     */
556     static final class CounterHashCode {
557     int code;
558     }
559    
560     /**
561     * Generates initial value for per-thread CounterHashCodes
562     */
563     static final AtomicInteger counterHashCodeGenerator = new AtomicInteger();
564    
565     /**
566     * Increment for counterHashCodeGenerator. See class ThreadLocal
567     * for explanation.
568     */
569     static final int SEED_INCREMENT = 0x61c88647;
570    
571     /**
572     * Per-thread counter hash codes. Shared across all instances
573     */
574     static final ThreadLocal<CounterHashCode> threadCounterHashCode =
575     new ThreadLocal<CounterHashCode>();
576 dl 1.46
577 dl 1.4 /* ---------------- Fields -------------- */
578 tim 1.1
579     /**
580 dl 1.119 * The array of bins. Lazily initialized upon first insertion.
581     * Size is always a power of two. Accessed directly by iterators.
582 jsr166 1.59 */
583 dl 1.119 transient volatile Node[] table;
584 tim 1.1
585     /**
586 dl 1.149 * The next table to use; non-null only while resizing.
587 jsr166 1.59 */
588 dl 1.149 private transient volatile Node[] nextTable;
589    
590     /**
591     * Base counter value, used mainly when there is no contention,
592     * but also as a fallback during table initialization
593     * races. Updated via CAS.
594     */
595     private transient volatile long baseCount;
596 tim 1.1
597     /**
598 dl 1.119 * Table initialization and resizing control. When negative, the
599 dl 1.149 * table is being initialized or resized: -1 for initialization,
600     * else -(1 + the number of active resizing threads). Otherwise,
601     * when table is null, holds the initial table size to use upon
602     * creation, or 0 for default. After initialization, holds the
603     * next element count value upon which to resize the table.
604 tim 1.1 */
605 dl 1.119 private transient volatile int sizeCtl;
606 dl 1.4
607 dl 1.149 /**
608     * The next table index (plus one) to split while resizing.
609     */
610     private transient volatile int transferIndex;
611    
612     /**
613     * The least available table index to split while resizing.
614     */
615     private transient volatile int transferOrigin;
616    
617     /**
618     * Spinlock (locked via CAS) used when resizing and/or creating Cells.
619     */
620     private transient volatile int counterBusy;
621    
622     /**
623     * Table of counter cells. When non-null, size is a power of 2.
624     */
625     private transient volatile CounterCell[] counterCells;
626    
627 dl 1.119 // views
628 dl 1.137 private transient KeySetView<K,V> keySet;
629 dl 1.142 private transient ValuesView<K,V> values;
630     private transient EntrySetView<K,V> entrySet;
631 dl 1.119
632     /** For serialization compatibility. Null unless serialized; see below */
633     private Segment<K,V>[] segments;
634    
635     /* ---------------- Table element access -------------- */
636    
637     /*
638     * Volatile access methods are used for table elements as well as
639     * elements of in-progress next table while resizing. Uses are
640     * null checked by callers, and implicitly bounds-checked, relying
641     * on the invariants that tab arrays have non-zero size, and all
642     * indices are masked with (tab.length - 1) which is never
643     * negative and always less than length. Note that, to be correct
644     * wrt arbitrary concurrency errors by users, bounds checks must
645     * operate on local variables, which accounts for some odd-looking
646     * inline assignments below.
647     */
648    
649 dl 1.149 static final Node tabAt(Node[] tab, int i) { // used by Traverser
650     return (Node)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
651 dl 1.119 }
652    
653     private static final boolean casTabAt(Node[] tab, int i, Node c, Node v) {
654 dl 1.149 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
655 dl 1.119 }
656    
657     private static final void setTabAt(Node[] tab, int i, Node v) {
658 dl 1.149 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
659 dl 1.119 }
660    
661     /* ---------------- Nodes -------------- */
662 dl 1.4
663 dl 1.99 /**
664 dl 1.119 * Key-value entry. Note that this is never exported out as a
665     * user-visible Map.Entry (see MapEntry below). Nodes with a hash
666     * field of MOVED are special, and do not contain user keys or
667     * values. Otherwise, keys are never null, and null val fields
668     * indicate that a node is in the process of being deleted or
669     * created. For purposes of read-only access, a key may be read
670     * before a val, but can only be used after checking val to be
671     * non-null.
672 dl 1.99 */
673 dl 1.119 static class Node {
674 dl 1.149 final int hash;
675 dl 1.119 final Object key;
676     volatile Object val;
677     volatile Node next;
678 dl 1.99
679 dl 1.119 Node(int hash, Object key, Object val, Node next) {
680 dl 1.99 this.hash = hash;
681     this.key = key;
682 dl 1.119 this.val = val;
683 dl 1.99 this.next = next;
684     }
685     }
686    
687 dl 1.119 /* ---------------- TreeBins -------------- */
688    
689 dl 1.99 /**
690 dl 1.119 * Nodes for use in TreeBins
691 dl 1.99 */
692 dl 1.119 static final class TreeNode extends Node {
693     TreeNode parent; // red-black tree links
694     TreeNode left;
695     TreeNode right;
696     TreeNode prev; // needed to unlink next upon deletion
697     boolean red;
698 dl 1.99
699 dl 1.119 TreeNode(int hash, Object key, Object val, Node next, TreeNode parent) {
700     super(hash, key, val, next);
701     this.parent = parent;
702     }
703 dl 1.99 }
704 tim 1.1
705     /**
706 dl 1.119 * A specialized form of red-black tree for use in bins
707     * whose size exceeds a threshold.
708     *
709     * TreeBins use a special form of comparison for search and
710     * related operations (which is the main reason we cannot use
711     * existing collections such as TreeMaps). TreeBins contain
712     * Comparable elements, but may contain others, as well as
713     * elements that are Comparable but not necessarily Comparable<T>
714     * for the same T, so we cannot invoke compareTo among them. To
715     * handle this, the tree is ordered primarily by hash value, then
716     * by getClass().getName() order, and then by Comparator order
717     * among elements of the same class. On lookup at a node, if
718     * elements are not comparable or compare as 0, both left and
719     * right children may need to be searched in the case of tied hash
720     * values. (This corresponds to the full list search that would be
721     * necessary if all elements were non-Comparable and had tied
722     * hashes.) The red-black balancing code is updated from
723     * pre-jdk-collections
724     * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
725     * based in turn on Cormen, Leiserson, and Rivest "Introduction to
726     * Algorithms" (CLR).
727     *
728     * TreeBins also maintain a separate locking discipline than
729     * regular bins. Because they are forwarded via special MOVED
730     * nodes at bin heads (which can never change once established),
731     * we cannot use those nodes as locks. Instead, TreeBin
732     * extends AbstractQueuedSynchronizer to support a simple form of
733     * read-write lock. For update operations and table validation,
734     * the exclusive form of lock behaves in the same way as bin-head
735     * locks. However, lookups use shared read-lock mechanics to allow
736     * multiple readers in the absence of writers. Additionally,
737     * these lookups do not ever block: While the lock is not
738     * available, they proceed along the slow traversal path (via
739     * next-pointers) until the lock becomes available or the list is
740     * exhausted, whichever comes first. (These cases are not fast,
741     * but maximize aggregate expected throughput.) The AQS mechanics
742     * for doing this are straightforward. The lock state is held as
743     * AQS getState(). Read counts are negative; the write count (1)
744     * is positive. There are no signalling preferences among readers
745     * and writers. Since we don't need to export full Lock API, we
746     * just override the minimal AQS methods and use them directly.
747     */
748     static final class TreeBin extends AbstractQueuedSynchronizer {
749 dl 1.24 private static final long serialVersionUID = 2249069246763182397L;
750 dl 1.119 transient TreeNode root; // root of tree
751     transient TreeNode first; // head of next-pointer list
752 dl 1.24
753 dl 1.119 /* AQS overrides */
754     public final boolean isHeldExclusively() { return getState() > 0; }
755     public final boolean tryAcquire(int ignore) {
756     if (compareAndSetState(0, 1)) {
757     setExclusiveOwnerThread(Thread.currentThread());
758     return true;
759     }
760     return false;
761     }
762     public final boolean tryRelease(int ignore) {
763     setExclusiveOwnerThread(null);
764     setState(0);
765     return true;
766     }
767     public final int tryAcquireShared(int ignore) {
768     for (int c;;) {
769     if ((c = getState()) > 0)
770     return -1;
771     if (compareAndSetState(c, c -1))
772     return 1;
773     }
774     }
775     public final boolean tryReleaseShared(int ignore) {
776     int c;
777     do {} while (!compareAndSetState(c = getState(), c + 1));
778     return c == -1;
779     }
780    
781     /** From CLR */
782     private void rotateLeft(TreeNode p) {
783     if (p != null) {
784     TreeNode r = p.right, pp, rl;
785     if ((rl = p.right = r.left) != null)
786     rl.parent = p;
787     if ((pp = r.parent = p.parent) == null)
788     root = r;
789     else if (pp.left == p)
790     pp.left = r;
791     else
792     pp.right = r;
793     r.left = p;
794     p.parent = r;
795     }
796     }
797 dl 1.4
798 dl 1.119 /** From CLR */
799     private void rotateRight(TreeNode p) {
800     if (p != null) {
801     TreeNode l = p.left, pp, lr;
802     if ((lr = p.left = l.right) != null)
803     lr.parent = p;
804     if ((pp = l.parent = p.parent) == null)
805     root = l;
806     else if (pp.right == p)
807     pp.right = l;
808     else
809     pp.left = l;
810     l.right = p;
811     p.parent = l;
812     }
813     }
814 dl 1.4
815     /**
816 jsr166 1.123 * Returns the TreeNode (or null if not found) for the given key
817 dl 1.119 * starting at given root.
818 dl 1.4 */
819 dl 1.128 @SuppressWarnings("unchecked") final TreeNode getTreeNode
820     (int h, Object k, TreeNode p) {
821 dl 1.119 Class<?> c = k.getClass();
822     while (p != null) {
823     int dir, ph; Object pk; Class<?> pc;
824     if ((ph = p.hash) == h) {
825     if ((pk = p.key) == k || k.equals(pk))
826     return p;
827     if (c != (pc = pk.getClass()) ||
828     !(k instanceof Comparable) ||
829     (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
830 dl 1.148 if ((dir = (c == pc) ? 0 :
831     c.getName().compareTo(pc.getName())) == 0) {
832     TreeNode r = null, pl, pr; // check both sides
833     if ((pr = p.right) != null && h >= pr.hash &&
834     (r = getTreeNode(h, k, pr)) != null)
835     return r;
836     else if ((pl = p.left) != null && h <= pl.hash)
837     dir = -1;
838     else // nothing there
839     return null;
840 dl 1.99 }
841 dl 1.45 }
842     }
843 dl 1.119 else
844     dir = (h < ph) ? -1 : 1;
845     p = (dir > 0) ? p.right : p.left;
846 dl 1.33 }
847 dl 1.119 return null;
848 dl 1.33 }
849    
850 dl 1.99 /**
851 dl 1.119 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
852     * read-lock to call getTreeNode, but during failure to get
853     * lock, searches along next links.
854 dl 1.99 */
855 dl 1.119 final Object getValue(int h, Object k) {
856     Node r = null;
857     int c = getState(); // Must read lock state first
858     for (Node e = first; e != null; e = e.next) {
859     if (c <= 0 && compareAndSetState(c, c - 1)) {
860     try {
861     r = getTreeNode(h, k, root);
862     } finally {
863     releaseShared(0);
864 dl 1.99 }
865     break;
866     }
867 dl 1.149 else if (e.hash == h && k.equals(e.key)) {
868 dl 1.119 r = e;
869 dl 1.99 break;
870     }
871 dl 1.119 else
872     c = getState();
873 dl 1.99 }
874 dl 1.119 return r == null ? null : r.val;
875 dl 1.99 }
876    
877     /**
878 dl 1.119 * Finds or adds a node.
879     * @return null if added
880 dl 1.6 */
881 dl 1.128 @SuppressWarnings("unchecked") final TreeNode putTreeNode
882     (int h, Object k, Object v) {
883 dl 1.119 Class<?> c = k.getClass();
884     TreeNode pp = root, p = null;
885     int dir = 0;
886     while (pp != null) { // find existing node or leaf to insert at
887     int ph; Object pk; Class<?> pc;
888     p = pp;
889     if ((ph = p.hash) == h) {
890     if ((pk = p.key) == k || k.equals(pk))
891     return p;
892     if (c != (pc = pk.getClass()) ||
893     !(k instanceof Comparable) ||
894     (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
895 dl 1.148 TreeNode s = null, r = null, pr;
896     if ((dir = (c == pc) ? 0 :
897     c.getName().compareTo(pc.getName())) == 0) {
898     if ((pr = p.right) != null && h >= pr.hash &&
899     (r = getTreeNode(h, k, pr)) != null)
900     return r;
901     else // continue left
902     dir = -1;
903 dl 1.99 }
904 dl 1.119 else if ((pr = p.right) != null && h >= pr.hash)
905     s = pr;
906     if (s != null && (r = getTreeNode(h, k, s)) != null)
907     return r;
908 dl 1.99 }
909     }
910 dl 1.119 else
911     dir = (h < ph) ? -1 : 1;
912     pp = (dir > 0) ? p.right : p.left;
913 dl 1.99 }
914    
915 dl 1.119 TreeNode f = first;
916     TreeNode x = first = new TreeNode(h, k, v, f, p);
917     if (p == null)
918     root = x;
919     else { // attach and rebalance; adapted from CLR
920     TreeNode xp, xpp;
921     if (f != null)
922     f.prev = x;
923     if (dir <= 0)
924     p.left = x;
925     else
926     p.right = x;
927     x.red = true;
928     while (x != null && (xp = x.parent) != null && xp.red &&
929     (xpp = xp.parent) != null) {
930     TreeNode xppl = xpp.left;
931     if (xp == xppl) {
932     TreeNode y = xpp.right;
933     if (y != null && y.red) {
934     y.red = false;
935     xp.red = false;
936     xpp.red = true;
937     x = xpp;
938     }
939     else {
940     if (x == xp.right) {
941     rotateLeft(x = xp);
942     xpp = (xp = x.parent) == null ? null : xp.parent;
943     }
944     if (xp != null) {
945     xp.red = false;
946     if (xpp != null) {
947     xpp.red = true;
948     rotateRight(xpp);
949     }
950     }
951     }
952     }
953     else {
954     TreeNode y = xppl;
955     if (y != null && y.red) {
956     y.red = false;
957     xp.red = false;
958     xpp.red = true;
959     x = xpp;
960     }
961     else {
962     if (x == xp.left) {
963     rotateRight(x = xp);
964     xpp = (xp = x.parent) == null ? null : xp.parent;
965     }
966     if (xp != null) {
967     xp.red = false;
968     if (xpp != null) {
969     xpp.red = true;
970     rotateLeft(xpp);
971     }
972     }
973 dl 1.99 }
974     }
975     }
976 dl 1.119 TreeNode r = root;
977     if (r != null && r.red)
978     r.red = false;
979 dl 1.99 }
980 dl 1.119 return null;
981 dl 1.99 }
982 dl 1.45
983 dl 1.119 /**
984     * Removes the given node, that must be present before this
985     * call. This is messier than typical red-black deletion code
986     * because we cannot swap the contents of an interior node
987     * with a leaf successor that is pinned by "next" pointers
988     * that are accessible independently of lock. So instead we
989     * swap the tree linkages.
990     */
991     final void deleteTreeNode(TreeNode p) {
992     TreeNode next = (TreeNode)p.next; // unlink traversal pointers
993     TreeNode pred = p.prev;
994     if (pred == null)
995     first = next;
996     else
997     pred.next = next;
998     if (next != null)
999     next.prev = pred;
1000     TreeNode replacement;
1001     TreeNode pl = p.left;
1002     TreeNode pr = p.right;
1003     if (pl != null && pr != null) {
1004     TreeNode s = pr, sl;
1005     while ((sl = s.left) != null) // find successor
1006     s = sl;
1007     boolean c = s.red; s.red = p.red; p.red = c; // swap colors
1008     TreeNode sr = s.right;
1009     TreeNode pp = p.parent;
1010     if (s == pr) { // p was s's direct parent
1011     p.parent = s;
1012     s.right = p;
1013     }
1014     else {
1015     TreeNode sp = s.parent;
1016     if ((p.parent = sp) != null) {
1017     if (s == sp.left)
1018     sp.left = p;
1019     else
1020     sp.right = p;
1021 dl 1.45 }
1022 dl 1.119 if ((s.right = pr) != null)
1023     pr.parent = s;
1024 dl 1.4 }
1025 dl 1.119 p.left = null;
1026     if ((p.right = sr) != null)
1027     sr.parent = p;
1028     if ((s.left = pl) != null)
1029     pl.parent = s;
1030     if ((s.parent = pp) == null)
1031     root = s;
1032     else if (p == pp.left)
1033     pp.left = s;
1034     else
1035     pp.right = s;
1036     replacement = sr;
1037     }
1038     else
1039     replacement = (pl != null) ? pl : pr;
1040     TreeNode pp = p.parent;
1041     if (replacement == null) {
1042     if (pp == null) {
1043     root = null;
1044     return;
1045     }
1046     replacement = p;
1047 dl 1.99 }
1048 dl 1.119 else {
1049     replacement.parent = pp;
1050     if (pp == null)
1051     root = replacement;
1052     else if (p == pp.left)
1053     pp.left = replacement;
1054     else
1055     pp.right = replacement;
1056     p.left = p.right = p.parent = null;
1057 dl 1.4 }
1058 dl 1.119 if (!p.red) { // rebalance, from CLR
1059     TreeNode x = replacement;
1060     while (x != null) {
1061     TreeNode xp, xpl;
1062     if (x.red || (xp = x.parent) == null) {
1063     x.red = false;
1064 dl 1.99 break;
1065 dl 1.119 }
1066     if (x == (xpl = xp.left)) {
1067     TreeNode sib = xp.right;
1068     if (sib != null && sib.red) {
1069     sib.red = false;
1070     xp.red = true;
1071     rotateLeft(xp);
1072     sib = (xp = x.parent) == null ? null : xp.right;
1073     }
1074     if (sib == null)
1075     x = xp;
1076     else {
1077     TreeNode sl = sib.left, sr = sib.right;
1078     if ((sr == null || !sr.red) &&
1079     (sl == null || !sl.red)) {
1080     sib.red = true;
1081     x = xp;
1082     }
1083     else {
1084     if (sr == null || !sr.red) {
1085     if (sl != null)
1086     sl.red = false;
1087     sib.red = true;
1088     rotateRight(sib);
1089 dl 1.149 sib = (xp = x.parent) == null ?
1090     null : xp.right;
1091 dl 1.119 }
1092     if (sib != null) {
1093     sib.red = (xp == null) ? false : xp.red;
1094     if ((sr = sib.right) != null)
1095     sr.red = false;
1096     }
1097     if (xp != null) {
1098     xp.red = false;
1099     rotateLeft(xp);
1100     }
1101     x = root;
1102     }
1103     }
1104     }
1105     else { // symmetric
1106     TreeNode sib = xpl;
1107     if (sib != null && sib.red) {
1108     sib.red = false;
1109     xp.red = true;
1110     rotateRight(xp);
1111     sib = (xp = x.parent) == null ? null : xp.left;
1112     }
1113     if (sib == null)
1114     x = xp;
1115     else {
1116     TreeNode sl = sib.left, sr = sib.right;
1117     if ((sl == null || !sl.red) &&
1118     (sr == null || !sr.red)) {
1119     sib.red = true;
1120     x = xp;
1121     }
1122     else {
1123     if (sl == null || !sl.red) {
1124     if (sr != null)
1125     sr.red = false;
1126     sib.red = true;
1127     rotateLeft(sib);
1128 dl 1.149 sib = (xp = x.parent) == null ?
1129     null : xp.left;
1130 dl 1.119 }
1131     if (sib != null) {
1132     sib.red = (xp == null) ? false : xp.red;
1133     if ((sl = sib.left) != null)
1134     sl.red = false;
1135     }
1136     if (xp != null) {
1137     xp.red = false;
1138     rotateRight(xp);
1139     }
1140     x = root;
1141     }
1142     }
1143     }
1144 dl 1.45 }
1145 dl 1.4 }
1146 dl 1.119 if (p == replacement && (pp = p.parent) != null) {
1147     if (p == pp.left) // detach pointers
1148     pp.left = null;
1149     else if (p == pp.right)
1150     pp.right = null;
1151     p.parent = null;
1152     }
1153 dl 1.4 }
1154 tim 1.1 }
1155    
1156 dl 1.119 /* ---------------- Collision reduction methods -------------- */
1157 tim 1.1
1158 dl 1.99 /**
1159 dl 1.149 * Spreads higher bits to lower, and also forces top bit to 0.
1160 dl 1.119 * Because the table uses power-of-two masking, sets of hashes
1161     * that vary only in bits above the current mask will always
1162     * collide. (Among known examples are sets of Float keys holding
1163     * consecutive whole numbers in small tables.) To counter this,
1164     * we apply a transform that spreads the impact of higher bits
1165     * downward. There is a tradeoff between speed, utility, and
1166     * quality of bit-spreading. Because many common sets of hashes
1167     * are already reasonably distributed across bits (so don't benefit
1168     * from spreading), and because we use trees to handle large sets
1169     * of collisions in bins, we don't need excessively high quality.
1170     */
1171     private static final int spread(int h) {
1172     h ^= (h >>> 18) ^ (h >>> 12);
1173     return (h ^ (h >>> 10)) & HASH_BITS;
1174 dl 1.99 }
1175    
1176     /**
1177 dl 1.149 * Replaces a list bin with a tree bin if key is comparable. Call
1178     * only when locked.
1179 dl 1.119 */
1180     private final void replaceWithTreeBin(Node[] tab, int index, Object key) {
1181 dl 1.149 if (key instanceof Comparable) {
1182 dl 1.119 TreeBin t = new TreeBin();
1183     for (Node e = tabAt(tab, index); e != null; e = e.next)
1184 dl 1.149 t.putTreeNode(e.hash, e.key, e.val);
1185 dl 1.119 setTabAt(tab, index, new Node(MOVED, t, null, null));
1186     }
1187 dl 1.99 }
1188 tim 1.11
1189 dl 1.119 /* ---------------- Internal access and update methods -------------- */
1190    
1191     /** Implementation for get and containsKey */
1192 dl 1.149 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1193 dl 1.119 int h = spread(k.hashCode());
1194     retry: for (Node[] tab = table; tab != null;) {
1195 dl 1.149 Node e; Object ek, ev; int eh; // locals to read fields once
1196 dl 1.119 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1197 dl 1.149 if ((eh = e.hash) < 0) {
1198 dl 1.119 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1199 dl 1.149 return (V)((TreeBin)ek).getValue(h, k);
1200 dl 1.119 else { // restart with new table
1201     tab = (Node[])ek;
1202     continue retry;
1203     }
1204     }
1205 dl 1.149 else if (eh == h && (ev = e.val) != null &&
1206 dl 1.119 ((ek = e.key) == k || k.equals(ek)))
1207 dl 1.149 return (V)ev;
1208 dl 1.119 }
1209     break;
1210     }
1211     return null;
1212 tim 1.1 }
1213    
1214     /**
1215 dl 1.119 * Implementation for the four public remove/replace methods:
1216     * Replaces node value with v, conditional upon match of cv if
1217     * non-null. If resulting value is null, delete.
1218     */
1219 dl 1.149 @SuppressWarnings("unchecked") private final V internalReplace
1220     (Object k, V v, Object cv) {
1221 dl 1.119 int h = spread(k.hashCode());
1222     Object oldVal = null;
1223     for (Node[] tab = table;;) {
1224     Node f; int i, fh; Object fk;
1225     if (tab == null ||
1226     (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1227     break;
1228 dl 1.149 else if ((fh = f.hash) < 0) {
1229 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1230     TreeBin t = (TreeBin)fk;
1231     boolean validated = false;
1232     boolean deleted = false;
1233     t.acquire(0);
1234     try {
1235     if (tabAt(tab, i) == f) {
1236     validated = true;
1237     TreeNode p = t.getTreeNode(h, k, t.root);
1238     if (p != null) {
1239     Object pv = p.val;
1240     if (cv == null || cv == pv || cv.equals(pv)) {
1241     oldVal = pv;
1242     if ((p.val = v) == null) {
1243     deleted = true;
1244     t.deleteTreeNode(p);
1245     }
1246     }
1247     }
1248     }
1249     } finally {
1250     t.release(0);
1251     }
1252     if (validated) {
1253     if (deleted)
1254 dl 1.149 addCount(-1L, -1);
1255 dl 1.119 break;
1256     }
1257     }
1258     else
1259     tab = (Node[])fk;
1260     }
1261 dl 1.149 else if (fh != h && f.next == null) // precheck
1262 dl 1.119 break; // rules out possible existence
1263 dl 1.149 else {
1264 dl 1.119 boolean validated = false;
1265     boolean deleted = false;
1266 jsr166 1.150 synchronized (f) {
1267 dl 1.119 if (tabAt(tab, i) == f) {
1268     validated = true;
1269     for (Node e = f, pred = null;;) {
1270     Object ek, ev;
1271 dl 1.149 if (e.hash == h &&
1272 dl 1.119 ((ev = e.val) != null) &&
1273     ((ek = e.key) == k || k.equals(ek))) {
1274     if (cv == null || cv == ev || cv.equals(ev)) {
1275     oldVal = ev;
1276     if ((e.val = v) == null) {
1277     deleted = true;
1278     Node en = e.next;
1279     if (pred != null)
1280     pred.next = en;
1281     else
1282     setTabAt(tab, i, en);
1283     }
1284     }
1285     break;
1286     }
1287     pred = e;
1288     if ((e = e.next) == null)
1289     break;
1290     }
1291     }
1292     }
1293     if (validated) {
1294     if (deleted)
1295 dl 1.149 addCount(-1L, -1);
1296 dl 1.119 break;
1297     }
1298     }
1299     }
1300 dl 1.149 return (V)oldVal;
1301 dl 1.55 }
1302    
1303 dl 1.119 /*
1304 dl 1.149 * Internal versions of insertion methods
1305     * All have the same basic structure as the first (internalPut):
1306 dl 1.119 * 1. If table uninitialized, create
1307     * 2. If bin empty, try to CAS new node
1308     * 3. If bin stale, use new table
1309     * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1310     * 5. Lock and validate; if valid, scan and add or update
1311 tim 1.1 *
1312 dl 1.149 * The putAll method differs mainly in attempting to pre-allocate
1313     * enough table space, and also more lazily performs count updates
1314     * and checks.
1315     *
1316     * Most of the function-accepting methods can't be factored nicely
1317     * because they require different functional forms, so instead
1318     * sprawl out similar mechanics.
1319 tim 1.1 */
1320    
1321 dl 1.149 /** Implementation for put and putIfAbsent */
1322     @SuppressWarnings("unchecked") private final V internalPut
1323     (K k, V v, boolean onlyIfAbsent) {
1324     if (k == null || v == null) throw new NullPointerException();
1325 dl 1.119 int h = spread(k.hashCode());
1326 dl 1.149 int len = 0;
1327 dl 1.119 for (Node[] tab = table;;) {
1328 dl 1.149 int i, fh; Node f; Object fk, fv;
1329 dl 1.119 if (tab == null)
1330     tab = initTable();
1331     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1332     if (casTabAt(tab, i, null, new Node(h, k, v, null)))
1333     break; // no lock when adding to empty bin
1334 dl 1.99 }
1335 dl 1.149 else if ((fh = f.hash) < 0) {
1336 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1337     TreeBin t = (TreeBin)fk;
1338     Object oldVal = null;
1339     t.acquire(0);
1340     try {
1341     if (tabAt(tab, i) == f) {
1342 dl 1.149 len = 2;
1343 dl 1.119 TreeNode p = t.putTreeNode(h, k, v);
1344     if (p != null) {
1345     oldVal = p.val;
1346 dl 1.149 if (!onlyIfAbsent)
1347     p.val = v;
1348 dl 1.119 }
1349     }
1350     } finally {
1351     t.release(0);
1352     }
1353 dl 1.149 if (len != 0) {
1354 dl 1.119 if (oldVal != null)
1355 dl 1.149 return (V)oldVal;
1356 dl 1.119 break;
1357     }
1358     }
1359     else
1360     tab = (Node[])fk;
1361     }
1362 dl 1.149 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1363     ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1364     return (V)fv;
1365     else {
1366 dl 1.119 Object oldVal = null;
1367 jsr166 1.150 synchronized (f) {
1368 dl 1.119 if (tabAt(tab, i) == f) {
1369 dl 1.149 len = 1;
1370     for (Node e = f;; ++len) {
1371 dl 1.119 Object ek, ev;
1372 dl 1.149 if (e.hash == h &&
1373 dl 1.119 (ev = e.val) != null &&
1374     ((ek = e.key) == k || k.equals(ek))) {
1375     oldVal = ev;
1376 dl 1.149 if (!onlyIfAbsent)
1377     e.val = v;
1378 dl 1.119 break;
1379     }
1380     Node last = e;
1381     if ((e = e.next) == null) {
1382     last.next = new Node(h, k, v, null);
1383 dl 1.149 if (len >= TREE_THRESHOLD)
1384 dl 1.119 replaceWithTreeBin(tab, i, k);
1385     break;
1386     }
1387     }
1388     }
1389     }
1390 dl 1.149 if (len != 0) {
1391 dl 1.119 if (oldVal != null)
1392 dl 1.149 return (V)oldVal;
1393 dl 1.119 break;
1394     }
1395     }
1396 dl 1.45 }
1397 dl 1.149 addCount(1L, len);
1398 dl 1.119 return null;
1399 dl 1.21 }
1400    
1401 dl 1.119 /** Implementation for computeIfAbsent */
1402 dl 1.149 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1403     (K k, Fun<? super K, ?> mf) {
1404     if (k == null || mf == null)
1405     throw new NullPointerException();
1406 dl 1.119 int h = spread(k.hashCode());
1407     Object val = null;
1408 dl 1.149 int len = 0;
1409 dl 1.119 for (Node[] tab = table;;) {
1410 dl 1.149 Node f; int i; Object fk;
1411 dl 1.119 if (tab == null)
1412     tab = initTable();
1413     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1414 dl 1.149 Node node = new Node(h, k, null, null);
1415 jsr166 1.150 synchronized (node) {
1416 dl 1.149 if (casTabAt(tab, i, null, node)) {
1417     len = 1;
1418     try {
1419     if ((val = mf.apply(k)) != null)
1420     node.val = val;
1421     } finally {
1422     if (val == null)
1423     setTabAt(tab, i, null);
1424 dl 1.119 }
1425     }
1426     }
1427 dl 1.149 if (len != 0)
1428 dl 1.119 break;
1429     }
1430 dl 1.149 else if (f.hash < 0) {
1431 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1432     TreeBin t = (TreeBin)fk;
1433     boolean added = false;
1434     t.acquire(0);
1435     try {
1436     if (tabAt(tab, i) == f) {
1437 dl 1.149 len = 1;
1438 dl 1.119 TreeNode p = t.getTreeNode(h, k, t.root);
1439     if (p != null)
1440     val = p.val;
1441     else if ((val = mf.apply(k)) != null) {
1442     added = true;
1443 dl 1.149 len = 2;
1444 dl 1.119 t.putTreeNode(h, k, val);
1445     }
1446     }
1447     } finally {
1448     t.release(0);
1449     }
1450 dl 1.149 if (len != 0) {
1451 dl 1.119 if (!added)
1452 dl 1.149 return (V)val;
1453 dl 1.119 break;
1454     }
1455     }
1456     else
1457     tab = (Node[])fk;
1458     }
1459     else {
1460 dl 1.149 for (Node e = f; e != null; e = e.next) { // prescan
1461     Object ek, ev;
1462     if (e.hash == h && (ev = e.val) != null &&
1463     ((ek = e.key) == k || k.equals(ek)))
1464     return (V)ev;
1465 dl 1.119 }
1466 dl 1.149 boolean added = false;
1467 jsr166 1.150 synchronized (f) {
1468 dl 1.149 if (tabAt(tab, i) == f) {
1469     len = 1;
1470     for (Node e = f;; ++len) {
1471     Object ek, ev;
1472     if (e.hash == h &&
1473     (ev = e.val) != null &&
1474     ((ek = e.key) == k || k.equals(ek))) {
1475     val = ev;
1476     break;
1477     }
1478     Node last = e;
1479     if ((e = e.next) == null) {
1480     if ((val = mf.apply(k)) != null) {
1481     added = true;
1482     last.next = new Node(h, k, val, null);
1483     if (len >= TREE_THRESHOLD)
1484     replaceWithTreeBin(tab, i, k);
1485 dl 1.119 }
1486 dl 1.149 break;
1487 dl 1.119 }
1488     }
1489     }
1490 dl 1.149 }
1491     if (len != 0) {
1492     if (!added)
1493     return (V)val;
1494     break;
1495 dl 1.119 }
1496 dl 1.105 }
1497 dl 1.99 }
1498 dl 1.149 if (val != null)
1499     addCount(1L, len);
1500     return (V)val;
1501 tim 1.1 }
1502    
1503 dl 1.119 /** Implementation for compute */
1504 dl 1.149 @SuppressWarnings("unchecked") private final V internalCompute
1505     (K k, boolean onlyIfPresent,
1506     BiFun<? super K, ? super V, ? extends V> mf) {
1507     if (k == null || mf == null)
1508     throw new NullPointerException();
1509 dl 1.119 int h = spread(k.hashCode());
1510     Object val = null;
1511     int delta = 0;
1512 dl 1.149 int len = 0;
1513 dl 1.119 for (Node[] tab = table;;) {
1514     Node f; int i, fh; Object fk;
1515     if (tab == null)
1516     tab = initTable();
1517     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1518     if (onlyIfPresent)
1519     break;
1520 dl 1.149 Node node = new Node(h, k, null, null);
1521 jsr166 1.150 synchronized (node) {
1522 dl 1.149 if (casTabAt(tab, i, null, node)) {
1523     try {
1524     len = 1;
1525     if ((val = mf.apply(k, null)) != null) {
1526     node.val = val;
1527     delta = 1;
1528     }
1529     } finally {
1530     if (delta == 0)
1531     setTabAt(tab, i, null);
1532 dl 1.119 }
1533     }
1534     }
1535 dl 1.149 if (len != 0)
1536 dl 1.119 break;
1537     }
1538 dl 1.149 else if ((fh = f.hash) < 0) {
1539 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1540     TreeBin t = (TreeBin)fk;
1541     t.acquire(0);
1542     try {
1543     if (tabAt(tab, i) == f) {
1544 dl 1.149 len = 1;
1545 dl 1.119 TreeNode p = t.getTreeNode(h, k, t.root);
1546 dl 1.149 if (p == null && onlyIfPresent)
1547     break;
1548 dl 1.119 Object pv = (p == null) ? null : p.val;
1549     if ((val = mf.apply(k, (V)pv)) != null) {
1550     if (p != null)
1551     p.val = val;
1552     else {
1553 dl 1.149 len = 2;
1554 dl 1.119 delta = 1;
1555     t.putTreeNode(h, k, val);
1556     }
1557     }
1558     else if (p != null) {
1559     delta = -1;
1560     t.deleteTreeNode(p);
1561     }
1562     }
1563     } finally {
1564     t.release(0);
1565     }
1566 dl 1.149 if (len != 0)
1567 dl 1.119 break;
1568     }
1569     else
1570     tab = (Node[])fk;
1571     }
1572 dl 1.149 else {
1573 jsr166 1.150 synchronized (f) {
1574 dl 1.119 if (tabAt(tab, i) == f) {
1575 dl 1.149 len = 1;
1576     for (Node e = f, pred = null;; ++len) {
1577 dl 1.119 Object ek, ev;
1578 dl 1.149 if (e.hash == h &&
1579 dl 1.119 (ev = e.val) != null &&
1580     ((ek = e.key) == k || k.equals(ek))) {
1581     val = mf.apply(k, (V)ev);
1582     if (val != null)
1583     e.val = val;
1584     else {
1585     delta = -1;
1586     Node en = e.next;
1587     if (pred != null)
1588     pred.next = en;
1589     else
1590     setTabAt(tab, i, en);
1591     }
1592     break;
1593     }
1594     pred = e;
1595     if ((e = e.next) == null) {
1596 dl 1.149 if (!onlyIfPresent &&
1597     (val = mf.apply(k, null)) != null) {
1598 dl 1.119 pred.next = new Node(h, k, val, null);
1599     delta = 1;
1600 dl 1.149 if (len >= TREE_THRESHOLD)
1601 dl 1.119 replaceWithTreeBin(tab, i, k);
1602     }
1603     break;
1604     }
1605     }
1606     }
1607     }
1608 dl 1.149 if (len != 0)
1609 dl 1.119 break;
1610     }
1611     }
1612 dl 1.149 if (delta != 0)
1613     addCount((long)delta, len);
1614     return (V)val;
1615 dl 1.119 }
1616    
1617 dl 1.126 /** Implementation for merge */
1618 dl 1.149 @SuppressWarnings("unchecked") private final V internalMerge
1619 dl 1.128 (K k, V v, BiFun<? super V, ? super V, ? extends V> mf) {
1620 dl 1.149 if (k == null || v == null || mf == null)
1621     throw new NullPointerException();
1622 dl 1.119 int h = spread(k.hashCode());
1623     Object val = null;
1624     int delta = 0;
1625 dl 1.149 int len = 0;
1626 dl 1.119 for (Node[] tab = table;;) {
1627 dl 1.149 int i; Node f; Object fk, fv;
1628 dl 1.119 if (tab == null)
1629     tab = initTable();
1630     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1631     if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
1632     delta = 1;
1633     val = v;
1634     break;
1635     }
1636     }
1637 dl 1.149 else if (f.hash < 0) {
1638 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1639     TreeBin t = (TreeBin)fk;
1640     t.acquire(0);
1641     try {
1642     if (tabAt(tab, i) == f) {
1643 dl 1.149 len = 1;
1644 dl 1.119 TreeNode p = t.getTreeNode(h, k, t.root);
1645     val = (p == null) ? v : mf.apply((V)p.val, v);
1646     if (val != null) {
1647     if (p != null)
1648     p.val = val;
1649     else {
1650 dl 1.149 len = 2;
1651 dl 1.119 delta = 1;
1652     t.putTreeNode(h, k, val);
1653     }
1654     }
1655     else if (p != null) {
1656     delta = -1;
1657     t.deleteTreeNode(p);
1658     }
1659     }
1660     } finally {
1661     t.release(0);
1662     }
1663 dl 1.149 if (len != 0)
1664 dl 1.119 break;
1665     }
1666     else
1667     tab = (Node[])fk;
1668     }
1669 dl 1.149 else {
1670 jsr166 1.150 synchronized (f) {
1671 dl 1.119 if (tabAt(tab, i) == f) {
1672 dl 1.149 len = 1;
1673     for (Node e = f, pred = null;; ++len) {
1674 dl 1.119 Object ek, ev;
1675 dl 1.149 if (e.hash == h &&
1676 dl 1.119 (ev = e.val) != null &&
1677     ((ek = e.key) == k || k.equals(ek))) {
1678 dl 1.149 val = mf.apply((V)ev, v);
1679 dl 1.119 if (val != null)
1680     e.val = val;
1681     else {
1682     delta = -1;
1683     Node en = e.next;
1684     if (pred != null)
1685     pred.next = en;
1686     else
1687     setTabAt(tab, i, en);
1688     }
1689     break;
1690     }
1691     pred = e;
1692     if ((e = e.next) == null) {
1693     val = v;
1694     pred.next = new Node(h, k, val, null);
1695     delta = 1;
1696 dl 1.149 if (len >= TREE_THRESHOLD)
1697 dl 1.119 replaceWithTreeBin(tab, i, k);
1698     break;
1699     }
1700     }
1701     }
1702     }
1703 dl 1.149 if (len != 0)
1704 dl 1.119 break;
1705 dl 1.105 }
1706 dl 1.99 }
1707 dl 1.149 if (delta != 0)
1708     addCount((long)delta, len);
1709     return (V)val;
1710 dl 1.119 }
1711    
1712     /** Implementation for putAll */
1713     private final void internalPutAll(Map<?, ?> m) {
1714     tryPresize(m.size());
1715     long delta = 0L; // number of uncommitted additions
1716     boolean npe = false; // to throw exception on exit for nulls
1717     try { // to clean up counts on other exceptions
1718     for (Map.Entry<?, ?> entry : m.entrySet()) {
1719     Object k, v;
1720     if (entry == null || (k = entry.getKey()) == null ||
1721     (v = entry.getValue()) == null) {
1722     npe = true;
1723     break;
1724     }
1725     int h = spread(k.hashCode());
1726     for (Node[] tab = table;;) {
1727     int i; Node f; int fh; Object fk;
1728     if (tab == null)
1729     tab = initTable();
1730     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1731     if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
1732     ++delta;
1733     break;
1734     }
1735     }
1736 dl 1.149 else if ((fh = f.hash) < 0) {
1737 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1738     TreeBin t = (TreeBin)fk;
1739     boolean validated = false;
1740     t.acquire(0);
1741     try {
1742     if (tabAt(tab, i) == f) {
1743     validated = true;
1744     TreeNode p = t.getTreeNode(h, k, t.root);
1745     if (p != null)
1746     p.val = v;
1747     else {
1748     t.putTreeNode(h, k, v);
1749     ++delta;
1750     }
1751     }
1752     } finally {
1753     t.release(0);
1754     }
1755     if (validated)
1756     break;
1757     }
1758     else
1759     tab = (Node[])fk;
1760     }
1761 dl 1.149 else {
1762     int len = 0;
1763 jsr166 1.150 synchronized (f) {
1764 dl 1.119 if (tabAt(tab, i) == f) {
1765 dl 1.149 len = 1;
1766     for (Node e = f;; ++len) {
1767 dl 1.119 Object ek, ev;
1768 dl 1.149 if (e.hash == h &&
1769 dl 1.119 (ev = e.val) != null &&
1770     ((ek = e.key) == k || k.equals(ek))) {
1771     e.val = v;
1772     break;
1773     }
1774     Node last = e;
1775     if ((e = e.next) == null) {
1776     ++delta;
1777     last.next = new Node(h, k, v, null);
1778 dl 1.149 if (len >= TREE_THRESHOLD)
1779 dl 1.119 replaceWithTreeBin(tab, i, k);
1780     break;
1781     }
1782     }
1783     }
1784     }
1785 dl 1.149 if (len != 0) {
1786     if (len > 1)
1787     addCount(delta, len);
1788 dl 1.119 break;
1789 dl 1.99 }
1790 dl 1.21 }
1791     }
1792 dl 1.45 }
1793     } finally {
1794 dl 1.149 if (delta != 0L)
1795     addCount(delta, 2);
1796 dl 1.45 }
1797 dl 1.119 if (npe)
1798     throw new NullPointerException();
1799 tim 1.1 }
1800 dl 1.19
1801 dl 1.149 /**
1802     * Implementation for clear. Steps through each bin, removing all
1803     * nodes.
1804     */
1805     private final void internalClear() {
1806     long delta = 0L; // negative number of deletions
1807     int i = 0;
1808     Node[] tab = table;
1809     while (tab != null && i < tab.length) {
1810     Node f = tabAt(tab, i);
1811     if (f == null)
1812     ++i;
1813     else if (f.hash < 0) {
1814     Object fk;
1815     if ((fk = f.key) instanceof TreeBin) {
1816     TreeBin t = (TreeBin)fk;
1817     t.acquire(0);
1818     try {
1819     if (tabAt(tab, i) == f) {
1820     for (Node p = t.first; p != null; p = p.next) {
1821     if (p.val != null) { // (currently always true)
1822     p.val = null;
1823     --delta;
1824     }
1825     }
1826     t.first = null;
1827     t.root = null;
1828     ++i;
1829     }
1830     } finally {
1831     t.release(0);
1832     }
1833     }
1834     else
1835     tab = (Node[])fk;
1836     }
1837     else {
1838 jsr166 1.150 synchronized (f) {
1839 dl 1.149 if (tabAt(tab, i) == f) {
1840     for (Node e = f; e != null; e = e.next) {
1841     if (e.val != null) { // (currently always true)
1842     e.val = null;
1843     --delta;
1844     }
1845     }
1846     setTabAt(tab, i, null);
1847     ++i;
1848     }
1849     }
1850     }
1851     }
1852     if (delta != 0L)
1853     addCount(delta, -1);
1854     }
1855    
1856 dl 1.119 /* ---------------- Table Initialization and Resizing -------------- */
1857    
1858 tim 1.1 /**
1859 dl 1.119 * Returns a power of two table size for the given desired capacity.
1860     * See Hackers Delight, sec 3.2
1861 tim 1.1 */
1862 dl 1.119 private static final int tableSizeFor(int c) {
1863     int n = c - 1;
1864     n |= n >>> 1;
1865     n |= n >>> 2;
1866     n |= n >>> 4;
1867     n |= n >>> 8;
1868     n |= n >>> 16;
1869     return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1870 tim 1.1 }
1871    
1872     /**
1873 dl 1.119 * Initializes table, using the size recorded in sizeCtl.
1874     */
1875     private final Node[] initTable() {
1876     Node[] tab; int sc;
1877     while ((tab = table) == null) {
1878     if ((sc = sizeCtl) < 0)
1879     Thread.yield(); // lost initialization race; just spin
1880 dl 1.149 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1881 dl 1.119 try {
1882     if ((tab = table) == null) {
1883     int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1884     tab = table = new Node[n];
1885     sc = n - (n >>> 2);
1886     }
1887     } finally {
1888     sizeCtl = sc;
1889     }
1890     break;
1891     }
1892     }
1893     return tab;
1894 dl 1.4 }
1895    
1896     /**
1897 dl 1.149 * Adds to count, and if table is too small and not already
1898     * resizing, initiates transfer. If already resizing, helps
1899     * perform transfer if work is available. Rechecks occupancy
1900     * after a transfer to see if another resize is already needed
1901     * because resizings are lagging additions.
1902     *
1903     * @param x the count to add
1904     * @param check if <0, don't check resize, if <= 1 only check if uncontended
1905     */
1906     private final void addCount(long x, int check) {
1907     CounterCell[] as; long b, s;
1908     if ((as = counterCells) != null ||
1909     !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1910     CounterHashCode hc; CounterCell a; long v; int m;
1911     boolean uncontended = true;
1912     if ((hc = threadCounterHashCode.get()) == null ||
1913     as == null || (m = as.length - 1) < 0 ||
1914     (a = as[m & hc.code]) == null ||
1915     !(uncontended =
1916     U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1917     fullAddCount(x, hc, uncontended);
1918     return;
1919     }
1920     if (check <= 1)
1921     return;
1922     s = sumCount();
1923     }
1924     if (check >= 0) {
1925     Node[] tab, nt; int sc;
1926     while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1927     tab.length < MAXIMUM_CAPACITY) {
1928     if (sc < 0) {
1929     if (sc == -1 || transferIndex <= transferOrigin ||
1930     (nt = nextTable) == null)
1931     break;
1932     if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1933     transfer(tab, nt);
1934 dl 1.119 }
1935 dl 1.149 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1936     transfer(tab, null);
1937     s = sumCount();
1938 dl 1.119 }
1939     }
1940 dl 1.4 }
1941    
1942     /**
1943 dl 1.119 * Tries to presize table to accommodate the given number of elements.
1944 tim 1.1 *
1945 dl 1.119 * @param size number of elements (doesn't need to be perfectly accurate)
1946 tim 1.1 */
1947 dl 1.119 private final void tryPresize(int size) {
1948     int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1949     tableSizeFor(size + (size >>> 1) + 1);
1950     int sc;
1951     while ((sc = sizeCtl) >= 0) {
1952     Node[] tab = table; int n;
1953     if (tab == null || (n = tab.length) == 0) {
1954     n = (sc > c) ? sc : c;
1955 dl 1.149 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1956 dl 1.119 try {
1957     if (table == tab) {
1958     table = new Node[n];
1959     sc = n - (n >>> 2);
1960     }
1961     } finally {
1962     sizeCtl = sc;
1963     }
1964     }
1965     }
1966     else if (c <= sc || n >= MAXIMUM_CAPACITY)
1967     break;
1968 dl 1.149 else if (tab == table &&
1969     U.compareAndSwapInt(this, SIZECTL, sc, -2))
1970     transfer(tab, null);
1971 dl 1.119 }
1972 dl 1.4 }
1973    
1974 dl 1.119 /*
1975     * Moves and/or copies the nodes in each bin to new table. See
1976     * above for explanation.
1977 dl 1.4 */
1978 dl 1.149 private final void transfer(Node[] tab, Node[] nextTab) {
1979     int n = tab.length, stride;
1980     if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1981     stride = MIN_TRANSFER_STRIDE; // subdivide range
1982     if (nextTab == null) { // initiating
1983     try {
1984     nextTab = new Node[n << 1];
1985 jsr166 1.150 } catch (Throwable ex) { // try to cope with OOME
1986 dl 1.149 sizeCtl = Integer.MAX_VALUE;
1987     return;
1988     }
1989     nextTable = nextTab;
1990     transferOrigin = n;
1991     transferIndex = n;
1992     Node rev = new Node(MOVED, tab, null, null);
1993     for (int k = n; k > 0;) { // progressively reveal ready slots
1994 jsr166 1.150 int nextk = (k > stride) ? k - stride : 0;
1995 dl 1.149 for (int m = nextk; m < k; ++m)
1996     nextTab[m] = rev;
1997     for (int m = n + nextk; m < n + k; ++m)
1998     nextTab[m] = rev;
1999     U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2000     }
2001     }
2002     int nextn = nextTab.length;
2003 dl 1.119 Node fwd = new Node(MOVED, nextTab, null, null);
2004 dl 1.149 boolean advance = true;
2005     for (int i = 0, bound = 0;;) {
2006     int nextIndex, nextBound; Node f; Object fk;
2007     while (advance) {
2008     if (--i >= bound)
2009     advance = false;
2010     else if ((nextIndex = transferIndex) <= transferOrigin) {
2011     i = -1;
2012     advance = false;
2013     }
2014     else if (U.compareAndSwapInt
2015     (this, TRANSFERINDEX, nextIndex,
2016 jsr166 1.150 nextBound = (nextIndex > stride ?
2017 dl 1.149 nextIndex - stride : 0))) {
2018     bound = nextBound;
2019     i = nextIndex - 1;
2020     advance = false;
2021     }
2022     }
2023     if (i < 0 || i >= n || i + n >= nextn) {
2024     for (int sc;;) {
2025     if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2026     if (sc == -1) {
2027     nextTable = null;
2028     table = nextTab;
2029     sizeCtl = (n << 1) - (n >>> 1);
2030     }
2031     return;
2032     }
2033     }
2034     }
2035     else if ((f = tabAt(tab, i)) == null) {
2036     if (casTabAt(tab, i, null, fwd)) {
2037 dl 1.119 setTabAt(nextTab, i, null);
2038     setTabAt(nextTab, i + n, null);
2039 dl 1.149 advance = true;
2040     }
2041     }
2042     else if (f.hash >= 0) {
2043 jsr166 1.150 synchronized (f) {
2044 dl 1.149 if (tabAt(tab, i) == f) {
2045     int runBit = f.hash & n;
2046     Node lastRun = f, lo = null, hi = null;
2047     for (Node p = f.next; p != null; p = p.next) {
2048     int b = p.hash & n;
2049     if (b != runBit) {
2050     runBit = b;
2051     lastRun = p;
2052     }
2053     }
2054     if (runBit == 0)
2055     lo = lastRun;
2056     else
2057     hi = lastRun;
2058     for (Node p = f; p != lastRun; p = p.next) {
2059     int ph = p.hash;
2060     Object pk = p.key, pv = p.val;
2061     if ((ph & n) == 0)
2062     lo = new Node(ph, pk, pv, lo);
2063     else
2064     hi = new Node(ph, pk, pv, hi);
2065     }
2066     setTabAt(nextTab, i, lo);
2067     setTabAt(nextTab, i + n, hi);
2068     setTabAt(tab, i, fwd);
2069     advance = true;
2070 dl 1.119 }
2071     }
2072     }
2073 dl 1.149 else if ((fk = f.key) instanceof TreeBin) {
2074     TreeBin t = (TreeBin)fk;
2075     t.acquire(0);
2076     try {
2077     if (tabAt(tab, i) == f) {
2078     TreeBin lt = new TreeBin();
2079     TreeBin ht = new TreeBin();
2080     int lc = 0, hc = 0;
2081     for (Node e = t.first; e != null; e = e.next) {
2082     int h = e.hash;
2083     Object k = e.key, v = e.val;
2084     if ((h & n) == 0) {
2085     ++lc;
2086     lt.putTreeNode(h, k, v);
2087     }
2088     else {
2089     ++hc;
2090     ht.putTreeNode(h, k, v);
2091     }
2092     }
2093     Node ln, hn; // throw away trees if too small
2094     if (lc < TREE_THRESHOLD) {
2095     ln = null;
2096     for (Node p = lt.first; p != null; p = p.next)
2097     ln = new Node(p.hash, p.key, p.val, ln);
2098     }
2099     else
2100     ln = new Node(MOVED, lt, null, null);
2101     setTabAt(nextTab, i, ln);
2102     if (hc < TREE_THRESHOLD) {
2103     hn = null;
2104     for (Node p = ht.first; p != null; p = p.next)
2105     hn = new Node(p.hash, p.key, p.val, hn);
2106 dl 1.119 }
2107 dl 1.149 else
2108     hn = new Node(MOVED, ht, null, null);
2109     setTabAt(nextTab, i + n, hn);
2110 dl 1.119 setTabAt(tab, i, fwd);
2111 dl 1.149 advance = true;
2112 dl 1.119 }
2113     } finally {
2114 dl 1.149 t.release(0);
2115 dl 1.119 }
2116     }
2117     else
2118 dl 1.149 advance = true; // already processed
2119 dl 1.119 }
2120 dl 1.4 }
2121 tim 1.1
2122 dl 1.149 /* ---------------- Counter support -------------- */
2123    
2124     final long sumCount() {
2125     CounterCell[] as = counterCells; CounterCell a;
2126     long sum = baseCount;
2127     if (as != null) {
2128     for (int i = 0; i < as.length; ++i) {
2129     if ((a = as[i]) != null)
2130     sum += a.value;
2131 dl 1.119 }
2132     }
2133 dl 1.149 return sum;
2134 dl 1.119 }
2135    
2136 dl 1.149 // See LongAdder version for explanation
2137     private final void fullAddCount(long x, CounterHashCode hc,
2138     boolean wasUncontended) {
2139     int h;
2140     if (hc == null) {
2141     hc = new CounterHashCode();
2142     int s = counterHashCodeGenerator.addAndGet(SEED_INCREMENT);
2143     h = hc.code = (s == 0) ? 1 : s; // Avoid zero
2144     threadCounterHashCode.set(hc);
2145 dl 1.119 }
2146     else
2147 dl 1.149 h = hc.code;
2148     boolean collide = false; // True if last slot nonempty
2149     for (;;) {
2150     CounterCell[] as; CounterCell a; int n; long v;
2151     if ((as = counterCells) != null && (n = as.length) > 0) {
2152     if ((a = as[(n - 1) & h]) == null) {
2153     if (counterBusy == 0) { // Try to attach new Cell
2154     CounterCell r = new CounterCell(x); // Optimistic create
2155     if (counterBusy == 0 &&
2156     U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2157     boolean created = false;
2158     try { // Recheck under lock
2159     CounterCell[] rs; int m, j;
2160     if ((rs = counterCells) != null &&
2161     (m = rs.length) > 0 &&
2162     rs[j = (m - 1) & h] == null) {
2163     rs[j] = r;
2164     created = true;
2165 dl 1.128 }
2166 dl 1.149 } finally {
2167     counterBusy = 0;
2168 dl 1.119 }
2169 dl 1.149 if (created)
2170     break;
2171     continue; // Slot is now non-empty
2172     }
2173     }
2174     collide = false;
2175     }
2176     else if (!wasUncontended) // CAS already known to fail
2177     wasUncontended = true; // Continue after rehash
2178     else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2179     break;
2180     else if (counterCells != as || n >= NCPU)
2181     collide = false; // At max size or stale
2182     else if (!collide)
2183     collide = true;
2184     else if (counterBusy == 0 &&
2185     U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2186     try {
2187     if (counterCells == as) {// Expand table unless stale
2188     CounterCell[] rs = new CounterCell[n << 1];
2189     for (int i = 0; i < n; ++i)
2190     rs[i] = as[i];
2191     counterCells = rs;
2192 dl 1.119 }
2193     } finally {
2194 dl 1.149 counterBusy = 0;
2195 dl 1.119 }
2196 dl 1.149 collide = false;
2197     continue; // Retry with expanded table
2198 dl 1.119 }
2199 dl 1.149 h ^= h << 13; // Rehash
2200     h ^= h >>> 17;
2201     h ^= h << 5;
2202     }
2203     else if (counterBusy == 0 && counterCells == as &&
2204     U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
2205     boolean init = false;
2206     try { // Initialize table
2207     if (counterCells == as) {
2208     CounterCell[] rs = new CounterCell[2];
2209     rs[h & 1] = new CounterCell(x);
2210     counterCells = rs;
2211     init = true;
2212 dl 1.119 }
2213     } finally {
2214 dl 1.149 counterBusy = 0;
2215 dl 1.119 }
2216 dl 1.149 if (init)
2217     break;
2218 dl 1.119 }
2219 dl 1.149 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2220     break; // Fall back on using base
2221 dl 1.119 }
2222 dl 1.149 hc.code = h; // Record index for next time
2223 dl 1.119 }
2224    
2225     /* ----------------Table Traversal -------------- */
2226    
2227     /**
2228     * Encapsulates traversal for methods such as containsValue; also
2229 dl 1.126 * serves as a base class for other iterators and bulk tasks.
2230 dl 1.119 *
2231     * At each step, the iterator snapshots the key ("nextKey") and
2232     * value ("nextVal") of a valid node (i.e., one that, at point of
2233     * snapshot, has a non-null user value). Because val fields can
2234     * change (including to null, indicating deletion), field nextVal
2235     * might not be accurate at point of use, but still maintains the
2236     * weak consistency property of holding a value that was once
2237 dl 1.137 * valid. To support iterator.remove, the nextKey field is not
2238     * updated (nulled out) when the iterator cannot advance.
2239 dl 1.119 *
2240     * Internal traversals directly access these fields, as in:
2241     * {@code while (it.advance() != null) { process(it.nextKey); }}
2242     *
2243     * Exported iterators must track whether the iterator has advanced
2244     * (in hasNext vs next) (by setting/checking/nulling field
2245     * nextVal), and then extract key, value, or key-value pairs as
2246     * return values of next().
2247     *
2248     * The iterator visits once each still-valid node that was
2249     * reachable upon iterator construction. It might miss some that
2250     * were added to a bin after the bin was visited, which is OK wrt
2251     * consistency guarantees. Maintaining this property in the face
2252     * of possible ongoing resizes requires a fair amount of
2253     * bookkeeping state that is difficult to optimize away amidst
2254     * volatile accesses. Even so, traversal maintains reasonable
2255     * throughput.
2256     *
2257     * Normally, iteration proceeds bin-by-bin traversing lists.
2258     * However, if the table has been resized, then all future steps
2259     * must traverse both the bin at the current index as well as at
2260     * (index + baseSize); and so on for further resizings. To
2261     * paranoically cope with potential sharing by users of iterators
2262     * across threads, iteration terminates if a bounds checks fails
2263     * for a table read.
2264     *
2265 dl 1.146 * This class extends CountedCompleter to streamline parallel
2266     * iteration in bulk operations. This adds only a few fields of
2267     * space overhead, which is small enough in cases where it is not
2268     * needed to not worry about it. Because CountedCompleter is
2269     * Serializable, but iterators need not be, we need to add warning
2270     * suppressions.
2271 dl 1.119 */
2272 dl 1.149 @SuppressWarnings("serial") static class Traverser<K,V,R>
2273     extends CountedCompleter<R> {
2274 dl 1.119 final ConcurrentHashMap<K, V> map;
2275     Node next; // the next entry to use
2276     Object nextKey; // cached key field of next
2277     Object nextVal; // cached val field of next
2278     Node[] tab; // current table; updated if resized
2279     int index; // index of bin to use next
2280     int baseIndex; // current index of initial table
2281     int baseLimit; // index bound for initial table
2282 dl 1.130 int baseSize; // initial table size
2283 dl 1.146 int batch; // split control
2284 dl 1.119
2285     /** Creates iterator for all entries in the table. */
2286     Traverser(ConcurrentHashMap<K, V> map) {
2287 dl 1.130 this.map = map;
2288 dl 1.119 }
2289    
2290 dl 1.146 /** Creates iterator for split() methods and task constructors */
2291     Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2292     super(it);
2293     this.batch = batch;
2294     if ((this.map = map) != null && it != null) { // split parent
2295     Node[] t;
2296     if ((t = it.tab) == null &&
2297     (t = it.tab = map.table) != null)
2298     it.baseLimit = it.baseSize = t.length;
2299     this.tab = t;
2300     this.baseSize = it.baseSize;
2301     int hi = this.baseLimit = it.baseLimit;
2302     it.baseLimit = this.index = this.baseIndex =
2303     (hi + it.baseIndex + 1) >>> 1;
2304     }
2305 dl 1.119 }
2306    
2307     /**
2308     * Advances next; returns nextVal or null if terminated.
2309     * See above for explanation.
2310     */
2311     final Object advance() {
2312 dl 1.137 Node e = next;
2313 dl 1.119 Object ev = null;
2314     outer: do {
2315     if (e != null) // advance past used/skipped node
2316     e = e.next;
2317     while (e == null) { // get to next non-null bin
2318 dl 1.130 ConcurrentHashMap<K, V> m;
2319 dl 1.119 Node[] t; int b, i, n; Object ek; // checks must use locals
2320 dl 1.130 if ((t = tab) != null)
2321     n = t.length;
2322     else if ((m = map) != null && (t = tab = m.table) != null)
2323     n = baseLimit = baseSize = t.length;
2324     else
2325 dl 1.119 break outer;
2326 dl 1.130 if ((b = baseIndex) >= baseLimit ||
2327     (i = index) < 0 || i >= n)
2328     break outer;
2329 dl 1.149 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2330 dl 1.119 if ((ek = e.key) instanceof TreeBin)
2331     e = ((TreeBin)ek).first;
2332     else {
2333     tab = (Node[])ek;
2334     continue; // restarts due to null val
2335     }
2336     } // visit upper slots if present
2337     index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2338     }
2339     nextKey = e.key;
2340     } while ((ev = e.val) == null); // skip deleted or special nodes
2341     next = e;
2342     return nextVal = ev;
2343     }
2344    
2345     public final void remove() {
2346 dl 1.137 Object k = nextKey;
2347     if (k == null && (advance() == null || (k = nextKey) == null))
2348 dl 1.119 throw new IllegalStateException();
2349 dl 1.137 map.internalReplace(k, null, null);
2350 dl 1.119 }
2351    
2352     public final boolean hasNext() {
2353     return nextVal != null || advance() != null;
2354     }
2355    
2356     public final boolean hasMoreElements() { return hasNext(); }
2357 dl 1.146
2358     public void compute() { } // default no-op CountedCompleter body
2359    
2360     /**
2361     * Returns a batch value > 0 if this task should (and must) be
2362     * split, if so, adding to pending count, and in any case
2363     * updating batch value. The initial batch value is approx
2364     * exp2 of the number of times (minus one) to split task by
2365     * two before executing leaf action. This value is faster to
2366     * compute and more convenient to use as a guide to splitting
2367     * than is the depth, since it is used while dividing by two
2368     * anyway.
2369     */
2370     final int preSplit() {
2371     ConcurrentHashMap<K, V> m; int b; Node[] t; ForkJoinPool pool;
2372     if ((b = batch) < 0 && (m = map) != null) { // force initialization
2373     if ((t = tab) == null && (t = tab = m.table) != null)
2374     baseLimit = baseSize = t.length;
2375     if (t != null) {
2376 dl 1.149 long n = m.sumCount();
2377 dl 1.146 int par = ((pool = getPool()) == null) ?
2378     ForkJoinPool.getCommonPoolParallelism() :
2379     pool.getParallelism();
2380     int sp = par << 3; // slack of 8
2381     b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2382     }
2383     }
2384 jsr166 1.147 b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2385 dl 1.146 if ((batch = b) > 0)
2386     addToPendingCount(1);
2387     return b;
2388     }
2389    
2390 dl 1.119 }
2391    
2392     /* ---------------- Public operations -------------- */
2393    
2394     /**
2395     * Creates a new, empty map with the default initial table size (16).
2396     */
2397     public ConcurrentHashMap() {
2398     }
2399    
2400     /**
2401     * Creates a new, empty map with an initial table size
2402     * accommodating the specified number of elements without the need
2403     * to dynamically resize.
2404     *
2405     * @param initialCapacity The implementation performs internal
2406     * sizing to accommodate this many elements.
2407     * @throws IllegalArgumentException if the initial capacity of
2408     * elements is negative
2409     */
2410     public ConcurrentHashMap(int initialCapacity) {
2411     if (initialCapacity < 0)
2412     throw new IllegalArgumentException();
2413     int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2414     MAXIMUM_CAPACITY :
2415     tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2416     this.sizeCtl = cap;
2417     }
2418    
2419     /**
2420     * Creates a new map with the same mappings as the given map.
2421     *
2422     * @param m the map
2423     */
2424     public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2425     this.sizeCtl = DEFAULT_CAPACITY;
2426     internalPutAll(m);
2427     }
2428    
2429     /**
2430     * Creates a new, empty map with an initial table size based on
2431     * the given number of elements ({@code initialCapacity}) and
2432     * initial table density ({@code loadFactor}).
2433     *
2434     * @param initialCapacity the initial capacity. The implementation
2435     * performs internal sizing to accommodate this many elements,
2436     * given the specified load factor.
2437     * @param loadFactor the load factor (table density) for
2438     * establishing the initial table size
2439     * @throws IllegalArgumentException if the initial capacity of
2440     * elements is negative or the load factor is nonpositive
2441     *
2442     * @since 1.6
2443     */
2444     public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2445     this(initialCapacity, loadFactor, 1);
2446     }
2447    
2448     /**
2449     * Creates a new, empty map with an initial table size based on
2450     * the given number of elements ({@code initialCapacity}), table
2451     * density ({@code loadFactor}), and number of concurrently
2452     * updating threads ({@code concurrencyLevel}).
2453     *
2454     * @param initialCapacity the initial capacity. The implementation
2455     * performs internal sizing to accommodate this many elements,
2456     * given the specified load factor.
2457     * @param loadFactor the load factor (table density) for
2458     * establishing the initial table size
2459     * @param concurrencyLevel the estimated number of concurrently
2460     * updating threads. The implementation may use this value as
2461     * a sizing hint.
2462     * @throws IllegalArgumentException if the initial capacity is
2463     * negative or the load factor or concurrencyLevel are
2464     * nonpositive
2465     */
2466     public ConcurrentHashMap(int initialCapacity,
2467     float loadFactor, int concurrencyLevel) {
2468     if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2469     throw new IllegalArgumentException();
2470     if (initialCapacity < concurrencyLevel) // Use at least as many bins
2471     initialCapacity = concurrencyLevel; // as estimated threads
2472     long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2473     int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2474     MAXIMUM_CAPACITY : tableSizeFor((int)size);
2475     this.sizeCtl = cap;
2476     }
2477    
2478     /**
2479 dl 1.137 * Creates a new {@link Set} backed by a ConcurrentHashMap
2480     * from the given type to {@code Boolean.TRUE}.
2481     *
2482     * @return the new set
2483     */
2484     public static <K> KeySetView<K,Boolean> newKeySet() {
2485     return new KeySetView<K,Boolean>(new ConcurrentHashMap<K,Boolean>(),
2486     Boolean.TRUE);
2487     }
2488    
2489     /**
2490     * Creates a new {@link Set} backed by a ConcurrentHashMap
2491     * from the given type to {@code Boolean.TRUE}.
2492     *
2493     * @param initialCapacity The implementation performs internal
2494     * sizing to accommodate this many elements.
2495     * @throws IllegalArgumentException if the initial capacity of
2496     * elements is negative
2497     * @return the new set
2498     */
2499     public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2500 dl 1.149 return new KeySetView<K,Boolean>
2501     (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2502 dl 1.137 }
2503    
2504     /**
2505 dl 1.119 * {@inheritDoc}
2506     */
2507     public boolean isEmpty() {
2508 dl 1.149 return sumCount() <= 0L; // ignore transient negative values
2509 dl 1.119 }
2510    
2511     /**
2512     * {@inheritDoc}
2513     */
2514     public int size() {
2515 dl 1.149 long n = sumCount();
2516 dl 1.119 return ((n < 0L) ? 0 :
2517     (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2518     (int)n);
2519     }
2520    
2521     /**
2522     * Returns the number of mappings. This method should be used
2523     * instead of {@link #size} because a ConcurrentHashMap may
2524     * contain more mappings than can be represented as an int. The
2525 dl 1.146 * value returned is an estimate; the actual count may differ if
2526     * there are concurrent insertions or removals.
2527 dl 1.119 *
2528     * @return the number of mappings
2529     */
2530     public long mappingCount() {
2531 dl 1.149 long n = sumCount();
2532 dl 1.126 return (n < 0L) ? 0L : n; // ignore transient negative values
2533 dl 1.119 }
2534    
2535     /**
2536     * Returns the value to which the specified key is mapped,
2537     * or {@code null} if this map contains no mapping for the key.
2538     *
2539     * <p>More formally, if this map contains a mapping from a key
2540     * {@code k} to a value {@code v} such that {@code key.equals(k)},
2541     * then this method returns {@code v}; otherwise it returns
2542     * {@code null}. (There can be at most one such mapping.)
2543     *
2544     * @throws NullPointerException if the specified key is null
2545     */
2546 dl 1.149 public V get(Object key) {
2547     return internalGet(key);
2548 dl 1.119 }
2549    
2550     /**
2551 dl 1.129 * Returns the value to which the specified key is mapped,
2552 jsr166 1.133 * or the given defaultValue if this map contains no mapping for the key.
2553 dl 1.129 *
2554     * @param key the key
2555     * @param defaultValue the value to return if this map contains
2556 jsr166 1.134 * no mapping for the given key
2557 dl 1.129 * @return the mapping for the key, if present; else the defaultValue
2558     * @throws NullPointerException if the specified key is null
2559     */
2560 dl 1.149 public V getValueOrDefault(Object key, V defaultValue) {
2561     V v;
2562     return (v = internalGet(key)) == null ? defaultValue : v;
2563 dl 1.129 }
2564    
2565     /**
2566 dl 1.119 * Tests if the specified object is a key in this table.
2567     *
2568     * @param key possible key
2569     * @return {@code true} if and only if the specified object
2570     * is a key in this table, as determined by the
2571     * {@code equals} method; {@code false} otherwise
2572     * @throws NullPointerException if the specified key is null
2573     */
2574     public boolean containsKey(Object key) {
2575     return internalGet(key) != null;
2576     }
2577    
2578     /**
2579     * Returns {@code true} if this map maps one or more keys to the
2580     * specified value. Note: This method may require a full traversal
2581     * of the map, and is much slower than method {@code containsKey}.
2582     *
2583     * @param value value whose presence in this map is to be tested
2584     * @return {@code true} if this map maps one or more keys to the
2585     * specified value
2586     * @throws NullPointerException if the specified value is null
2587     */
2588     public boolean containsValue(Object value) {
2589     if (value == null)
2590     throw new NullPointerException();
2591     Object v;
2592     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2593     while ((v = it.advance()) != null) {
2594     if (v == value || value.equals(v))
2595     return true;
2596     }
2597     return false;
2598     }
2599    
2600     /**
2601     * Legacy method testing if some key maps into the specified value
2602     * in this table. This method is identical in functionality to
2603     * {@link #containsValue}, and exists solely to ensure
2604     * full compatibility with class {@link java.util.Hashtable},
2605     * which supported this method prior to introduction of the
2606     * Java Collections framework.
2607     *
2608     * @param value a value to search for
2609     * @return {@code true} if and only if some key maps to the
2610     * {@code value} argument in this table as
2611     * determined by the {@code equals} method;
2612     * {@code false} otherwise
2613     * @throws NullPointerException if the specified value is null
2614     */
2615     public boolean contains(Object value) {
2616     return containsValue(value);
2617     }
2618    
2619     /**
2620     * Maps the specified key to the specified value in this table.
2621     * Neither the key nor the value can be null.
2622     *
2623 jsr166 1.145 * <p>The value can be retrieved by calling the {@code get} method
2624 dl 1.119 * with a key that is equal to the original key.
2625     *
2626     * @param key key with which the specified value is to be associated
2627     * @param value value to be associated with the specified key
2628     * @return the previous value associated with {@code key}, or
2629     * {@code null} if there was no mapping for {@code key}
2630     * @throws NullPointerException if the specified key or value is null
2631     */
2632 dl 1.149 public V put(K key, V value) {
2633     return internalPut(key, value, false);
2634 dl 1.119 }
2635    
2636     /**
2637     * {@inheritDoc}
2638     *
2639     * @return the previous value associated with the specified key,
2640     * or {@code null} if there was no mapping for the key
2641     * @throws NullPointerException if the specified key or value is null
2642     */
2643 dl 1.149 public V putIfAbsent(K key, V value) {
2644     return internalPut(key, value, true);
2645 dl 1.119 }
2646    
2647     /**
2648     * Copies all of the mappings from the specified map to this one.
2649     * These mappings replace any mappings that this map had for any of the
2650     * keys currently in the specified map.
2651     *
2652     * @param m mappings to be stored in this map
2653     */
2654     public void putAll(Map<? extends K, ? extends V> m) {
2655     internalPutAll(m);
2656     }
2657    
2658     /**
2659     * If the specified key is not already associated with a value,
2660     * computes its value using the given mappingFunction and enters
2661     * it into the map unless null. This is equivalent to
2662     * <pre> {@code
2663     * if (map.containsKey(key))
2664     * return map.get(key);
2665     * value = mappingFunction.apply(key);
2666     * if (value != null)
2667     * map.put(key, value);
2668     * return value;}</pre>
2669     *
2670     * except that the action is performed atomically. If the
2671     * function returns {@code null} no mapping is recorded. If the
2672     * function itself throws an (unchecked) exception, the exception
2673     * is rethrown to its caller, and no mapping is recorded. Some
2674     * attempted update operations on this map by other threads may be
2675     * blocked while computation is in progress, so the computation
2676     * should be short and simple, and must not attempt to update any
2677     * other mappings of this Map. The most appropriate usage is to
2678     * construct a new object serving as an initial mapped value, or
2679     * memoized result, as in:
2680     *
2681     * <pre> {@code
2682     * map.computeIfAbsent(key, new Fun<K, V>() {
2683     * public V map(K k) { return new Value(f(k)); }});}</pre>
2684     *
2685     * @param key key with which the specified value is to be associated
2686     * @param mappingFunction the function to compute a value
2687     * @return the current (existing or computed) value associated with
2688 jsr166 1.134 * the specified key, or null if the computed value is null
2689 dl 1.119 * @throws NullPointerException if the specified key or mappingFunction
2690     * is null
2691     * @throws IllegalStateException if the computation detectably
2692     * attempts a recursive update to this map that would
2693     * otherwise never complete
2694     * @throws RuntimeException or Error if the mappingFunction does so,
2695     * in which case the mapping is left unestablished
2696     */
2697 dl 1.149 public V computeIfAbsent
2698 dl 1.128 (K key, Fun<? super K, ? extends V> mappingFunction) {
2699 dl 1.149 return internalComputeIfAbsent(key, mappingFunction);
2700 dl 1.119 }
2701    
2702     /**
2703     * If the given key is present, computes a new mapping value given a key and
2704     * its current mapped value. This is equivalent to
2705     * <pre> {@code
2706     * if (map.containsKey(key)) {
2707     * value = remappingFunction.apply(key, map.get(key));
2708     * if (value != null)
2709     * map.put(key, value);
2710     * else
2711     * map.remove(key);
2712     * }
2713     * }</pre>
2714     *
2715     * except that the action is performed atomically. If the
2716     * function returns {@code null}, the mapping is removed. If the
2717     * function itself throws an (unchecked) exception, the exception
2718     * is rethrown to its caller, and the current mapping is left
2719     * unchanged. Some attempted update operations on this map by
2720     * other threads may be blocked while computation is in progress,
2721     * so the computation should be short and simple, and must not
2722 dl 1.149 * attempt to update any other mappings of this Map. For example,
2723     * to either create or append new messages to a value mapping:
2724 dl 1.119 *
2725     * @param key key with which the specified value is to be associated
2726     * @param remappingFunction the function to compute a value
2727 jsr166 1.123 * @return the new value associated with the specified key, or null if none
2728 dl 1.119 * @throws NullPointerException if the specified key or remappingFunction
2729     * is null
2730     * @throws IllegalStateException if the computation detectably
2731     * attempts a recursive update to this map that would
2732     * otherwise never complete
2733     * @throws RuntimeException or Error if the remappingFunction does so,
2734     * in which case the mapping is unchanged
2735     */
2736 dl 1.149 public V computeIfPresent
2737 dl 1.128 (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2738 dl 1.149 return internalCompute(key, true, remappingFunction);
2739 dl 1.119 }
2740    
2741     /**
2742     * Computes a new mapping value given a key and
2743     * its current mapped value (or {@code null} if there is no current
2744     * mapping). This is equivalent to
2745     * <pre> {@code
2746     * value = remappingFunction.apply(key, map.get(key));
2747     * if (value != null)
2748     * map.put(key, value);
2749     * else
2750     * map.remove(key);
2751     * }</pre>
2752     *
2753     * except that the action is performed atomically. If the
2754     * function returns {@code null}, the mapping is removed. If the
2755     * function itself throws an (unchecked) exception, the exception
2756     * is rethrown to its caller, and the current mapping is left
2757     * unchanged. Some attempted update operations on this map by
2758     * other threads may be blocked while computation is in progress,
2759     * so the computation should be short and simple, and must not
2760     * attempt to update any other mappings of this Map. For example,
2761     * to either create or append new messages to a value mapping:
2762     *
2763     * <pre> {@code
2764     * Map<Key, String> map = ...;
2765     * final String msg = ...;
2766     * map.compute(key, new BiFun<Key, String, String>() {
2767     * public String apply(Key k, String v) {
2768     * return (v == null) ? msg : v + msg;});}}</pre>
2769     *
2770     * @param key key with which the specified value is to be associated
2771     * @param remappingFunction the function to compute a value
2772 jsr166 1.123 * @return the new value associated with the specified key, or null if none
2773 dl 1.119 * @throws NullPointerException if the specified key or remappingFunction
2774     * is null
2775     * @throws IllegalStateException if the computation detectably
2776     * attempts a recursive update to this map that would
2777     * otherwise never complete
2778     * @throws RuntimeException or Error if the remappingFunction does so,
2779     * in which case the mapping is unchanged
2780     */
2781 dl 1.149 public V compute
2782 dl 1.128 (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2783 dl 1.149 return internalCompute(key, false, remappingFunction);
2784 dl 1.119 }
2785    
2786     /**
2787     * If the specified key is not already associated
2788     * with a value, associate it with the given value.
2789     * Otherwise, replace the value with the results of
2790     * the given remapping function. This is equivalent to:
2791     * <pre> {@code
2792     * if (!map.containsKey(key))
2793     * map.put(value);
2794     * else {
2795     * newValue = remappingFunction.apply(map.get(key), value);
2796     * if (value != null)
2797     * map.put(key, value);
2798     * else
2799     * map.remove(key);
2800     * }
2801     * }</pre>
2802     * except that the action is performed atomically. If the
2803     * function returns {@code null}, the mapping is removed. If the
2804     * function itself throws an (unchecked) exception, the exception
2805     * is rethrown to its caller, and the current mapping is left
2806     * unchanged. Some attempted update operations on this map by
2807     * other threads may be blocked while computation is in progress,
2808     * so the computation should be short and simple, and must not
2809     * attempt to update any other mappings of this Map.
2810     */
2811 dl 1.149 public V merge
2812     (K key, V value,
2813     BiFun<? super V, ? super V, ? extends V> remappingFunction) {
2814     return internalMerge(key, value, remappingFunction);
2815 dl 1.119 }
2816    
2817     /**
2818     * Removes the key (and its corresponding value) from this map.
2819     * This method does nothing if the key is not in the map.
2820     *
2821     * @param key the key that needs to be removed
2822     * @return the previous value associated with {@code key}, or
2823     * {@code null} if there was no mapping for {@code key}
2824     * @throws NullPointerException if the specified key is null
2825     */
2826 dl 1.149 public V remove(Object key) {
2827     return internalReplace(key, null, null);
2828 dl 1.119 }
2829    
2830     /**
2831     * {@inheritDoc}
2832     *
2833     * @throws NullPointerException if the specified key is null
2834     */
2835     public boolean remove(Object key, Object value) {
2836 dl 1.149 return value != null && internalReplace(key, null, value) != null;
2837 tim 1.1 }
2838 dl 1.31
2839     /**
2840 jsr166 1.68 * {@inheritDoc}
2841     *
2842     * @throws NullPointerException if any of the arguments are null
2843 dl 1.31 */
2844     public boolean replace(K key, V oldValue, V newValue) {
2845 dl 1.119 if (key == null || oldValue == null || newValue == null)
2846 dl 1.31 throw new NullPointerException();
2847 dl 1.119 return internalReplace(key, newValue, oldValue) != null;
2848 dl 1.32 }
2849    
2850     /**
2851 jsr166 1.68 * {@inheritDoc}
2852     *
2853     * @return the previous value associated with the specified key,
2854 dl 1.119 * or {@code null} if there was no mapping for the key
2855 jsr166 1.68 * @throws NullPointerException if the specified key or value is null
2856 dl 1.32 */
2857 dl 1.149 public V replace(K key, V value) {
2858 dl 1.119 if (key == null || value == null)
2859 dl 1.32 throw new NullPointerException();
2860 dl 1.149 return internalReplace(key, value, null);
2861 dl 1.31 }
2862    
2863 tim 1.1 /**
2864 jsr166 1.68 * Removes all of the mappings from this map.
2865 tim 1.1 */
2866     public void clear() {
2867 dl 1.119 internalClear();
2868 tim 1.1 }
2869    
2870     /**
2871 jsr166 1.68 * Returns a {@link Set} view of the keys contained in this map.
2872     * The set is backed by the map, so changes to the map are
2873 dl 1.137 * reflected in the set, and vice-versa.
2874     *
2875     * @return the set view
2876     */
2877     public KeySetView<K,V> keySet() {
2878     KeySetView<K,V> ks = keySet;
2879     return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2880     }
2881    
2882     /**
2883     * Returns a {@link Set} view of the keys in this map, using the
2884     * given common mapped value for any additions (i.e., {@link
2885     * Collection#add} and {@link Collection#addAll}). This is of
2886     * course only appropriate if it is acceptable to use the same
2887     * value for all additions from this view.
2888 jsr166 1.68 *
2889 dl 1.137 * @param mappedValue the mapped value to use for any
2890     * additions.
2891     * @return the set view
2892     * @throws NullPointerException if the mappedValue is null
2893 tim 1.1 */
2894 dl 1.137 public KeySetView<K,V> keySet(V mappedValue) {
2895     if (mappedValue == null)
2896     throw new NullPointerException();
2897     return new KeySetView<K,V>(this, mappedValue);
2898 tim 1.1 }
2899    
2900     /**
2901 jsr166 1.68 * Returns a {@link Collection} view of the values contained in this map.
2902     * The collection is backed by the map, so changes to the map are
2903 jsr166 1.143 * reflected in the collection, and vice-versa.
2904 tim 1.1 */
2905 dl 1.142 public ValuesView<K,V> values() {
2906     ValuesView<K,V> vs = values;
2907     return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2908 dl 1.119 }
2909    
2910     /**
2911     * Returns a {@link Set} view of the mappings contained in this map.
2912     * The set is backed by the map, so changes to the map are
2913     * reflected in the set, and vice-versa. The set supports element
2914     * removal, which removes the corresponding mapping from the map,
2915     * via the {@code Iterator.remove}, {@code Set.remove},
2916     * {@code removeAll}, {@code retainAll}, and {@code clear}
2917     * operations. It does not support the {@code add} or
2918     * {@code addAll} operations.
2919     *
2920     * <p>The view's {@code iterator} is a "weakly consistent" iterator
2921     * that will never throw {@link ConcurrentModificationException},
2922     * and guarantees to traverse elements as they existed upon
2923     * construction of the iterator, and may (but is not guaranteed to)
2924     * reflect any modifications subsequent to construction.
2925     */
2926     public Set<Map.Entry<K,V>> entrySet() {
2927 dl 1.142 EntrySetView<K,V> es = entrySet;
2928     return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2929 dl 1.119 }
2930    
2931     /**
2932     * Returns an enumeration of the keys in this table.
2933     *
2934     * @return an enumeration of the keys in this table
2935     * @see #keySet()
2936     */
2937     public Enumeration<K> keys() {
2938     return new KeyIterator<K,V>(this);
2939     }
2940    
2941     /**
2942     * Returns an enumeration of the values in this table.
2943     *
2944     * @return an enumeration of the values in this table
2945     * @see #values()
2946     */
2947     public Enumeration<V> elements() {
2948     return new ValueIterator<K,V>(this);
2949     }
2950    
2951     /**
2952 jsr166 1.122 * Returns a partitionable iterator of the keys in this map.
2953 dl 1.119 *
2954 jsr166 1.122 * @return a partitionable iterator of the keys in this map
2955 dl 1.119 */
2956     public Spliterator<K> keySpliterator() {
2957     return new KeyIterator<K,V>(this);
2958     }
2959    
2960     /**
2961 jsr166 1.122 * Returns a partitionable iterator of the values in this map.
2962 dl 1.119 *
2963 jsr166 1.122 * @return a partitionable iterator of the values in this map
2964 dl 1.119 */
2965     public Spliterator<V> valueSpliterator() {
2966     return new ValueIterator<K,V>(this);
2967     }
2968    
2969     /**
2970 jsr166 1.122 * Returns a partitionable iterator of the entries in this map.
2971 dl 1.119 *
2972 jsr166 1.122 * @return a partitionable iterator of the entries in this map
2973 dl 1.119 */
2974     public Spliterator<Map.Entry<K,V>> entrySpliterator() {
2975     return new EntryIterator<K,V>(this);
2976     }
2977    
2978     /**
2979     * Returns the hash code value for this {@link Map}, i.e.,
2980     * the sum of, for each key-value pair in the map,
2981     * {@code key.hashCode() ^ value.hashCode()}.
2982     *
2983     * @return the hash code value for this map
2984     */
2985     public int hashCode() {
2986     int h = 0;
2987     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2988     Object v;
2989     while ((v = it.advance()) != null) {
2990     h += it.nextKey.hashCode() ^ v.hashCode();
2991     }
2992     return h;
2993     }
2994    
2995     /**
2996     * Returns a string representation of this map. The string
2997     * representation consists of a list of key-value mappings (in no
2998     * particular order) enclosed in braces ("{@code {}}"). Adjacent
2999     * mappings are separated by the characters {@code ", "} (comma
3000     * and space). Each key-value mapping is rendered as the key
3001     * followed by an equals sign ("{@code =}") followed by the
3002     * associated value.
3003     *
3004     * @return a string representation of this map
3005     */
3006     public String toString() {
3007     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3008     StringBuilder sb = new StringBuilder();
3009     sb.append('{');
3010     Object v;
3011     if ((v = it.advance()) != null) {
3012     for (;;) {
3013     Object k = it.nextKey;
3014     sb.append(k == this ? "(this Map)" : k);
3015     sb.append('=');
3016     sb.append(v == this ? "(this Map)" : v);
3017     if ((v = it.advance()) == null)
3018     break;
3019     sb.append(',').append(' ');
3020     }
3021     }
3022     return sb.append('}').toString();
3023     }
3024    
3025     /**
3026     * Compares the specified object with this map for equality.
3027     * Returns {@code true} if the given object is a map with the same
3028     * mappings as this map. This operation may return misleading
3029     * results if either map is concurrently modified during execution
3030     * of this method.
3031     *
3032     * @param o object to be compared for equality with this map
3033     * @return {@code true} if the specified object is equal to this map
3034     */
3035     public boolean equals(Object o) {
3036     if (o != this) {
3037     if (!(o instanceof Map))
3038     return false;
3039     Map<?,?> m = (Map<?,?>) o;
3040     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3041     Object val;
3042     while ((val = it.advance()) != null) {
3043     Object v = m.get(it.nextKey);
3044     if (v == null || (v != val && !v.equals(val)))
3045     return false;
3046     }
3047     for (Map.Entry<?,?> e : m.entrySet()) {
3048     Object mk, mv, v;
3049     if ((mk = e.getKey()) == null ||
3050     (mv = e.getValue()) == null ||
3051     (v = internalGet(mk)) == null ||
3052     (mv != v && !mv.equals(v)))
3053     return false;
3054     }
3055     }
3056     return true;
3057     }
3058    
3059     /* ----------------Iterators -------------- */
3060    
3061 dl 1.149 @SuppressWarnings("serial") static final class KeyIterator<K,V>
3062     extends Traverser<K,V,Object>
3063 dl 1.119 implements Spliterator<K>, Enumeration<K> {
3064     KeyIterator(ConcurrentHashMap<K, V> map) { super(map); }
3065 dl 1.146 KeyIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3066     super(map, it, -1);
3067 dl 1.119 }
3068     public KeyIterator<K,V> split() {
3069 dl 1.137 if (nextKey != null)
3070 dl 1.119 throw new IllegalStateException();
3071 dl 1.146 return new KeyIterator<K,V>(map, this);
3072 dl 1.119 }
3073 dl 1.128 @SuppressWarnings("unchecked") public final K next() {
3074 dl 1.119 if (nextVal == null && advance() == null)
3075     throw new NoSuchElementException();
3076     Object k = nextKey;
3077     nextVal = null;
3078     return (K) k;
3079     }
3080    
3081     public final K nextElement() { return next(); }
3082     }
3083    
3084 dl 1.149 @SuppressWarnings("serial") static final class ValueIterator<K,V>
3085     extends Traverser<K,V,Object>
3086 dl 1.119 implements Spliterator<V>, Enumeration<V> {
3087     ValueIterator(ConcurrentHashMap<K, V> map) { super(map); }
3088 dl 1.146 ValueIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3089     super(map, it, -1);
3090 dl 1.119 }
3091     public ValueIterator<K,V> split() {
3092 dl 1.137 if (nextKey != null)
3093 dl 1.119 throw new IllegalStateException();
3094 dl 1.146 return new ValueIterator<K,V>(map, this);
3095 dl 1.119 }
3096    
3097 dl 1.128 @SuppressWarnings("unchecked") public final V next() {
3098 dl 1.119 Object v;
3099     if ((v = nextVal) == null && (v = advance()) == null)
3100     throw new NoSuchElementException();
3101     nextVal = null;
3102     return (V) v;
3103     }
3104    
3105     public final V nextElement() { return next(); }
3106     }
3107    
3108 dl 1.149 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3109     extends Traverser<K,V,Object>
3110 dl 1.119 implements Spliterator<Map.Entry<K,V>> {
3111     EntryIterator(ConcurrentHashMap<K, V> map) { super(map); }
3112 dl 1.146 EntryIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3113     super(map, it, -1);
3114 dl 1.119 }
3115     public EntryIterator<K,V> split() {
3116 dl 1.137 if (nextKey != null)
3117 dl 1.119 throw new IllegalStateException();
3118 dl 1.146 return new EntryIterator<K,V>(map, this);
3119 dl 1.119 }
3120    
3121 dl 1.128 @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3122 dl 1.119 Object v;
3123     if ((v = nextVal) == null && (v = advance()) == null)
3124     throw new NoSuchElementException();
3125     Object k = nextKey;
3126     nextVal = null;
3127     return new MapEntry<K,V>((K)k, (V)v, map);
3128     }
3129     }
3130    
3131     /**
3132     * Exported Entry for iterators
3133     */
3134     static final class MapEntry<K,V> implements Map.Entry<K, V> {
3135     final K key; // non-null
3136     V val; // non-null
3137     final ConcurrentHashMap<K, V> map;
3138     MapEntry(K key, V val, ConcurrentHashMap<K, V> map) {
3139     this.key = key;
3140     this.val = val;
3141     this.map = map;
3142     }
3143     public final K getKey() { return key; }
3144     public final V getValue() { return val; }
3145     public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3146     public final String toString(){ return key + "=" + val; }
3147    
3148     public final boolean equals(Object o) {
3149     Object k, v; Map.Entry<?,?> e;
3150     return ((o instanceof Map.Entry) &&
3151     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3152     (v = e.getValue()) != null &&
3153     (k == key || k.equals(key)) &&
3154     (v == val || v.equals(val)));
3155     }
3156    
3157     /**
3158     * Sets our entry's value and writes through to the map. The
3159     * value to return is somewhat arbitrary here. Since we do not
3160     * necessarily track asynchronous changes, the most recent
3161     * "previous" value could be different from what we return (or
3162     * could even have been removed in which case the put will
3163     * re-establish). We do not and cannot guarantee more.
3164     */
3165     public final V setValue(V value) {
3166     if (value == null) throw new NullPointerException();
3167     V v = val;
3168     val = value;
3169     map.put(key, value);
3170     return v;
3171     }
3172     }
3173    
3174 dl 1.146 /**
3175     * Returns exportable snapshot entry for the given key and value
3176     * when write-through can't or shouldn't be used.
3177     */
3178     static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3179     return new AbstractMap.SimpleEntry<K,V>(k, v);
3180     }
3181    
3182 dl 1.142 /* ---------------- Serialization Support -------------- */
3183 dl 1.119
3184     /**
3185 dl 1.142 * Stripped-down version of helper class used in previous version,
3186     * declared for the sake of serialization compatibility
3187 dl 1.119 */
3188 dl 1.142 static class Segment<K,V> implements Serializable {
3189     private static final long serialVersionUID = 2249069246763182397L;
3190     final float loadFactor;
3191     Segment(float lf) { this.loadFactor = lf; }
3192     }
3193 dl 1.119
3194 dl 1.142 /**
3195     * Saves the state of the {@code ConcurrentHashMap} instance to a
3196     * stream (i.e., serializes it).
3197     * @param s the stream
3198     * @serialData
3199     * the key (Object) and value (Object)
3200     * for each key-value mapping, followed by a null pair.
3201     * The key-value mappings are emitted in no particular order.
3202     */
3203 dl 1.149 @SuppressWarnings("unchecked") private void writeObject
3204     (java.io.ObjectOutputStream s)
3205 dl 1.142 throws java.io.IOException {
3206     if (segments == null) { // for serialization compatibility
3207     segments = (Segment<K,V>[])
3208     new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3209     for (int i = 0; i < segments.length; ++i)
3210     segments[i] = new Segment<K,V>(LOAD_FACTOR);
3211     }
3212     s.defaultWriteObject();
3213     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3214     Object v;
3215     while ((v = it.advance()) != null) {
3216     s.writeObject(it.nextKey);
3217     s.writeObject(v);
3218     }
3219     s.writeObject(null);
3220     s.writeObject(null);
3221     segments = null; // throw away
3222     }
3223 dl 1.119
3224 dl 1.142 /**
3225     * Reconstitutes the instance from a stream (that is, deserializes it).
3226     * @param s the stream
3227     */
3228 dl 1.149 @SuppressWarnings("unchecked") private void readObject
3229     (java.io.ObjectInputStream s)
3230 dl 1.142 throws java.io.IOException, ClassNotFoundException {
3231     s.defaultReadObject();
3232     this.segments = null; // unneeded
3233 dl 1.119
3234 dl 1.142 // Create all nodes, then place in table once size is known
3235     long size = 0L;
3236     Node p = null;
3237     for (;;) {
3238     K k = (K) s.readObject();
3239     V v = (V) s.readObject();
3240     if (k != null && v != null) {
3241     int h = spread(k.hashCode());
3242     p = new Node(h, k, v, p);
3243     ++size;
3244 dl 1.119 }
3245 dl 1.142 else
3246     break;
3247 dl 1.119 }
3248 dl 1.142 if (p != null) {
3249     boolean init = false;
3250     int n;
3251     if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3252     n = MAXIMUM_CAPACITY;
3253     else {
3254     int sz = (int)size;
3255     n = tableSizeFor(sz + (sz >>> 1) + 1);
3256     }
3257     int sc = sizeCtl;
3258     boolean collide = false;
3259     if (n > sc &&
3260 dl 1.149 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3261 dl 1.142 try {
3262     if (table == null) {
3263     init = true;
3264     Node[] tab = new Node[n];
3265     int mask = n - 1;
3266     while (p != null) {
3267     int j = p.hash & mask;
3268     Node next = p.next;
3269     Node q = p.next = tabAt(tab, j);
3270     setTabAt(tab, j, p);
3271     if (!collide && q != null && q.hash == p.hash)
3272     collide = true;
3273     p = next;
3274     }
3275     table = tab;
3276 dl 1.149 addCount(size, -1);
3277 dl 1.142 sc = n - (n >>> 2);
3278     }
3279     } finally {
3280     sizeCtl = sc;
3281     }
3282     if (collide) { // rescan and convert to TreeBins
3283     Node[] tab = table;
3284     for (int i = 0; i < tab.length; ++i) {
3285     int c = 0;
3286     for (Node e = tabAt(tab, i); e != null; e = e.next) {
3287     if (++c > TREE_THRESHOLD &&
3288     (e.key instanceof Comparable)) {
3289     replaceWithTreeBin(tab, i, e.key);
3290     break;
3291     }
3292     }
3293     }
3294 dl 1.119 }
3295     }
3296 dl 1.142 if (!init) { // Can only happen if unsafely published.
3297     while (p != null) {
3298 dl 1.149 internalPut((K)p.key, (V)p.val, false);
3299 dl 1.142 p = p.next;
3300     }
3301 dl 1.119 }
3302     }
3303 dl 1.142 }
3304 dl 1.119
3305 dl 1.142 // -------------------------------------------------------
3306    
3307     // Sams
3308     /** Interface describing a void action of one argument */
3309 dl 1.119 public interface Action<A> { void apply(A a); }
3310     /** Interface describing a void action of two arguments */
3311     public interface BiAction<A,B> { void apply(A a, B b); }
3312     /** Interface describing a function of one argument */
3313     public interface Fun<A,T> { T apply(A a); }
3314     /** Interface describing a function of two arguments */
3315     public interface BiFun<A,B,T> { T apply(A a, B b); }
3316     /** Interface describing a function of no arguments */
3317     public interface Generator<T> { T apply(); }
3318     /** Interface describing a function mapping its argument to a double */
3319     public interface ObjectToDouble<A> { double apply(A a); }
3320     /** Interface describing a function mapping its argument to a long */
3321     public interface ObjectToLong<A> { long apply(A a); }
3322     /** Interface describing a function mapping its argument to an int */
3323     public interface ObjectToInt<A> {int apply(A a); }
3324     /** Interface describing a function mapping two arguments to a double */
3325     public interface ObjectByObjectToDouble<A,B> { double apply(A a, B b); }
3326     /** Interface describing a function mapping two arguments to a long */
3327     public interface ObjectByObjectToLong<A,B> { long apply(A a, B b); }
3328     /** Interface describing a function mapping two arguments to an int */
3329     public interface ObjectByObjectToInt<A,B> {int apply(A a, B b); }
3330     /** Interface describing a function mapping a double to a double */
3331     public interface DoubleToDouble { double apply(double a); }
3332     /** Interface describing a function mapping a long to a long */
3333     public interface LongToLong { long apply(long a); }
3334     /** Interface describing a function mapping an int to an int */
3335     public interface IntToInt { int apply(int a); }
3336     /** Interface describing a function mapping two doubles to a double */
3337     public interface DoubleByDoubleToDouble { double apply(double a, double b); }
3338     /** Interface describing a function mapping two longs to a long */
3339     public interface LongByLongToLong { long apply(long a, long b); }
3340     /** Interface describing a function mapping two ints to an int */
3341     public interface IntByIntToInt { int apply(int a, int b); }
3342    
3343    
3344     // -------------------------------------------------------
3345    
3346     /**
3347 dl 1.137 * Performs the given action for each (key, value).
3348 dl 1.119 *
3349 dl 1.137 * @param action the action
3350 dl 1.119 */
3351 dl 1.137 public void forEach(BiAction<K,V> action) {
3352     ForkJoinTasks.forEach
3353     (this, action).invoke();
3354 dl 1.119 }
3355    
3356     /**
3357 dl 1.137 * Performs the given action for each non-null transformation
3358     * of each (key, value).
3359     *
3360     * @param transformer a function returning the transformation
3361     * for an element, or null of there is no transformation (in
3362     * which case the action is not applied).
3363     * @param action the action
3364 dl 1.119 */
3365 dl 1.137 public <U> void forEach(BiFun<? super K, ? super V, ? extends U> transformer,
3366     Action<U> action) {
3367     ForkJoinTasks.forEach
3368     (this, transformer, action).invoke();
3369     }
3370    
3371     /**
3372     * Returns a non-null result from applying the given search
3373     * function on each (key, value), or null if none. Upon
3374     * success, further element processing is suppressed and the
3375     * results of any other parallel invocations of the search
3376     * function are ignored.
3377     *
3378     * @param searchFunction a function returning a non-null
3379     * result on success, else null
3380     * @return a non-null result from applying the given search
3381     * function on each (key, value), or null if none
3382     */
3383     public <U> U search(BiFun<? super K, ? super V, ? extends U> searchFunction) {
3384     return ForkJoinTasks.search
3385     (this, searchFunction).invoke();
3386     }
3387    
3388     /**
3389     * Returns the result of accumulating the given transformation
3390     * of all (key, value) pairs using the given reducer to
3391     * combine values, or null if none.
3392     *
3393     * @param transformer a function returning the transformation
3394     * for an element, or null of there is no transformation (in
3395     * which case it is not combined).
3396     * @param reducer a commutative associative combining function
3397     * @return the result of accumulating the given transformation
3398     * of all (key, value) pairs
3399     */
3400     public <U> U reduce(BiFun<? super K, ? super V, ? extends U> transformer,
3401     BiFun<? super U, ? super U, ? extends U> reducer) {
3402     return ForkJoinTasks.reduce
3403     (this, transformer, reducer).invoke();
3404     }
3405    
3406     /**
3407     * Returns the result of accumulating the given transformation
3408     * of all (key, value) pairs using the given reducer to
3409     * combine values, and the given basis as an identity value.
3410     *
3411     * @param transformer a function returning the transformation
3412     * for an element
3413     * @param basis the identity (initial default value) for the reduction
3414     * @param reducer a commutative associative combining function
3415     * @return the result of accumulating the given transformation
3416     * of all (key, value) pairs
3417     */
3418     public double reduceToDouble(ObjectByObjectToDouble<? super K, ? super V> transformer,
3419     double basis,
3420     DoubleByDoubleToDouble reducer) {
3421     return ForkJoinTasks.reduceToDouble
3422     (this, transformer, basis, reducer).invoke();
3423     }
3424 dl 1.119
3425 dl 1.137 /**
3426     * Returns the result of accumulating the given transformation
3427     * of all (key, value) pairs using the given reducer to
3428     * combine values, and the given basis as an identity value.
3429     *
3430     * @param transformer a function returning the transformation
3431     * for an element
3432     * @param basis the identity (initial default value) for the reduction
3433     * @param reducer a commutative associative combining function
3434     * @return the result of accumulating the given transformation
3435     * of all (key, value) pairs
3436     */
3437     public long reduceToLong(ObjectByObjectToLong<? super K, ? super V> transformer,
3438     long basis,
3439     LongByLongToLong reducer) {
3440     return ForkJoinTasks.reduceToLong
3441     (this, transformer, basis, reducer).invoke();
3442     }
3443    
3444     /**
3445     * Returns the result of accumulating the given transformation
3446     * of all (key, value) pairs using the given reducer to
3447     * combine values, and the given basis as an identity value.
3448     *
3449     * @param transformer a function returning the transformation
3450     * for an element
3451     * @param basis the identity (initial default value) for the reduction
3452     * @param reducer a commutative associative combining function
3453     * @return the result of accumulating the given transformation
3454     * of all (key, value) pairs
3455     */
3456     public int reduceToInt(ObjectByObjectToInt<? super K, ? super V> transformer,
3457     int basis,
3458     IntByIntToInt reducer) {
3459     return ForkJoinTasks.reduceToInt
3460     (this, transformer, basis, reducer).invoke();
3461     }
3462    
3463     /**
3464     * Performs the given action for each key.
3465     *
3466     * @param action the action
3467     */
3468     public void forEachKey(Action<K> action) {
3469     ForkJoinTasks.forEachKey
3470     (this, action).invoke();
3471     }
3472 dl 1.119
3473 dl 1.137 /**
3474     * Performs the given action for each non-null transformation
3475     * of each key.
3476     *
3477     * @param transformer a function returning the transformation
3478     * for an element, or null of there is no transformation (in
3479     * which case the action is not applied).
3480     * @param action the action
3481     */
3482     public <U> void forEachKey(Fun<? super K, ? extends U> transformer,
3483     Action<U> action) {
3484     ForkJoinTasks.forEachKey
3485     (this, transformer, action).invoke();
3486     }
3487 dl 1.119
3488 dl 1.137 /**
3489     * Returns a non-null result from applying the given search
3490     * function on each key, or null if none. Upon success,
3491     * further element processing is suppressed and the results of
3492     * any other parallel invocations of the search function are
3493     * ignored.
3494     *
3495     * @param searchFunction a function returning a non-null
3496     * result on success, else null
3497     * @return a non-null result from applying the given search
3498     * function on each key, or null if none
3499     */
3500     public <U> U searchKeys(Fun<? super K, ? extends U> searchFunction) {
3501     return ForkJoinTasks.searchKeys
3502     (this, searchFunction).invoke();
3503     }
3504 dl 1.119
3505 dl 1.137 /**
3506     * Returns the result of accumulating all keys using the given
3507     * reducer to combine values, or null if none.
3508     *
3509     * @param reducer a commutative associative combining function
3510     * @return the result of accumulating all keys using the given
3511     * reducer to combine values, or null if none
3512     */
3513     public K reduceKeys(BiFun<? super K, ? super K, ? extends K> reducer) {
3514     return ForkJoinTasks.reduceKeys
3515     (this, reducer).invoke();
3516     }
3517 dl 1.119
3518 dl 1.137 /**
3519     * Returns the result of accumulating the given transformation
3520     * of all keys using the given reducer to combine values, or
3521     * null if none.
3522     *
3523     * @param transformer a function returning the transformation
3524     * for an element, or null of there is no transformation (in
3525     * which case it is not combined).
3526     * @param reducer a commutative associative combining function
3527     * @return the result of accumulating the given transformation
3528     * of all keys
3529     */
3530     public <U> U reduceKeys(Fun<? super K, ? extends U> transformer,
3531 dl 1.119 BiFun<? super U, ? super U, ? extends U> reducer) {
3532 dl 1.137 return ForkJoinTasks.reduceKeys
3533     (this, transformer, reducer).invoke();
3534     }
3535 dl 1.119
3536 dl 1.137 /**
3537     * Returns the result of accumulating the given transformation
3538     * of all keys using the given reducer to combine values, and
3539     * the given basis as an identity value.
3540     *
3541     * @param transformer a function returning the transformation
3542     * for an element
3543     * @param basis the identity (initial default value) for the reduction
3544     * @param reducer a commutative associative combining function
3545     * @return the result of accumulating the given transformation
3546     * of all keys
3547     */
3548     public double reduceKeysToDouble(ObjectToDouble<? super K> transformer,
3549 dl 1.119 double basis,
3550     DoubleByDoubleToDouble reducer) {
3551 dl 1.137 return ForkJoinTasks.reduceKeysToDouble
3552     (this, transformer, basis, reducer).invoke();
3553     }
3554 dl 1.119
3555 dl 1.137 /**
3556     * Returns the result of accumulating the given transformation
3557     * of all keys using the given reducer to combine values, and
3558     * the given basis as an identity value.
3559     *
3560     * @param transformer a function returning the transformation
3561     * for an element
3562     * @param basis the identity (initial default value) for the reduction
3563     * @param reducer a commutative associative combining function
3564     * @return the result of accumulating the given transformation
3565     * of all keys
3566     */
3567     public long reduceKeysToLong(ObjectToLong<? super K> transformer,
3568 dl 1.119 long basis,
3569     LongByLongToLong reducer) {
3570 dl 1.137 return ForkJoinTasks.reduceKeysToLong
3571     (this, transformer, basis, reducer).invoke();
3572     }
3573 dl 1.119
3574 dl 1.137 /**
3575     * Returns the result of accumulating the given transformation
3576     * of all keys using the given reducer to combine values, and
3577     * the given basis as an identity value.
3578     *
3579     * @param transformer a function returning the transformation
3580     * for an element
3581     * @param basis the identity (initial default value) for the reduction
3582     * @param reducer a commutative associative combining function
3583     * @return the result of accumulating the given transformation
3584     * of all keys
3585     */
3586     public int reduceKeysToInt(ObjectToInt<? super K> transformer,
3587 dl 1.119 int basis,
3588     IntByIntToInt reducer) {
3589 dl 1.137 return ForkJoinTasks.reduceKeysToInt
3590     (this, transformer, basis, reducer).invoke();
3591     }
3592 dl 1.119
3593 dl 1.137 /**
3594     * Performs the given action for each value.
3595     *
3596     * @param action the action
3597     */
3598     public void forEachValue(Action<V> action) {
3599     ForkJoinTasks.forEachValue
3600     (this, action).invoke();
3601     }
3602 dl 1.119
3603 dl 1.137 /**
3604     * Performs the given action for each non-null transformation
3605     * of each value.
3606     *
3607     * @param transformer a function returning the transformation
3608     * for an element, or null of there is no transformation (in
3609     * which case the action is not applied).
3610     */
3611     public <U> void forEachValue(Fun<? super V, ? extends U> transformer,
3612     Action<U> action) {
3613     ForkJoinTasks.forEachValue
3614     (this, transformer, action).invoke();
3615     }
3616 dl 1.119
3617 dl 1.137 /**
3618     * Returns a non-null result from applying the given search
3619     * function on each value, or null if none. Upon success,
3620     * further element processing is suppressed and the results of
3621     * any other parallel invocations of the search function are
3622     * ignored.
3623     *
3624     * @param searchFunction a function returning a non-null
3625     * result on success, else null
3626     * @return a non-null result from applying the given search
3627     * function on each value, or null if none
3628     *
3629     */
3630     public <U> U searchValues(Fun<? super V, ? extends U> searchFunction) {
3631     return ForkJoinTasks.searchValues
3632     (this, searchFunction).invoke();
3633     }
3634 dl 1.119
3635 dl 1.137 /**
3636     * Returns the result of accumulating all values using the
3637     * given reducer to combine values, or null if none.
3638     *
3639     * @param reducer a commutative associative combining function
3640     * @return the result of accumulating all values
3641     */
3642     public V reduceValues(BiFun<? super V, ? super V, ? extends V> reducer) {
3643     return ForkJoinTasks.reduceValues
3644     (this, reducer).invoke();
3645     }
3646 dl 1.119
3647 dl 1.137 /**
3648     * Returns the result of accumulating the given transformation
3649     * of all values using the given reducer to combine values, or
3650     * null if none.
3651     *
3652     * @param transformer a function returning the transformation
3653     * for an element, or null of there is no transformation (in
3654     * which case it is not combined).
3655     * @param reducer a commutative associative combining function
3656     * @return the result of accumulating the given transformation
3657     * of all values
3658     */
3659     public <U> U reduceValues(Fun<? super V, ? extends U> transformer,
3660     BiFun<? super U, ? super U, ? extends U> reducer) {
3661     return ForkJoinTasks.reduceValues
3662     (this, transformer, reducer).invoke();
3663     }
3664 dl 1.119
3665 dl 1.137 /**
3666     * Returns the result of accumulating the given transformation
3667     * of all values using the given reducer to combine values,
3668     * and the given basis as an identity value.
3669     *
3670     * @param transformer a function returning the transformation
3671     * for an element
3672     * @param basis the identity (initial default value) for the reduction
3673     * @param reducer a commutative associative combining function
3674     * @return the result of accumulating the given transformation
3675     * of all values
3676     */
3677     public double reduceValuesToDouble(ObjectToDouble<? super V> transformer,
3678     double basis,
3679     DoubleByDoubleToDouble reducer) {
3680     return ForkJoinTasks.reduceValuesToDouble
3681     (this, transformer, basis, reducer).invoke();
3682     }
3683 dl 1.119
3684 dl 1.137 /**
3685     * Returns the result of accumulating the given transformation
3686     * of all values using the given reducer to combine values,
3687     * and the given basis as an identity value.
3688     *
3689     * @param transformer a function returning the transformation
3690     * for an element
3691     * @param basis the identity (initial default value) for the reduction
3692     * @param reducer a commutative associative combining function
3693     * @return the result of accumulating the given transformation
3694     * of all values
3695     */
3696     public long reduceValuesToLong(ObjectToLong<? super V> transformer,
3697     long basis,
3698     LongByLongToLong reducer) {
3699     return ForkJoinTasks.reduceValuesToLong
3700     (this, transformer, basis, reducer).invoke();
3701     }
3702 dl 1.119
3703 dl 1.137 /**
3704     * Returns the result of accumulating the given transformation
3705     * of all values using the given reducer to combine values,
3706     * and the given basis as an identity value.
3707     *
3708     * @param transformer a function returning the transformation
3709     * for an element
3710     * @param basis the identity (initial default value) for the reduction
3711     * @param reducer a commutative associative combining function
3712     * @return the result of accumulating the given transformation
3713     * of all values
3714     */
3715     public int reduceValuesToInt(ObjectToInt<? super V> transformer,
3716     int basis,
3717     IntByIntToInt reducer) {
3718     return ForkJoinTasks.reduceValuesToInt
3719     (this, transformer, basis, reducer).invoke();
3720     }
3721 dl 1.119
3722 dl 1.137 /**
3723     * Performs the given action for each entry.
3724     *
3725     * @param action the action
3726     */
3727     public void forEachEntry(Action<Map.Entry<K,V>> action) {
3728     ForkJoinTasks.forEachEntry
3729     (this, action).invoke();
3730     }
3731 dl 1.119
3732 dl 1.137 /**
3733     * Performs the given action for each non-null transformation
3734     * of each entry.
3735     *
3736     * @param transformer a function returning the transformation
3737     * for an element, or null of there is no transformation (in
3738     * which case the action is not applied).
3739     * @param action the action
3740     */
3741     public <U> void forEachEntry(Fun<Map.Entry<K,V>, ? extends U> transformer,
3742     Action<U> action) {
3743     ForkJoinTasks.forEachEntry
3744     (this, transformer, action).invoke();
3745     }
3746 dl 1.119
3747 dl 1.137 /**
3748     * Returns a non-null result from applying the given search
3749     * function on each entry, or null if none. Upon success,
3750     * further element processing is suppressed and the results of
3751     * any other parallel invocations of the search function are
3752     * ignored.
3753     *
3754     * @param searchFunction a function returning a non-null
3755     * result on success, else null
3756     * @return a non-null result from applying the given search
3757     * function on each entry, or null if none
3758     */
3759     public <U> U searchEntries(Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
3760     return ForkJoinTasks.searchEntries
3761     (this, searchFunction).invoke();
3762     }
3763 dl 1.119
3764 dl 1.137 /**
3765     * Returns the result of accumulating all entries using the
3766     * given reducer to combine values, or null if none.
3767     *
3768     * @param reducer a commutative associative combining function
3769     * @return the result of accumulating all entries
3770     */
3771     public Map.Entry<K,V> reduceEntries(BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3772     return ForkJoinTasks.reduceEntries
3773     (this, reducer).invoke();
3774     }
3775 dl 1.119
3776 dl 1.137 /**
3777     * Returns the result of accumulating the given transformation
3778     * of all entries using the given reducer to combine values,
3779     * or null if none.
3780     *
3781     * @param transformer a function returning the transformation
3782     * for an element, or null of there is no transformation (in
3783     * which case it is not combined).
3784     * @param reducer a commutative associative combining function
3785     * @return the result of accumulating the given transformation
3786     * of all entries
3787     */
3788     public <U> U reduceEntries(Fun<Map.Entry<K,V>, ? extends U> transformer,
3789     BiFun<? super U, ? super U, ? extends U> reducer) {
3790     return ForkJoinTasks.reduceEntries
3791     (this, transformer, reducer).invoke();
3792     }
3793 dl 1.119
3794 dl 1.137 /**
3795     * Returns the result of accumulating the given transformation
3796     * of all entries using the given reducer to combine values,
3797     * and the given basis as an identity value.
3798     *
3799     * @param transformer a function returning the transformation
3800     * for an element
3801     * @param basis the identity (initial default value) for the reduction
3802     * @param reducer a commutative associative combining function
3803     * @return the result of accumulating the given transformation
3804     * of all entries
3805     */
3806     public double reduceEntriesToDouble(ObjectToDouble<Map.Entry<K,V>> transformer,
3807     double basis,
3808     DoubleByDoubleToDouble reducer) {
3809     return ForkJoinTasks.reduceEntriesToDouble
3810     (this, transformer, basis, reducer).invoke();
3811     }
3812 dl 1.119
3813 dl 1.137 /**
3814     * Returns the result of accumulating the given transformation
3815     * of all entries using the given reducer to combine values,
3816     * and the given basis as an identity value.
3817     *
3818     * @param transformer a function returning the transformation
3819     * for an element
3820     * @param basis the identity (initial default value) for the reduction
3821     * @param reducer a commutative associative combining function
3822     * @return the result of accumulating the given transformation
3823     * of all entries
3824     */
3825     public long reduceEntriesToLong(ObjectToLong<Map.Entry<K,V>> transformer,
3826     long basis,
3827     LongByLongToLong reducer) {
3828     return ForkJoinTasks.reduceEntriesToLong
3829     (this, transformer, basis, reducer).invoke();
3830     }
3831 dl 1.119
3832 dl 1.137 /**
3833     * Returns the result of accumulating the given transformation
3834     * of all entries using the given reducer to combine values,
3835     * and the given basis as an identity value.
3836     *
3837     * @param transformer a function returning the transformation
3838     * for an element
3839     * @param basis the identity (initial default value) for the reduction
3840     * @param reducer a commutative associative combining function
3841     * @return the result of accumulating the given transformation
3842     * of all entries
3843     */
3844     public int reduceEntriesToInt(ObjectToInt<Map.Entry<K,V>> transformer,
3845     int basis,
3846     IntByIntToInt reducer) {
3847     return ForkJoinTasks.reduceEntriesToInt
3848     (this, transformer, basis, reducer).invoke();
3849 dl 1.119 }
3850    
3851 dl 1.142 /* ----------------Views -------------- */
3852    
3853     /**
3854     * Base class for views.
3855     */
3856     static abstract class CHMView<K, V> {
3857     final ConcurrentHashMap<K, V> map;
3858     CHMView(ConcurrentHashMap<K, V> map) { this.map = map; }
3859    
3860     /**
3861     * Returns the map backing this view.
3862     *
3863     * @return the map backing this view
3864     */
3865     public ConcurrentHashMap<K,V> getMap() { return map; }
3866    
3867     public final int size() { return map.size(); }
3868     public final boolean isEmpty() { return map.isEmpty(); }
3869     public final void clear() { map.clear(); }
3870    
3871     // implementations below rely on concrete classes supplying these
3872     abstract public Iterator<?> iterator();
3873     abstract public boolean contains(Object o);
3874     abstract public boolean remove(Object o);
3875    
3876     private static final String oomeMsg = "Required array size too large";
3877    
3878     public final Object[] toArray() {
3879     long sz = map.mappingCount();
3880     if (sz > (long)(MAX_ARRAY_SIZE))
3881     throw new OutOfMemoryError(oomeMsg);
3882     int n = (int)sz;
3883     Object[] r = new Object[n];
3884     int i = 0;
3885     Iterator<?> it = iterator();
3886     while (it.hasNext()) {
3887     if (i == n) {
3888     if (n >= MAX_ARRAY_SIZE)
3889     throw new OutOfMemoryError(oomeMsg);
3890     if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
3891     n = MAX_ARRAY_SIZE;
3892     else
3893     n += (n >>> 1) + 1;
3894     r = Arrays.copyOf(r, n);
3895     }
3896     r[i++] = it.next();
3897     }
3898     return (i == n) ? r : Arrays.copyOf(r, i);
3899     }
3900    
3901     @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
3902     long sz = map.mappingCount();
3903     if (sz > (long)(MAX_ARRAY_SIZE))
3904     throw new OutOfMemoryError(oomeMsg);
3905     int m = (int)sz;
3906     T[] r = (a.length >= m) ? a :
3907     (T[])java.lang.reflect.Array
3908     .newInstance(a.getClass().getComponentType(), m);
3909     int n = r.length;
3910     int i = 0;
3911     Iterator<?> it = iterator();
3912     while (it.hasNext()) {
3913     if (i == n) {
3914     if (n >= MAX_ARRAY_SIZE)
3915     throw new OutOfMemoryError(oomeMsg);
3916     if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
3917     n = MAX_ARRAY_SIZE;
3918     else
3919     n += (n >>> 1) + 1;
3920     r = Arrays.copyOf(r, n);
3921     }
3922     r[i++] = (T)it.next();
3923     }
3924     if (a == r && i < n) {
3925     r[i] = null; // null-terminate
3926     return r;
3927     }
3928     return (i == n) ? r : Arrays.copyOf(r, i);
3929     }
3930    
3931     public final int hashCode() {
3932     int h = 0;
3933     for (Iterator<?> it = iterator(); it.hasNext();)
3934     h += it.next().hashCode();
3935     return h;
3936     }
3937    
3938     public final String toString() {
3939     StringBuilder sb = new StringBuilder();
3940     sb.append('[');
3941     Iterator<?> it = iterator();
3942     if (it.hasNext()) {
3943     for (;;) {
3944     Object e = it.next();
3945     sb.append(e == this ? "(this Collection)" : e);
3946     if (!it.hasNext())
3947     break;
3948     sb.append(',').append(' ');
3949     }
3950     }
3951     return sb.append(']').toString();
3952     }
3953    
3954     public final boolean containsAll(Collection<?> c) {
3955     if (c != this) {
3956     for (Iterator<?> it = c.iterator(); it.hasNext();) {
3957     Object e = it.next();
3958     if (e == null || !contains(e))
3959     return false;
3960     }
3961     }
3962     return true;
3963     }
3964    
3965     public final boolean removeAll(Collection<?> c) {
3966     boolean modified = false;
3967     for (Iterator<?> it = iterator(); it.hasNext();) {
3968     if (c.contains(it.next())) {
3969     it.remove();
3970     modified = true;
3971     }
3972     }
3973     return modified;
3974     }
3975    
3976     public final boolean retainAll(Collection<?> c) {
3977     boolean modified = false;
3978     for (Iterator<?> it = iterator(); it.hasNext();) {
3979     if (!c.contains(it.next())) {
3980     it.remove();
3981     modified = true;
3982     }
3983     }
3984     return modified;
3985     }
3986    
3987     }
3988    
3989     /**
3990     * A view of a ConcurrentHashMap as a {@link Set} of keys, in
3991     * which additions may optionally be enabled by mapping to a
3992     * common value. This class cannot be directly instantiated. See
3993     * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
3994     * {@link #newKeySet(int)}.
3995     */
3996 dl 1.149 public static class KeySetView<K,V> extends CHMView<K,V>
3997     implements Set<K>, java.io.Serializable {
3998 dl 1.142 private static final long serialVersionUID = 7249069246763182397L;
3999     private final V value;
4000     KeySetView(ConcurrentHashMap<K, V> map, V value) { // non-public
4001     super(map);
4002     this.value = value;
4003     }
4004    
4005     /**
4006     * Returns the default mapped value for additions,
4007     * or {@code null} if additions are not supported.
4008     *
4009     * @return the default mapped value for additions, or {@code null}
4010     * if not supported.
4011     */
4012     public V getMappedValue() { return value; }
4013    
4014     // implement Set API
4015    
4016     public boolean contains(Object o) { return map.containsKey(o); }
4017     public boolean remove(Object o) { return map.remove(o) != null; }
4018    
4019     /**
4020     * Returns a "weakly consistent" iterator that will never
4021     * throw {@link ConcurrentModificationException}, and
4022     * guarantees to traverse elements as they existed upon
4023     * construction of the iterator, and may (but is not
4024     * guaranteed to) reflect any modifications subsequent to
4025     * construction.
4026     *
4027     * @return an iterator over the keys of this map
4028     */
4029     public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4030     public boolean add(K e) {
4031     V v;
4032     if ((v = value) == null)
4033     throw new UnsupportedOperationException();
4034     if (e == null)
4035     throw new NullPointerException();
4036 dl 1.149 return map.internalPut(e, v, true) == null;
4037 dl 1.142 }
4038     public boolean addAll(Collection<? extends K> c) {
4039     boolean added = false;
4040     V v;
4041     if ((v = value) == null)
4042     throw new UnsupportedOperationException();
4043     for (K e : c) {
4044     if (e == null)
4045     throw new NullPointerException();
4046 dl 1.149 if (map.internalPut(e, v, true) == null)
4047 dl 1.142 added = true;
4048     }
4049     return added;
4050     }
4051     public boolean equals(Object o) {
4052     Set<?> c;
4053     return ((o instanceof Set) &&
4054     ((c = (Set<?>)o) == this ||
4055     (containsAll(c) && c.containsAll(this))));
4056     }
4057    
4058     /**
4059     * Performs the given action for each key.
4060     *
4061     * @param action the action
4062     */
4063     public void forEach(Action<K> action) {
4064     ForkJoinTasks.forEachKey
4065     (map, action).invoke();
4066     }
4067    
4068     /**
4069     * Performs the given action for each non-null transformation
4070     * of each key.
4071     *
4072     * @param transformer a function returning the transformation
4073     * for an element, or null of there is no transformation (in
4074     * which case the action is not applied).
4075     * @param action the action
4076     */
4077     public <U> void forEach(Fun<? super K, ? extends U> transformer,
4078     Action<U> action) {
4079     ForkJoinTasks.forEachKey
4080     (map, transformer, action).invoke();
4081     }
4082    
4083     /**
4084     * Returns a non-null result from applying the given search
4085     * function on each key, or null if none. Upon success,
4086     * further element processing is suppressed and the results of
4087     * any other parallel invocations of the search function are
4088     * ignored.
4089     *
4090     * @param searchFunction a function returning a non-null
4091     * result on success, else null
4092     * @return a non-null result from applying the given search
4093     * function on each key, or null if none
4094     */
4095     public <U> U search(Fun<? super K, ? extends U> searchFunction) {
4096     return ForkJoinTasks.searchKeys
4097     (map, searchFunction).invoke();
4098     }
4099    
4100     /**
4101     * Returns the result of accumulating all keys using the given
4102     * reducer to combine values, or null if none.
4103     *
4104     * @param reducer a commutative associative combining function
4105     * @return the result of accumulating all keys using the given
4106     * reducer to combine values, or null if none
4107     */
4108     public K reduce(BiFun<? super K, ? super K, ? extends K> reducer) {
4109     return ForkJoinTasks.reduceKeys
4110     (map, reducer).invoke();
4111     }
4112    
4113     /**
4114     * Returns the result of accumulating the given transformation
4115     * of all keys using the given reducer to combine values, and
4116     * the given basis as an identity value.
4117     *
4118     * @param transformer a function returning the transformation
4119     * for an element
4120     * @param basis the identity (initial default value) for the reduction
4121     * @param reducer a commutative associative combining function
4122     * @return the result of accumulating the given transformation
4123     * of all keys
4124     */
4125     public double reduceToDouble(ObjectToDouble<? super K> transformer,
4126     double basis,
4127     DoubleByDoubleToDouble reducer) {
4128     return ForkJoinTasks.reduceKeysToDouble
4129     (map, transformer, basis, reducer).invoke();
4130     }
4131    
4132     /**
4133     * Returns the result of accumulating the given transformation
4134     * of all keys using the given reducer to combine values, and
4135     * the given basis as an identity value.
4136     *
4137     * @param transformer a function returning the transformation
4138     * for an element
4139     * @param basis the identity (initial default value) for the reduction
4140     * @param reducer a commutative associative combining function
4141     * @return the result of accumulating the given transformation
4142     * of all keys
4143     */
4144     public long reduceToLong(ObjectToLong<? super K> transformer,
4145     long basis,
4146     LongByLongToLong reducer) {
4147     return ForkJoinTasks.reduceKeysToLong
4148     (map, transformer, basis, reducer).invoke();
4149     }
4150 jsr166 1.143
4151 dl 1.142 /**
4152     * Returns the result of accumulating the given transformation
4153     * of all keys using the given reducer to combine values, and
4154     * the given basis as an identity value.
4155     *
4156     * @param transformer a function returning the transformation
4157     * for an element
4158     * @param basis the identity (initial default value) for the reduction
4159     * @param reducer a commutative associative combining function
4160     * @return the result of accumulating the given transformation
4161     * of all keys
4162     */
4163     public int reduceToInt(ObjectToInt<? super K> transformer,
4164     int basis,
4165     IntByIntToInt reducer) {
4166     return ForkJoinTasks.reduceKeysToInt
4167     (map, transformer, basis, reducer).invoke();
4168     }
4169 jsr166 1.143
4170 dl 1.142 }
4171    
4172     /**
4173     * A view of a ConcurrentHashMap as a {@link Collection} of
4174     * values, in which additions are disabled. This class cannot be
4175     * directly instantiated. See {@link #values},
4176     *
4177     * <p>The view's {@code iterator} is a "weakly consistent" iterator
4178     * that will never throw {@link ConcurrentModificationException},
4179     * and guarantees to traverse elements as they existed upon
4180     * construction of the iterator, and may (but is not guaranteed to)
4181     * reflect any modifications subsequent to construction.
4182     */
4183     public static final class ValuesView<K,V> extends CHMView<K,V>
4184     implements Collection<V> {
4185     ValuesView(ConcurrentHashMap<K, V> map) { super(map); }
4186     public final boolean contains(Object o) { return map.containsValue(o); }
4187     public final boolean remove(Object o) {
4188     if (o != null) {
4189     Iterator<V> it = new ValueIterator<K,V>(map);
4190     while (it.hasNext()) {
4191     if (o.equals(it.next())) {
4192     it.remove();
4193     return true;
4194     }
4195     }
4196     }
4197     return false;
4198     }
4199    
4200     /**
4201     * Returns a "weakly consistent" iterator that will never
4202     * throw {@link ConcurrentModificationException}, and
4203     * guarantees to traverse elements as they existed upon
4204     * construction of the iterator, and may (but is not
4205     * guaranteed to) reflect any modifications subsequent to
4206     * construction.
4207     *
4208     * @return an iterator over the values of this map
4209     */
4210     public final Iterator<V> iterator() {
4211     return new ValueIterator<K,V>(map);
4212     }
4213     public final boolean add(V e) {
4214     throw new UnsupportedOperationException();
4215     }
4216     public final boolean addAll(Collection<? extends V> c) {
4217     throw new UnsupportedOperationException();
4218     }
4219    
4220     /**
4221     * Performs the given action for each value.
4222     *
4223     * @param action the action
4224     */
4225     public void forEach(Action<V> action) {
4226     ForkJoinTasks.forEachValue
4227     (map, action).invoke();
4228     }
4229    
4230     /**
4231     * Performs the given action for each non-null transformation
4232     * of each value.
4233     *
4234     * @param transformer a function returning the transformation
4235     * for an element, or null of there is no transformation (in
4236     * which case the action is not applied).
4237     */
4238     public <U> void forEach(Fun<? super V, ? extends U> transformer,
4239     Action<U> action) {
4240     ForkJoinTasks.forEachValue
4241     (map, transformer, action).invoke();
4242     }
4243    
4244     /**
4245     * Returns a non-null result from applying the given search
4246     * function on each value, or null if none. Upon success,
4247     * further element processing is suppressed and the results of
4248     * any other parallel invocations of the search function are
4249     * ignored.
4250     *
4251     * @param searchFunction a function returning a non-null
4252     * result on success, else null
4253     * @return a non-null result from applying the given search
4254     * function on each value, or null if none
4255     *
4256     */
4257     public <U> U search(Fun<? super V, ? extends U> searchFunction) {
4258     return ForkJoinTasks.searchValues
4259     (map, searchFunction).invoke();
4260     }
4261    
4262     /**
4263     * Returns the result of accumulating all values using the
4264     * given reducer to combine values, or null if none.
4265     *
4266     * @param reducer a commutative associative combining function
4267     * @return the result of accumulating all values
4268     */
4269     public V reduce(BiFun<? super V, ? super V, ? extends V> reducer) {
4270     return ForkJoinTasks.reduceValues
4271     (map, reducer).invoke();
4272     }
4273    
4274     /**
4275     * Returns the result of accumulating the given transformation
4276     * of all values using the given reducer to combine values, or
4277     * null if none.
4278     *
4279     * @param transformer a function returning the transformation
4280     * for an element, or null of there is no transformation (in
4281     * which case it is not combined).
4282     * @param reducer a commutative associative combining function
4283     * @return the result of accumulating the given transformation
4284     * of all values
4285     */
4286     public <U> U reduce(Fun<? super V, ? extends U> transformer,
4287     BiFun<? super U, ? super U, ? extends U> reducer) {
4288     return ForkJoinTasks.reduceValues
4289     (map, transformer, reducer).invoke();
4290     }
4291 jsr166 1.143
4292 dl 1.142 /**
4293     * Returns the result of accumulating the given transformation
4294     * of all values using the given reducer to combine values,
4295     * and the given basis as an identity value.
4296     *
4297     * @param transformer a function returning the transformation
4298     * for an element
4299     * @param basis the identity (initial default value) for the reduction
4300     * @param reducer a commutative associative combining function
4301     * @return the result of accumulating the given transformation
4302     * of all values
4303     */
4304     public double reduceToDouble(ObjectToDouble<? super V> transformer,
4305     double basis,
4306     DoubleByDoubleToDouble reducer) {
4307     return ForkJoinTasks.reduceValuesToDouble
4308     (map, transformer, basis, reducer).invoke();
4309     }
4310    
4311     /**
4312     * Returns the result of accumulating the given transformation
4313     * of all values using the given reducer to combine values,
4314     * and the given basis as an identity value.
4315     *
4316     * @param transformer a function returning the transformation
4317     * for an element
4318     * @param basis the identity (initial default value) for the reduction
4319     * @param reducer a commutative associative combining function
4320     * @return the result of accumulating the given transformation
4321     * of all values
4322     */
4323     public long reduceToLong(ObjectToLong<? super V> transformer,
4324     long basis,
4325     LongByLongToLong reducer) {
4326     return ForkJoinTasks.reduceValuesToLong
4327     (map, transformer, basis, reducer).invoke();
4328     }
4329    
4330     /**
4331     * Returns the result of accumulating the given transformation
4332     * of all values using the given reducer to combine values,
4333     * and the given basis as an identity value.
4334     *
4335     * @param transformer a function returning the transformation
4336     * for an element
4337     * @param basis the identity (initial default value) for the reduction
4338     * @param reducer a commutative associative combining function
4339     * @return the result of accumulating the given transformation
4340     * of all values
4341     */
4342     public int reduceToInt(ObjectToInt<? super V> transformer,
4343     int basis,
4344     IntByIntToInt reducer) {
4345     return ForkJoinTasks.reduceValuesToInt
4346     (map, transformer, basis, reducer).invoke();
4347     }
4348    
4349     }
4350    
4351     /**
4352     * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4353     * entries. This class cannot be directly instantiated. See
4354     * {@link #entrySet}.
4355     */
4356     public static final class EntrySetView<K,V> extends CHMView<K,V>
4357     implements Set<Map.Entry<K,V>> {
4358     EntrySetView(ConcurrentHashMap<K, V> map) { super(map); }
4359     public final boolean contains(Object o) {
4360     Object k, v, r; Map.Entry<?,?> e;
4361     return ((o instanceof Map.Entry) &&
4362     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4363     (r = map.get(k)) != null &&
4364     (v = e.getValue()) != null &&
4365     (v == r || v.equals(r)));
4366     }
4367     public final boolean remove(Object o) {
4368     Object k, v; Map.Entry<?,?> e;
4369     return ((o instanceof Map.Entry) &&
4370     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4371     (v = e.getValue()) != null &&
4372     map.remove(k, v));
4373     }
4374    
4375     /**
4376     * Returns a "weakly consistent" iterator that will never
4377     * throw {@link ConcurrentModificationException}, and
4378     * guarantees to traverse elements as they existed upon
4379     * construction of the iterator, and may (but is not
4380     * guaranteed to) reflect any modifications subsequent to
4381     * construction.
4382     *
4383     * @return an iterator over the entries of this map
4384     */
4385     public final Iterator<Map.Entry<K,V>> iterator() {
4386     return new EntryIterator<K,V>(map);
4387     }
4388    
4389     public final boolean add(Entry<K,V> e) {
4390     K key = e.getKey();
4391     V value = e.getValue();
4392     if (key == null || value == null)
4393     throw new NullPointerException();
4394 dl 1.149 return map.internalPut(key, value, false) == null;
4395 dl 1.142 }
4396     public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4397     boolean added = false;
4398     for (Entry<K,V> e : c) {
4399     if (add(e))
4400     added = true;
4401     }
4402     return added;
4403     }
4404     public boolean equals(Object o) {
4405     Set<?> c;
4406     return ((o instanceof Set) &&
4407     ((c = (Set<?>)o) == this ||
4408     (containsAll(c) && c.containsAll(this))));
4409     }
4410    
4411     /**
4412     * Performs the given action for each entry.
4413     *
4414     * @param action the action
4415     */
4416     public void forEach(Action<Map.Entry<K,V>> action) {
4417     ForkJoinTasks.forEachEntry
4418     (map, action).invoke();
4419     }
4420    
4421     /**
4422     * Performs the given action for each non-null transformation
4423     * of each entry.
4424     *
4425     * @param transformer a function returning the transformation
4426     * for an element, or null of there is no transformation (in
4427     * which case the action is not applied).
4428     * @param action the action
4429     */
4430     public <U> void forEach(Fun<Map.Entry<K,V>, ? extends U> transformer,
4431     Action<U> action) {
4432     ForkJoinTasks.forEachEntry
4433     (map, transformer, action).invoke();
4434     }
4435    
4436     /**
4437     * Returns a non-null result from applying the given search
4438     * function on each entry, or null if none. Upon success,
4439     * further element processing is suppressed and the results of
4440     * any other parallel invocations of the search function are
4441     * ignored.
4442     *
4443     * @param searchFunction a function returning a non-null
4444     * result on success, else null
4445     * @return a non-null result from applying the given search
4446     * function on each entry, or null if none
4447     */
4448     public <U> U search(Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
4449     return ForkJoinTasks.searchEntries
4450     (map, searchFunction).invoke();
4451     }
4452    
4453     /**
4454     * Returns the result of accumulating all entries using the
4455     * given reducer to combine values, or null if none.
4456     *
4457     * @param reducer a commutative associative combining function
4458     * @return the result of accumulating all entries
4459     */
4460     public Map.Entry<K,V> reduce(BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4461     return ForkJoinTasks.reduceEntries
4462     (map, reducer).invoke();
4463     }
4464    
4465     /**
4466     * Returns the result of accumulating the given transformation
4467     * of all entries using the given reducer to combine values,
4468     * or null if none.
4469     *
4470     * @param transformer a function returning the transformation
4471     * for an element, or null of there is no transformation (in
4472     * which case it is not combined).
4473     * @param reducer a commutative associative combining function
4474     * @return the result of accumulating the given transformation
4475     * of all entries
4476     */
4477     public <U> U reduce(Fun<Map.Entry<K,V>, ? extends U> transformer,
4478     BiFun<? super U, ? super U, ? extends U> reducer) {
4479     return ForkJoinTasks.reduceEntries
4480     (map, transformer, reducer).invoke();
4481     }
4482    
4483     /**
4484     * Returns the result of accumulating the given transformation
4485     * of all entries using the given reducer to combine values,
4486     * and the given basis as an identity value.
4487     *
4488     * @param transformer a function returning the transformation
4489     * for an element
4490     * @param basis the identity (initial default value) for the reduction
4491     * @param reducer a commutative associative combining function
4492     * @return the result of accumulating the given transformation
4493     * of all entries
4494     */
4495     public double reduceToDouble(ObjectToDouble<Map.Entry<K,V>> transformer,
4496     double basis,
4497     DoubleByDoubleToDouble reducer) {
4498     return ForkJoinTasks.reduceEntriesToDouble
4499     (map, transformer, basis, reducer).invoke();
4500     }
4501    
4502     /**
4503     * Returns the result of accumulating the given transformation
4504     * of all entries using the given reducer to combine values,
4505     * and the given basis as an identity value.
4506     *
4507     * @param transformer a function returning the transformation
4508     * for an element
4509     * @param basis the identity (initial default value) for the reduction
4510     * @param reducer a commutative associative combining function
4511     * @return the result of accumulating the given transformation
4512     * of all entries
4513     */
4514     public long reduceToLong(ObjectToLong<Map.Entry<K,V>> transformer,
4515     long basis,
4516     LongByLongToLong reducer) {
4517     return ForkJoinTasks.reduceEntriesToLong
4518     (map, transformer, basis, reducer).invoke();
4519     }
4520    
4521     /**
4522     * Returns the result of accumulating the given transformation
4523     * of all entries using the given reducer to combine values,
4524     * and the given basis as an identity value.
4525     *
4526     * @param transformer a function returning the transformation
4527     * for an element
4528     * @param basis the identity (initial default value) for the reduction
4529     * @param reducer a commutative associative combining function
4530     * @return the result of accumulating the given transformation
4531     * of all entries
4532     */
4533     public int reduceToInt(ObjectToInt<Map.Entry<K,V>> transformer,
4534     int basis,
4535     IntByIntToInt reducer) {
4536     return ForkJoinTasks.reduceEntriesToInt
4537     (map, transformer, basis, reducer).invoke();
4538     }
4539    
4540     }
4541    
4542 dl 1.119 // ---------------------------------------------------------------------
4543    
4544     /**
4545     * Predefined tasks for performing bulk parallel operations on
4546     * ConcurrentHashMaps. These tasks follow the forms and rules used
4547 dl 1.137 * for bulk operations. Each method has the same name, but returns
4548     * a task rather than invoking it. These methods may be useful in
4549     * custom applications such as submitting a task without waiting
4550     * for completion, using a custom pool, or combining with other
4551     * tasks.
4552 dl 1.119 */
4553     public static class ForkJoinTasks {
4554     private ForkJoinTasks() {}
4555    
4556     /**
4557     * Returns a task that when invoked, performs the given
4558     * action for each (key, value)
4559     *
4560     * @param map the map
4561     * @param action the action
4562     * @return the task
4563     */
4564 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEach
4565 dl 1.119 (ConcurrentHashMap<K,V> map,
4566     BiAction<K,V> action) {
4567     if (action == null) throw new NullPointerException();
4568 dl 1.146 return new ForEachMappingTask<K,V>(map, null, -1, action);
4569 dl 1.119 }
4570    
4571     /**
4572     * Returns a task that when invoked, performs the given
4573     * action for each non-null transformation of each (key, value)
4574     *
4575     * @param map the map
4576     * @param transformer a function returning the transformation
4577 jsr166 1.135 * for an element, or null if there is no transformation (in
4578 jsr166 1.134 * which case the action is not applied)
4579 dl 1.119 * @param action the action
4580     * @return the task
4581     */
4582 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEach
4583 dl 1.119 (ConcurrentHashMap<K,V> map,
4584     BiFun<? super K, ? super V, ? extends U> transformer,
4585     Action<U> action) {
4586     if (transformer == null || action == null)
4587     throw new NullPointerException();
4588     return new ForEachTransformedMappingTask<K,V,U>
4589 dl 1.146 (map, null, -1, transformer, action);
4590 dl 1.119 }
4591    
4592     /**
4593 dl 1.126 * Returns a task that when invoked, returns a non-null result
4594     * from applying the given search function on each (key,
4595     * value), or null if none. Upon success, further element
4596     * processing is suppressed and the results of any other
4597     * parallel invocations of the search function are ignored.
4598 dl 1.119 *
4599     * @param map the map
4600     * @param searchFunction a function returning a non-null
4601     * result on success, else null
4602     * @return the task
4603     */
4604     public static <K,V,U> ForkJoinTask<U> search
4605     (ConcurrentHashMap<K,V> map,
4606     BiFun<? super K, ? super V, ? extends U> searchFunction) {
4607     if (searchFunction == null) throw new NullPointerException();
4608     return new SearchMappingsTask<K,V,U>
4609 dl 1.146 (map, null, -1, searchFunction,
4610 dl 1.119 new AtomicReference<U>());
4611     }
4612    
4613     /**
4614     * Returns a task that when invoked, returns the result of
4615     * accumulating the given transformation of all (key, value) pairs
4616     * using the given reducer to combine values, or null if none.
4617     *
4618     * @param map the map
4619     * @param transformer a function returning the transformation
4620 jsr166 1.135 * for an element, or null if there is no transformation (in
4621 dl 1.119 * which case it is not combined).
4622     * @param reducer a commutative associative combining function
4623     * @return the task
4624     */
4625     public static <K,V,U> ForkJoinTask<U> reduce
4626     (ConcurrentHashMap<K,V> map,
4627     BiFun<? super K, ? super V, ? extends U> transformer,
4628     BiFun<? super U, ? super U, ? extends U> reducer) {
4629     if (transformer == null || reducer == null)
4630     throw new NullPointerException();
4631     return new MapReduceMappingsTask<K,V,U>
4632 dl 1.130 (map, null, -1, null, transformer, reducer);
4633 dl 1.119 }
4634    
4635     /**
4636     * Returns a task that when invoked, returns the result of
4637     * accumulating the given transformation of all (key, value) pairs
4638     * using the given reducer to combine values, and the given
4639     * basis as an identity value.
4640     *
4641     * @param map the map
4642     * @param transformer a function returning the transformation
4643     * for an element
4644     * @param basis the identity (initial default value) for the reduction
4645     * @param reducer a commutative associative combining function
4646     * @return the task
4647     */
4648     public static <K,V> ForkJoinTask<Double> reduceToDouble
4649     (ConcurrentHashMap<K,V> map,
4650     ObjectByObjectToDouble<? super K, ? super V> transformer,
4651     double basis,
4652     DoubleByDoubleToDouble reducer) {
4653     if (transformer == null || reducer == null)
4654     throw new NullPointerException();
4655     return new MapReduceMappingsToDoubleTask<K,V>
4656 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4657 dl 1.119 }
4658    
4659     /**
4660     * Returns a task that when invoked, returns the result of
4661     * accumulating the given transformation of all (key, value) pairs
4662     * using the given reducer to combine values, and the given
4663     * basis as an identity value.
4664     *
4665     * @param map the map
4666     * @param transformer a function returning the transformation
4667     * for an element
4668     * @param basis the identity (initial default value) for the reduction
4669     * @param reducer a commutative associative combining function
4670     * @return the task
4671     */
4672     public static <K,V> ForkJoinTask<Long> reduceToLong
4673     (ConcurrentHashMap<K,V> map,
4674     ObjectByObjectToLong<? super K, ? super V> transformer,
4675     long basis,
4676     LongByLongToLong reducer) {
4677     if (transformer == null || reducer == null)
4678     throw new NullPointerException();
4679     return new MapReduceMappingsToLongTask<K,V>
4680 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4681 dl 1.119 }
4682    
4683     /**
4684     * Returns a task that when invoked, returns the result of
4685     * accumulating the given transformation of all (key, value) pairs
4686     * using the given reducer to combine values, and the given
4687     * basis as an identity value.
4688     *
4689     * @param transformer a function returning the transformation
4690     * for an element
4691     * @param basis the identity (initial default value) for the reduction
4692     * @param reducer a commutative associative combining function
4693     * @return the task
4694     */
4695     public static <K,V> ForkJoinTask<Integer> reduceToInt
4696     (ConcurrentHashMap<K,V> map,
4697     ObjectByObjectToInt<? super K, ? super V> transformer,
4698     int basis,
4699     IntByIntToInt reducer) {
4700     if (transformer == null || reducer == null)
4701     throw new NullPointerException();
4702     return new MapReduceMappingsToIntTask<K,V>
4703 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4704 dl 1.119 }
4705    
4706     /**
4707     * Returns a task that when invoked, performs the given action
4708 jsr166 1.123 * for each key.
4709 dl 1.119 *
4710     * @param map the map
4711     * @param action the action
4712     * @return the task
4713     */
4714 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachKey
4715 dl 1.119 (ConcurrentHashMap<K,V> map,
4716     Action<K> action) {
4717     if (action == null) throw new NullPointerException();
4718 dl 1.146 return new ForEachKeyTask<K,V>(map, null, -1, action);
4719 dl 1.119 }
4720    
4721     /**
4722     * Returns a task that when invoked, performs the given action
4723 jsr166 1.123 * for each non-null transformation of each key.
4724 dl 1.119 *
4725     * @param map the map
4726     * @param transformer a function returning the transformation
4727 jsr166 1.135 * for an element, or null if there is no transformation (in
4728 jsr166 1.134 * which case the action is not applied)
4729 dl 1.119 * @param action the action
4730     * @return the task
4731     */
4732 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachKey
4733 dl 1.119 (ConcurrentHashMap<K,V> map,
4734     Fun<? super K, ? extends U> transformer,
4735     Action<U> action) {
4736     if (transformer == null || action == null)
4737     throw new NullPointerException();
4738     return new ForEachTransformedKeyTask<K,V,U>
4739 dl 1.146 (map, null, -1, transformer, action);
4740 dl 1.119 }
4741    
4742     /**
4743     * Returns a task that when invoked, returns a non-null result
4744     * from applying the given search function on each key, or
4745 dl 1.126 * null if none. Upon success, further element processing is
4746     * suppressed and the results of any other parallel
4747     * invocations of the search function are ignored.
4748 dl 1.119 *
4749     * @param map the map
4750     * @param searchFunction a function returning a non-null
4751     * result on success, else null
4752     * @return the task
4753     */
4754     public static <K,V,U> ForkJoinTask<U> searchKeys
4755     (ConcurrentHashMap<K,V> map,
4756     Fun<? super K, ? extends U> searchFunction) {
4757     if (searchFunction == null) throw new NullPointerException();
4758     return new SearchKeysTask<K,V,U>
4759 dl 1.146 (map, null, -1, searchFunction,
4760 dl 1.119 new AtomicReference<U>());
4761     }
4762    
4763     /**
4764     * Returns a task that when invoked, returns the result of
4765     * accumulating all keys using the given reducer to combine
4766     * values, or null if none.
4767     *
4768     * @param map the map
4769     * @param reducer a commutative associative combining function
4770     * @return the task
4771     */
4772     public static <K,V> ForkJoinTask<K> reduceKeys
4773     (ConcurrentHashMap<K,V> map,
4774     BiFun<? super K, ? super K, ? extends K> reducer) {
4775     if (reducer == null) throw new NullPointerException();
4776     return new ReduceKeysTask<K,V>
4777 dl 1.130 (map, null, -1, null, reducer);
4778 dl 1.119 }
4779 jsr166 1.125
4780 dl 1.119 /**
4781     * Returns a task that when invoked, returns the result of
4782     * accumulating the given transformation of all keys using the given
4783     * reducer to combine values, or null if none.
4784     *
4785     * @param map the map
4786     * @param transformer a function returning the transformation
4787 jsr166 1.135 * for an element, or null if there is no transformation (in
4788 dl 1.119 * which case it is not combined).
4789     * @param reducer a commutative associative combining function
4790     * @return the task
4791     */
4792     public static <K,V,U> ForkJoinTask<U> reduceKeys
4793     (ConcurrentHashMap<K,V> map,
4794     Fun<? super K, ? extends U> transformer,
4795     BiFun<? super U, ? super U, ? extends U> reducer) {
4796     if (transformer == null || reducer == null)
4797     throw new NullPointerException();
4798     return new MapReduceKeysTask<K,V,U>
4799 dl 1.130 (map, null, -1, null, transformer, reducer);
4800 dl 1.119 }
4801    
4802     /**
4803     * Returns a task that when invoked, returns the result of
4804     * accumulating the given transformation of all keys using the given
4805     * reducer to combine values, and the given basis as an
4806     * identity value.
4807     *
4808     * @param map the map
4809     * @param transformer a function returning the transformation
4810     * for an element
4811     * @param basis the identity (initial default value) for the reduction
4812     * @param reducer a commutative associative combining function
4813     * @return the task
4814     */
4815     public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
4816     (ConcurrentHashMap<K,V> map,
4817     ObjectToDouble<? super K> transformer,
4818     double basis,
4819     DoubleByDoubleToDouble reducer) {
4820     if (transformer == null || reducer == null)
4821     throw new NullPointerException();
4822     return new MapReduceKeysToDoubleTask<K,V>
4823 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4824 dl 1.119 }
4825    
4826     /**
4827     * Returns a task that when invoked, returns the result of
4828     * accumulating the given transformation of all keys using the given
4829     * reducer to combine values, and the given basis as an
4830     * identity value.
4831     *
4832     * @param map the map
4833     * @param transformer a function returning the transformation
4834     * for an element
4835     * @param basis the identity (initial default value) for the reduction
4836     * @param reducer a commutative associative combining function
4837     * @return the task
4838     */
4839     public static <K,V> ForkJoinTask<Long> reduceKeysToLong
4840     (ConcurrentHashMap<K,V> map,
4841     ObjectToLong<? super K> transformer,
4842     long basis,
4843     LongByLongToLong reducer) {
4844     if (transformer == null || reducer == null)
4845     throw new NullPointerException();
4846     return new MapReduceKeysToLongTask<K,V>
4847 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4848 dl 1.119 }
4849    
4850     /**
4851     * Returns a task that when invoked, returns the result of
4852     * accumulating the given transformation of all keys using the given
4853     * reducer to combine values, and the given basis as an
4854     * identity value.
4855     *
4856     * @param map the map
4857     * @param transformer a function returning the transformation
4858     * for an element
4859     * @param basis the identity (initial default value) for the reduction
4860     * @param reducer a commutative associative combining function
4861     * @return the task
4862     */
4863     public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
4864     (ConcurrentHashMap<K,V> map,
4865     ObjectToInt<? super K> transformer,
4866     int basis,
4867     IntByIntToInt reducer) {
4868     if (transformer == null || reducer == null)
4869     throw new NullPointerException();
4870     return new MapReduceKeysToIntTask<K,V>
4871 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4872 dl 1.119 }
4873    
4874     /**
4875     * Returns a task that when invoked, performs the given action
4876 jsr166 1.123 * for each value.
4877 dl 1.119 *
4878     * @param map the map
4879     * @param action the action
4880     */
4881 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachValue
4882 dl 1.119 (ConcurrentHashMap<K,V> map,
4883     Action<V> action) {
4884     if (action == null) throw new NullPointerException();
4885 dl 1.146 return new ForEachValueTask<K,V>(map, null, -1, action);
4886 dl 1.119 }
4887    
4888     /**
4889     * Returns a task that when invoked, performs the given action
4890 jsr166 1.123 * for each non-null transformation of each value.
4891 dl 1.119 *
4892     * @param map the map
4893     * @param transformer a function returning the transformation
4894 jsr166 1.135 * for an element, or null if there is no transformation (in
4895 jsr166 1.134 * which case the action is not applied)
4896 dl 1.119 * @param action the action
4897     */
4898 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachValue
4899 dl 1.119 (ConcurrentHashMap<K,V> map,
4900     Fun<? super V, ? extends U> transformer,
4901     Action<U> action) {
4902     if (transformer == null || action == null)
4903     throw new NullPointerException();
4904     return new ForEachTransformedValueTask<K,V,U>
4905 dl 1.146 (map, null, -1, transformer, action);
4906 dl 1.119 }
4907    
4908     /**
4909     * Returns a task that when invoked, returns a non-null result
4910     * from applying the given search function on each value, or
4911 dl 1.126 * null if none. Upon success, further element processing is
4912     * suppressed and the results of any other parallel
4913     * invocations of the search function are ignored.
4914 dl 1.119 *
4915     * @param map the map
4916     * @param searchFunction a function returning a non-null
4917     * result on success, else null
4918     * @return the task
4919     */
4920     public static <K,V,U> ForkJoinTask<U> searchValues
4921     (ConcurrentHashMap<K,V> map,
4922     Fun<? super V, ? extends U> searchFunction) {
4923     if (searchFunction == null) throw new NullPointerException();
4924     return new SearchValuesTask<K,V,U>
4925 dl 1.146 (map, null, -1, searchFunction,
4926 dl 1.119 new AtomicReference<U>());
4927     }
4928    
4929     /**
4930     * Returns a task that when invoked, returns the result of
4931     * accumulating all values using the given reducer to combine
4932     * values, or null if none.
4933     *
4934     * @param map the map
4935     * @param reducer a commutative associative combining function
4936     * @return the task
4937     */
4938     public static <K,V> ForkJoinTask<V> reduceValues
4939     (ConcurrentHashMap<K,V> map,
4940     BiFun<? super V, ? super V, ? extends V> reducer) {
4941     if (reducer == null) throw new NullPointerException();
4942     return new ReduceValuesTask<K,V>
4943 dl 1.130 (map, null, -1, null, reducer);
4944 dl 1.119 }
4945    
4946     /**
4947     * Returns a task that when invoked, returns the result of
4948     * accumulating the given transformation of all values using the
4949     * given reducer to combine values, or null if none.
4950     *
4951     * @param map the map
4952     * @param transformer a function returning the transformation
4953 jsr166 1.135 * for an element, or null if there is no transformation (in
4954 dl 1.119 * which case it is not combined).
4955     * @param reducer a commutative associative combining function
4956     * @return the task
4957     */
4958     public static <K,V,U> ForkJoinTask<U> reduceValues
4959     (ConcurrentHashMap<K,V> map,
4960     Fun<? super V, ? extends U> transformer,
4961     BiFun<? super U, ? super U, ? extends U> reducer) {
4962     if (transformer == null || reducer == null)
4963     throw new NullPointerException();
4964     return new MapReduceValuesTask<K,V,U>
4965 dl 1.130 (map, null, -1, null, transformer, reducer);
4966 dl 1.119 }
4967    
4968     /**
4969     * Returns a task that when invoked, returns the result of
4970     * accumulating the given transformation of all values using the
4971     * given reducer to combine values, and the given basis as an
4972     * identity value.
4973     *
4974     * @param map the map
4975     * @param transformer a function returning the transformation
4976     * for an element
4977     * @param basis the identity (initial default value) for the reduction
4978     * @param reducer a commutative associative combining function
4979     * @return the task
4980     */
4981     public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
4982     (ConcurrentHashMap<K,V> map,
4983     ObjectToDouble<? super V> transformer,
4984     double basis,
4985     DoubleByDoubleToDouble reducer) {
4986     if (transformer == null || reducer == null)
4987     throw new NullPointerException();
4988     return new MapReduceValuesToDoubleTask<K,V>
4989 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
4990 dl 1.119 }
4991    
4992     /**
4993     * Returns a task that when invoked, returns the result of
4994     * accumulating the given transformation of all values using the
4995     * given reducer to combine values, and the given basis as an
4996     * identity value.
4997     *
4998     * @param map the map
4999     * @param transformer a function returning the transformation
5000     * for an element
5001     * @param basis the identity (initial default value) for the reduction
5002     * @param reducer a commutative associative combining function
5003     * @return the task
5004     */
5005     public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5006     (ConcurrentHashMap<K,V> map,
5007     ObjectToLong<? super V> transformer,
5008     long basis,
5009     LongByLongToLong reducer) {
5010     if (transformer == null || reducer == null)
5011     throw new NullPointerException();
5012     return new MapReduceValuesToLongTask<K,V>
5013 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5014 dl 1.119 }
5015    
5016     /**
5017     * Returns a task that when invoked, returns the result of
5018     * accumulating the given transformation of all values using the
5019     * given reducer to combine values, and the given basis as an
5020     * identity value.
5021     *
5022     * @param map the map
5023     * @param transformer a function returning the transformation
5024     * for an element
5025     * @param basis the identity (initial default value) for the reduction
5026     * @param reducer a commutative associative combining function
5027     * @return the task
5028     */
5029     public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5030     (ConcurrentHashMap<K,V> map,
5031     ObjectToInt<? super V> transformer,
5032     int basis,
5033     IntByIntToInt reducer) {
5034     if (transformer == null || reducer == null)
5035     throw new NullPointerException();
5036     return new MapReduceValuesToIntTask<K,V>
5037 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5038 dl 1.119 }
5039    
5040     /**
5041     * Returns a task that when invoked, perform the given action
5042 jsr166 1.123 * for each entry.
5043 dl 1.119 *
5044     * @param map the map
5045     * @param action the action
5046     */
5047 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachEntry
5048 dl 1.119 (ConcurrentHashMap<K,V> map,
5049     Action<Map.Entry<K,V>> action) {
5050     if (action == null) throw new NullPointerException();
5051 dl 1.146 return new ForEachEntryTask<K,V>(map, null, -1, action);
5052 dl 1.119 }
5053    
5054     /**
5055     * Returns a task that when invoked, perform the given action
5056 jsr166 1.123 * for each non-null transformation of each entry.
5057 dl 1.119 *
5058     * @param map the map
5059     * @param transformer a function returning the transformation
5060 jsr166 1.135 * for an element, or null if there is no transformation (in
5061 jsr166 1.134 * which case the action is not applied)
5062 dl 1.119 * @param action the action
5063     */
5064 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5065 dl 1.119 (ConcurrentHashMap<K,V> map,
5066     Fun<Map.Entry<K,V>, ? extends U> transformer,
5067     Action<U> action) {
5068     if (transformer == null || action == null)
5069     throw new NullPointerException();
5070     return new ForEachTransformedEntryTask<K,V,U>
5071 dl 1.146 (map, null, -1, transformer, action);
5072 dl 1.119 }
5073    
5074     /**
5075     * Returns a task that when invoked, returns a non-null result
5076     * from applying the given search function on each entry, or
5077 dl 1.126 * null if none. Upon success, further element processing is
5078     * suppressed and the results of any other parallel
5079     * invocations of the search function are ignored.
5080 dl 1.119 *
5081     * @param map the map
5082     * @param searchFunction a function returning a non-null
5083     * result on success, else null
5084     * @return the task
5085     */
5086     public static <K,V,U> ForkJoinTask<U> searchEntries
5087     (ConcurrentHashMap<K,V> map,
5088     Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
5089     if (searchFunction == null) throw new NullPointerException();
5090     return new SearchEntriesTask<K,V,U>
5091 dl 1.146 (map, null, -1, searchFunction,
5092 dl 1.119 new AtomicReference<U>());
5093     }
5094    
5095     /**
5096     * Returns a task that when invoked, returns the result of
5097     * accumulating all entries using the given reducer to combine
5098     * values, or null if none.
5099     *
5100     * @param map the map
5101     * @param reducer a commutative associative combining function
5102     * @return the task
5103     */
5104     public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5105     (ConcurrentHashMap<K,V> map,
5106     BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5107     if (reducer == null) throw new NullPointerException();
5108     return new ReduceEntriesTask<K,V>
5109 dl 1.130 (map, null, -1, null, reducer);
5110 dl 1.119 }
5111    
5112     /**
5113     * Returns a task that when invoked, returns the result of
5114     * accumulating the given transformation of all entries using the
5115     * given reducer to combine values, or null if none.
5116     *
5117     * @param map the map
5118     * @param transformer a function returning the transformation
5119 jsr166 1.135 * for an element, or null if there is no transformation (in
5120 dl 1.119 * which case it is not combined).
5121     * @param reducer a commutative associative combining function
5122     * @return the task
5123     */
5124     public static <K,V,U> ForkJoinTask<U> reduceEntries
5125     (ConcurrentHashMap<K,V> map,
5126     Fun<Map.Entry<K,V>, ? extends U> transformer,
5127     BiFun<? super U, ? super U, ? extends U> reducer) {
5128     if (transformer == null || reducer == null)
5129     throw new NullPointerException();
5130     return new MapReduceEntriesTask<K,V,U>
5131 dl 1.130 (map, null, -1, null, transformer, reducer);
5132 dl 1.119 }
5133    
5134     /**
5135     * Returns a task that when invoked, returns the result of
5136     * accumulating the given transformation of all entries using the
5137     * given reducer to combine values, and the given basis as an
5138     * identity value.
5139     *
5140     * @param map the map
5141     * @param transformer a function returning the transformation
5142     * for an element
5143     * @param basis the identity (initial default value) for the reduction
5144     * @param reducer a commutative associative combining function
5145     * @return the task
5146     */
5147     public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5148     (ConcurrentHashMap<K,V> map,
5149     ObjectToDouble<Map.Entry<K,V>> transformer,
5150     double basis,
5151     DoubleByDoubleToDouble reducer) {
5152     if (transformer == null || reducer == null)
5153     throw new NullPointerException();
5154     return new MapReduceEntriesToDoubleTask<K,V>
5155 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5156 dl 1.119 }
5157    
5158     /**
5159     * Returns a task that when invoked, returns the result of
5160     * accumulating the given transformation of all entries using the
5161     * given reducer to combine values, and the given basis as an
5162     * identity value.
5163     *
5164     * @param map the map
5165     * @param transformer a function returning the transformation
5166     * for an element
5167     * @param basis the identity (initial default value) for the reduction
5168     * @param reducer a commutative associative combining function
5169     * @return the task
5170     */
5171     public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5172     (ConcurrentHashMap<K,V> map,
5173     ObjectToLong<Map.Entry<K,V>> transformer,
5174     long basis,
5175     LongByLongToLong reducer) {
5176     if (transformer == null || reducer == null)
5177     throw new NullPointerException();
5178     return new MapReduceEntriesToLongTask<K,V>
5179 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5180 dl 1.119 }
5181    
5182     /**
5183     * Returns a task that when invoked, returns the result of
5184     * accumulating the given transformation of all entries using the
5185     * given reducer to combine values, and the given basis as an
5186     * identity value.
5187     *
5188     * @param map the map
5189     * @param transformer a function returning the transformation
5190     * for an element
5191     * @param basis the identity (initial default value) for the reduction
5192     * @param reducer a commutative associative combining function
5193     * @return the task
5194     */
5195     public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5196     (ConcurrentHashMap<K,V> map,
5197     ObjectToInt<Map.Entry<K,V>> transformer,
5198     int basis,
5199     IntByIntToInt reducer) {
5200     if (transformer == null || reducer == null)
5201     throw new NullPointerException();
5202     return new MapReduceEntriesToIntTask<K,V>
5203 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5204 dl 1.119 }
5205     }
5206    
5207     // -------------------------------------------------------
5208    
5209     /*
5210     * Task classes. Coded in a regular but ugly format/style to
5211     * simplify checks that each variant differs in the right way from
5212 dl 1.149 * others. The null screenings exist because compilers cannot tell
5213     * that we've already null-checked task arguments, so we force
5214     * simplest hoisted bypass to help avoid convoluted traps.
5215 dl 1.119 */
5216    
5217 dl 1.128 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5218 dl 1.146 extends Traverser<K,V,Void> {
5219 dl 1.119 final Action<K> action;
5220     ForEachKeyTask
5221 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5222 dl 1.119 Action<K> action) {
5223 dl 1.146 super(m, p, b);
5224 dl 1.119 this.action = action;
5225     }
5226 dl 1.146 @SuppressWarnings("unchecked") public final void compute() {
5227     final Action<K> action;
5228 dl 1.149 if ((action = this.action) != null) {
5229     for (int b; (b = preSplit()) > 0;)
5230     new ForEachKeyTask<K,V>(map, this, b, action).fork();
5231     while (advance() != null)
5232     action.apply((K)nextKey);
5233     propagateCompletion();
5234     }
5235 dl 1.119 }
5236     }
5237    
5238 dl 1.128 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5239 dl 1.146 extends Traverser<K,V,Void> {
5240 dl 1.119 final Action<V> action;
5241     ForEachValueTask
5242 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5243 dl 1.119 Action<V> action) {
5244 dl 1.146 super(m, p, b);
5245 dl 1.119 this.action = action;
5246     }
5247 dl 1.146 @SuppressWarnings("unchecked") public final void compute() {
5248     final Action<V> action;
5249 dl 1.149 if ((action = this.action) != null) {
5250     for (int b; (b = preSplit()) > 0;)
5251     new ForEachValueTask<K,V>(map, this, b, action).fork();
5252     Object v;
5253     while ((v = advance()) != null)
5254     action.apply((V)v);
5255     propagateCompletion();
5256     }
5257 dl 1.119 }
5258     }
5259    
5260 dl 1.128 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5261 dl 1.146 extends Traverser<K,V,Void> {
5262 dl 1.119 final Action<Entry<K,V>> action;
5263     ForEachEntryTask
5264 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5265 dl 1.119 Action<Entry<K,V>> action) {
5266 dl 1.146 super(m, p, b);
5267 dl 1.119 this.action = action;
5268     }
5269 dl 1.146 @SuppressWarnings("unchecked") public final void compute() {
5270     final Action<Entry<K,V>> action;
5271 dl 1.149 if ((action = this.action) != null) {
5272     for (int b; (b = preSplit()) > 0;)
5273     new ForEachEntryTask<K,V>(map, this, b, action).fork();
5274     Object v;
5275     while ((v = advance()) != null)
5276     action.apply(entryFor((K)nextKey, (V)v));
5277     propagateCompletion();
5278     }
5279 dl 1.119 }
5280     }
5281    
5282 dl 1.128 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5283 dl 1.146 extends Traverser<K,V,Void> {
5284 dl 1.119 final BiAction<K,V> action;
5285     ForEachMappingTask
5286 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5287 dl 1.119 BiAction<K,V> action) {
5288 dl 1.146 super(m, p, b);
5289 dl 1.119 this.action = action;
5290     }
5291 dl 1.146 @SuppressWarnings("unchecked") public final void compute() {
5292     final BiAction<K,V> action;
5293 dl 1.149 if ((action = this.action) != null) {
5294     for (int b; (b = preSplit()) > 0;)
5295     new ForEachMappingTask<K,V>(map, this, b, action).fork();
5296     Object v;
5297     while ((v = advance()) != null)
5298     action.apply((K)nextKey, (V)v);
5299     propagateCompletion();
5300     }
5301 dl 1.119 }
5302     }
5303    
5304 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5305 dl 1.146 extends Traverser<K,V,Void> {
5306 dl 1.119 final Fun<? super K, ? extends U> transformer;
5307     final Action<U> action;
5308     ForEachTransformedKeyTask
5309 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5310     Fun<? super K, ? extends U> transformer, Action<U> action) {
5311     super(m, p, b);
5312     this.transformer = transformer; this.action = action;
5313     }
5314     @SuppressWarnings("unchecked") public final void compute() {
5315     final Fun<? super K, ? extends U> transformer;
5316     final Action<U> action;
5317 dl 1.149 if ((transformer = this.transformer) != null &&
5318     (action = this.action) != null) {
5319     for (int b; (b = preSplit()) > 0;)
5320     new ForEachTransformedKeyTask<K,V,U>
5321     (map, this, b, transformer, action).fork();
5322     U u;
5323     while (advance() != null) {
5324     if ((u = transformer.apply((K)nextKey)) != null)
5325     action.apply(u);
5326     }
5327     propagateCompletion();
5328 dl 1.119 }
5329     }
5330     }
5331    
5332 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5333 dl 1.146 extends Traverser<K,V,Void> {
5334 dl 1.119 final Fun<? super V, ? extends U> transformer;
5335     final Action<U> action;
5336     ForEachTransformedValueTask
5337 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5338     Fun<? super V, ? extends U> transformer, Action<U> action) {
5339     super(m, p, b);
5340     this.transformer = transformer; this.action = action;
5341     }
5342     @SuppressWarnings("unchecked") public final void compute() {
5343     final Fun<? super V, ? extends U> transformer;
5344     final Action<U> action;
5345 dl 1.149 if ((transformer = this.transformer) != null &&
5346     (action = this.action) != null) {
5347     for (int b; (b = preSplit()) > 0;)
5348     new ForEachTransformedValueTask<K,V,U>
5349     (map, this, b, transformer, action).fork();
5350     Object v; U u;
5351     while ((v = advance()) != null) {
5352     if ((u = transformer.apply((V)v)) != null)
5353     action.apply(u);
5354     }
5355     propagateCompletion();
5356 dl 1.119 }
5357     }
5358 tim 1.1 }
5359    
5360 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5361 dl 1.146 extends Traverser<K,V,Void> {
5362 dl 1.119 final Fun<Map.Entry<K,V>, ? extends U> transformer;
5363     final Action<U> action;
5364     ForEachTransformedEntryTask
5365 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5366     Fun<Map.Entry<K,V>, ? extends U> transformer, Action<U> action) {
5367     super(m, p, b);
5368     this.transformer = transformer; this.action = action;
5369     }
5370     @SuppressWarnings("unchecked") public final void compute() {
5371     final Fun<Map.Entry<K,V>, ? extends U> transformer;
5372     final Action<U> action;
5373 dl 1.149 if ((transformer = this.transformer) != null &&
5374     (action = this.action) != null) {
5375     for (int b; (b = preSplit()) > 0;)
5376     new ForEachTransformedEntryTask<K,V,U>
5377     (map, this, b, transformer, action).fork();
5378     Object v; U u;
5379     while ((v = advance()) != null) {
5380     if ((u = transformer.apply(entryFor((K)nextKey,
5381     (V)v))) != null)
5382     action.apply(u);
5383     }
5384     propagateCompletion();
5385 dl 1.119 }
5386     }
5387 tim 1.1 }
5388    
5389 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5390 dl 1.146 extends Traverser<K,V,Void> {
5391 dl 1.119 final BiFun<? super K, ? super V, ? extends U> transformer;
5392     final Action<U> action;
5393     ForEachTransformedMappingTask
5394 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5395 dl 1.119 BiFun<? super K, ? super V, ? extends U> transformer,
5396     Action<U> action) {
5397 dl 1.146 super(m, p, b);
5398     this.transformer = transformer; this.action = action;
5399 dl 1.119 }
5400 dl 1.146 @SuppressWarnings("unchecked") public final void compute() {
5401     final BiFun<? super K, ? super V, ? extends U> transformer;
5402     final Action<U> action;
5403 dl 1.149 if ((transformer = this.transformer) != null &&
5404     (action = this.action) != null) {
5405     for (int b; (b = preSplit()) > 0;)
5406     new ForEachTransformedMappingTask<K,V,U>
5407     (map, this, b, transformer, action).fork();
5408     Object v; U u;
5409     while ((v = advance()) != null) {
5410     if ((u = transformer.apply((K)nextKey, (V)v)) != null)
5411     action.apply(u);
5412     }
5413     propagateCompletion();
5414 dl 1.119 }
5415     }
5416 tim 1.1 }
5417    
5418 dl 1.128 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5419 dl 1.146 extends Traverser<K,V,U> {
5420 dl 1.119 final Fun<? super K, ? extends U> searchFunction;
5421     final AtomicReference<U> result;
5422     SearchKeysTask
5423 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5424 dl 1.119 Fun<? super K, ? extends U> searchFunction,
5425     AtomicReference<U> result) {
5426 dl 1.146 super(m, p, b);
5427 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5428     }
5429 dl 1.146 public final U getRawResult() { return result.get(); }
5430     @SuppressWarnings("unchecked") public final void compute() {
5431     final Fun<? super K, ? extends U> searchFunction;
5432     final AtomicReference<U> result;
5433 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5434     (result = this.result) != null) {
5435     for (int b;;) {
5436     if (result.get() != null)
5437     return;
5438     if ((b = preSplit()) <= 0)
5439     break;
5440     new SearchKeysTask<K,V,U>
5441     (map, this, b, searchFunction, result).fork();
5442 dl 1.128 }
5443 dl 1.149 while (result.get() == null) {
5444     U u;
5445     if (advance() == null) {
5446     propagateCompletion();
5447     break;
5448     }
5449     if ((u = searchFunction.apply((K)nextKey)) != null) {
5450     if (result.compareAndSet(null, u))
5451     quietlyCompleteRoot();
5452     break;
5453     }
5454 dl 1.119 }
5455     }
5456     }
5457 tim 1.1 }
5458    
5459 dl 1.128 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5460 dl 1.146 extends Traverser<K,V,U> {
5461 dl 1.119 final Fun<? super V, ? extends U> searchFunction;
5462     final AtomicReference<U> result;
5463     SearchValuesTask
5464 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5465 dl 1.119 Fun<? super V, ? extends U> searchFunction,
5466     AtomicReference<U> result) {
5467 dl 1.146 super(m, p, b);
5468 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5469     }
5470 dl 1.146 public final U getRawResult() { return result.get(); }
5471     @SuppressWarnings("unchecked") public final void compute() {
5472     final Fun<? super V, ? extends U> searchFunction;
5473     final AtomicReference<U> result;
5474 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5475     (result = this.result) != null) {
5476     for (int b;;) {
5477     if (result.get() != null)
5478     return;
5479     if ((b = preSplit()) <= 0)
5480     break;
5481     new SearchValuesTask<K,V,U>
5482     (map, this, b, searchFunction, result).fork();
5483 dl 1.128 }
5484 dl 1.149 while (result.get() == null) {
5485     Object v; U u;
5486     if ((v = advance()) == null) {
5487     propagateCompletion();
5488     break;
5489     }
5490     if ((u = searchFunction.apply((V)v)) != null) {
5491     if (result.compareAndSet(null, u))
5492     quietlyCompleteRoot();
5493     break;
5494     }
5495 dl 1.119 }
5496     }
5497     }
5498     }
5499 tim 1.11
5500 dl 1.128 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5501 dl 1.146 extends Traverser<K,V,U> {
5502 dl 1.119 final Fun<Entry<K,V>, ? extends U> searchFunction;
5503     final AtomicReference<U> result;
5504     SearchEntriesTask
5505 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5506 dl 1.119 Fun<Entry<K,V>, ? extends U> searchFunction,
5507     AtomicReference<U> result) {
5508 dl 1.146 super(m, p, b);
5509 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5510     }
5511 dl 1.146 public final U getRawResult() { return result.get(); }
5512     @SuppressWarnings("unchecked") public final void compute() {
5513     final Fun<Entry<K,V>, ? extends U> searchFunction;
5514     final AtomicReference<U> result;
5515 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5516     (result = this.result) != null) {
5517     for (int b;;) {
5518     if (result.get() != null)
5519     return;
5520     if ((b = preSplit()) <= 0)
5521     break;
5522     new SearchEntriesTask<K,V,U>
5523     (map, this, b, searchFunction, result).fork();
5524 dl 1.128 }
5525 dl 1.149 while (result.get() == null) {
5526     Object v; U u;
5527     if ((v = advance()) == null) {
5528     propagateCompletion();
5529     break;
5530     }
5531     if ((u = searchFunction.apply(entryFor((K)nextKey,
5532     (V)v))) != null) {
5533     if (result.compareAndSet(null, u))
5534     quietlyCompleteRoot();
5535     return;
5536     }
5537 dl 1.119 }
5538     }
5539     }
5540     }
5541 tim 1.1
5542 dl 1.128 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5543 dl 1.146 extends Traverser<K,V,U> {
5544 dl 1.119 final BiFun<? super K, ? super V, ? extends U> searchFunction;
5545     final AtomicReference<U> result;
5546     SearchMappingsTask
5547 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5548 dl 1.119 BiFun<? super K, ? super V, ? extends U> searchFunction,
5549     AtomicReference<U> result) {
5550 dl 1.146 super(m, p, b);
5551 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5552     }
5553 dl 1.146 public final U getRawResult() { return result.get(); }
5554     @SuppressWarnings("unchecked") public final void compute() {
5555     final BiFun<? super K, ? super V, ? extends U> searchFunction;
5556     final AtomicReference<U> result;
5557 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5558     (result = this.result) != null) {
5559     for (int b;;) {
5560     if (result.get() != null)
5561     return;
5562     if ((b = preSplit()) <= 0)
5563     break;
5564     new SearchMappingsTask<K,V,U>
5565     (map, this, b, searchFunction, result).fork();
5566 dl 1.128 }
5567 dl 1.149 while (result.get() == null) {
5568     Object v; U u;
5569     if ((v = advance()) == null) {
5570     propagateCompletion();
5571     break;
5572     }
5573     if ((u = searchFunction.apply((K)nextKey, (V)v)) != null) {
5574     if (result.compareAndSet(null, u))
5575     quietlyCompleteRoot();
5576     break;
5577     }
5578 dl 1.119 }
5579     }
5580 tim 1.1 }
5581 dl 1.119 }
5582 tim 1.1
5583 dl 1.128 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5584 dl 1.146 extends Traverser<K,V,K> {
5585 dl 1.119 final BiFun<? super K, ? super K, ? extends K> reducer;
5586     K result;
5587 dl 1.128 ReduceKeysTask<K,V> rights, nextRight;
5588 dl 1.119 ReduceKeysTask
5589 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5590 dl 1.128 ReduceKeysTask<K,V> nextRight,
5591 dl 1.119 BiFun<? super K, ? super K, ? extends K> reducer) {
5592 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5593 dl 1.119 this.reducer = reducer;
5594     }
5595 dl 1.146 public final K getRawResult() { return result; }
5596     @SuppressWarnings("unchecked") public final void compute() {
5597 dl 1.149 final BiFun<? super K, ? super K, ? extends K> reducer;
5598     if ((reducer = this.reducer) != null) {
5599     for (int b; (b = preSplit()) > 0;)
5600     (rights = new ReduceKeysTask<K,V>
5601     (map, this, b, rights, reducer)).fork();
5602     K r = null;
5603     while (advance() != null) {
5604     K u = (K)nextKey;
5605     r = (r == null) ? u : reducer.apply(r, u);
5606     }
5607     result = r;
5608     CountedCompleter<?> c;
5609     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5610     ReduceKeysTask<K,V>
5611     t = (ReduceKeysTask<K,V>)c,
5612     s = t.rights;
5613     while (s != null) {
5614     K tr, sr;
5615     if ((sr = s.result) != null)
5616     t.result = (((tr = t.result) == null) ? sr :
5617     reducer.apply(tr, sr));
5618     s = t.rights = s.nextRight;
5619     }
5620 dl 1.99 }
5621 dl 1.138 }
5622 tim 1.1 }
5623 dl 1.119 }
5624 tim 1.1
5625 dl 1.128 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5626 dl 1.146 extends Traverser<K,V,V> {
5627 dl 1.119 final BiFun<? super V, ? super V, ? extends V> reducer;
5628     V result;
5629 dl 1.128 ReduceValuesTask<K,V> rights, nextRight;
5630 dl 1.119 ReduceValuesTask
5631 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5632 dl 1.128 ReduceValuesTask<K,V> nextRight,
5633 dl 1.119 BiFun<? super V, ? super V, ? extends V> reducer) {
5634 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5635 dl 1.119 this.reducer = reducer;
5636     }
5637 dl 1.146 public final V getRawResult() { return result; }
5638     @SuppressWarnings("unchecked") public final void compute() {
5639 dl 1.149 final BiFun<? super V, ? super V, ? extends V> reducer;
5640     if ((reducer = this.reducer) != null) {
5641     for (int b; (b = preSplit()) > 0;)
5642     (rights = new ReduceValuesTask<K,V>
5643     (map, this, b, rights, reducer)).fork();
5644     V r = null;
5645     Object v;
5646     while ((v = advance()) != null) {
5647     V u = (V)v;
5648     r = (r == null) ? u : reducer.apply(r, u);
5649     }
5650     result = r;
5651     CountedCompleter<?> c;
5652     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5653     ReduceValuesTask<K,V>
5654     t = (ReduceValuesTask<K,V>)c,
5655     s = t.rights;
5656     while (s != null) {
5657     V tr, sr;
5658     if ((sr = s.result) != null)
5659     t.result = (((tr = t.result) == null) ? sr :
5660     reducer.apply(tr, sr));
5661     s = t.rights = s.nextRight;
5662     }
5663 dl 1.119 }
5664     }
5665 tim 1.1 }
5666 dl 1.119 }
5667 tim 1.1
5668 dl 1.128 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
5669 dl 1.146 extends Traverser<K,V,Map.Entry<K,V>> {
5670 dl 1.119 final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5671     Map.Entry<K,V> result;
5672 dl 1.128 ReduceEntriesTask<K,V> rights, nextRight;
5673 dl 1.119 ReduceEntriesTask
5674 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5675 dl 1.130 ReduceEntriesTask<K,V> nextRight,
5676 dl 1.119 BiFun<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5677 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5678 dl 1.119 this.reducer = reducer;
5679     }
5680 dl 1.146 public final Map.Entry<K,V> getRawResult() { return result; }
5681     @SuppressWarnings("unchecked") public final void compute() {
5682 dl 1.149 final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5683     if ((reducer = this.reducer) != null) {
5684     for (int b; (b = preSplit()) > 0;)
5685     (rights = new ReduceEntriesTask<K,V>
5686     (map, this, b, rights, reducer)).fork();
5687     Map.Entry<K,V> r = null;
5688     Object v;
5689     while ((v = advance()) != null) {
5690     Map.Entry<K,V> u = entryFor((K)nextKey, (V)v);
5691     r = (r == null) ? u : reducer.apply(r, u);
5692     }
5693     result = r;
5694     CountedCompleter<?> c;
5695     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5696     ReduceEntriesTask<K,V>
5697     t = (ReduceEntriesTask<K,V>)c,
5698     s = t.rights;
5699     while (s != null) {
5700     Map.Entry<K,V> tr, sr;
5701     if ((sr = s.result) != null)
5702     t.result = (((tr = t.result) == null) ? sr :
5703     reducer.apply(tr, sr));
5704     s = t.rights = s.nextRight;
5705     }
5706 dl 1.119 }
5707 dl 1.138 }
5708 dl 1.119 }
5709     }
5710 dl 1.99
5711 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
5712 dl 1.146 extends Traverser<K,V,U> {
5713 dl 1.119 final Fun<? super K, ? extends U> transformer;
5714     final BiFun<? super U, ? super U, ? extends U> reducer;
5715     U result;
5716 dl 1.128 MapReduceKeysTask<K,V,U> rights, nextRight;
5717 dl 1.119 MapReduceKeysTask
5718 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5719 dl 1.128 MapReduceKeysTask<K,V,U> nextRight,
5720 dl 1.119 Fun<? super K, ? extends U> transformer,
5721     BiFun<? super U, ? super U, ? extends U> reducer) {
5722 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5723 dl 1.119 this.transformer = transformer;
5724     this.reducer = reducer;
5725     }
5726 dl 1.146 public final U getRawResult() { return result; }
5727     @SuppressWarnings("unchecked") public final void compute() {
5728 dl 1.149 final Fun<? super K, ? extends U> transformer;
5729     final BiFun<? super U, ? super U, ? extends U> reducer;
5730     if ((transformer = this.transformer) != null &&
5731     (reducer = this.reducer) != null) {
5732     for (int b; (b = preSplit()) > 0;)
5733     (rights = new MapReduceKeysTask<K,V,U>
5734     (map, this, b, rights, transformer, reducer)).fork();
5735     U r = null, u;
5736     while (advance() != null) {
5737     if ((u = transformer.apply((K)nextKey)) != null)
5738     r = (r == null) ? u : reducer.apply(r, u);
5739     }
5740     result = r;
5741     CountedCompleter<?> c;
5742     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5743     MapReduceKeysTask<K,V,U>
5744     t = (MapReduceKeysTask<K,V,U>)c,
5745     s = t.rights;
5746     while (s != null) {
5747     U tr, sr;
5748     if ((sr = s.result) != null)
5749     t.result = (((tr = t.result) == null) ? sr :
5750     reducer.apply(tr, sr));
5751     s = t.rights = s.nextRight;
5752     }
5753 dl 1.119 }
5754 dl 1.138 }
5755 tim 1.1 }
5756 dl 1.4 }
5757    
5758 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
5759 dl 1.146 extends Traverser<K,V,U> {
5760 dl 1.119 final Fun<? super V, ? extends U> transformer;
5761     final BiFun<? super U, ? super U, ? extends U> reducer;
5762     U result;
5763 dl 1.128 MapReduceValuesTask<K,V,U> rights, nextRight;
5764 dl 1.119 MapReduceValuesTask
5765 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5766 dl 1.128 MapReduceValuesTask<K,V,U> nextRight,
5767 dl 1.119 Fun<? super V, ? extends U> transformer,
5768     BiFun<? super U, ? super U, ? extends U> reducer) {
5769 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5770 dl 1.119 this.transformer = transformer;
5771     this.reducer = reducer;
5772     }
5773 dl 1.146 public final U getRawResult() { return result; }
5774     @SuppressWarnings("unchecked") public final void compute() {
5775 dl 1.149 final Fun<? super V, ? extends U> transformer;
5776     final BiFun<? super U, ? super U, ? extends U> reducer;
5777     if ((transformer = this.transformer) != null &&
5778     (reducer = this.reducer) != null) {
5779     for (int b; (b = preSplit()) > 0;)
5780     (rights = new MapReduceValuesTask<K,V,U>
5781     (map, this, b, rights, transformer, reducer)).fork();
5782     U r = null, u;
5783     Object v;
5784     while ((v = advance()) != null) {
5785     if ((u = transformer.apply((V)v)) != null)
5786     r = (r == null) ? u : reducer.apply(r, u);
5787     }
5788     result = r;
5789     CountedCompleter<?> c;
5790     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5791     MapReduceValuesTask<K,V,U>
5792     t = (MapReduceValuesTask<K,V,U>)c,
5793     s = t.rights;
5794     while (s != null) {
5795     U tr, sr;
5796     if ((sr = s.result) != null)
5797     t.result = (((tr = t.result) == null) ? sr :
5798     reducer.apply(tr, sr));
5799     s = t.rights = s.nextRight;
5800     }
5801 dl 1.119 }
5802     }
5803     }
5804 dl 1.4 }
5805    
5806 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
5807 dl 1.146 extends Traverser<K,V,U> {
5808 dl 1.119 final Fun<Map.Entry<K,V>, ? extends U> transformer;
5809     final BiFun<? super U, ? super U, ? extends U> reducer;
5810     U result;
5811 dl 1.128 MapReduceEntriesTask<K,V,U> rights, nextRight;
5812 dl 1.119 MapReduceEntriesTask
5813 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5814 dl 1.128 MapReduceEntriesTask<K,V,U> nextRight,
5815 dl 1.119 Fun<Map.Entry<K,V>, ? extends U> transformer,
5816     BiFun<? super U, ? super U, ? extends U> reducer) {
5817 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5818 dl 1.119 this.transformer = transformer;
5819     this.reducer = reducer;
5820     }
5821 dl 1.146 public final U getRawResult() { return result; }
5822     @SuppressWarnings("unchecked") public final void compute() {
5823 dl 1.149 final Fun<Map.Entry<K,V>, ? extends U> transformer;
5824     final BiFun<? super U, ? super U, ? extends U> reducer;
5825     if ((transformer = this.transformer) != null &&
5826     (reducer = this.reducer) != null) {
5827     for (int b; (b = preSplit()) > 0;)
5828     (rights = new MapReduceEntriesTask<K,V,U>
5829     (map, this, b, rights, transformer, reducer)).fork();
5830     U r = null, u;
5831     Object v;
5832     while ((v = advance()) != null) {
5833     if ((u = transformer.apply(entryFor((K)nextKey,
5834     (V)v))) != null)
5835     r = (r == null) ? u : reducer.apply(r, u);
5836     }
5837     result = r;
5838     CountedCompleter<?> c;
5839     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5840     MapReduceEntriesTask<K,V,U>
5841     t = (MapReduceEntriesTask<K,V,U>)c,
5842     s = t.rights;
5843     while (s != null) {
5844     U tr, sr;
5845     if ((sr = s.result) != null)
5846     t.result = (((tr = t.result) == null) ? sr :
5847     reducer.apply(tr, sr));
5848     s = t.rights = s.nextRight;
5849     }
5850 dl 1.119 }
5851 dl 1.138 }
5852 dl 1.119 }
5853 dl 1.4 }
5854 tim 1.1
5855 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
5856 dl 1.146 extends Traverser<K,V,U> {
5857 dl 1.119 final BiFun<? super K, ? super V, ? extends U> transformer;
5858     final BiFun<? super U, ? super U, ? extends U> reducer;
5859     U result;
5860 dl 1.128 MapReduceMappingsTask<K,V,U> rights, nextRight;
5861 dl 1.119 MapReduceMappingsTask
5862 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5863 dl 1.128 MapReduceMappingsTask<K,V,U> nextRight,
5864 dl 1.119 BiFun<? super K, ? super V, ? extends U> transformer,
5865     BiFun<? super U, ? super U, ? extends U> reducer) {
5866 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5867 dl 1.119 this.transformer = transformer;
5868     this.reducer = reducer;
5869     }
5870 dl 1.146 public final U getRawResult() { return result; }
5871     @SuppressWarnings("unchecked") public final void compute() {
5872 dl 1.149 final BiFun<? super K, ? super V, ? extends U> transformer;
5873     final BiFun<? super U, ? super U, ? extends U> reducer;
5874     if ((transformer = this.transformer) != null &&
5875     (reducer = this.reducer) != null) {
5876     for (int b; (b = preSplit()) > 0;)
5877     (rights = new MapReduceMappingsTask<K,V,U>
5878     (map, this, b, rights, transformer, reducer)).fork();
5879     U r = null, u;
5880     Object v;
5881     while ((v = advance()) != null) {
5882     if ((u = transformer.apply((K)nextKey, (V)v)) != null)
5883     r = (r == null) ? u : reducer.apply(r, u);
5884     }
5885     result = r;
5886     CountedCompleter<?> c;
5887     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5888     MapReduceMappingsTask<K,V,U>
5889     t = (MapReduceMappingsTask<K,V,U>)c,
5890     s = t.rights;
5891     while (s != null) {
5892     U tr, sr;
5893     if ((sr = s.result) != null)
5894     t.result = (((tr = t.result) == null) ? sr :
5895     reducer.apply(tr, sr));
5896     s = t.rights = s.nextRight;
5897     }
5898 dl 1.119 }
5899     }
5900     }
5901     }
5902 jsr166 1.114
5903 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
5904 dl 1.146 extends Traverser<K,V,Double> {
5905 dl 1.119 final ObjectToDouble<? super K> transformer;
5906     final DoubleByDoubleToDouble reducer;
5907     final double basis;
5908     double result;
5909 dl 1.128 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
5910 dl 1.119 MapReduceKeysToDoubleTask
5911 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5912 dl 1.128 MapReduceKeysToDoubleTask<K,V> nextRight,
5913 dl 1.119 ObjectToDouble<? super K> transformer,
5914     double basis,
5915     DoubleByDoubleToDouble reducer) {
5916 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5917 dl 1.119 this.transformer = transformer;
5918     this.basis = basis; this.reducer = reducer;
5919     }
5920 dl 1.146 public final Double getRawResult() { return result; }
5921     @SuppressWarnings("unchecked") public final void compute() {
5922 dl 1.149 final ObjectToDouble<? super K> transformer;
5923     final DoubleByDoubleToDouble reducer;
5924     if ((transformer = this.transformer) != null &&
5925     (reducer = this.reducer) != null) {
5926     double r = this.basis;
5927     for (int b; (b = preSplit()) > 0;)
5928     (rights = new MapReduceKeysToDoubleTask<K,V>
5929     (map, this, b, rights, transformer, r, reducer)).fork();
5930     while (advance() != null)
5931     r = reducer.apply(r, transformer.apply((K)nextKey));
5932     result = r;
5933     CountedCompleter<?> c;
5934     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5935     MapReduceKeysToDoubleTask<K,V>
5936     t = (MapReduceKeysToDoubleTask<K,V>)c,
5937     s = t.rights;
5938     while (s != null) {
5939     t.result = reducer.apply(t.result, s.result);
5940     s = t.rights = s.nextRight;
5941     }
5942 dl 1.119 }
5943 dl 1.138 }
5944 dl 1.79 }
5945 dl 1.119 }
5946 dl 1.79
5947 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
5948 dl 1.146 extends Traverser<K,V,Double> {
5949 dl 1.119 final ObjectToDouble<? super V> transformer;
5950     final DoubleByDoubleToDouble reducer;
5951     final double basis;
5952     double result;
5953 dl 1.128 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
5954 dl 1.119 MapReduceValuesToDoubleTask
5955 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5956 dl 1.128 MapReduceValuesToDoubleTask<K,V> nextRight,
5957 dl 1.119 ObjectToDouble<? super V> transformer,
5958     double basis,
5959     DoubleByDoubleToDouble reducer) {
5960 dl 1.130 super(m, p, b); this.nextRight = nextRight;
5961 dl 1.119 this.transformer = transformer;
5962     this.basis = basis; this.reducer = reducer;
5963     }
5964 dl 1.146 public final Double getRawResult() { return result; }
5965     @SuppressWarnings("unchecked") public final void compute() {
5966 dl 1.149 final ObjectToDouble<? super V> transformer;
5967     final DoubleByDoubleToDouble reducer;
5968     if ((transformer = this.transformer) != null &&
5969     (reducer = this.reducer) != null) {
5970     double r = this.basis;
5971     for (int b; (b = preSplit()) > 0;)
5972     (rights = new MapReduceValuesToDoubleTask<K,V>
5973     (map, this, b, rights, transformer, r, reducer)).fork();
5974     Object v;
5975     while ((v = advance()) != null)
5976     r = reducer.apply(r, transformer.apply((V)v));
5977     result = r;
5978     CountedCompleter<?> c;
5979     for (c = firstComplete(); c != null; c = c.nextComplete()) {
5980     MapReduceValuesToDoubleTask<K,V>
5981     t = (MapReduceValuesToDoubleTask<K,V>)c,
5982     s = t.rights;
5983     while (s != null) {
5984     t.result = reducer.apply(t.result, s.result);
5985     s = t.rights = s.nextRight;
5986     }
5987 dl 1.119 }
5988     }
5989 dl 1.30 }
5990 dl 1.79 }
5991 dl 1.30
5992 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
5993 dl 1.146 extends Traverser<K,V,Double> {
5994 dl 1.119 final ObjectToDouble<Map.Entry<K,V>> transformer;
5995     final DoubleByDoubleToDouble reducer;
5996     final double basis;
5997     double result;
5998 dl 1.128 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
5999 dl 1.119 MapReduceEntriesToDoubleTask
6000 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6001 dl 1.128 MapReduceEntriesToDoubleTask<K,V> nextRight,
6002 dl 1.119 ObjectToDouble<Map.Entry<K,V>> transformer,
6003     double basis,
6004     DoubleByDoubleToDouble reducer) {
6005 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6006 dl 1.119 this.transformer = transformer;
6007     this.basis = basis; this.reducer = reducer;
6008     }
6009 dl 1.146 public final Double getRawResult() { return result; }
6010     @SuppressWarnings("unchecked") public final void compute() {
6011 dl 1.149 final ObjectToDouble<Map.Entry<K,V>> transformer;
6012     final DoubleByDoubleToDouble reducer;
6013     if ((transformer = this.transformer) != null &&
6014     (reducer = this.reducer) != null) {
6015     double r = this.basis;
6016     for (int b; (b = preSplit()) > 0;)
6017     (rights = new MapReduceEntriesToDoubleTask<K,V>
6018     (map, this, b, rights, transformer, r, reducer)).fork();
6019     Object v;
6020     while ((v = advance()) != null)
6021     r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6022     (V)v)));
6023     result = r;
6024     CountedCompleter<?> c;
6025     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6026     MapReduceEntriesToDoubleTask<K,V>
6027     t = (MapReduceEntriesToDoubleTask<K,V>)c,
6028     s = t.rights;
6029     while (s != null) {
6030     t.result = reducer.apply(t.result, s.result);
6031     s = t.rights = s.nextRight;
6032     }
6033 dl 1.119 }
6034 dl 1.138 }
6035 dl 1.30 }
6036 tim 1.1 }
6037    
6038 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6039 dl 1.146 extends Traverser<K,V,Double> {
6040 dl 1.119 final ObjectByObjectToDouble<? super K, ? super V> transformer;
6041     final DoubleByDoubleToDouble reducer;
6042     final double basis;
6043     double result;
6044 dl 1.128 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6045 dl 1.119 MapReduceMappingsToDoubleTask
6046 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6047 dl 1.128 MapReduceMappingsToDoubleTask<K,V> nextRight,
6048 dl 1.119 ObjectByObjectToDouble<? super K, ? super V> transformer,
6049     double basis,
6050     DoubleByDoubleToDouble reducer) {
6051 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6052 dl 1.119 this.transformer = transformer;
6053     this.basis = basis; this.reducer = reducer;
6054     }
6055 dl 1.146 public final Double getRawResult() { return result; }
6056     @SuppressWarnings("unchecked") public final void compute() {
6057 dl 1.149 final ObjectByObjectToDouble<? super K, ? super V> transformer;
6058     final DoubleByDoubleToDouble reducer;
6059     if ((transformer = this.transformer) != null &&
6060     (reducer = this.reducer) != null) {
6061     double r = this.basis;
6062     for (int b; (b = preSplit()) > 0;)
6063     (rights = new MapReduceMappingsToDoubleTask<K,V>
6064     (map, this, b, rights, transformer, r, reducer)).fork();
6065     Object v;
6066     while ((v = advance()) != null)
6067     r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6068     result = r;
6069     CountedCompleter<?> c;
6070     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6071     MapReduceMappingsToDoubleTask<K,V>
6072     t = (MapReduceMappingsToDoubleTask<K,V>)c,
6073     s = t.rights;
6074     while (s != null) {
6075     t.result = reducer.apply(t.result, s.result);
6076     s = t.rights = s.nextRight;
6077     }
6078 dl 1.119 }
6079     }
6080 dl 1.4 }
6081 dl 1.119 }
6082    
6083 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6084 dl 1.146 extends Traverser<K,V,Long> {
6085 dl 1.119 final ObjectToLong<? super K> transformer;
6086     final LongByLongToLong reducer;
6087     final long basis;
6088     long result;
6089 dl 1.128 MapReduceKeysToLongTask<K,V> rights, nextRight;
6090 dl 1.119 MapReduceKeysToLongTask
6091 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6092 dl 1.128 MapReduceKeysToLongTask<K,V> nextRight,
6093 dl 1.119 ObjectToLong<? super K> transformer,
6094     long basis,
6095     LongByLongToLong reducer) {
6096 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6097 dl 1.119 this.transformer = transformer;
6098     this.basis = basis; this.reducer = reducer;
6099     }
6100 dl 1.146 public final Long getRawResult() { return result; }
6101     @SuppressWarnings("unchecked") public final void compute() {
6102 dl 1.149 final ObjectToLong<? super K> transformer;
6103     final LongByLongToLong reducer;
6104     if ((transformer = this.transformer) != null &&
6105     (reducer = this.reducer) != null) {
6106     long r = this.basis;
6107     for (int b; (b = preSplit()) > 0;)
6108     (rights = new MapReduceKeysToLongTask<K,V>
6109     (map, this, b, rights, transformer, r, reducer)).fork();
6110     while (advance() != null)
6111     r = reducer.apply(r, transformer.apply((K)nextKey));
6112     result = r;
6113     CountedCompleter<?> c;
6114     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6115     MapReduceKeysToLongTask<K,V>
6116     t = (MapReduceKeysToLongTask<K,V>)c,
6117     s = t.rights;
6118     while (s != null) {
6119     t.result = reducer.apply(t.result, s.result);
6120     s = t.rights = s.nextRight;
6121     }
6122 dl 1.119 }
6123 dl 1.138 }
6124 dl 1.4 }
6125 dl 1.119 }
6126    
6127 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6128 dl 1.146 extends Traverser<K,V,Long> {
6129 dl 1.119 final ObjectToLong<? super V> transformer;
6130     final LongByLongToLong reducer;
6131     final long basis;
6132     long result;
6133 dl 1.128 MapReduceValuesToLongTask<K,V> rights, nextRight;
6134 dl 1.119 MapReduceValuesToLongTask
6135 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6136 dl 1.128 MapReduceValuesToLongTask<K,V> nextRight,
6137 dl 1.119 ObjectToLong<? super V> transformer,
6138     long basis,
6139     LongByLongToLong reducer) {
6140 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6141 dl 1.119 this.transformer = transformer;
6142     this.basis = basis; this.reducer = reducer;
6143     }
6144 dl 1.146 public final Long getRawResult() { return result; }
6145     @SuppressWarnings("unchecked") public final void compute() {
6146 dl 1.149 final ObjectToLong<? super V> transformer;
6147     final LongByLongToLong reducer;
6148     if ((transformer = this.transformer) != null &&
6149     (reducer = this.reducer) != null) {
6150     long r = this.basis;
6151     for (int b; (b = preSplit()) > 0;)
6152     (rights = new MapReduceValuesToLongTask<K,V>
6153     (map, this, b, rights, transformer, r, reducer)).fork();
6154     Object v;
6155     while ((v = advance()) != null)
6156     r = reducer.apply(r, transformer.apply((V)v));
6157     result = r;
6158     CountedCompleter<?> c;
6159     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6160     MapReduceValuesToLongTask<K,V>
6161     t = (MapReduceValuesToLongTask<K,V>)c,
6162     s = t.rights;
6163     while (s != null) {
6164     t.result = reducer.apply(t.result, s.result);
6165     s = t.rights = s.nextRight;
6166     }
6167 dl 1.119 }
6168     }
6169 jsr166 1.95 }
6170 dl 1.119 }
6171    
6172 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6173 dl 1.146 extends Traverser<K,V,Long> {
6174 dl 1.119 final ObjectToLong<Map.Entry<K,V>> transformer;
6175     final LongByLongToLong reducer;
6176     final long basis;
6177     long result;
6178 dl 1.128 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6179 dl 1.119 MapReduceEntriesToLongTask
6180 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6181 dl 1.128 MapReduceEntriesToLongTask<K,V> nextRight,
6182 dl 1.119 ObjectToLong<Map.Entry<K,V>> transformer,
6183     long basis,
6184     LongByLongToLong reducer) {
6185 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6186 dl 1.119 this.transformer = transformer;
6187     this.basis = basis; this.reducer = reducer;
6188     }
6189 dl 1.146 public final Long getRawResult() { return result; }
6190     @SuppressWarnings("unchecked") public final void compute() {
6191 dl 1.149 final ObjectToLong<Map.Entry<K,V>> transformer;
6192     final LongByLongToLong reducer;
6193     if ((transformer = this.transformer) != null &&
6194     (reducer = this.reducer) != null) {
6195     long r = this.basis;
6196     for (int b; (b = preSplit()) > 0;)
6197     (rights = new MapReduceEntriesToLongTask<K,V>
6198     (map, this, b, rights, transformer, r, reducer)).fork();
6199     Object v;
6200     while ((v = advance()) != null)
6201     r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6202     (V)v)));
6203     result = r;
6204     CountedCompleter<?> c;
6205     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6206     MapReduceEntriesToLongTask<K,V>
6207     t = (MapReduceEntriesToLongTask<K,V>)c,
6208     s = t.rights;
6209     while (s != null) {
6210     t.result = reducer.apply(t.result, s.result);
6211     s = t.rights = s.nextRight;
6212     }
6213 dl 1.119 }
6214 dl 1.138 }
6215 dl 1.4 }
6216 tim 1.1 }
6217    
6218 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6219 dl 1.146 extends Traverser<K,V,Long> {
6220 dl 1.119 final ObjectByObjectToLong<? super K, ? super V> transformer;
6221     final LongByLongToLong reducer;
6222     final long basis;
6223     long result;
6224 dl 1.128 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6225 dl 1.119 MapReduceMappingsToLongTask
6226 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6227 dl 1.128 MapReduceMappingsToLongTask<K,V> nextRight,
6228 dl 1.119 ObjectByObjectToLong<? super K, ? super V> transformer,
6229     long basis,
6230     LongByLongToLong reducer) {
6231 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6232 dl 1.119 this.transformer = transformer;
6233     this.basis = basis; this.reducer = reducer;
6234     }
6235 dl 1.146 public final Long getRawResult() { return result; }
6236     @SuppressWarnings("unchecked") public final void compute() {
6237 dl 1.149 final ObjectByObjectToLong<? super K, ? super V> transformer;
6238     final LongByLongToLong reducer;
6239     if ((transformer = this.transformer) != null &&
6240     (reducer = this.reducer) != null) {
6241     long r = this.basis;
6242     for (int b; (b = preSplit()) > 0;)
6243     (rights = new MapReduceMappingsToLongTask<K,V>
6244     (map, this, b, rights, transformer, r, reducer)).fork();
6245     Object v;
6246     while ((v = advance()) != null)
6247     r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6248     result = r;
6249     CountedCompleter<?> c;
6250     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6251     MapReduceMappingsToLongTask<K,V>
6252     t = (MapReduceMappingsToLongTask<K,V>)c,
6253     s = t.rights;
6254     while (s != null) {
6255     t.result = reducer.apply(t.result, s.result);
6256     s = t.rights = s.nextRight;
6257     }
6258 dl 1.119 }
6259     }
6260 dl 1.4 }
6261 tim 1.1 }
6262    
6263 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6264 dl 1.146 extends Traverser<K,V,Integer> {
6265 dl 1.119 final ObjectToInt<? super K> transformer;
6266     final IntByIntToInt reducer;
6267     final int basis;
6268     int result;
6269 dl 1.128 MapReduceKeysToIntTask<K,V> rights, nextRight;
6270 dl 1.119 MapReduceKeysToIntTask
6271 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6272 dl 1.128 MapReduceKeysToIntTask<K,V> nextRight,
6273 dl 1.119 ObjectToInt<? super K> transformer,
6274     int basis,
6275     IntByIntToInt reducer) {
6276 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6277 dl 1.119 this.transformer = transformer;
6278     this.basis = basis; this.reducer = reducer;
6279     }
6280 dl 1.146 public final Integer getRawResult() { return result; }
6281     @SuppressWarnings("unchecked") public final void compute() {
6282 dl 1.149 final ObjectToInt<? super K> transformer;
6283     final IntByIntToInt reducer;
6284     if ((transformer = this.transformer) != null &&
6285     (reducer = this.reducer) != null) {
6286     int r = this.basis;
6287     for (int b; (b = preSplit()) > 0;)
6288     (rights = new MapReduceKeysToIntTask<K,V>
6289     (map, this, b, rights, transformer, r, reducer)).fork();
6290     while (advance() != null)
6291     r = reducer.apply(r, transformer.apply((K)nextKey));
6292     result = r;
6293     CountedCompleter<?> c;
6294     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6295     MapReduceKeysToIntTask<K,V>
6296     t = (MapReduceKeysToIntTask<K,V>)c,
6297     s = t.rights;
6298     while (s != null) {
6299     t.result = reducer.apply(t.result, s.result);
6300     s = t.rights = s.nextRight;
6301     }
6302 dl 1.119 }
6303 dl 1.138 }
6304 dl 1.30 }
6305     }
6306    
6307 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6308 dl 1.146 extends Traverser<K,V,Integer> {
6309 dl 1.119 final ObjectToInt<? super V> transformer;
6310     final IntByIntToInt reducer;
6311     final int basis;
6312     int result;
6313 dl 1.128 MapReduceValuesToIntTask<K,V> rights, nextRight;
6314 dl 1.119 MapReduceValuesToIntTask
6315 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6316 dl 1.128 MapReduceValuesToIntTask<K,V> nextRight,
6317 dl 1.119 ObjectToInt<? super V> transformer,
6318     int basis,
6319     IntByIntToInt reducer) {
6320 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6321 dl 1.119 this.transformer = transformer;
6322     this.basis = basis; this.reducer = reducer;
6323     }
6324 dl 1.146 public final Integer getRawResult() { return result; }
6325     @SuppressWarnings("unchecked") public final void compute() {
6326 dl 1.149 final ObjectToInt<? super V> transformer;
6327     final IntByIntToInt reducer;
6328     if ((transformer = this.transformer) != null &&
6329     (reducer = this.reducer) != null) {
6330     int r = this.basis;
6331     for (int b; (b = preSplit()) > 0;)
6332     (rights = new MapReduceValuesToIntTask<K,V>
6333     (map, this, b, rights, transformer, r, reducer)).fork();
6334     Object v;
6335     while ((v = advance()) != null)
6336     r = reducer.apply(r, transformer.apply((V)v));
6337     result = r;
6338     CountedCompleter<?> c;
6339     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6340     MapReduceValuesToIntTask<K,V>
6341     t = (MapReduceValuesToIntTask<K,V>)c,
6342     s = t.rights;
6343     while (s != null) {
6344     t.result = reducer.apply(t.result, s.result);
6345     s = t.rights = s.nextRight;
6346     }
6347 dl 1.119 }
6348 dl 1.2 }
6349 tim 1.1 }
6350     }
6351    
6352 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6353 dl 1.146 extends Traverser<K,V,Integer> {
6354 dl 1.119 final ObjectToInt<Map.Entry<K,V>> transformer;
6355     final IntByIntToInt reducer;
6356     final int basis;
6357     int result;
6358 dl 1.128 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6359 dl 1.119 MapReduceEntriesToIntTask
6360 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6361 dl 1.128 MapReduceEntriesToIntTask<K,V> nextRight,
6362 dl 1.119 ObjectToInt<Map.Entry<K,V>> transformer,
6363     int basis,
6364     IntByIntToInt reducer) {
6365 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6366 dl 1.119 this.transformer = transformer;
6367     this.basis = basis; this.reducer = reducer;
6368     }
6369 dl 1.146 public final Integer getRawResult() { return result; }
6370     @SuppressWarnings("unchecked") public final void compute() {
6371 dl 1.149 final ObjectToInt<Map.Entry<K,V>> transformer;
6372     final IntByIntToInt reducer;
6373     if ((transformer = this.transformer) != null &&
6374     (reducer = this.reducer) != null) {
6375     int r = this.basis;
6376     for (int b; (b = preSplit()) > 0;)
6377     (rights = new MapReduceEntriesToIntTask<K,V>
6378     (map, this, b, rights, transformer, r, reducer)).fork();
6379     Object v;
6380     while ((v = advance()) != null)
6381     r = reducer.apply(r, transformer.apply(entryFor((K)nextKey,
6382     (V)v)));
6383     result = r;
6384     CountedCompleter<?> c;
6385     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6386     MapReduceEntriesToIntTask<K,V>
6387     t = (MapReduceEntriesToIntTask<K,V>)c,
6388     s = t.rights;
6389     while (s != null) {
6390     t.result = reducer.apply(t.result, s.result);
6391     s = t.rights = s.nextRight;
6392     }
6393 dl 1.119 }
6394 dl 1.138 }
6395 dl 1.4 }
6396 dl 1.119 }
6397 tim 1.1
6398 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6399 dl 1.146 extends Traverser<K,V,Integer> {
6400 dl 1.119 final ObjectByObjectToInt<? super K, ? super V> transformer;
6401     final IntByIntToInt reducer;
6402     final int basis;
6403     int result;
6404 dl 1.128 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6405 dl 1.119 MapReduceMappingsToIntTask
6406 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6407     MapReduceMappingsToIntTask<K,V> nextRight,
6408 dl 1.119 ObjectByObjectToInt<? super K, ? super V> transformer,
6409     int basis,
6410     IntByIntToInt reducer) {
6411 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6412 dl 1.119 this.transformer = transformer;
6413     this.basis = basis; this.reducer = reducer;
6414     }
6415 dl 1.146 public final Integer getRawResult() { return result; }
6416     @SuppressWarnings("unchecked") public final void compute() {
6417 dl 1.149 final ObjectByObjectToInt<? super K, ? super V> transformer;
6418     final IntByIntToInt reducer;
6419     if ((transformer = this.transformer) != null &&
6420     (reducer = this.reducer) != null) {
6421     int r = this.basis;
6422     for (int b; (b = preSplit()) > 0;)
6423     (rights = new MapReduceMappingsToIntTask<K,V>
6424     (map, this, b, rights, transformer, r, reducer)).fork();
6425     Object v;
6426     while ((v = advance()) != null)
6427     r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6428     result = r;
6429     CountedCompleter<?> c;
6430     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6431     MapReduceMappingsToIntTask<K,V>
6432     t = (MapReduceMappingsToIntTask<K,V>)c,
6433     s = t.rights;
6434     while (s != null) {
6435     t.result = reducer.apply(t.result, s.result);
6436     s = t.rights = s.nextRight;
6437     }
6438 dl 1.119 }
6439 dl 1.138 }
6440 tim 1.1 }
6441     }
6442 dl 1.99
6443     // Unsafe mechanics
6444 dl 1.149 private static final sun.misc.Unsafe U;
6445     private static final long SIZECTL;
6446     private static final long TRANSFERINDEX;
6447     private static final long TRANSFERORIGIN;
6448     private static final long BASECOUNT;
6449     private static final long COUNTERBUSY;
6450     private static final long CELLVALUE;
6451 dl 1.119 private static final long ABASE;
6452     private static final int ASHIFT;
6453 dl 1.99
6454     static {
6455 dl 1.119 int ss;
6456 dl 1.99 try {
6457 dl 1.149 U = sun.misc.Unsafe.getUnsafe();
6458 dl 1.119 Class<?> k = ConcurrentHashMap.class;
6459 dl 1.149 SIZECTL = U.objectFieldOffset
6460 dl 1.119 (k.getDeclaredField("sizeCtl"));
6461 dl 1.149 TRANSFERINDEX = U.objectFieldOffset
6462     (k.getDeclaredField("transferIndex"));
6463     TRANSFERORIGIN = U.objectFieldOffset
6464     (k.getDeclaredField("transferOrigin"));
6465     BASECOUNT = U.objectFieldOffset
6466     (k.getDeclaredField("baseCount"));
6467     COUNTERBUSY = U.objectFieldOffset
6468     (k.getDeclaredField("counterBusy"));
6469     Class<?> ck = CounterCell.class;
6470     CELLVALUE = U.objectFieldOffset
6471     (ck.getDeclaredField("value"));
6472 dl 1.119 Class<?> sc = Node[].class;
6473 dl 1.149 ABASE = U.arrayBaseOffset(sc);
6474     ss = U.arrayIndexScale(sc);
6475     ASHIFT = 31 - Integer.numberOfLeadingZeros(ss);
6476 dl 1.99 } catch (Exception e) {
6477     throw new Error(e);
6478     }
6479 dl 1.119 if ((ss & (ss-1)) != 0)
6480 dl 1.99 throw new Error("data type scale not a power of two");
6481     }
6482 dl 1.128 }