ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.209
Committed: Tue May 7 20:25:36 2013 UTC (11 years ago) by dl
Branch: MAIN
Changes since 1.208: +32 -2 lines
Log Message:
Overrride default Map methods

File Contents

# User Rev Content
1 dl 1.2 /*
2     * Written by Doug Lea with assistance from members of JCP JSR-166
3 dl 1.36 * Expert Group and released to the public domain, as explained at
4 dl 1.100 * http://creativecommons.org/publicdomain/zero/1.0/
5 dl 1.2 */
6    
7 tim 1.1 package java.util.concurrent;
8 dl 1.208 import java.io.Serializable;
9     import java.io.ObjectStreamField;
10     import java.lang.reflect.ParameterizedType;
11     import java.lang.reflect.Type;
12     import java.util.AbstractCollection;
13     import java.util.AbstractMap;
14     import java.util.AbstractSet;
15 dl 1.119 import java.util.Arrays;
16     import java.util.Collection;
17 dl 1.208 import java.util.Comparator;
18     import java.util.ConcurrentModificationException;
19     import java.util.Enumeration;
20     import java.util.HashMap;
21 dl 1.119 import java.util.Hashtable;
22     import java.util.Iterator;
23 dl 1.208 import java.util.Map;
24 dl 1.119 import java.util.NoSuchElementException;
25 dl 1.208 import java.util.Set;
26     import java.util.Spliterator;
27 dl 1.119 import java.util.concurrent.ConcurrentMap;
28 dl 1.208 import java.util.concurrent.ForkJoinPool;
29     import java.util.concurrent.atomic.AtomicReference;
30 dl 1.119 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
31 dl 1.208 import java.util.concurrent.locks.ReentrantLock;
32     import java.util.function.BiConsumer;
33     import java.util.function.BiFunction;
34     import java.util.function.BinaryOperator;
35     import java.util.function.Consumer;
36     import java.util.function.DoubleBinaryOperator;
37     import java.util.function.Function;
38     import java.util.function.IntBinaryOperator;
39     import java.util.function.LongBinaryOperator;
40     import java.util.function.ToDoubleBiFunction;
41     import java.util.function.ToDoubleFunction;
42     import java.util.function.ToIntBiFunction;
43     import java.util.function.ToIntFunction;
44     import java.util.function.ToLongBiFunction;
45     import java.util.function.ToLongFunction;
46 tim 1.1
47     /**
48 dl 1.4 * A hash table supporting full concurrency of retrievals and
49 dl 1.119 * high expected concurrency for updates. This class obeys the
50 dl 1.22 * same functional specification as {@link java.util.Hashtable}, and
51 dl 1.19 * includes versions of methods corresponding to each method of
52 dl 1.119 * {@code Hashtable}. However, even though all operations are
53 dl 1.19 * thread-safe, retrieval operations do <em>not</em> entail locking,
54     * and there is <em>not</em> any support for locking the entire table
55     * in a way that prevents all access. This class is fully
56 dl 1.119 * interoperable with {@code Hashtable} in programs that rely on its
57 dl 1.4 * thread safety but not on its synchronization details.
58 tim 1.11 *
59 jsr166 1.145 * <p>Retrieval operations (including {@code get}) generally do not
60 dl 1.119 * block, so may overlap with update operations (including {@code put}
61     * and {@code remove}). Retrievals reflect the results of the most
62     * recently <em>completed</em> update operations holding upon their
63 dl 1.126 * onset. (More formally, an update operation for a given key bears a
64     * <em>happens-before</em> relation with any (non-null) retrieval for
65     * that key reporting the updated value.) For aggregate operations
66     * such as {@code putAll} and {@code clear}, concurrent retrievals may
67     * reflect insertion or removal of only some entries. Similarly,
68     * Iterators and Enumerations return elements reflecting the state of
69     * the hash table at some point at or since the creation of the
70     * iterator/enumeration. They do <em>not</em> throw {@link
71     * ConcurrentModificationException}. However, iterators are designed
72     * to be used by only one thread at a time. Bear in mind that the
73     * results of aggregate status methods including {@code size}, {@code
74     * isEmpty}, and {@code containsValue} are typically useful only when
75     * a map is not undergoing concurrent updates in other threads.
76     * Otherwise the results of these methods reflect transient states
77     * that may be adequate for monitoring or estimation purposes, but not
78     * for program control.
79 tim 1.1 *
80 jsr166 1.145 * <p>The table is dynamically expanded when there are too many
81 dl 1.119 * collisions (i.e., keys that have distinct hash codes but fall into
82     * the same slot modulo the table size), with the expected average
83     * effect of maintaining roughly two bins per mapping (corresponding
84     * to a 0.75 load factor threshold for resizing). There may be much
85     * variance around this average as mappings are added and removed, but
86     * overall, this maintains a commonly accepted time/space tradeoff for
87     * hash tables. However, resizing this or any other kind of hash
88     * table may be a relatively slow operation. When possible, it is a
89     * good idea to provide a size estimate as an optional {@code
90     * initialCapacity} constructor argument. An additional optional
91     * {@code loadFactor} constructor argument provides a further means of
92     * customizing initial table capacity by specifying the table density
93     * to be used in calculating the amount of space to allocate for the
94     * given number of elements. Also, for compatibility with previous
95     * versions of this class, constructors may optionally specify an
96     * expected {@code concurrencyLevel} as an additional hint for
97     * internal sizing. Note that using many keys with exactly the same
98     * {@code hashCode()} is a sure way to slow down performance of any
99     * hash table.
100 tim 1.1 *
101 jsr166 1.145 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
102 dl 1.137 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
103     * (using {@link #keySet(Object)} when only keys are of interest, and the
104     * mapped values are (perhaps transiently) not used or all take the
105     * same mapping value.
106     *
107 jsr166 1.145 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
108 dl 1.153 * form of histogram or multiset) by using {@link
109     * java.util.concurrent.atomic.LongAdder} values and initializing via
110 jsr166 1.175 * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
111     * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
112     * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
113 dl 1.137 *
114 dl 1.45 * <p>This class and its views and iterators implement all of the
115     * <em>optional</em> methods of the {@link Map} and {@link Iterator}
116     * interfaces.
117 dl 1.23 *
118 jsr166 1.145 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
119 dl 1.119 * does <em>not</em> allow {@code null} to be used as a key or value.
120 tim 1.1 *
121 dl 1.151 * <p>ConcurrentHashMaps support sequential and parallel operations
122     * bulk operations. (Parallel forms use the {@link
123     * ForkJoinPool#commonPool()}). Tasks that may be used in other
124     * contexts are available in class {@link ForkJoinTasks}. These
125     * operations are designed to be safely, and often sensibly, applied
126     * even with maps that are being concurrently updated by other
127     * threads; for example, when computing a snapshot summary of the
128     * values in a shared registry. There are three kinds of operation,
129     * each with four forms, accepting functions with Keys, Values,
130     * Entries, and (Key, Value) arguments and/or return values. Because
131     * the elements of a ConcurrentHashMap are not ordered in any
132     * particular way, and may be processed in different orders in
133     * different parallel executions, the correctness of supplied
134     * functions should not depend on any ordering, or on any other
135     * objects or values that may transiently change while computation is
136     * in progress; and except for forEach actions, should ideally be
137     * side-effect-free.
138 dl 1.137 *
139     * <ul>
140     * <li> forEach: Perform a given action on each element.
141     * A variant form applies a given transformation on each element
142     * before performing the action.</li>
143     *
144     * <li> search: Return the first available non-null result of
145     * applying a given function on each element; skipping further
146     * search when a result is found.</li>
147     *
148     * <li> reduce: Accumulate each element. The supplied reduction
149     * function cannot rely on ordering (more formally, it should be
150     * both associative and commutative). There are five variants:
151     *
152     * <ul>
153     *
154     * <li> Plain reductions. (There is not a form of this method for
155     * (key, value) function arguments since there is no corresponding
156     * return type.)</li>
157     *
158     * <li> Mapped reductions that accumulate the results of a given
159     * function applied to each element.</li>
160     *
161     * <li> Reductions to scalar doubles, longs, and ints, using a
162     * given basis value.</li>
163     *
164 jsr166 1.178 * </ul>
165 dl 1.137 * </li>
166     * </ul>
167     *
168     * <p>The concurrency properties of bulk operations follow
169     * from those of ConcurrentHashMap: Any non-null result returned
170     * from {@code get(key)} and related access methods bears a
171     * happens-before relation with the associated insertion or
172     * update. The result of any bulk operation reflects the
173     * composition of these per-element relations (but is not
174     * necessarily atomic with respect to the map as a whole unless it
175     * is somehow known to be quiescent). Conversely, because keys
176     * and values in the map are never null, null serves as a reliable
177     * atomic indicator of the current lack of any result. To
178     * maintain this property, null serves as an implicit basis for
179     * all non-scalar reduction operations. For the double, long, and
180     * int versions, the basis should be one that, when combined with
181     * any other value, returns that other value (more formally, it
182     * should be the identity element for the reduction). Most common
183     * reductions have these properties; for example, computing a sum
184     * with basis 0 or a minimum with basis MAX_VALUE.
185     *
186     * <p>Search and transformation functions provided as arguments
187     * should similarly return null to indicate the lack of any result
188     * (in which case it is not used). In the case of mapped
189     * reductions, this also enables transformations to serve as
190     * filters, returning null (or, in the case of primitive
191     * specializations, the identity basis) if the element should not
192     * be combined. You can create compound transformations and
193     * filterings by composing them yourself under this "null means
194     * there is nothing there now" rule before using them in search or
195     * reduce operations.
196     *
197     * <p>Methods accepting and/or returning Entry arguments maintain
198     * key-value associations. They may be useful for example when
199     * finding the key for the greatest value. Note that "plain" Entry
200     * arguments can be supplied using {@code new
201     * AbstractMap.SimpleEntry(k,v)}.
202     *
203 jsr166 1.145 * <p>Bulk operations may complete abruptly, throwing an
204 dl 1.137 * exception encountered in the application of a supplied
205     * function. Bear in mind when handling such exceptions that other
206     * concurrently executing functions could also have thrown
207     * exceptions, or would have done so if the first exception had
208     * not occurred.
209     *
210 dl 1.151 * <p>Speedups for parallel compared to sequential forms are common
211     * but not guaranteed. Parallel operations involving brief functions
212     * on small maps may execute more slowly than sequential forms if the
213     * underlying work to parallelize the computation is more expensive
214     * than the computation itself. Similarly, parallelization may not
215     * lead to much actual parallelism if all processors are busy
216     * performing unrelated tasks.
217 dl 1.137 *
218 jsr166 1.145 * <p>All arguments to all task methods must be non-null.
219 dl 1.137 *
220 dl 1.42 * <p>This class is a member of the
221 jsr166 1.88 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
222 dl 1.42 * Java Collections Framework</a>.
223     *
224 dl 1.8 * @since 1.5
225     * @author Doug Lea
226 dl 1.27 * @param <K> the type of keys maintained by this map
227 jsr166 1.64 * @param <V> the type of mapped values
228 dl 1.8 */
229 jsr166 1.186 public class ConcurrentHashMap<K,V>
230     implements ConcurrentMap<K,V>, Serializable {
231 dl 1.20 private static final long serialVersionUID = 7249069246763182397L;
232 tim 1.1
233     /*
234 dl 1.119 * Overview:
235     *
236     * The primary design goal of this hash table is to maintain
237     * concurrent readability (typically method get(), but also
238     * iterators and related methods) while minimizing update
239     * contention. Secondary goals are to keep space consumption about
240     * the same or better than java.util.HashMap, and to support high
241     * initial insertion rates on an empty table by many threads.
242     *
243 dl 1.151 * Each key-value mapping is held in a Node. Because Node key
244     * fields can contain special values, they are defined using plain
245     * Object types (not type "K"). This leads to a lot of explicit
246     * casting (and many explicit warning suppressions to tell
247     * compilers not to complain about it). It also allows some of the
248     * public methods to be factored into a smaller number of internal
249     * methods (although sadly not so for the five variants of
250     * put-related operations). The validation-based approach
251     * explained below leads to a lot of code sprawl because
252 dl 1.119 * retry-control precludes factoring into smaller methods.
253     *
254     * The table is lazily initialized to a power-of-two size upon the
255     * first insertion. Each bin in the table normally contains a
256     * list of Nodes (most often, the list has only zero or one Node).
257     * Table accesses require volatile/atomic reads, writes, and
258     * CASes. Because there is no other way to arrange this without
259     * adding further indirections, we use intrinsics
260     * (sun.misc.Unsafe) operations. The lists of nodes within bins
261     * are always accurately traversable under volatile reads, so long
262     * as lookups check hash code and non-nullness of value before
263     * checking key equality.
264     *
265 dl 1.149 * We use the top (sign) bit of Node hash fields for control
266     * purposes -- it is available anyway because of addressing
267     * constraints. Nodes with negative hash fields are forwarding
268     * nodes to either TreeBins or resized tables. The lower 31 bits
269     * of each normal Node's hash field contain a transformation of
270     * the key's hash code.
271 dl 1.119 *
272     * Insertion (via put or its variants) of the first node in an
273     * empty bin is performed by just CASing it to the bin. This is
274     * by far the most common case for put operations under most
275     * key/hash distributions. Other update operations (insert,
276     * delete, and replace) require locks. We do not want to waste
277     * the space required to associate a distinct lock object with
278     * each bin, so instead use the first node of a bin list itself as
279 dl 1.149 * a lock. Locking support for these locks relies on builtin
280     * "synchronized" monitors.
281 dl 1.119 *
282     * Using the first node of a list as a lock does not by itself
283     * suffice though: When a node is locked, any update must first
284     * validate that it is still the first node after locking it, and
285     * retry if not. Because new nodes are always appended to lists,
286     * once a node is first in a bin, it remains first until deleted
287     * or the bin becomes invalidated (upon resizing). However,
288     * operations that only conditionally update may inspect nodes
289     * until the point of update. This is a converse of sorts to the
290     * lazy locking technique described by Herlihy & Shavit.
291     *
292     * The main disadvantage of per-bin locks is that other update
293     * operations on other nodes in a bin list protected by the same
294     * lock can stall, for example when user equals() or mapping
295     * functions take a long time. However, statistically, under
296     * random hash codes, this is not a common problem. Ideally, the
297     * frequency of nodes in bins follows a Poisson distribution
298     * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
299     * parameter of about 0.5 on average, given the resizing threshold
300     * of 0.75, although with a large variance because of resizing
301     * granularity. Ignoring variance, the expected occurrences of
302     * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
303     * first values are:
304     *
305     * 0: 0.60653066
306     * 1: 0.30326533
307     * 2: 0.07581633
308     * 3: 0.01263606
309     * 4: 0.00157952
310     * 5: 0.00015795
311     * 6: 0.00001316
312     * 7: 0.00000094
313     * 8: 0.00000006
314     * more: less than 1 in ten million
315     *
316     * Lock contention probability for two threads accessing distinct
317     * elements is roughly 1 / (8 * #elements) under random hashes.
318     *
319     * Actual hash code distributions encountered in practice
320     * sometimes deviate significantly from uniform randomness. This
321     * includes the case when N > (1<<30), so some keys MUST collide.
322     * Similarly for dumb or hostile usages in which multiple keys are
323     * designed to have identical hash codes. Also, although we guard
324     * against the worst effects of this (see method spread), sets of
325     * hashes may differ only in bits that do not impact their bin
326     * index for a given power-of-two mask. So we use a secondary
327     * strategy that applies when the number of nodes in a bin exceeds
328     * a threshold, and at least one of the keys implements
329     * Comparable. These TreeBins use a balanced tree to hold nodes
330     * (a specialized form of red-black trees), bounding search time
331 dl 1.206 * to O(log N). Each search step in a TreeBin is at least twice as
332 dl 1.119 * slow as in a regular list, but given that N cannot exceed
333     * (1<<64) (before running out of addresses) this bounds search
334     * steps, lock hold times, etc, to reasonable constants (roughly
335     * 100 nodes inspected per operation worst case) so long as keys
336     * are Comparable (which is very common -- String, Long, etc).
337     * TreeBin nodes (TreeNodes) also maintain the same "next"
338     * traversal pointers as regular nodes, so can be traversed in
339     * iterators in the same way.
340     *
341     * The table is resized when occupancy exceeds a percentage
342 dl 1.149 * threshold (nominally, 0.75, but see below). Any thread
343     * noticing an overfull bin may assist in resizing after the
344     * initiating thread allocates and sets up the replacement
345     * array. However, rather than stalling, these other threads may
346     * proceed with insertions etc. The use of TreeBins shields us
347     * from the worst case effects of overfilling while resizes are in
348     * progress. Resizing proceeds by transferring bins, one by one,
349     * from the table to the next table. To enable concurrency, the
350     * next table must be (incrementally) prefilled with place-holders
351     * serving as reverse forwarders to the old table. Because we are
352     * using power-of-two expansion, the elements from each bin must
353     * either stay at same index, or move with a power of two
354     * offset. We eliminate unnecessary node creation by catching
355     * cases where old nodes can be reused because their next fields
356     * won't change. On average, only about one-sixth of them need
357     * cloning when a table doubles. The nodes they replace will be
358     * garbage collectable as soon as they are no longer referenced by
359     * any reader thread that may be in the midst of concurrently
360     * traversing table. Upon transfer, the old table bin contains
361     * only a special forwarding node (with hash field "MOVED") that
362     * contains the next table as its key. On encountering a
363     * forwarding node, access and update operations restart, using
364     * the new table.
365     *
366     * Each bin transfer requires its bin lock, which can stall
367     * waiting for locks while resizing. However, because other
368     * threads can join in and help resize rather than contend for
369     * locks, average aggregate waits become shorter as resizing
370     * progresses. The transfer operation must also ensure that all
371     * accessible bins in both the old and new table are usable by any
372     * traversal. This is arranged by proceeding from the last bin
373     * (table.length - 1) up towards the first. Upon seeing a
374     * forwarding node, traversals (see class Traverser) arrange to
375     * move to the new table without revisiting nodes. However, to
376     * ensure that no intervening nodes are skipped, bin splitting can
377     * only begin after the associated reverse-forwarders are in
378     * place.
379 dl 1.119 *
380     * The traversal scheme also applies to partial traversals of
381     * ranges of bins (via an alternate Traverser constructor)
382     * to support partitioned aggregate operations. Also, read-only
383     * operations give up if ever forwarded to a null table, which
384     * provides support for shutdown-style clearing, which is also not
385     * currently implemented.
386     *
387     * Lazy table initialization minimizes footprint until first use,
388     * and also avoids resizings when the first operation is from a
389     * putAll, constructor with map argument, or deserialization.
390     * These cases attempt to override the initial capacity settings,
391     * but harmlessly fail to take effect in cases of races.
392     *
393 dl 1.149 * The element count is maintained using a specialization of
394     * LongAdder. We need to incorporate a specialization rather than
395     * just use a LongAdder in order to access implicit
396     * contention-sensing that leads to creation of multiple
397 dl 1.153 * Cells. The counter mechanics avoid contention on
398 dl 1.149 * updates but can encounter cache thrashing if read too
399     * frequently during concurrent access. To avoid reading so often,
400     * resizing under contention is attempted only upon adding to a
401     * bin already holding two or more nodes. Under uniform hash
402     * distributions, the probability of this occurring at threshold
403     * is around 13%, meaning that only about 1 in 8 puts check
404     * threshold (and after resizing, many fewer do so). The bulk
405     * putAll operation further reduces contention by only committing
406     * count updates upon these size checks.
407 dl 1.119 *
408     * Maintaining API and serialization compatibility with previous
409     * versions of this class introduces several oddities. Mainly: We
410     * leave untouched but unused constructor arguments refering to
411     * concurrencyLevel. We accept a loadFactor constructor argument,
412     * but apply it only to initial table capacity (which is the only
413     * time that we can guarantee to honor it.) We also declare an
414     * unused "Segment" class that is instantiated in minimal form
415     * only when serializing.
416 dl 1.4 */
417 tim 1.1
418 dl 1.4 /* ---------------- Constants -------------- */
419 tim 1.11
420 dl 1.4 /**
421 dl 1.119 * The largest possible table capacity. This value must be
422     * exactly 1<<30 to stay within Java array allocation and indexing
423     * bounds for power of two table sizes, and is further required
424     * because the top two bits of 32bit hash fields are used for
425     * control purposes.
426 dl 1.4 */
427 dl 1.119 private static final int MAXIMUM_CAPACITY = 1 << 30;
428 dl 1.56
429     /**
430 dl 1.119 * The default initial table capacity. Must be a power of 2
431     * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
432 dl 1.56 */
433 dl 1.119 private static final int DEFAULT_CAPACITY = 16;
434 dl 1.56
435     /**
436 dl 1.119 * The largest possible (non-power of two) array size.
437     * Needed by toArray and related methods.
438 jsr166 1.59 */
439 dl 1.119 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
440 tim 1.1
441     /**
442 dl 1.119 * The default concurrency level for this table. Unused but
443     * defined for compatibility with previous versions of this class.
444 dl 1.4 */
445 dl 1.119 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
446 tim 1.11
447 tim 1.1 /**
448 dl 1.119 * The load factor for this table. Overrides of this value in
449     * constructors affect only the initial table capacity. The
450     * actual floating point value isn't normally used -- it is
451     * simpler to use expressions such as {@code n - (n >>> 2)} for
452     * the associated resizing threshold.
453 dl 1.99 */
454 dl 1.119 private static final float LOAD_FACTOR = 0.75f;
455 dl 1.99
456     /**
457 dl 1.119 * The bin count threshold for using a tree rather than list for a
458     * bin. The value reflects the approximate break-even point for
459     * using tree-based operations.
460     */
461 dl 1.201 private static final int TREE_THRESHOLD = 16;
462 dl 1.119
463 dl 1.149 /**
464     * Minimum number of rebinnings per transfer step. Ranges are
465     * subdivided to allow multiple resizer threads. This value
466     * serves as a lower bound to avoid resizers encountering
467     * excessive memory contention. The value should be at least
468     * DEFAULT_CAPACITY.
469     */
470     private static final int MIN_TRANSFER_STRIDE = 16;
471    
472 dl 1.119 /*
473 dl 1.149 * Encodings for Node hash fields. See above for explanation.
474 dl 1.46 */
475 dl 1.119 static final int MOVED = 0x80000000; // hash field for forwarding nodes
476 dl 1.149 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
477    
478     /** Number of CPUS, to place bounds on some sizings */
479     static final int NCPU = Runtime.getRuntime().availableProcessors();
480    
481 dl 1.208 /** For serialization compatibility. */
482     private static final ObjectStreamField[] serialPersistentFields = {
483 dl 1.209 new ObjectStreamField("segments", Segment[].class),
484     new ObjectStreamField("segmentMask", Integer.TYPE),
485     new ObjectStreamField("segmentShift", Integer.TYPE)
486 dl 1.208 };
487    
488 dl 1.149 /* ---------------- Counters -------------- */
489    
490     // Adapted from LongAdder and Striped64.
491     // See their internal docs for explanation.
492    
493     // A padded cell for distributing counts
494 dl 1.153 static final class Cell {
495 dl 1.149 volatile long p0, p1, p2, p3, p4, p5, p6;
496     volatile long value;
497     volatile long q0, q1, q2, q3, q4, q5, q6;
498 dl 1.153 Cell(long x) { value = x; }
499 dl 1.149 }
500    
501 dl 1.4 /* ---------------- Fields -------------- */
502 tim 1.1
503     /**
504 dl 1.119 * The array of bins. Lazily initialized upon first insertion.
505     * Size is always a power of two. Accessed directly by iterators.
506 jsr166 1.59 */
507 dl 1.151 transient volatile Node<V>[] table;
508 tim 1.1
509     /**
510 dl 1.149 * The next table to use; non-null only while resizing.
511 jsr166 1.59 */
512 dl 1.151 private transient volatile Node<V>[] nextTable;
513 dl 1.149
514     /**
515     * Base counter value, used mainly when there is no contention,
516     * but also as a fallback during table initialization
517     * races. Updated via CAS.
518     */
519     private transient volatile long baseCount;
520 tim 1.1
521     /**
522 dl 1.119 * Table initialization and resizing control. When negative, the
523 dl 1.149 * table is being initialized or resized: -1 for initialization,
524     * else -(1 + the number of active resizing threads). Otherwise,
525     * when table is null, holds the initial table size to use upon
526     * creation, or 0 for default. After initialization, holds the
527     * next element count value upon which to resize the table.
528 tim 1.1 */
529 dl 1.119 private transient volatile int sizeCtl;
530 dl 1.4
531 dl 1.149 /**
532     * The next table index (plus one) to split while resizing.
533     */
534     private transient volatile int transferIndex;
535    
536     /**
537     * The least available table index to split while resizing.
538     */
539     private transient volatile int transferOrigin;
540    
541     /**
542     * Spinlock (locked via CAS) used when resizing and/or creating Cells.
543     */
544 dl 1.153 private transient volatile int cellsBusy;
545 dl 1.149
546     /**
547     * Table of counter cells. When non-null, size is a power of 2.
548     */
549 dl 1.153 private transient volatile Cell[] counterCells;
550 dl 1.149
551 dl 1.119 // views
552 dl 1.137 private transient KeySetView<K,V> keySet;
553 dl 1.142 private transient ValuesView<K,V> values;
554     private transient EntrySetView<K,V> entrySet;
555 dl 1.119
556     /* ---------------- Table element access -------------- */
557    
558     /*
559     * Volatile access methods are used for table elements as well as
560     * elements of in-progress next table while resizing. Uses are
561     * null checked by callers, and implicitly bounds-checked, relying
562     * on the invariants that tab arrays have non-zero size, and all
563     * indices are masked with (tab.length - 1) which is never
564     * negative and always less than length. Note that, to be correct
565     * wrt arbitrary concurrency errors by users, bounds checks must
566     * operate on local variables, which accounts for some odd-looking
567     * inline assignments below.
568     */
569    
570 dl 1.151 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
571     (Node<V>[] tab, int i) { // used by Traverser
572     return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
573 dl 1.119 }
574    
575 dl 1.151 private static final <V> boolean casTabAt
576     (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
577 dl 1.149 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
578 dl 1.119 }
579    
580 dl 1.151 private static final <V> void setTabAt
581     (Node<V>[] tab, int i, Node<V> v) {
582 dl 1.149 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
583 dl 1.119 }
584    
585     /* ---------------- Nodes -------------- */
586 dl 1.4
587 dl 1.99 /**
588 dl 1.119 * Key-value entry. Note that this is never exported out as a
589     * user-visible Map.Entry (see MapEntry below). Nodes with a hash
590     * field of MOVED are special, and do not contain user keys or
591     * values. Otherwise, keys are never null, and null val fields
592     * indicate that a node is in the process of being deleted or
593     * created. For purposes of read-only access, a key may be read
594     * before a val, but can only be used after checking val to be
595     * non-null.
596 dl 1.99 */
597 dl 1.151 static class Node<V> {
598 dl 1.149 final int hash;
599 dl 1.119 final Object key;
600 dl 1.151 volatile V val;
601     volatile Node<V> next;
602 dl 1.99
603 dl 1.151 Node(int hash, Object key, V val, Node<V> next) {
604 dl 1.99 this.hash = hash;
605     this.key = key;
606 dl 1.119 this.val = val;
607 dl 1.99 this.next = next;
608     }
609     }
610    
611 dl 1.119 /* ---------------- TreeBins -------------- */
612    
613 dl 1.99 /**
614 dl 1.119 * Nodes for use in TreeBins
615 dl 1.99 */
616 dl 1.151 static final class TreeNode<V> extends Node<V> {
617     TreeNode<V> parent; // red-black tree links
618     TreeNode<V> left;
619     TreeNode<V> right;
620     TreeNode<V> prev; // needed to unlink next upon deletion
621 dl 1.119 boolean red;
622 dl 1.99
623 dl 1.151 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
624 dl 1.119 super(hash, key, val, next);
625     this.parent = parent;
626     }
627 dl 1.99 }
628 tim 1.1
629 dl 1.201 /**
630     * Returns a Class for the given object of the form "class C
631     * implements Comparable<C>", if one exists, else null. See below
632     * for explanation.
633     */
634     static Class<?> comparableClassFor(Object x) {
635     Class<?> c, s, cmpc; Type[] ts, as; Type t; ParameterizedType p;
636     if ((c = x.getClass()) == String.class) // bypass checks
637     return c;
638     if ((cmpc = Comparable.class).isAssignableFrom(c)) {
639     while (cmpc.isAssignableFrom(s = c.getSuperclass()))
640     c = s; // find topmost comparable class
641 jsr166 1.207 if ((ts = c.getGenericInterfaces()) != null) {
642 dl 1.201 for (int i = 0; i < ts.length; ++i) {
643     if (((t = ts[i]) instanceof ParameterizedType) &&
644     ((p = (ParameterizedType)t).getRawType() == cmpc) &&
645     (as = p.getActualTypeArguments()) != null &&
646     as.length == 1 && as[0] == c) // type arg is c
647     return c;
648     }
649     }
650     }
651     return null;
652     }
653    
654 tim 1.1 /**
655 dl 1.119 * A specialized form of red-black tree for use in bins
656     * whose size exceeds a threshold.
657     *
658     * TreeBins use a special form of comparison for search and
659     * related operations (which is the main reason we cannot use
660     * existing collections such as TreeMaps). TreeBins contain
661     * Comparable elements, but may contain others, as well as
662     * elements that are Comparable but not necessarily Comparable<T>
663     * for the same T, so we cannot invoke compareTo among them. To
664     * handle this, the tree is ordered primarily by hash value, then
665 dl 1.205 * by Comparable.compareTo order if applicable. On lookup at a
666 dl 1.201 * node, if elements are not comparable or compare as 0 then both
667     * left and right children may need to be searched in the case of
668     * tied hash values. (This corresponds to the full list search
669     * that would be necessary if all elements were non-Comparable and
670     * had tied hashes.) The red-black balancing code is updated from
671 dl 1.119 * pre-jdk-collections
672     * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
673     * based in turn on Cormen, Leiserson, and Rivest "Introduction to
674     * Algorithms" (CLR).
675     *
676     * TreeBins also maintain a separate locking discipline than
677     * regular bins. Because they are forwarded via special MOVED
678     * nodes at bin heads (which can never change once established),
679     * we cannot use those nodes as locks. Instead, TreeBin
680     * extends AbstractQueuedSynchronizer to support a simple form of
681     * read-write lock. For update operations and table validation,
682     * the exclusive form of lock behaves in the same way as bin-head
683     * locks. However, lookups use shared read-lock mechanics to allow
684     * multiple readers in the absence of writers. Additionally,
685     * these lookups do not ever block: While the lock is not
686     * available, they proceed along the slow traversal path (via
687     * next-pointers) until the lock becomes available or the list is
688     * exhausted, whichever comes first. (These cases are not fast,
689     * but maximize aggregate expected throughput.) The AQS mechanics
690     * for doing this are straightforward. The lock state is held as
691     * AQS getState(). Read counts are negative; the write count (1)
692     * is positive. There are no signalling preferences among readers
693     * and writers. Since we don't need to export full Lock API, we
694     * just override the minimal AQS methods and use them directly.
695     */
696 dl 1.151 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
697 dl 1.24 private static final long serialVersionUID = 2249069246763182397L;
698 dl 1.151 transient TreeNode<V> root; // root of tree
699     transient TreeNode<V> first; // head of next-pointer list
700 dl 1.24
701 dl 1.119 /* AQS overrides */
702     public final boolean isHeldExclusively() { return getState() > 0; }
703     public final boolean tryAcquire(int ignore) {
704     if (compareAndSetState(0, 1)) {
705     setExclusiveOwnerThread(Thread.currentThread());
706     return true;
707     }
708     return false;
709     }
710     public final boolean tryRelease(int ignore) {
711     setExclusiveOwnerThread(null);
712     setState(0);
713     return true;
714     }
715     public final int tryAcquireShared(int ignore) {
716     for (int c;;) {
717     if ((c = getState()) > 0)
718     return -1;
719     if (compareAndSetState(c, c -1))
720     return 1;
721     }
722     }
723     public final boolean tryReleaseShared(int ignore) {
724     int c;
725     do {} while (!compareAndSetState(c = getState(), c + 1));
726     return c == -1;
727     }
728    
729     /** From CLR */
730 dl 1.151 private void rotateLeft(TreeNode<V> p) {
731 dl 1.119 if (p != null) {
732 dl 1.151 TreeNode<V> r = p.right, pp, rl;
733 dl 1.119 if ((rl = p.right = r.left) != null)
734     rl.parent = p;
735     if ((pp = r.parent = p.parent) == null)
736     root = r;
737     else if (pp.left == p)
738     pp.left = r;
739     else
740     pp.right = r;
741     r.left = p;
742     p.parent = r;
743     }
744     }
745 dl 1.4
746 dl 1.119 /** From CLR */
747 dl 1.151 private void rotateRight(TreeNode<V> p) {
748 dl 1.119 if (p != null) {
749 dl 1.151 TreeNode<V> l = p.left, pp, lr;
750 dl 1.119 if ((lr = p.left = l.right) != null)
751     lr.parent = p;
752     if ((pp = l.parent = p.parent) == null)
753     root = l;
754     else if (pp.right == p)
755     pp.right = l;
756     else
757     pp.left = l;
758     l.right = p;
759     p.parent = l;
760     }
761     }
762 dl 1.4
763     /**
764 dl 1.201 * Returns the TreeNode (or null if not found) for the given
765     * key. A front-end for recursive version.
766     */
767     final TreeNode<V> getTreeNode(int h, Object k) {
768     return getTreeNode(h, k, root, comparableClassFor(k));
769     }
770    
771     /**
772 jsr166 1.123 * Returns the TreeNode (or null if not found) for the given key
773 dl 1.119 * starting at given root.
774 dl 1.4 */
775 dl 1.151 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
776 dl 1.201 (int h, Object k, TreeNode<V> p, Class<?> cc) {
777 dl 1.119 while (p != null) {
778 jsr166 1.207 int dir, ph; Object pk;
779 dl 1.201 if ((ph = p.hash) != h)
780     dir = (h < ph) ? -1 : 1;
781     else if ((pk = p.key) == k || k.equals(pk))
782     return p;
783 dl 1.205 else if (cc == null || comparableClassFor(pk) != cc ||
784     (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
785 dl 1.206 TreeNode<V> r, pr; // check both sides
786 dl 1.201 if ((pr = p.right) != null && h >= pr.hash &&
787     (r = getTreeNode(h, k, pr, cc)) != null)
788     return r;
789 dl 1.206 else // continue left
790 dl 1.201 dir = -1;
791 dl 1.45 }
792 dl 1.119 p = (dir > 0) ? p.right : p.left;
793 dl 1.33 }
794 dl 1.119 return null;
795 dl 1.33 }
796    
797 dl 1.99 /**
798 dl 1.119 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
799     * read-lock to call getTreeNode, but during failure to get
800     * lock, searches along next links.
801 dl 1.99 */
802 dl 1.151 final V getValue(int h, Object k) {
803     Node<V> r = null;
804 dl 1.119 int c = getState(); // Must read lock state first
805 dl 1.151 for (Node<V> e = first; e != null; e = e.next) {
806 dl 1.119 if (c <= 0 && compareAndSetState(c, c - 1)) {
807     try {
808 dl 1.201 r = getTreeNode(h, k, root, comparableClassFor(k));
809 dl 1.119 } finally {
810     releaseShared(0);
811 dl 1.99 }
812     break;
813     }
814 dl 1.149 else if (e.hash == h && k.equals(e.key)) {
815 dl 1.119 r = e;
816 dl 1.99 break;
817     }
818 dl 1.119 else
819     c = getState();
820 dl 1.99 }
821 dl 1.119 return r == null ? null : r.val;
822 dl 1.99 }
823    
824     /**
825 dl 1.203 * Finds or adds a node.
826 dl 1.119 * @return null if added
827 dl 1.6 */
828 dl 1.151 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
829 dl 1.203 (int h, Object k, V v) {
830     Class<?> cc = comparableClassFor(k);
831 dl 1.151 TreeNode<V> pp = root, p = null;
832 dl 1.119 int dir = 0;
833     while (pp != null) { // find existing node or leaf to insert at
834 dl 1.206 int ph; Object pk;
835 dl 1.119 p = pp;
836 dl 1.201 if ((ph = p.hash) != h)
837     dir = (h < ph) ? -1 : 1;
838     else if ((pk = p.key) == k || k.equals(pk))
839     return p;
840 dl 1.205 else if (cc == null || comparableClassFor(pk) != cc ||
841     (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
842 dl 1.201 TreeNode<V> r, pr;
843     if ((pr = p.right) != null && h >= pr.hash &&
844     (r = getTreeNode(h, k, pr, cc)) != null)
845     return r;
846     else // continue left
847     dir = -1;
848 dl 1.99 }
849 dl 1.119 pp = (dir > 0) ? p.right : p.left;
850 dl 1.99 }
851    
852 dl 1.151 TreeNode<V> f = first;
853     TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
854 dl 1.119 if (p == null)
855     root = x;
856     else { // attach and rebalance; adapted from CLR
857 dl 1.151 TreeNode<V> xp, xpp;
858 dl 1.119 if (f != null)
859     f.prev = x;
860     if (dir <= 0)
861     p.left = x;
862     else
863     p.right = x;
864     x.red = true;
865     while (x != null && (xp = x.parent) != null && xp.red &&
866     (xpp = xp.parent) != null) {
867 dl 1.151 TreeNode<V> xppl = xpp.left;
868 dl 1.119 if (xp == xppl) {
869 dl 1.151 TreeNode<V> y = xpp.right;
870 dl 1.119 if (y != null && y.red) {
871     y.red = false;
872     xp.red = false;
873     xpp.red = true;
874     x = xpp;
875     }
876     else {
877     if (x == xp.right) {
878     rotateLeft(x = xp);
879     xpp = (xp = x.parent) == null ? null : xp.parent;
880     }
881     if (xp != null) {
882     xp.red = false;
883     if (xpp != null) {
884     xpp.red = true;
885     rotateRight(xpp);
886     }
887     }
888     }
889     }
890     else {
891 dl 1.151 TreeNode<V> y = xppl;
892 dl 1.119 if (y != null && y.red) {
893     y.red = false;
894     xp.red = false;
895     xpp.red = true;
896     x = xpp;
897     }
898     else {
899     if (x == xp.left) {
900     rotateRight(x = xp);
901     xpp = (xp = x.parent) == null ? null : xp.parent;
902     }
903     if (xp != null) {
904     xp.red = false;
905     if (xpp != null) {
906     xpp.red = true;
907     rotateLeft(xpp);
908     }
909     }
910 dl 1.99 }
911     }
912     }
913 dl 1.151 TreeNode<V> r = root;
914 dl 1.119 if (r != null && r.red)
915     r.red = false;
916 dl 1.99 }
917 dl 1.119 return null;
918 dl 1.99 }
919 dl 1.45
920 dl 1.119 /**
921     * Removes the given node, that must be present before this
922     * call. This is messier than typical red-black deletion code
923     * because we cannot swap the contents of an interior node
924     * with a leaf successor that is pinned by "next" pointers
925     * that are accessible independently of lock. So instead we
926     * swap the tree linkages.
927     */
928 dl 1.151 final void deleteTreeNode(TreeNode<V> p) {
929     TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
930     TreeNode<V> pred = p.prev;
931 dl 1.119 if (pred == null)
932     first = next;
933     else
934     pred.next = next;
935     if (next != null)
936     next.prev = pred;
937 dl 1.151 TreeNode<V> replacement;
938     TreeNode<V> pl = p.left;
939     TreeNode<V> pr = p.right;
940 dl 1.119 if (pl != null && pr != null) {
941 dl 1.151 TreeNode<V> s = pr, sl;
942 dl 1.119 while ((sl = s.left) != null) // find successor
943     s = sl;
944     boolean c = s.red; s.red = p.red; p.red = c; // swap colors
945 dl 1.151 TreeNode<V> sr = s.right;
946     TreeNode<V> pp = p.parent;
947 dl 1.119 if (s == pr) { // p was s's direct parent
948     p.parent = s;
949     s.right = p;
950     }
951     else {
952 dl 1.151 TreeNode<V> sp = s.parent;
953 dl 1.119 if ((p.parent = sp) != null) {
954     if (s == sp.left)
955     sp.left = p;
956     else
957     sp.right = p;
958 dl 1.45 }
959 dl 1.119 if ((s.right = pr) != null)
960     pr.parent = s;
961 dl 1.4 }
962 dl 1.119 p.left = null;
963     if ((p.right = sr) != null)
964     sr.parent = p;
965     if ((s.left = pl) != null)
966     pl.parent = s;
967     if ((s.parent = pp) == null)
968     root = s;
969     else if (p == pp.left)
970     pp.left = s;
971     else
972     pp.right = s;
973     replacement = sr;
974     }
975     else
976     replacement = (pl != null) ? pl : pr;
977 dl 1.151 TreeNode<V> pp = p.parent;
978 dl 1.119 if (replacement == null) {
979     if (pp == null) {
980     root = null;
981     return;
982     }
983     replacement = p;
984 dl 1.99 }
985 dl 1.119 else {
986     replacement.parent = pp;
987     if (pp == null)
988     root = replacement;
989     else if (p == pp.left)
990     pp.left = replacement;
991     else
992     pp.right = replacement;
993     p.left = p.right = p.parent = null;
994 dl 1.4 }
995 dl 1.119 if (!p.red) { // rebalance, from CLR
996 dl 1.151 TreeNode<V> x = replacement;
997 dl 1.119 while (x != null) {
998 dl 1.151 TreeNode<V> xp, xpl;
999 dl 1.119 if (x.red || (xp = x.parent) == null) {
1000     x.red = false;
1001 dl 1.99 break;
1002 dl 1.119 }
1003     if (x == (xpl = xp.left)) {
1004 dl 1.151 TreeNode<V> sib = xp.right;
1005 dl 1.119 if (sib != null && sib.red) {
1006     sib.red = false;
1007     xp.red = true;
1008     rotateLeft(xp);
1009     sib = (xp = x.parent) == null ? null : xp.right;
1010     }
1011     if (sib == null)
1012     x = xp;
1013     else {
1014 dl 1.151 TreeNode<V> sl = sib.left, sr = sib.right;
1015 dl 1.119 if ((sr == null || !sr.red) &&
1016     (sl == null || !sl.red)) {
1017     sib.red = true;
1018     x = xp;
1019     }
1020     else {
1021     if (sr == null || !sr.red) {
1022     if (sl != null)
1023     sl.red = false;
1024     sib.red = true;
1025     rotateRight(sib);
1026 dl 1.149 sib = (xp = x.parent) == null ?
1027     null : xp.right;
1028 dl 1.119 }
1029     if (sib != null) {
1030     sib.red = (xp == null) ? false : xp.red;
1031     if ((sr = sib.right) != null)
1032     sr.red = false;
1033     }
1034     if (xp != null) {
1035     xp.red = false;
1036     rotateLeft(xp);
1037     }
1038     x = root;
1039     }
1040     }
1041     }
1042     else { // symmetric
1043 dl 1.151 TreeNode<V> sib = xpl;
1044 dl 1.119 if (sib != null && sib.red) {
1045     sib.red = false;
1046     xp.red = true;
1047     rotateRight(xp);
1048     sib = (xp = x.parent) == null ? null : xp.left;
1049     }
1050     if (sib == null)
1051     x = xp;
1052     else {
1053 dl 1.151 TreeNode<V> sl = sib.left, sr = sib.right;
1054 dl 1.119 if ((sl == null || !sl.red) &&
1055     (sr == null || !sr.red)) {
1056     sib.red = true;
1057     x = xp;
1058     }
1059     else {
1060     if (sl == null || !sl.red) {
1061     if (sr != null)
1062     sr.red = false;
1063     sib.red = true;
1064     rotateLeft(sib);
1065 dl 1.149 sib = (xp = x.parent) == null ?
1066     null : xp.left;
1067 dl 1.119 }
1068     if (sib != null) {
1069     sib.red = (xp == null) ? false : xp.red;
1070     if ((sl = sib.left) != null)
1071     sl.red = false;
1072     }
1073     if (xp != null) {
1074     xp.red = false;
1075     rotateRight(xp);
1076     }
1077     x = root;
1078     }
1079     }
1080     }
1081 dl 1.45 }
1082 dl 1.4 }
1083 dl 1.119 if (p == replacement && (pp = p.parent) != null) {
1084     if (p == pp.left) // detach pointers
1085     pp.left = null;
1086     else if (p == pp.right)
1087     pp.right = null;
1088     p.parent = null;
1089     }
1090 dl 1.4 }
1091 tim 1.1 }
1092    
1093 dl 1.119 /* ---------------- Collision reduction methods -------------- */
1094 tim 1.1
1095 dl 1.99 /**
1096 dl 1.149 * Spreads higher bits to lower, and also forces top bit to 0.
1097 dl 1.119 * Because the table uses power-of-two masking, sets of hashes
1098     * that vary only in bits above the current mask will always
1099     * collide. (Among known examples are sets of Float keys holding
1100     * consecutive whole numbers in small tables.) To counter this,
1101     * we apply a transform that spreads the impact of higher bits
1102     * downward. There is a tradeoff between speed, utility, and
1103     * quality of bit-spreading. Because many common sets of hashes
1104     * are already reasonably distributed across bits (so don't benefit
1105     * from spreading), and because we use trees to handle large sets
1106     * of collisions in bins, we don't need excessively high quality.
1107     */
1108     private static final int spread(int h) {
1109     h ^= (h >>> 18) ^ (h >>> 12);
1110     return (h ^ (h >>> 10)) & HASH_BITS;
1111 dl 1.99 }
1112    
1113     /**
1114 dl 1.149 * Replaces a list bin with a tree bin if key is comparable. Call
1115     * only when locked.
1116 dl 1.119 */
1117 dl 1.151 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1118 dl 1.201 if (comparableClassFor(key) != null) {
1119 dl 1.151 TreeBin<V> t = new TreeBin<V>();
1120     for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1121 dl 1.149 t.putTreeNode(e.hash, e.key, e.val);
1122 dl 1.151 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1123 dl 1.119 }
1124 dl 1.99 }
1125 tim 1.11
1126 dl 1.119 /* ---------------- Internal access and update methods -------------- */
1127    
1128     /** Implementation for get and containsKey */
1129 dl 1.149 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1130 dl 1.119 int h = spread(k.hashCode());
1131 dl 1.151 retry: for (Node<V>[] tab = table; tab != null;) {
1132     Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1133 dl 1.119 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1134 dl 1.149 if ((eh = e.hash) < 0) {
1135 dl 1.119 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1136 dl 1.151 return ((TreeBin<V>)ek).getValue(h, k);
1137     else { // restart with new table
1138     tab = (Node<V>[])ek;
1139 dl 1.119 continue retry;
1140     }
1141     }
1142 dl 1.149 else if (eh == h && (ev = e.val) != null &&
1143 dl 1.119 ((ek = e.key) == k || k.equals(ek)))
1144 dl 1.151 return ev;
1145 dl 1.119 }
1146     break;
1147     }
1148     return null;
1149 tim 1.1 }
1150    
1151     /**
1152 dl 1.119 * Implementation for the four public remove/replace methods:
1153     * Replaces node value with v, conditional upon match of cv if
1154     * non-null. If resulting value is null, delete.
1155     */
1156 dl 1.149 @SuppressWarnings("unchecked") private final V internalReplace
1157     (Object k, V v, Object cv) {
1158 dl 1.119 int h = spread(k.hashCode());
1159 dl 1.151 V oldVal = null;
1160     for (Node<V>[] tab = table;;) {
1161     Node<V> f; int i, fh; Object fk;
1162 dl 1.119 if (tab == null ||
1163     (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1164     break;
1165 dl 1.149 else if ((fh = f.hash) < 0) {
1166 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1167 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1168 dl 1.119 boolean validated = false;
1169     boolean deleted = false;
1170     t.acquire(0);
1171     try {
1172     if (tabAt(tab, i) == f) {
1173     validated = true;
1174 dl 1.201 TreeNode<V> p = t.getTreeNode(h, k);
1175 dl 1.119 if (p != null) {
1176 dl 1.151 V pv = p.val;
1177 dl 1.119 if (cv == null || cv == pv || cv.equals(pv)) {
1178     oldVal = pv;
1179     if ((p.val = v) == null) {
1180     deleted = true;
1181     t.deleteTreeNode(p);
1182     }
1183     }
1184     }
1185     }
1186     } finally {
1187     t.release(0);
1188     }
1189     if (validated) {
1190     if (deleted)
1191 dl 1.149 addCount(-1L, -1);
1192 dl 1.119 break;
1193     }
1194     }
1195     else
1196 dl 1.151 tab = (Node<V>[])fk;
1197 dl 1.119 }
1198 dl 1.149 else if (fh != h && f.next == null) // precheck
1199 dl 1.119 break; // rules out possible existence
1200 dl 1.149 else {
1201 dl 1.119 boolean validated = false;
1202     boolean deleted = false;
1203 jsr166 1.150 synchronized (f) {
1204 dl 1.119 if (tabAt(tab, i) == f) {
1205     validated = true;
1206 dl 1.151 for (Node<V> e = f, pred = null;;) {
1207     Object ek; V ev;
1208 dl 1.149 if (e.hash == h &&
1209 dl 1.119 ((ev = e.val) != null) &&
1210     ((ek = e.key) == k || k.equals(ek))) {
1211     if (cv == null || cv == ev || cv.equals(ev)) {
1212     oldVal = ev;
1213     if ((e.val = v) == null) {
1214     deleted = true;
1215 dl 1.151 Node<V> en = e.next;
1216 dl 1.119 if (pred != null)
1217     pred.next = en;
1218     else
1219     setTabAt(tab, i, en);
1220     }
1221     }
1222     break;
1223     }
1224     pred = e;
1225     if ((e = e.next) == null)
1226     break;
1227     }
1228     }
1229     }
1230     if (validated) {
1231     if (deleted)
1232 dl 1.149 addCount(-1L, -1);
1233 dl 1.119 break;
1234     }
1235     }
1236     }
1237 dl 1.151 return oldVal;
1238 dl 1.55 }
1239    
1240 dl 1.119 /*
1241 dl 1.149 * Internal versions of insertion methods
1242     * All have the same basic structure as the first (internalPut):
1243 dl 1.119 * 1. If table uninitialized, create
1244     * 2. If bin empty, try to CAS new node
1245     * 3. If bin stale, use new table
1246     * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1247     * 5. Lock and validate; if valid, scan and add or update
1248 tim 1.1 *
1249 dl 1.149 * The putAll method differs mainly in attempting to pre-allocate
1250     * enough table space, and also more lazily performs count updates
1251     * and checks.
1252     *
1253     * Most of the function-accepting methods can't be factored nicely
1254     * because they require different functional forms, so instead
1255     * sprawl out similar mechanics.
1256 tim 1.1 */
1257    
1258 dl 1.149 /** Implementation for put and putIfAbsent */
1259     @SuppressWarnings("unchecked") private final V internalPut
1260     (K k, V v, boolean onlyIfAbsent) {
1261     if (k == null || v == null) throw new NullPointerException();
1262 dl 1.119 int h = spread(k.hashCode());
1263 dl 1.149 int len = 0;
1264 dl 1.151 for (Node<V>[] tab = table;;) {
1265     int i, fh; Node<V> f; Object fk; V fv;
1266 dl 1.119 if (tab == null)
1267     tab = initTable();
1268     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1269 dl 1.151 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1270 dl 1.119 break; // no lock when adding to empty bin
1271 dl 1.99 }
1272 dl 1.149 else if ((fh = f.hash) < 0) {
1273 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1274 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1275     V oldVal = null;
1276 dl 1.119 t.acquire(0);
1277     try {
1278     if (tabAt(tab, i) == f) {
1279 dl 1.149 len = 2;
1280 dl 1.151 TreeNode<V> p = t.putTreeNode(h, k, v);
1281 dl 1.119 if (p != null) {
1282     oldVal = p.val;
1283 dl 1.149 if (!onlyIfAbsent)
1284     p.val = v;
1285 dl 1.119 }
1286     }
1287     } finally {
1288     t.release(0);
1289     }
1290 dl 1.149 if (len != 0) {
1291 dl 1.119 if (oldVal != null)
1292 dl 1.151 return oldVal;
1293 dl 1.119 break;
1294     }
1295     }
1296     else
1297 dl 1.151 tab = (Node<V>[])fk;
1298 dl 1.119 }
1299 dl 1.149 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1300     ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1301 dl 1.151 return fv;
1302 dl 1.149 else {
1303 dl 1.151 V oldVal = null;
1304 jsr166 1.150 synchronized (f) {
1305 dl 1.119 if (tabAt(tab, i) == f) {
1306 dl 1.149 len = 1;
1307 dl 1.151 for (Node<V> e = f;; ++len) {
1308     Object ek; V ev;
1309 dl 1.149 if (e.hash == h &&
1310 dl 1.119 (ev = e.val) != null &&
1311     ((ek = e.key) == k || k.equals(ek))) {
1312     oldVal = ev;
1313 dl 1.149 if (!onlyIfAbsent)
1314     e.val = v;
1315 dl 1.119 break;
1316     }
1317 dl 1.151 Node<V> last = e;
1318 dl 1.119 if ((e = e.next) == null) {
1319 dl 1.151 last.next = new Node<V>(h, k, v, null);
1320 dl 1.149 if (len >= TREE_THRESHOLD)
1321 dl 1.119 replaceWithTreeBin(tab, i, k);
1322     break;
1323     }
1324     }
1325     }
1326     }
1327 dl 1.149 if (len != 0) {
1328 dl 1.119 if (oldVal != null)
1329 dl 1.151 return oldVal;
1330 dl 1.119 break;
1331     }
1332     }
1333 dl 1.45 }
1334 dl 1.149 addCount(1L, len);
1335 dl 1.119 return null;
1336 dl 1.21 }
1337    
1338 dl 1.119 /** Implementation for computeIfAbsent */
1339 dl 1.149 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1340 dl 1.153 (K k, Function<? super K, ? extends V> mf) {
1341 dl 1.149 if (k == null || mf == null)
1342     throw new NullPointerException();
1343 dl 1.119 int h = spread(k.hashCode());
1344 dl 1.151 V val = null;
1345 dl 1.149 int len = 0;
1346 dl 1.151 for (Node<V>[] tab = table;;) {
1347     Node<V> f; int i; Object fk;
1348 dl 1.119 if (tab == null)
1349     tab = initTable();
1350     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1351 dl 1.151 Node<V> node = new Node<V>(h, k, null, null);
1352 jsr166 1.150 synchronized (node) {
1353 dl 1.149 if (casTabAt(tab, i, null, node)) {
1354     len = 1;
1355     try {
1356     if ((val = mf.apply(k)) != null)
1357     node.val = val;
1358     } finally {
1359     if (val == null)
1360     setTabAt(tab, i, null);
1361 dl 1.119 }
1362     }
1363     }
1364 dl 1.149 if (len != 0)
1365 dl 1.119 break;
1366     }
1367 dl 1.149 else if (f.hash < 0) {
1368 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1369 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1370 dl 1.119 boolean added = false;
1371     t.acquire(0);
1372     try {
1373     if (tabAt(tab, i) == f) {
1374 dl 1.149 len = 1;
1375 dl 1.201 TreeNode<V> p = t.getTreeNode(h, k);
1376 dl 1.119 if (p != null)
1377     val = p.val;
1378     else if ((val = mf.apply(k)) != null) {
1379     added = true;
1380 dl 1.149 len = 2;
1381 dl 1.119 t.putTreeNode(h, k, val);
1382     }
1383     }
1384     } finally {
1385     t.release(0);
1386     }
1387 dl 1.149 if (len != 0) {
1388 dl 1.119 if (!added)
1389 dl 1.151 return val;
1390 dl 1.119 break;
1391     }
1392     }
1393     else
1394 dl 1.151 tab = (Node<V>[])fk;
1395 dl 1.119 }
1396     else {
1397 dl 1.151 for (Node<V> e = f; e != null; e = e.next) { // prescan
1398     Object ek; V ev;
1399 dl 1.149 if (e.hash == h && (ev = e.val) != null &&
1400     ((ek = e.key) == k || k.equals(ek)))
1401 dl 1.151 return ev;
1402 dl 1.119 }
1403 dl 1.149 boolean added = false;
1404 jsr166 1.150 synchronized (f) {
1405 dl 1.149 if (tabAt(tab, i) == f) {
1406     len = 1;
1407 dl 1.151 for (Node<V> e = f;; ++len) {
1408     Object ek; V ev;
1409 dl 1.149 if (e.hash == h &&
1410     (ev = e.val) != null &&
1411     ((ek = e.key) == k || k.equals(ek))) {
1412     val = ev;
1413     break;
1414     }
1415 dl 1.151 Node<V> last = e;
1416 dl 1.149 if ((e = e.next) == null) {
1417     if ((val = mf.apply(k)) != null) {
1418     added = true;
1419 dl 1.151 last.next = new Node<V>(h, k, val, null);
1420 dl 1.149 if (len >= TREE_THRESHOLD)
1421     replaceWithTreeBin(tab, i, k);
1422 dl 1.119 }
1423 dl 1.149 break;
1424 dl 1.119 }
1425     }
1426     }
1427 dl 1.149 }
1428     if (len != 0) {
1429     if (!added)
1430 dl 1.151 return val;
1431 dl 1.149 break;
1432 dl 1.119 }
1433 dl 1.105 }
1434 dl 1.99 }
1435 dl 1.149 if (val != null)
1436     addCount(1L, len);
1437 dl 1.151 return val;
1438 tim 1.1 }
1439    
1440 dl 1.119 /** Implementation for compute */
1441 dl 1.149 @SuppressWarnings("unchecked") private final V internalCompute
1442     (K k, boolean onlyIfPresent,
1443 dl 1.153 BiFunction<? super K, ? super V, ? extends V> mf) {
1444 dl 1.149 if (k == null || mf == null)
1445     throw new NullPointerException();
1446 dl 1.119 int h = spread(k.hashCode());
1447 dl 1.151 V val = null;
1448 dl 1.119 int delta = 0;
1449 dl 1.149 int len = 0;
1450 dl 1.151 for (Node<V>[] tab = table;;) {
1451     Node<V> f; int i, fh; Object fk;
1452 dl 1.119 if (tab == null)
1453     tab = initTable();
1454     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1455     if (onlyIfPresent)
1456     break;
1457 dl 1.151 Node<V> node = new Node<V>(h, k, null, null);
1458 jsr166 1.150 synchronized (node) {
1459 dl 1.149 if (casTabAt(tab, i, null, node)) {
1460     try {
1461     len = 1;
1462     if ((val = mf.apply(k, null)) != null) {
1463     node.val = val;
1464     delta = 1;
1465     }
1466     } finally {
1467     if (delta == 0)
1468     setTabAt(tab, i, null);
1469 dl 1.119 }
1470     }
1471     }
1472 dl 1.149 if (len != 0)
1473 dl 1.119 break;
1474     }
1475 dl 1.149 else if ((fh = f.hash) < 0) {
1476 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1477 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1478 dl 1.119 t.acquire(0);
1479     try {
1480     if (tabAt(tab, i) == f) {
1481 dl 1.149 len = 1;
1482 dl 1.201 TreeNode<V> p = t.getTreeNode(h, k);
1483 dl 1.149 if (p == null && onlyIfPresent)
1484     break;
1485 dl 1.151 V pv = (p == null) ? null : p.val;
1486     if ((val = mf.apply(k, pv)) != null) {
1487 dl 1.119 if (p != null)
1488     p.val = val;
1489     else {
1490 dl 1.149 len = 2;
1491 dl 1.119 delta = 1;
1492     t.putTreeNode(h, k, val);
1493     }
1494     }
1495     else if (p != null) {
1496     delta = -1;
1497     t.deleteTreeNode(p);
1498     }
1499     }
1500     } finally {
1501     t.release(0);
1502     }
1503 dl 1.149 if (len != 0)
1504 dl 1.119 break;
1505     }
1506     else
1507 dl 1.151 tab = (Node<V>[])fk;
1508 dl 1.119 }
1509 dl 1.149 else {
1510 jsr166 1.150 synchronized (f) {
1511 dl 1.119 if (tabAt(tab, i) == f) {
1512 dl 1.149 len = 1;
1513 dl 1.151 for (Node<V> e = f, pred = null;; ++len) {
1514     Object ek; V ev;
1515 dl 1.149 if (e.hash == h &&
1516 dl 1.119 (ev = e.val) != null &&
1517     ((ek = e.key) == k || k.equals(ek))) {
1518 dl 1.151 val = mf.apply(k, ev);
1519 dl 1.119 if (val != null)
1520     e.val = val;
1521     else {
1522     delta = -1;
1523 dl 1.151 Node<V> en = e.next;
1524 dl 1.119 if (pred != null)
1525     pred.next = en;
1526     else
1527     setTabAt(tab, i, en);
1528     }
1529     break;
1530     }
1531     pred = e;
1532     if ((e = e.next) == null) {
1533 dl 1.149 if (!onlyIfPresent &&
1534     (val = mf.apply(k, null)) != null) {
1535 dl 1.151 pred.next = new Node<V>(h, k, val, null);
1536 dl 1.119 delta = 1;
1537 dl 1.149 if (len >= TREE_THRESHOLD)
1538 dl 1.119 replaceWithTreeBin(tab, i, k);
1539     }
1540     break;
1541     }
1542     }
1543     }
1544     }
1545 dl 1.149 if (len != 0)
1546 dl 1.119 break;
1547     }
1548     }
1549 dl 1.149 if (delta != 0)
1550     addCount((long)delta, len);
1551 dl 1.151 return val;
1552 dl 1.119 }
1553    
1554 dl 1.126 /** Implementation for merge */
1555 dl 1.149 @SuppressWarnings("unchecked") private final V internalMerge
1556 dl 1.153 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1557 dl 1.149 if (k == null || v == null || mf == null)
1558     throw new NullPointerException();
1559 dl 1.119 int h = spread(k.hashCode());
1560 dl 1.151 V val = null;
1561 dl 1.119 int delta = 0;
1562 dl 1.149 int len = 0;
1563 dl 1.151 for (Node<V>[] tab = table;;) {
1564     int i; Node<V> f; Object fk; V fv;
1565 dl 1.119 if (tab == null)
1566     tab = initTable();
1567     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1568 dl 1.151 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1569 dl 1.119 delta = 1;
1570     val = v;
1571     break;
1572     }
1573     }
1574 dl 1.149 else if (f.hash < 0) {
1575 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1576 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1577 dl 1.119 t.acquire(0);
1578     try {
1579     if (tabAt(tab, i) == f) {
1580 dl 1.149 len = 1;
1581 dl 1.201 TreeNode<V> p = t.getTreeNode(h, k);
1582 dl 1.151 val = (p == null) ? v : mf.apply(p.val, v);
1583 dl 1.119 if (val != null) {
1584     if (p != null)
1585     p.val = val;
1586     else {
1587 dl 1.149 len = 2;
1588 dl 1.119 delta = 1;
1589     t.putTreeNode(h, k, val);
1590     }
1591     }
1592     else if (p != null) {
1593     delta = -1;
1594     t.deleteTreeNode(p);
1595     }
1596     }
1597     } finally {
1598     t.release(0);
1599     }
1600 dl 1.149 if (len != 0)
1601 dl 1.119 break;
1602     }
1603     else
1604 dl 1.151 tab = (Node<V>[])fk;
1605 dl 1.119 }
1606 dl 1.149 else {
1607 jsr166 1.150 synchronized (f) {
1608 dl 1.119 if (tabAt(tab, i) == f) {
1609 dl 1.149 len = 1;
1610 dl 1.151 for (Node<V> e = f, pred = null;; ++len) {
1611     Object ek; V ev;
1612 dl 1.149 if (e.hash == h &&
1613 dl 1.119 (ev = e.val) != null &&
1614     ((ek = e.key) == k || k.equals(ek))) {
1615 dl 1.151 val = mf.apply(ev, v);
1616 dl 1.119 if (val != null)
1617     e.val = val;
1618     else {
1619     delta = -1;
1620 dl 1.151 Node<V> en = e.next;
1621 dl 1.119 if (pred != null)
1622     pred.next = en;
1623     else
1624     setTabAt(tab, i, en);
1625     }
1626     break;
1627     }
1628     pred = e;
1629     if ((e = e.next) == null) {
1630     val = v;
1631 dl 1.151 pred.next = new Node<V>(h, k, val, null);
1632 dl 1.119 delta = 1;
1633 dl 1.149 if (len >= TREE_THRESHOLD)
1634 dl 1.119 replaceWithTreeBin(tab, i, k);
1635     break;
1636     }
1637     }
1638     }
1639     }
1640 dl 1.149 if (len != 0)
1641 dl 1.119 break;
1642 dl 1.105 }
1643 dl 1.99 }
1644 dl 1.149 if (delta != 0)
1645     addCount((long)delta, len);
1646 dl 1.151 return val;
1647 dl 1.119 }
1648    
1649     /** Implementation for putAll */
1650 dl 1.151 @SuppressWarnings("unchecked") private final void internalPutAll
1651     (Map<? extends K, ? extends V> m) {
1652 dl 1.119 tryPresize(m.size());
1653     long delta = 0L; // number of uncommitted additions
1654     boolean npe = false; // to throw exception on exit for nulls
1655     try { // to clean up counts on other exceptions
1656 dl 1.151 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1657     Object k; V v;
1658 dl 1.119 if (entry == null || (k = entry.getKey()) == null ||
1659     (v = entry.getValue()) == null) {
1660     npe = true;
1661     break;
1662     }
1663     int h = spread(k.hashCode());
1664 dl 1.151 for (Node<V>[] tab = table;;) {
1665     int i; Node<V> f; int fh; Object fk;
1666 dl 1.119 if (tab == null)
1667     tab = initTable();
1668     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1669 dl 1.151 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1670 dl 1.119 ++delta;
1671     break;
1672     }
1673     }
1674 dl 1.149 else if ((fh = f.hash) < 0) {
1675 dl 1.119 if ((fk = f.key) instanceof TreeBin) {
1676 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1677 dl 1.119 boolean validated = false;
1678     t.acquire(0);
1679     try {
1680     if (tabAt(tab, i) == f) {
1681     validated = true;
1682 dl 1.201 TreeNode<V> p = t.getTreeNode(h, k);
1683 dl 1.119 if (p != null)
1684     p.val = v;
1685     else {
1686     t.putTreeNode(h, k, v);
1687     ++delta;
1688     }
1689     }
1690     } finally {
1691     t.release(0);
1692     }
1693     if (validated)
1694     break;
1695     }
1696     else
1697 dl 1.151 tab = (Node<V>[])fk;
1698 dl 1.119 }
1699 dl 1.149 else {
1700     int len = 0;
1701 jsr166 1.150 synchronized (f) {
1702 dl 1.119 if (tabAt(tab, i) == f) {
1703 dl 1.149 len = 1;
1704 dl 1.151 for (Node<V> e = f;; ++len) {
1705     Object ek; V ev;
1706 dl 1.149 if (e.hash == h &&
1707 dl 1.119 (ev = e.val) != null &&
1708     ((ek = e.key) == k || k.equals(ek))) {
1709     e.val = v;
1710     break;
1711     }
1712 dl 1.151 Node<V> last = e;
1713 dl 1.119 if ((e = e.next) == null) {
1714     ++delta;
1715 dl 1.151 last.next = new Node<V>(h, k, v, null);
1716 dl 1.149 if (len >= TREE_THRESHOLD)
1717 dl 1.119 replaceWithTreeBin(tab, i, k);
1718     break;
1719     }
1720     }
1721     }
1722     }
1723 dl 1.149 if (len != 0) {
1724 dl 1.164 if (len > 1) {
1725 dl 1.149 addCount(delta, len);
1726 dl 1.164 delta = 0L;
1727     }
1728 dl 1.119 break;
1729 dl 1.99 }
1730 dl 1.21 }
1731     }
1732 dl 1.45 }
1733     } finally {
1734 dl 1.149 if (delta != 0L)
1735     addCount(delta, 2);
1736 dl 1.45 }
1737 dl 1.119 if (npe)
1738     throw new NullPointerException();
1739 tim 1.1 }
1740 dl 1.19
1741 dl 1.149 /**
1742     * Implementation for clear. Steps through each bin, removing all
1743     * nodes.
1744     */
1745 dl 1.151 @SuppressWarnings("unchecked") private final void internalClear() {
1746 dl 1.149 long delta = 0L; // negative number of deletions
1747     int i = 0;
1748 dl 1.151 Node<V>[] tab = table;
1749 dl 1.149 while (tab != null && i < tab.length) {
1750 dl 1.151 Node<V> f = tabAt(tab, i);
1751 dl 1.149 if (f == null)
1752     ++i;
1753     else if (f.hash < 0) {
1754     Object fk;
1755     if ((fk = f.key) instanceof TreeBin) {
1756 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
1757 dl 1.149 t.acquire(0);
1758     try {
1759     if (tabAt(tab, i) == f) {
1760 dl 1.151 for (Node<V> p = t.first; p != null; p = p.next) {
1761 dl 1.149 if (p.val != null) { // (currently always true)
1762     p.val = null;
1763     --delta;
1764     }
1765     }
1766     t.first = null;
1767     t.root = null;
1768     ++i;
1769     }
1770     } finally {
1771     t.release(0);
1772     }
1773     }
1774     else
1775 dl 1.151 tab = (Node<V>[])fk;
1776 dl 1.149 }
1777     else {
1778 jsr166 1.150 synchronized (f) {
1779 dl 1.149 if (tabAt(tab, i) == f) {
1780 dl 1.151 for (Node<V> e = f; e != null; e = e.next) {
1781 dl 1.149 if (e.val != null) { // (currently always true)
1782     e.val = null;
1783     --delta;
1784     }
1785     }
1786     setTabAt(tab, i, null);
1787     ++i;
1788     }
1789     }
1790     }
1791     }
1792     if (delta != 0L)
1793     addCount(delta, -1);
1794     }
1795    
1796 dl 1.119 /* ---------------- Table Initialization and Resizing -------------- */
1797    
1798 tim 1.1 /**
1799 dl 1.119 * Returns a power of two table size for the given desired capacity.
1800     * See Hackers Delight, sec 3.2
1801 tim 1.1 */
1802 dl 1.119 private static final int tableSizeFor(int c) {
1803     int n = c - 1;
1804     n |= n >>> 1;
1805     n |= n >>> 2;
1806     n |= n >>> 4;
1807     n |= n >>> 8;
1808     n |= n >>> 16;
1809     return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1810 tim 1.1 }
1811    
1812     /**
1813 dl 1.119 * Initializes table, using the size recorded in sizeCtl.
1814     */
1815 dl 1.151 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1816     Node<V>[] tab; int sc;
1817 dl 1.119 while ((tab = table) == null) {
1818     if ((sc = sizeCtl) < 0)
1819     Thread.yield(); // lost initialization race; just spin
1820 dl 1.149 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1821 dl 1.119 try {
1822     if ((tab = table) == null) {
1823     int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1824 dl 1.151 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1825     table = tab = (Node<V>[])tb;
1826 dl 1.119 sc = n - (n >>> 2);
1827     }
1828     } finally {
1829     sizeCtl = sc;
1830     }
1831     break;
1832     }
1833     }
1834     return tab;
1835 dl 1.4 }
1836    
1837     /**
1838 dl 1.149 * Adds to count, and if table is too small and not already
1839     * resizing, initiates transfer. If already resizing, helps
1840     * perform transfer if work is available. Rechecks occupancy
1841     * after a transfer to see if another resize is already needed
1842     * because resizings are lagging additions.
1843     *
1844     * @param x the count to add
1845     * @param check if <0, don't check resize, if <= 1 only check if uncontended
1846     */
1847     private final void addCount(long x, int check) {
1848 dl 1.153 Cell[] as; long b, s;
1849 dl 1.149 if ((as = counterCells) != null ||
1850     !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1851 dl 1.160 Cell a; long v; int m;
1852 dl 1.149 boolean uncontended = true;
1853 dl 1.160 if (as == null || (m = as.length - 1) < 0 ||
1854     (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
1855 dl 1.149 !(uncontended =
1856     U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1857 dl 1.160 fullAddCount(x, uncontended);
1858 dl 1.149 return;
1859     }
1860     if (check <= 1)
1861     return;
1862     s = sumCount();
1863     }
1864     if (check >= 0) {
1865 dl 1.151 Node<V>[] tab, nt; int sc;
1866 dl 1.149 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1867     tab.length < MAXIMUM_CAPACITY) {
1868     if (sc < 0) {
1869     if (sc == -1 || transferIndex <= transferOrigin ||
1870     (nt = nextTable) == null)
1871     break;
1872     if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1873     transfer(tab, nt);
1874 dl 1.119 }
1875 dl 1.149 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1876     transfer(tab, null);
1877     s = sumCount();
1878 dl 1.119 }
1879     }
1880 dl 1.4 }
1881    
1882     /**
1883 dl 1.119 * Tries to presize table to accommodate the given number of elements.
1884 tim 1.1 *
1885 dl 1.119 * @param size number of elements (doesn't need to be perfectly accurate)
1886 tim 1.1 */
1887 dl 1.151 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1888 dl 1.119 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1889     tableSizeFor(size + (size >>> 1) + 1);
1890     int sc;
1891     while ((sc = sizeCtl) >= 0) {
1892 dl 1.151 Node<V>[] tab = table; int n;
1893 dl 1.119 if (tab == null || (n = tab.length) == 0) {
1894     n = (sc > c) ? sc : c;
1895 dl 1.149 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1896 dl 1.119 try {
1897     if (table == tab) {
1898 dl 1.151 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1899     table = (Node<V>[])tb;
1900 dl 1.119 sc = n - (n >>> 2);
1901     }
1902     } finally {
1903     sizeCtl = sc;
1904     }
1905     }
1906     }
1907     else if (c <= sc || n >= MAXIMUM_CAPACITY)
1908     break;
1909 dl 1.149 else if (tab == table &&
1910     U.compareAndSwapInt(this, SIZECTL, sc, -2))
1911     transfer(tab, null);
1912 dl 1.119 }
1913 dl 1.4 }
1914    
1915 jsr166 1.170 /**
1916 dl 1.119 * Moves and/or copies the nodes in each bin to new table. See
1917     * above for explanation.
1918 dl 1.4 */
1919 dl 1.151 @SuppressWarnings("unchecked") private final void transfer
1920     (Node<V>[] tab, Node<V>[] nextTab) {
1921 dl 1.149 int n = tab.length, stride;
1922     if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1923     stride = MIN_TRANSFER_STRIDE; // subdivide range
1924     if (nextTab == null) { // initiating
1925     try {
1926 dl 1.151 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1927     nextTab = (Node<V>[])tb;
1928 jsr166 1.150 } catch (Throwable ex) { // try to cope with OOME
1929 dl 1.149 sizeCtl = Integer.MAX_VALUE;
1930     return;
1931     }
1932     nextTable = nextTab;
1933     transferOrigin = n;
1934     transferIndex = n;
1935 dl 1.151 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1936 dl 1.149 for (int k = n; k > 0;) { // progressively reveal ready slots
1937 jsr166 1.150 int nextk = (k > stride) ? k - stride : 0;
1938 dl 1.149 for (int m = nextk; m < k; ++m)
1939     nextTab[m] = rev;
1940     for (int m = n + nextk; m < n + k; ++m)
1941     nextTab[m] = rev;
1942     U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1943     }
1944     }
1945     int nextn = nextTab.length;
1946 dl 1.151 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1947 dl 1.149 boolean advance = true;
1948     for (int i = 0, bound = 0;;) {
1949 dl 1.151 int nextIndex, nextBound; Node<V> f; Object fk;
1950 dl 1.149 while (advance) {
1951     if (--i >= bound)
1952     advance = false;
1953     else if ((nextIndex = transferIndex) <= transferOrigin) {
1954     i = -1;
1955     advance = false;
1956     }
1957     else if (U.compareAndSwapInt
1958     (this, TRANSFERINDEX, nextIndex,
1959 jsr166 1.150 nextBound = (nextIndex > stride ?
1960 dl 1.149 nextIndex - stride : 0))) {
1961     bound = nextBound;
1962     i = nextIndex - 1;
1963     advance = false;
1964     }
1965     }
1966     if (i < 0 || i >= n || i + n >= nextn) {
1967     for (int sc;;) {
1968     if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1969     if (sc == -1) {
1970     nextTable = null;
1971     table = nextTab;
1972     sizeCtl = (n << 1) - (n >>> 1);
1973     }
1974     return;
1975     }
1976     }
1977     }
1978     else if ((f = tabAt(tab, i)) == null) {
1979     if (casTabAt(tab, i, null, fwd)) {
1980 dl 1.119 setTabAt(nextTab, i, null);
1981     setTabAt(nextTab, i + n, null);
1982 dl 1.149 advance = true;
1983     }
1984     }
1985     else if (f.hash >= 0) {
1986 jsr166 1.150 synchronized (f) {
1987 dl 1.149 if (tabAt(tab, i) == f) {
1988     int runBit = f.hash & n;
1989 dl 1.151 Node<V> lastRun = f, lo = null, hi = null;
1990     for (Node<V> p = f.next; p != null; p = p.next) {
1991 dl 1.149 int b = p.hash & n;
1992     if (b != runBit) {
1993     runBit = b;
1994     lastRun = p;
1995     }
1996     }
1997     if (runBit == 0)
1998     lo = lastRun;
1999     else
2000     hi = lastRun;
2001 dl 1.151 for (Node<V> p = f; p != lastRun; p = p.next) {
2002 dl 1.149 int ph = p.hash;
2003 dl 1.151 Object pk = p.key; V pv = p.val;
2004 dl 1.149 if ((ph & n) == 0)
2005 dl 1.151 lo = new Node<V>(ph, pk, pv, lo);
2006 dl 1.149 else
2007 dl 1.151 hi = new Node<V>(ph, pk, pv, hi);
2008 dl 1.149 }
2009     setTabAt(nextTab, i, lo);
2010     setTabAt(nextTab, i + n, hi);
2011     setTabAt(tab, i, fwd);
2012     advance = true;
2013 dl 1.119 }
2014     }
2015     }
2016 dl 1.149 else if ((fk = f.key) instanceof TreeBin) {
2017 dl 1.151 TreeBin<V> t = (TreeBin<V>)fk;
2018 dl 1.149 t.acquire(0);
2019     try {
2020     if (tabAt(tab, i) == f) {
2021 dl 1.151 TreeBin<V> lt = new TreeBin<V>();
2022     TreeBin<V> ht = new TreeBin<V>();
2023 dl 1.149 int lc = 0, hc = 0;
2024 dl 1.151 for (Node<V> e = t.first; e != null; e = e.next) {
2025 dl 1.149 int h = e.hash;
2026 dl 1.151 Object k = e.key; V v = e.val;
2027 dl 1.149 if ((h & n) == 0) {
2028     ++lc;
2029     lt.putTreeNode(h, k, v);
2030     }
2031     else {
2032     ++hc;
2033     ht.putTreeNode(h, k, v);
2034     }
2035     }
2036 dl 1.151 Node<V> ln, hn; // throw away trees if too small
2037 dl 1.149 if (lc < TREE_THRESHOLD) {
2038     ln = null;
2039 dl 1.151 for (Node<V> p = lt.first; p != null; p = p.next)
2040     ln = new Node<V>(p.hash, p.key, p.val, ln);
2041 dl 1.149 }
2042     else
2043 dl 1.151 ln = new Node<V>(MOVED, lt, null, null);
2044 dl 1.149 setTabAt(nextTab, i, ln);
2045     if (hc < TREE_THRESHOLD) {
2046     hn = null;
2047 dl 1.151 for (Node<V> p = ht.first; p != null; p = p.next)
2048     hn = new Node<V>(p.hash, p.key, p.val, hn);
2049 dl 1.119 }
2050 dl 1.149 else
2051 dl 1.151 hn = new Node<V>(MOVED, ht, null, null);
2052 dl 1.149 setTabAt(nextTab, i + n, hn);
2053 dl 1.119 setTabAt(tab, i, fwd);
2054 dl 1.149 advance = true;
2055 dl 1.119 }
2056     } finally {
2057 dl 1.149 t.release(0);
2058 dl 1.119 }
2059     }
2060     else
2061 dl 1.149 advance = true; // already processed
2062 dl 1.119 }
2063 dl 1.4 }
2064 tim 1.1
2065 dl 1.149 /* ---------------- Counter support -------------- */
2066    
2067     final long sumCount() {
2068 dl 1.153 Cell[] as = counterCells; Cell a;
2069 dl 1.149 long sum = baseCount;
2070     if (as != null) {
2071     for (int i = 0; i < as.length; ++i) {
2072     if ((a = as[i]) != null)
2073     sum += a.value;
2074 dl 1.119 }
2075     }
2076 dl 1.149 return sum;
2077 dl 1.119 }
2078    
2079 dl 1.149 // See LongAdder version for explanation
2080 dl 1.160 private final void fullAddCount(long x, boolean wasUncontended) {
2081 dl 1.149 int h;
2082 dl 1.160 if ((h = ThreadLocalRandom.getProbe()) == 0) {
2083     ThreadLocalRandom.localInit(); // force initialization
2084     h = ThreadLocalRandom.getProbe();
2085     wasUncontended = true;
2086 dl 1.119 }
2087 dl 1.149 boolean collide = false; // True if last slot nonempty
2088     for (;;) {
2089 dl 1.153 Cell[] as; Cell a; int n; long v;
2090 dl 1.149 if ((as = counterCells) != null && (n = as.length) > 0) {
2091     if ((a = as[(n - 1) & h]) == null) {
2092 dl 1.153 if (cellsBusy == 0) { // Try to attach new Cell
2093     Cell r = new Cell(x); // Optimistic create
2094     if (cellsBusy == 0 &&
2095     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2096 dl 1.149 boolean created = false;
2097     try { // Recheck under lock
2098 dl 1.153 Cell[] rs; int m, j;
2099 dl 1.149 if ((rs = counterCells) != null &&
2100     (m = rs.length) > 0 &&
2101     rs[j = (m - 1) & h] == null) {
2102     rs[j] = r;
2103     created = true;
2104 dl 1.128 }
2105 dl 1.149 } finally {
2106 dl 1.153 cellsBusy = 0;
2107 dl 1.119 }
2108 dl 1.149 if (created)
2109     break;
2110     continue; // Slot is now non-empty
2111     }
2112     }
2113     collide = false;
2114     }
2115     else if (!wasUncontended) // CAS already known to fail
2116     wasUncontended = true; // Continue after rehash
2117     else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2118     break;
2119     else if (counterCells != as || n >= NCPU)
2120     collide = false; // At max size or stale
2121     else if (!collide)
2122     collide = true;
2123 dl 1.153 else if (cellsBusy == 0 &&
2124     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2125 dl 1.149 try {
2126     if (counterCells == as) {// Expand table unless stale
2127 dl 1.153 Cell[] rs = new Cell[n << 1];
2128 dl 1.149 for (int i = 0; i < n; ++i)
2129     rs[i] = as[i];
2130     counterCells = rs;
2131 dl 1.119 }
2132     } finally {
2133 dl 1.153 cellsBusy = 0;
2134 dl 1.119 }
2135 dl 1.149 collide = false;
2136     continue; // Retry with expanded table
2137 dl 1.119 }
2138 dl 1.160 h = ThreadLocalRandom.advanceProbe(h);
2139 dl 1.149 }
2140 dl 1.153 else if (cellsBusy == 0 && counterCells == as &&
2141     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2142 dl 1.149 boolean init = false;
2143     try { // Initialize table
2144     if (counterCells == as) {
2145 dl 1.153 Cell[] rs = new Cell[2];
2146     rs[h & 1] = new Cell(x);
2147 dl 1.149 counterCells = rs;
2148     init = true;
2149 dl 1.119 }
2150     } finally {
2151 dl 1.153 cellsBusy = 0;
2152 dl 1.119 }
2153 dl 1.149 if (init)
2154     break;
2155 dl 1.119 }
2156 dl 1.149 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2157     break; // Fall back on using base
2158 dl 1.119 }
2159     }
2160    
2161     /* ----------------Table Traversal -------------- */
2162    
2163     /**
2164     * Encapsulates traversal for methods such as containsValue; also
2165 dl 1.126 * serves as a base class for other iterators and bulk tasks.
2166 dl 1.119 *
2167     * At each step, the iterator snapshots the key ("nextKey") and
2168     * value ("nextVal") of a valid node (i.e., one that, at point of
2169     * snapshot, has a non-null user value). Because val fields can
2170     * change (including to null, indicating deletion), field nextVal
2171     * might not be accurate at point of use, but still maintains the
2172     * weak consistency property of holding a value that was once
2173 dl 1.137 * valid. To support iterator.remove, the nextKey field is not
2174     * updated (nulled out) when the iterator cannot advance.
2175 dl 1.119 *
2176     * Exported iterators must track whether the iterator has advanced
2177     * (in hasNext vs next) (by setting/checking/nulling field
2178     * nextVal), and then extract key, value, or key-value pairs as
2179     * return values of next().
2180     *
2181 dl 1.192 * Method advance visits once each still-valid node that was
2182 dl 1.119 * reachable upon iterator construction. It might miss some that
2183     * were added to a bin after the bin was visited, which is OK wrt
2184     * consistency guarantees. Maintaining this property in the face
2185     * of possible ongoing resizes requires a fair amount of
2186     * bookkeeping state that is difficult to optimize away amidst
2187     * volatile accesses. Even so, traversal maintains reasonable
2188     * throughput.
2189     *
2190     * Normally, iteration proceeds bin-by-bin traversing lists.
2191     * However, if the table has been resized, then all future steps
2192     * must traverse both the bin at the current index as well as at
2193     * (index + baseSize); and so on for further resizings. To
2194     * paranoically cope with potential sharing by users of iterators
2195     * across threads, iteration terminates if a bounds checks fails
2196     * for a table read.
2197     *
2198 dl 1.192 * Methods advanceKey and advanceValue are specializations of the
2199     * common cases of advance, relaying to the full version
2200     * otherwise. The forEachKey and forEachValue methods further
2201     * specialize, bypassing all incremental field updates in most cases.
2202     *
2203 dl 1.153 * This class supports both Spliterator-based traversal and
2204     * CountedCompleter-based bulk tasks. The same "batch" field is
2205     * used, but in slightly different ways, in the two cases. For
2206     * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2207     * estimate of element coverage. For CHM tasks, it is a pre-scaled
2208     * size that halves down to zero for leaf tasks, that is only
2209     * computed upon execution of the task. (Tasks can be submitted to
2210     * any pool, of any size, so we don't know scale factors until
2211     * running.)
2212     *
2213 dl 1.146 * This class extends CountedCompleter to streamline parallel
2214     * iteration in bulk operations. This adds only a few fields of
2215     * space overhead, which is small enough in cases where it is not
2216     * needed to not worry about it. Because CountedCompleter is
2217     * Serializable, but iterators need not be, we need to add warning
2218     * suppressions.
2219 dl 1.119 */
2220 dl 1.149 @SuppressWarnings("serial") static class Traverser<K,V,R>
2221     extends CountedCompleter<R> {
2222 jsr166 1.186 final ConcurrentHashMap<K,V> map;
2223 dl 1.151 Node<V> next; // the next entry to use
2224 jsr166 1.168 K nextKey; // cached key field of next
2225 dl 1.151 V nextVal; // cached val field of next
2226     Node<V>[] tab; // current table; updated if resized
2227 dl 1.119 int index; // index of bin to use next
2228     int baseIndex; // current index of initial table
2229     int baseLimit; // index bound for initial table
2230 dl 1.192 final int baseSize; // initial table size
2231 dl 1.146 int batch; // split control
2232 dl 1.192
2233 dl 1.119 /** Creates iterator for all entries in the table. */
2234 jsr166 1.186 Traverser(ConcurrentHashMap<K,V> map) {
2235 dl 1.130 this.map = map;
2236 dl 1.192 Node<V>[] t = this.tab = map.table;
2237     baseLimit = baseSize = (t == null) ? 0 : t.length;
2238 dl 1.119 }
2239    
2240 dl 1.153 /** Task constructor */
2241 dl 1.146 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2242     super(it);
2243 dl 1.153 this.map = map;
2244     this.batch = batch; // -1 if unknown
2245     if (it == null) {
2246 dl 1.192 Node<V>[] t = this.tab = map.table;
2247     baseLimit = baseSize = (t == null) ? 0 : t.length;
2248 dl 1.153 }
2249     else { // split parent
2250     this.tab = it.tab;
2251 dl 1.146 this.baseSize = it.baseSize;
2252     int hi = this.baseLimit = it.baseLimit;
2253     it.baseLimit = this.index = this.baseIndex =
2254 dl 1.195 (hi + it.baseIndex) >>> 1;
2255 dl 1.146 }
2256 dl 1.119 }
2257    
2258 dl 1.153 /** Spliterator constructor */
2259     Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2260     super(it);
2261     this.map = map;
2262     if (it == null) {
2263 dl 1.192 Node<V>[] t = this.tab = map.table;
2264     baseLimit = baseSize = (t == null) ? 0 : t.length;
2265 dl 1.153 long n = map.sumCount();
2266     batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2267     (int)n);
2268     }
2269     else {
2270     this.tab = it.tab;
2271     this.baseSize = it.baseSize;
2272     int hi = this.baseLimit = it.baseLimit;
2273     it.baseLimit = this.index = this.baseIndex =
2274 dl 1.195 (hi + it.baseIndex) >>> 1;
2275 dl 1.153 this.batch = it.batch >>>= 1;
2276     }
2277     }
2278    
2279 jsr166 1.193 /**
2280     * Advances if possible, returning next valid value, or null if none.
2281 dl 1.119 */
2282 dl 1.192 @SuppressWarnings("unchecked") final V advance() {
2283     for (Node<V> e = next;;) {
2284 dl 1.119 if (e != null) // advance past used/skipped node
2285 dl 1.192 e = next = e.next;
2286 dl 1.119 while (e == null) { // get to next non-null bin
2287 dl 1.192 Node<V>[] t; int i, n; // must use locals in checks
2288     if (baseIndex >= baseLimit || (t = tab) == null ||
2289     (n = t.length) <= (i = index) || i < 0)
2290     return nextVal = null;
2291     if ((e = next = tabAt(t, index)) != null && e.hash < 0) {
2292     Object ek;
2293 dl 1.119 if ((ek = e.key) instanceof TreeBin)
2294 dl 1.151 e = ((TreeBin<V>)ek).first;
2295 dl 1.119 else {
2296 dl 1.151 tab = (Node<V>[])ek;
2297 dl 1.119 continue; // restarts due to null val
2298     }
2299 dl 1.192 }
2300     if ((index += baseSize) >= n)
2301     index = ++baseIndex; // visit upper slots if present
2302 dl 1.119 }
2303 dl 1.192 nextKey = (K)e.key;
2304     if ((nextVal = e.val) != null) // skip deleted or special nodes
2305     return nextVal;
2306     }
2307 dl 1.191 }
2308    
2309 dl 1.192 /**
2310     * Common case version for value traversal
2311     */
2312     @SuppressWarnings("unchecked") final V advanceValue() {
2313     outer: for (Node<V> e = next;;) {
2314     if (e == null || (e = e.next) == null) {
2315     Node<V>[] t; int i, len, n; Object ek;
2316     if ((t = tab) == null ||
2317     baseSize != (len = t.length) ||
2318     len < (n = baseLimit) ||
2319     baseIndex != (i = index))
2320     break;
2321     do {
2322     if (i < 0 || i >= n) {
2323     index = baseIndex = n;
2324     next = null;
2325     return nextVal = null;
2326     }
2327     if ((e = tabAt(t, i)) != null && e.hash < 0) {
2328     if ((ek = e.key) instanceof TreeBin)
2329     e = ((TreeBin<V>)ek).first;
2330     else {
2331     index = baseIndex = i;
2332     next = null;
2333     tab = (Node<V>[])ek;
2334     break outer;
2335     }
2336     }
2337     ++i;
2338     } while (e == null);
2339     index = baseIndex = i;
2340     }
2341     V v;
2342     K k = (K)e.key;
2343     if ((v = e.val) != null) {
2344     nextVal = v;
2345     nextKey = k;
2346     next = e;
2347     return v;
2348     }
2349     }
2350     return advance();
2351     }
2352    
2353     /**
2354     * Common case version for key traversal
2355     */
2356 dl 1.191 @SuppressWarnings("unchecked") final K advanceKey() {
2357 dl 1.192 outer: for (Node<V> e = next;;) {
2358     if (e == null || (e = e.next) == null) {
2359     Node<V>[] t; int i, len, n; Object ek;
2360     if ((t = tab) == null ||
2361     baseSize != (len = t.length) ||
2362     len < (n = baseLimit) ||
2363     baseIndex != (i = index))
2364     break;
2365     do {
2366     if (i < 0 || i >= n) {
2367     index = baseIndex = n;
2368     next = null;
2369     nextVal = null;
2370     return null;
2371     }
2372     if ((e = tabAt(t, i)) != null && e.hash < 0) {
2373     if ((ek = e.key) instanceof TreeBin)
2374     e = ((TreeBin<V>)ek).first;
2375     else {
2376     index = baseIndex = i;
2377     next = null;
2378     tab = (Node<V>[])ek;
2379     break outer;
2380     }
2381     }
2382     ++i;
2383     } while (e == null);
2384     index = baseIndex = i;
2385     }
2386     V v;
2387     K k = (K)e.key;
2388     if ((v = e.val) != null) {
2389     nextVal = v;
2390     nextKey = k;
2391     next = e;
2392     return k;
2393     }
2394     }
2395     return (advance() == null) ? null : nextKey;
2396     }
2397    
2398     @SuppressWarnings("unchecked") final void forEachValue(Consumer<? super V> action) {
2399     if (action == null) throw new NullPointerException();
2400     Node<V>[] t; int i, len, n;
2401     if ((t = tab) != null && baseSize == (len = t.length) &&
2402     len >= (n = baseLimit) && baseIndex == (i = index)) {
2403 dl 1.195 index = baseIndex = n;
2404     nextVal = null;
2405 dl 1.192 Node<V> e = next;
2406     next = null;
2407 dl 1.195 if (e != null)
2408     e = e.next;
2409 dl 1.192 outer: for (;; e = e.next) {
2410     V v; Object ek;
2411     for (; e == null; ++i) {
2412     if (i < 0 || i >= n)
2413     return;
2414     if ((e = tabAt(t, i)) != null && e.hash < 0) {
2415     if ((ek = e.key) instanceof TreeBin)
2416     e = ((TreeBin<V>)ek).first;
2417     else {
2418     index = baseIndex = i;
2419     tab = (Node<V>[])ek;
2420     break outer;
2421     }
2422     }
2423 dl 1.191 }
2424 dl 1.192 if ((v = e.val) != null)
2425     action.accept(v);
2426     }
2427     }
2428     V v;
2429     while ((v = advance()) != null)
2430     action.accept(v);
2431     }
2432    
2433     @SuppressWarnings("unchecked") final void forEachKey(Consumer<? super K> action) {
2434     if (action == null) throw new NullPointerException();
2435     Node<V>[] t; int i, len, n;
2436     if ((t = tab) != null && baseSize == (len = t.length) &&
2437     len >= (n = baseLimit) && baseIndex == (i = index)) {
2438 dl 1.195 index = baseIndex = n;
2439     nextVal = null;
2440 dl 1.192 Node<V> e = next;
2441     next = null;
2442 dl 1.195 if (e != null)
2443     e = e.next;
2444 dl 1.192 outer: for (;; e = e.next) {
2445     for (; e == null; ++i) {
2446     if (i < 0 || i >= n)
2447     return;
2448     if ((e = tabAt(t, i)) != null && e.hash < 0) {
2449     Object ek;
2450     if ((ek = e.key) instanceof TreeBin)
2451     e = ((TreeBin<V>)ek).first;
2452     else {
2453     index = baseIndex = i;
2454     tab = (Node<V>[])ek;
2455     break outer;
2456     }
2457 dl 1.191 }
2458     }
2459 dl 1.192 Object k = e.key;
2460     if (e.val != null)
2461     action.accept((K)k);
2462 dl 1.191 }
2463 dl 1.192 }
2464     while (advance() != null)
2465     action.accept(nextKey);
2466 dl 1.119 }
2467    
2468     public final void remove() {
2469 jsr166 1.168 K k = nextKey;
2470 dl 1.191 if (k == null && (advanceValue() == null || (k = nextKey) == null))
2471 dl 1.119 throw new IllegalStateException();
2472 dl 1.137 map.internalReplace(k, null, null);
2473 dl 1.119 }
2474    
2475     public final boolean hasNext() {
2476 dl 1.191 return nextVal != null || advanceValue() != null;
2477 dl 1.119 }
2478    
2479     public final boolean hasMoreElements() { return hasNext(); }
2480 dl 1.146
2481     public void compute() { } // default no-op CountedCompleter body
2482    
2483 dl 1.192 public long estimateSize() { return batch; }
2484    
2485 dl 1.146 /**
2486     * Returns a batch value > 0 if this task should (and must) be
2487     * split, if so, adding to pending count, and in any case
2488     * updating batch value. The initial batch value is approx
2489     * exp2 of the number of times (minus one) to split task by
2490     * two before executing leaf action. This value is faster to
2491     * compute and more convenient to use as a guide to splitting
2492     * than is the depth, since it is used while dividing by two
2493     * anyway.
2494     */
2495     final int preSplit() {
2496 jsr166 1.207 int b; ForkJoinPool pool;
2497 dl 1.153 if ((b = batch) < 0) { // force initialization
2498     int sp = (((pool = getPool()) == null) ?
2499     ForkJoinPool.getCommonPoolParallelism() :
2500 dl 1.209 pool.getParallelism()) << 2; // slack of 4
2501 dl 1.153 long n = map.sumCount();
2502     b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2503 dl 1.146 }
2504 dl 1.192 b = (b <= 1 || baseIndex >= baseLimit) ? 0 : (b >>> 1);
2505 dl 1.146 if ((batch = b) > 0)
2506     addToPendingCount(1);
2507     return b;
2508     }
2509 dl 1.119 }
2510    
2511     /* ---------------- Public operations -------------- */
2512    
2513     /**
2514     * Creates a new, empty map with the default initial table size (16).
2515     */
2516     public ConcurrentHashMap() {
2517     }
2518    
2519     /**
2520     * Creates a new, empty map with an initial table size
2521     * accommodating the specified number of elements without the need
2522     * to dynamically resize.
2523     *
2524     * @param initialCapacity The implementation performs internal
2525     * sizing to accommodate this many elements.
2526     * @throws IllegalArgumentException if the initial capacity of
2527     * elements is negative
2528     */
2529     public ConcurrentHashMap(int initialCapacity) {
2530     if (initialCapacity < 0)
2531     throw new IllegalArgumentException();
2532     int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2533     MAXIMUM_CAPACITY :
2534     tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2535     this.sizeCtl = cap;
2536     }
2537    
2538     /**
2539     * Creates a new map with the same mappings as the given map.
2540     *
2541     * @param m the map
2542     */
2543     public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2544     this.sizeCtl = DEFAULT_CAPACITY;
2545     internalPutAll(m);
2546     }
2547    
2548     /**
2549     * Creates a new, empty map with an initial table size based on
2550     * the given number of elements ({@code initialCapacity}) and
2551     * initial table density ({@code loadFactor}).
2552     *
2553     * @param initialCapacity the initial capacity. The implementation
2554     * performs internal sizing to accommodate this many elements,
2555     * given the specified load factor.
2556     * @param loadFactor the load factor (table density) for
2557     * establishing the initial table size
2558     * @throws IllegalArgumentException if the initial capacity of
2559     * elements is negative or the load factor is nonpositive
2560     *
2561     * @since 1.6
2562     */
2563     public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2564     this(initialCapacity, loadFactor, 1);
2565     }
2566    
2567     /**
2568     * Creates a new, empty map with an initial table size based on
2569     * the given number of elements ({@code initialCapacity}), table
2570     * density ({@code loadFactor}), and number of concurrently
2571     * updating threads ({@code concurrencyLevel}).
2572     *
2573     * @param initialCapacity the initial capacity. The implementation
2574     * performs internal sizing to accommodate this many elements,
2575     * given the specified load factor.
2576     * @param loadFactor the load factor (table density) for
2577     * establishing the initial table size
2578     * @param concurrencyLevel the estimated number of concurrently
2579     * updating threads. The implementation may use this value as
2580     * a sizing hint.
2581     * @throws IllegalArgumentException if the initial capacity is
2582     * negative or the load factor or concurrencyLevel are
2583     * nonpositive
2584     */
2585     public ConcurrentHashMap(int initialCapacity,
2586     float loadFactor, int concurrencyLevel) {
2587     if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2588     throw new IllegalArgumentException();
2589     if (initialCapacity < concurrencyLevel) // Use at least as many bins
2590     initialCapacity = concurrencyLevel; // as estimated threads
2591     long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2592     int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2593     MAXIMUM_CAPACITY : tableSizeFor((int)size);
2594     this.sizeCtl = cap;
2595     }
2596    
2597     /**
2598 dl 1.137 * Creates a new {@link Set} backed by a ConcurrentHashMap
2599     * from the given type to {@code Boolean.TRUE}.
2600     *
2601     * @return the new set
2602     */
2603     public static <K> KeySetView<K,Boolean> newKeySet() {
2604 jsr166 1.184 return new KeySetView<K,Boolean>
2605     (new ConcurrentHashMap<K,Boolean>(), Boolean.TRUE);
2606 dl 1.137 }
2607    
2608     /**
2609     * Creates a new {@link Set} backed by a ConcurrentHashMap
2610     * from the given type to {@code Boolean.TRUE}.
2611     *
2612     * @param initialCapacity The implementation performs internal
2613     * sizing to accommodate this many elements.
2614     * @throws IllegalArgumentException if the initial capacity of
2615     * elements is negative
2616     * @return the new set
2617     */
2618     public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2619 dl 1.149 return new KeySetView<K,Boolean>
2620     (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2621 dl 1.137 }
2622    
2623     /**
2624 dl 1.119 * {@inheritDoc}
2625     */
2626     public boolean isEmpty() {
2627 dl 1.149 return sumCount() <= 0L; // ignore transient negative values
2628 dl 1.119 }
2629    
2630     /**
2631     * {@inheritDoc}
2632     */
2633     public int size() {
2634 dl 1.149 long n = sumCount();
2635 dl 1.119 return ((n < 0L) ? 0 :
2636     (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2637     (int)n);
2638     }
2639    
2640     /**
2641     * Returns the number of mappings. This method should be used
2642     * instead of {@link #size} because a ConcurrentHashMap may
2643     * contain more mappings than can be represented as an int. The
2644 dl 1.146 * value returned is an estimate; the actual count may differ if
2645     * there are concurrent insertions or removals.
2646 dl 1.119 *
2647     * @return the number of mappings
2648     */
2649     public long mappingCount() {
2650 dl 1.149 long n = sumCount();
2651 dl 1.126 return (n < 0L) ? 0L : n; // ignore transient negative values
2652 dl 1.119 }
2653    
2654     /**
2655     * Returns the value to which the specified key is mapped,
2656     * or {@code null} if this map contains no mapping for the key.
2657     *
2658     * <p>More formally, if this map contains a mapping from a key
2659     * {@code k} to a value {@code v} such that {@code key.equals(k)},
2660     * then this method returns {@code v}; otherwise it returns
2661     * {@code null}. (There can be at most one such mapping.)
2662     *
2663     * @throws NullPointerException if the specified key is null
2664     */
2665 dl 1.149 public V get(Object key) {
2666     return internalGet(key);
2667 dl 1.119 }
2668    
2669     /**
2670 dl 1.129 * Returns the value to which the specified key is mapped,
2671 jsr166 1.133 * or the given defaultValue if this map contains no mapping for the key.
2672 dl 1.129 *
2673     * @param key the key
2674     * @param defaultValue the value to return if this map contains
2675 jsr166 1.134 * no mapping for the given key
2676 dl 1.129 * @return the mapping for the key, if present; else the defaultValue
2677     * @throws NullPointerException if the specified key is null
2678     */
2679 dl 1.195 public V getOrDefault(Object key, V defaultValue) {
2680 dl 1.149 V v;
2681     return (v = internalGet(key)) == null ? defaultValue : v;
2682 dl 1.129 }
2683    
2684     /**
2685 dl 1.119 * Tests if the specified object is a key in this table.
2686     *
2687 jsr166 1.190 * @param key possible key
2688 dl 1.119 * @return {@code true} if and only if the specified object
2689     * is a key in this table, as determined by the
2690     * {@code equals} method; {@code false} otherwise
2691     * @throws NullPointerException if the specified key is null
2692     */
2693     public boolean containsKey(Object key) {
2694     return internalGet(key) != null;
2695     }
2696    
2697     /**
2698     * Returns {@code true} if this map maps one or more keys to the
2699     * specified value. Note: This method may require a full traversal
2700     * of the map, and is much slower than method {@code containsKey}.
2701     *
2702     * @param value value whose presence in this map is to be tested
2703     * @return {@code true} if this map maps one or more keys to the
2704     * specified value
2705     * @throws NullPointerException if the specified value is null
2706     */
2707     public boolean containsValue(Object value) {
2708     if (value == null)
2709     throw new NullPointerException();
2710 dl 1.151 V v;
2711 dl 1.119 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2712 dl 1.191 while ((v = it.advanceValue()) != null) {
2713 dl 1.119 if (v == value || value.equals(v))
2714     return true;
2715     }
2716     return false;
2717     }
2718    
2719     /**
2720     * Legacy method testing if some key maps into the specified value
2721     * in this table. This method is identical in functionality to
2722 jsr166 1.176 * {@link #containsValue(Object)}, and exists solely to ensure
2723 dl 1.119 * full compatibility with class {@link java.util.Hashtable},
2724     * which supported this method prior to introduction of the
2725     * Java Collections framework.
2726     *
2727     * @param value a value to search for
2728     * @return {@code true} if and only if some key maps to the
2729     * {@code value} argument in this table as
2730     * determined by the {@code equals} method;
2731     * {@code false} otherwise
2732     * @throws NullPointerException if the specified value is null
2733     */
2734 dl 1.151 @Deprecated public boolean contains(Object value) {
2735 dl 1.119 return containsValue(value);
2736     }
2737    
2738     /**
2739     * Maps the specified key to the specified value in this table.
2740     * Neither the key nor the value can be null.
2741     *
2742 jsr166 1.145 * <p>The value can be retrieved by calling the {@code get} method
2743 dl 1.119 * with a key that is equal to the original key.
2744     *
2745     * @param key key with which the specified value is to be associated
2746     * @param value value to be associated with the specified key
2747     * @return the previous value associated with {@code key}, or
2748     * {@code null} if there was no mapping for {@code key}
2749     * @throws NullPointerException if the specified key or value is null
2750     */
2751 dl 1.149 public V put(K key, V value) {
2752     return internalPut(key, value, false);
2753 dl 1.119 }
2754    
2755     /**
2756     * {@inheritDoc}
2757     *
2758     * @return the previous value associated with the specified key,
2759     * or {@code null} if there was no mapping for the key
2760     * @throws NullPointerException if the specified key or value is null
2761     */
2762 dl 1.149 public V putIfAbsent(K key, V value) {
2763     return internalPut(key, value, true);
2764 dl 1.119 }
2765    
2766     /**
2767     * Copies all of the mappings from the specified map to this one.
2768     * These mappings replace any mappings that this map had for any of the
2769     * keys currently in the specified map.
2770     *
2771     * @param m mappings to be stored in this map
2772     */
2773     public void putAll(Map<? extends K, ? extends V> m) {
2774     internalPutAll(m);
2775     }
2776    
2777     /**
2778 dl 1.153 * If the specified key is not already associated with a value (or
2779     * is mapped to {@code null}), attempts to compute its value using
2780     * the given mapping function and enters it into this map unless
2781     * {@code null}. The entire method invocation is performed
2782     * atomically, so the function is applied at most once per key.
2783     * Some attempted update operations on this map by other threads
2784     * may be blocked while computation is in progress, so the
2785     * computation should be short and simple, and must not attempt to
2786     * update any other mappings of this Map.
2787 dl 1.119 *
2788     * @param key key with which the specified value is to be associated
2789     * @param mappingFunction the function to compute a value
2790     * @return the current (existing or computed) value associated with
2791 jsr166 1.134 * the specified key, or null if the computed value is null
2792 dl 1.119 * @throws NullPointerException if the specified key or mappingFunction
2793     * is null
2794     * @throws IllegalStateException if the computation detectably
2795     * attempts a recursive update to this map that would
2796     * otherwise never complete
2797     * @throws RuntimeException or Error if the mappingFunction does so,
2798     * in which case the mapping is left unestablished
2799     */
2800 dl 1.149 public V computeIfAbsent
2801 dl 1.153 (K key, Function<? super K, ? extends V> mappingFunction) {
2802 dl 1.149 return internalComputeIfAbsent(key, mappingFunction);
2803 dl 1.119 }
2804    
2805     /**
2806 dl 1.153 * If the value for the specified key is present and non-null,
2807     * attempts to compute a new mapping given the key and its current
2808     * mapped value. The entire method invocation is performed
2809     * atomically. Some attempted update operations on this map by
2810 dl 1.119 * other threads may be blocked while computation is in progress,
2811     * so the computation should be short and simple, and must not
2812 dl 1.153 * attempt to update any other mappings of this Map.
2813 dl 1.119 *
2814 dl 1.197 * @param key key with which a value may be associated
2815 dl 1.119 * @param remappingFunction the function to compute a value
2816 jsr166 1.123 * @return the new value associated with the specified key, or null if none
2817 dl 1.119 * @throws NullPointerException if the specified key or remappingFunction
2818     * is null
2819     * @throws IllegalStateException if the computation detectably
2820     * attempts a recursive update to this map that would
2821     * otherwise never complete
2822     * @throws RuntimeException or Error if the remappingFunction does so,
2823     * in which case the mapping is unchanged
2824     */
2825 dl 1.149 public V computeIfPresent
2826 dl 1.153 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2827 dl 1.149 return internalCompute(key, true, remappingFunction);
2828 dl 1.119 }
2829    
2830     /**
2831 dl 1.153 * Attempts to compute a mapping for the specified key and its
2832     * current mapped value (or {@code null} if there is no current
2833     * mapping). The entire method invocation is performed atomically.
2834     * Some attempted update operations on this map by other threads
2835     * may be blocked while computation is in progress, so the
2836     * computation should be short and simple, and must not attempt to
2837     * update any other mappings of this Map.
2838 dl 1.119 *
2839     * @param key key with which the specified value is to be associated
2840     * @param remappingFunction the function to compute a value
2841 jsr166 1.123 * @return the new value associated with the specified key, or null if none
2842 dl 1.119 * @throws NullPointerException if the specified key or remappingFunction
2843     * is null
2844     * @throws IllegalStateException if the computation detectably
2845     * attempts a recursive update to this map that would
2846     * otherwise never complete
2847     * @throws RuntimeException or Error if the remappingFunction does so,
2848     * in which case the mapping is unchanged
2849     */
2850 dl 1.149 public V compute
2851 dl 1.153 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2852 dl 1.149 return internalCompute(key, false, remappingFunction);
2853 dl 1.119 }
2854    
2855     /**
2856 dl 1.153 * If the specified key is not already associated with a
2857     * (non-null) value, associates it with the given value.
2858     * Otherwise, replaces the value with the results of the given
2859     * remapping function, or removes if {@code null}. The entire
2860     * method invocation is performed atomically. Some attempted
2861     * update operations on this map by other threads may be blocked
2862     * while computation is in progress, so the computation should be
2863     * short and simple, and must not attempt to update any other
2864     * mappings of this Map.
2865     *
2866     * @param key key with which the specified value is to be associated
2867     * @param value the value to use if absent
2868     * @param remappingFunction the function to recompute a value if present
2869     * @return the new value associated with the specified key, or null if none
2870     * @throws NullPointerException if the specified key or the
2871     * remappingFunction is null
2872     * @throws RuntimeException or Error if the remappingFunction does so,
2873     * in which case the mapping is unchanged
2874 dl 1.119 */
2875 dl 1.149 public V merge
2876     (K key, V value,
2877 dl 1.153 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2878 dl 1.149 return internalMerge(key, value, remappingFunction);
2879 dl 1.119 }
2880    
2881     /**
2882     * Removes the key (and its corresponding value) from this map.
2883     * This method does nothing if the key is not in the map.
2884     *
2885     * @param key the key that needs to be removed
2886     * @return the previous value associated with {@code key}, or
2887     * {@code null} if there was no mapping for {@code key}
2888     * @throws NullPointerException if the specified key is null
2889     */
2890 dl 1.149 public V remove(Object key) {
2891     return internalReplace(key, null, null);
2892 dl 1.119 }
2893    
2894     /**
2895     * {@inheritDoc}
2896     *
2897     * @throws NullPointerException if the specified key is null
2898     */
2899     public boolean remove(Object key, Object value) {
2900 dl 1.163 if (key == null)
2901     throw new NullPointerException();
2902 dl 1.149 return value != null && internalReplace(key, null, value) != null;
2903 tim 1.1 }
2904 dl 1.31
2905     /**
2906 jsr166 1.68 * {@inheritDoc}
2907     *
2908     * @throws NullPointerException if any of the arguments are null
2909 dl 1.31 */
2910     public boolean replace(K key, V oldValue, V newValue) {
2911 dl 1.119 if (key == null || oldValue == null || newValue == null)
2912 dl 1.31 throw new NullPointerException();
2913 dl 1.119 return internalReplace(key, newValue, oldValue) != null;
2914 dl 1.32 }
2915    
2916     /**
2917 jsr166 1.68 * {@inheritDoc}
2918     *
2919     * @return the previous value associated with the specified key,
2920 dl 1.119 * or {@code null} if there was no mapping for the key
2921 jsr166 1.68 * @throws NullPointerException if the specified key or value is null
2922 dl 1.32 */
2923 dl 1.149 public V replace(K key, V value) {
2924 dl 1.119 if (key == null || value == null)
2925 dl 1.32 throw new NullPointerException();
2926 dl 1.149 return internalReplace(key, value, null);
2927 dl 1.31 }
2928    
2929 tim 1.1 /**
2930 jsr166 1.68 * Removes all of the mappings from this map.
2931 tim 1.1 */
2932     public void clear() {
2933 dl 1.119 internalClear();
2934 tim 1.1 }
2935    
2936     /**
2937 jsr166 1.68 * Returns a {@link Set} view of the keys contained in this map.
2938     * The set is backed by the map, so changes to the map are
2939 dl 1.137 * reflected in the set, and vice-versa.
2940     *
2941     * @return the set view
2942     */
2943     public KeySetView<K,V> keySet() {
2944     KeySetView<K,V> ks = keySet;
2945     return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2946     }
2947    
2948     /**
2949     * Returns a {@link Set} view of the keys in this map, using the
2950     * given common mapped value for any additions (i.e., {@link
2951 jsr166 1.174 * Collection#add} and {@link Collection#addAll(Collection)}).
2952     * This is of course only appropriate if it is acceptable to use
2953     * the same value for all additions from this view.
2954 jsr166 1.68 *
2955 jsr166 1.172 * @param mappedValue the mapped value to use for any additions
2956 dl 1.137 * @return the set view
2957     * @throws NullPointerException if the mappedValue is null
2958 tim 1.1 */
2959 dl 1.137 public KeySetView<K,V> keySet(V mappedValue) {
2960     if (mappedValue == null)
2961     throw new NullPointerException();
2962     return new KeySetView<K,V>(this, mappedValue);
2963 tim 1.1 }
2964    
2965     /**
2966 jsr166 1.68 * Returns a {@link Collection} view of the values contained in this map.
2967     * The collection is backed by the map, so changes to the map are
2968 jsr166 1.143 * reflected in the collection, and vice-versa.
2969 jsr166 1.184 *
2970     * @return the collection view
2971 tim 1.1 */
2972 dl 1.142 public ValuesView<K,V> values() {
2973     ValuesView<K,V> vs = values;
2974     return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2975 dl 1.119 }
2976    
2977     /**
2978     * Returns a {@link Set} view of the mappings contained in this map.
2979     * The set is backed by the map, so changes to the map are
2980     * reflected in the set, and vice-versa. The set supports element
2981     * removal, which removes the corresponding mapping from the map,
2982     * via the {@code Iterator.remove}, {@code Set.remove},
2983     * {@code removeAll}, {@code retainAll}, and {@code clear}
2984     * operations. It does not support the {@code add} or
2985     * {@code addAll} operations.
2986     *
2987     * <p>The view's {@code iterator} is a "weakly consistent" iterator
2988     * that will never throw {@link ConcurrentModificationException},
2989     * and guarantees to traverse elements as they existed upon
2990     * construction of the iterator, and may (but is not guaranteed to)
2991     * reflect any modifications subsequent to construction.
2992 jsr166 1.184 *
2993     * @return the set view
2994 dl 1.119 */
2995     public Set<Map.Entry<K,V>> entrySet() {
2996 dl 1.142 EntrySetView<K,V> es = entrySet;
2997     return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2998 dl 1.119 }
2999    
3000     /**
3001     * Returns an enumeration of the keys in this table.
3002     *
3003     * @return an enumeration of the keys in this table
3004     * @see #keySet()
3005     */
3006     public Enumeration<K> keys() {
3007     return new KeyIterator<K,V>(this);
3008     }
3009    
3010     /**
3011     * Returns an enumeration of the values in this table.
3012     *
3013     * @return an enumeration of the values in this table
3014     * @see #values()
3015     */
3016     public Enumeration<V> elements() {
3017     return new ValueIterator<K,V>(this);
3018     }
3019    
3020     /**
3021     * Returns the hash code value for this {@link Map}, i.e.,
3022     * the sum of, for each key-value pair in the map,
3023     * {@code key.hashCode() ^ value.hashCode()}.
3024     *
3025     * @return the hash code value for this map
3026     */
3027     public int hashCode() {
3028     int h = 0;
3029     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3030 dl 1.151 V v;
3031 dl 1.191 while ((v = it.advanceValue()) != null) {
3032 dl 1.119 h += it.nextKey.hashCode() ^ v.hashCode();
3033     }
3034     return h;
3035     }
3036    
3037     /**
3038     * Returns a string representation of this map. The string
3039     * representation consists of a list of key-value mappings (in no
3040     * particular order) enclosed in braces ("{@code {}}"). Adjacent
3041     * mappings are separated by the characters {@code ", "} (comma
3042     * and space). Each key-value mapping is rendered as the key
3043     * followed by an equals sign ("{@code =}") followed by the
3044     * associated value.
3045     *
3046     * @return a string representation of this map
3047     */
3048     public String toString() {
3049     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3050     StringBuilder sb = new StringBuilder();
3051     sb.append('{');
3052 dl 1.151 V v;
3053 dl 1.191 if ((v = it.advanceValue()) != null) {
3054 dl 1.119 for (;;) {
3055 jsr166 1.168 K k = it.nextKey;
3056 dl 1.119 sb.append(k == this ? "(this Map)" : k);
3057     sb.append('=');
3058     sb.append(v == this ? "(this Map)" : v);
3059 dl 1.191 if ((v = it.advanceValue()) == null)
3060 dl 1.119 break;
3061     sb.append(',').append(' ');
3062     }
3063     }
3064     return sb.append('}').toString();
3065     }
3066    
3067     /**
3068     * Compares the specified object with this map for equality.
3069     * Returns {@code true} if the given object is a map with the same
3070     * mappings as this map. This operation may return misleading
3071     * results if either map is concurrently modified during execution
3072     * of this method.
3073     *
3074     * @param o object to be compared for equality with this map
3075     * @return {@code true} if the specified object is equal to this map
3076     */
3077     public boolean equals(Object o) {
3078     if (o != this) {
3079     if (!(o instanceof Map))
3080     return false;
3081     Map<?,?> m = (Map<?,?>) o;
3082     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3083 dl 1.151 V val;
3084 dl 1.191 while ((val = it.advanceValue()) != null) {
3085 dl 1.119 Object v = m.get(it.nextKey);
3086     if (v == null || (v != val && !v.equals(val)))
3087     return false;
3088     }
3089     for (Map.Entry<?,?> e : m.entrySet()) {
3090     Object mk, mv, v;
3091     if ((mk = e.getKey()) == null ||
3092     (mv = e.getValue()) == null ||
3093     (v = internalGet(mk)) == null ||
3094     (mv != v && !mv.equals(v)))
3095     return false;
3096     }
3097     }
3098     return true;
3099     }
3100    
3101     /* ----------------Iterators -------------- */
3102    
3103 dl 1.149 @SuppressWarnings("serial") static final class KeyIterator<K,V>
3104     extends Traverser<K,V,Object>
3105 dl 1.153 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
3106 jsr166 1.186 KeyIterator(ConcurrentHashMap<K,V> map) { super(map); }
3107     KeyIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3108 dl 1.153 super(map, it);
3109 dl 1.119 }
3110 dl 1.192 public Spliterator<K> trySplit() {
3111 dl 1.195 return (baseLimit - baseIndex <= 1) ? null :
3112 dl 1.191 new KeyIterator<K,V>(map, this);
3113 dl 1.119 }
3114 jsr166 1.168 public final K next() {
3115 dl 1.191 K k;
3116     if ((k = (nextVal == null) ? advanceKey() : nextKey) == null)
3117 dl 1.119 throw new NoSuchElementException();
3118     nextVal = null;
3119 jsr166 1.168 return k;
3120 dl 1.119 }
3121    
3122     public final K nextElement() { return next(); }
3123 dl 1.153
3124 dl 1.162 public Iterator<K> iterator() { return this; }
3125 dl 1.153
3126 dl 1.199 public void forEachRemaining(Consumer<? super K> action) {
3127 dl 1.192 forEachKey(action);
3128 dl 1.153 }
3129 dl 1.160
3130 dl 1.171 public boolean tryAdvance(Consumer<? super K> block) {
3131 dl 1.160 if (block == null) throw new NullPointerException();
3132 dl 1.191 K k;
3133     if ((k = advanceKey()) == null)
3134 dl 1.160 return false;
3135 dl 1.191 block.accept(k);
3136 dl 1.160 return true;
3137     }
3138 dl 1.188
3139     public int characteristics() {
3140 jsr166 1.189 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3141 dl 1.188 Spliterator.NONNULL;
3142     }
3143    
3144 dl 1.119 }
3145    
3146 dl 1.149 @SuppressWarnings("serial") static final class ValueIterator<K,V>
3147     extends Traverser<K,V,Object>
3148 dl 1.153 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
3149 jsr166 1.186 ValueIterator(ConcurrentHashMap<K,V> map) { super(map); }
3150     ValueIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3151 dl 1.153 super(map, it);
3152 dl 1.119 }
3153 dl 1.192 public Spliterator<V> trySplit() {
3154 dl 1.195 return (baseLimit - baseIndex <= 1) ? null :
3155 dl 1.191 new ValueIterator<K,V>(map, this);
3156 dl 1.119 }
3157    
3158 dl 1.151 public final V next() {
3159     V v;
3160 dl 1.191 if ((v = nextVal) == null && (v = advanceValue()) == null)
3161 dl 1.119 throw new NoSuchElementException();
3162     nextVal = null;
3163 dl 1.151 return v;
3164 dl 1.119 }
3165    
3166     public final V nextElement() { return next(); }
3167 dl 1.153
3168 dl 1.162 public Iterator<V> iterator() { return this; }
3169 dl 1.153
3170 dl 1.199 public void forEachRemaining(Consumer<? super V> action) {
3171 dl 1.192 forEachValue(action);
3172 dl 1.153 }
3173 dl 1.160
3174 dl 1.171 public boolean tryAdvance(Consumer<? super V> block) {
3175 dl 1.160 V v;
3176     if (block == null) throw new NullPointerException();
3177 dl 1.191 if ((v = advanceValue()) == null)
3178 dl 1.160 return false;
3179     block.accept(v);
3180     return true;
3181     }
3182 jsr166 1.161
3183 dl 1.188 public int characteristics() {
3184     return Spliterator.CONCURRENT | Spliterator.NONNULL;
3185     }
3186 dl 1.119 }
3187    
3188 dl 1.149 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3189     extends Traverser<K,V,Object>
3190 dl 1.153 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3191 jsr166 1.186 EntryIterator(ConcurrentHashMap<K,V> map) { super(map); }
3192     EntryIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3193 dl 1.153 super(map, it);
3194 dl 1.119 }
3195 dl 1.192 public Spliterator<Map.Entry<K,V>> trySplit() {
3196 dl 1.195 return (baseLimit - baseIndex <= 1) ? null :
3197 dl 1.191 new EntryIterator<K,V>(map, this);
3198 dl 1.119 }
3199    
3200 jsr166 1.168 public final Map.Entry<K,V> next() {
3201 dl 1.151 V v;
3202 dl 1.191 if ((v = nextVal) == null && (v = advanceValue()) == null)
3203 dl 1.119 throw new NoSuchElementException();
3204 jsr166 1.168 K k = nextKey;
3205 dl 1.119 nextVal = null;
3206 jsr166 1.168 return new MapEntry<K,V>(k, v, map);
3207 dl 1.119 }
3208 dl 1.153
3209 dl 1.162 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3210 dl 1.153
3211 dl 1.199 public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
3212 dl 1.153 if (action == null) throw new NullPointerException();
3213     V v;
3214 dl 1.191 while ((v = advanceValue()) != null)
3215 jsr166 1.168 action.accept(entryFor(nextKey, v));
3216 dl 1.153 }
3217 dl 1.160
3218 dl 1.171 public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> block) {
3219 dl 1.160 V v;
3220     if (block == null) throw new NullPointerException();
3221 dl 1.191 if ((v = advanceValue()) == null)
3222 dl 1.160 return false;
3223 jsr166 1.168 block.accept(entryFor(nextKey, v));
3224 dl 1.160 return true;
3225     }
3226    
3227 dl 1.188 public int characteristics() {
3228 jsr166 1.189 return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3229 dl 1.188 Spliterator.NONNULL;
3230     }
3231 dl 1.119 }
3232    
3233     /**
3234     * Exported Entry for iterators
3235     */
3236 jsr166 1.186 static final class MapEntry<K,V> implements Map.Entry<K,V> {
3237 dl 1.119 final K key; // non-null
3238     V val; // non-null
3239 jsr166 1.186 final ConcurrentHashMap<K,V> map;
3240     MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
3241 dl 1.119 this.key = key;
3242     this.val = val;
3243     this.map = map;
3244     }
3245     public final K getKey() { return key; }
3246     public final V getValue() { return val; }
3247     public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3248     public final String toString(){ return key + "=" + val; }
3249    
3250     public final boolean equals(Object o) {
3251     Object k, v; Map.Entry<?,?> e;
3252     return ((o instanceof Map.Entry) &&
3253     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3254     (v = e.getValue()) != null &&
3255     (k == key || k.equals(key)) &&
3256     (v == val || v.equals(val)));
3257     }
3258    
3259     /**
3260     * Sets our entry's value and writes through to the map. The
3261     * value to return is somewhat arbitrary here. Since we do not
3262     * necessarily track asynchronous changes, the most recent
3263     * "previous" value could be different from what we return (or
3264     * could even have been removed in which case the put will
3265     * re-establish). We do not and cannot guarantee more.
3266     */
3267     public final V setValue(V value) {
3268     if (value == null) throw new NullPointerException();
3269     V v = val;
3270     val = value;
3271     map.put(key, value);
3272     return v;
3273     }
3274     }
3275    
3276 dl 1.146 /**
3277     * Returns exportable snapshot entry for the given key and value
3278     * when write-through can't or shouldn't be used.
3279     */
3280     static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3281     return new AbstractMap.SimpleEntry<K,V>(k, v);
3282     }
3283    
3284 dl 1.142 /* ---------------- Serialization Support -------------- */
3285 dl 1.119
3286     /**
3287 dl 1.142 * Stripped-down version of helper class used in previous version,
3288     * declared for the sake of serialization compatibility
3289 dl 1.119 */
3290 dl 1.208 static class Segment<K,V> extends ReentrantLock implements Serializable {
3291 dl 1.142 private static final long serialVersionUID = 2249069246763182397L;
3292     final float loadFactor;
3293     Segment(float lf) { this.loadFactor = lf; }
3294     }
3295 dl 1.119
3296 dl 1.142 /**
3297     * Saves the state of the {@code ConcurrentHashMap} instance to a
3298     * stream (i.e., serializes it).
3299     * @param s the stream
3300     * @serialData
3301     * the key (Object) and value (Object)
3302     * for each key-value mapping, followed by a null pair.
3303     * The key-value mappings are emitted in no particular order.
3304     */
3305 dl 1.149 @SuppressWarnings("unchecked") private void writeObject
3306     (java.io.ObjectOutputStream s)
3307 dl 1.142 throws java.io.IOException {
3308 dl 1.208 // For serialization compatibility
3309 dl 1.209 // Emulate segment calculation from previous version of this class
3310     int sshift = 0;
3311     int ssize = 1;
3312     while (ssize < DEFAULT_CONCURRENCY_LEVEL) {
3313     ++sshift;
3314     ssize <<= 1;
3315     }
3316     int segmentShift = 32 - sshift;
3317     int segmentMask = ssize - 1;
3318 dl 1.208 Segment<K,V>[] segments = (Segment<K,V>[])
3319     new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3320     for (int i = 0; i < segments.length; ++i)
3321     segments[i] = new Segment<K,V>(LOAD_FACTOR);
3322     s.putFields().put("segments", segments);
3323 dl 1.209 s.putFields().put("segmentShift", segmentShift);
3324     s.putFields().put("segmentMask", segmentMask);
3325    
3326 dl 1.208 s.writeFields();
3327 dl 1.142 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3328 dl 1.151 V v;
3329 dl 1.191 while ((v = it.advanceValue()) != null) {
3330 dl 1.142 s.writeObject(it.nextKey);
3331     s.writeObject(v);
3332     }
3333     s.writeObject(null);
3334     s.writeObject(null);
3335     segments = null; // throw away
3336     }
3337 dl 1.119
3338 dl 1.142 /**
3339     * Reconstitutes the instance from a stream (that is, deserializes it).
3340     * @param s the stream
3341     */
3342 dl 1.149 @SuppressWarnings("unchecked") private void readObject
3343     (java.io.ObjectInputStream s)
3344 dl 1.142 throws java.io.IOException, ClassNotFoundException {
3345     s.defaultReadObject();
3346 dl 1.119
3347 dl 1.142 // Create all nodes, then place in table once size is known
3348     long size = 0L;
3349 dl 1.151 Node<V> p = null;
3350 dl 1.142 for (;;) {
3351     K k = (K) s.readObject();
3352     V v = (V) s.readObject();
3353     if (k != null && v != null) {
3354     int h = spread(k.hashCode());
3355 dl 1.151 p = new Node<V>(h, k, v, p);
3356 dl 1.142 ++size;
3357 dl 1.119 }
3358 dl 1.142 else
3359     break;
3360 dl 1.119 }
3361 dl 1.142 if (p != null) {
3362     boolean init = false;
3363     int n;
3364     if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3365     n = MAXIMUM_CAPACITY;
3366     else {
3367     int sz = (int)size;
3368     n = tableSizeFor(sz + (sz >>> 1) + 1);
3369     }
3370     int sc = sizeCtl;
3371     boolean collide = false;
3372     if (n > sc &&
3373 dl 1.149 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3374 dl 1.142 try {
3375     if (table == null) {
3376     init = true;
3377 dl 1.151 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3378     Node<V>[] tab = (Node<V>[])rt;
3379 dl 1.142 int mask = n - 1;
3380     while (p != null) {
3381     int j = p.hash & mask;
3382 dl 1.151 Node<V> next = p.next;
3383     Node<V> q = p.next = tabAt(tab, j);
3384 dl 1.142 setTabAt(tab, j, p);
3385     if (!collide && q != null && q.hash == p.hash)
3386     collide = true;
3387     p = next;
3388     }
3389     table = tab;
3390 dl 1.149 addCount(size, -1);
3391 dl 1.142 sc = n - (n >>> 2);
3392     }
3393     } finally {
3394     sizeCtl = sc;
3395     }
3396     if (collide) { // rescan and convert to TreeBins
3397 dl 1.151 Node<V>[] tab = table;
3398 dl 1.142 for (int i = 0; i < tab.length; ++i) {
3399     int c = 0;
3400 dl 1.151 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3401 dl 1.142 if (++c > TREE_THRESHOLD &&
3402     (e.key instanceof Comparable)) {
3403     replaceWithTreeBin(tab, i, e.key);
3404     break;
3405     }
3406     }
3407     }
3408 dl 1.119 }
3409     }
3410 dl 1.142 if (!init) { // Can only happen if unsafely published.
3411     while (p != null) {
3412 dl 1.151 internalPut((K)p.key, p.val, false);
3413 dl 1.142 p = p.next;
3414     }
3415 dl 1.119 }
3416     }
3417 dl 1.142 }
3418 dl 1.119
3419 dl 1.142 // -------------------------------------------------------
3420    
3421 dl 1.151 // Sequential bulk operations
3422    
3423 dl 1.119 /**
3424 dl 1.137 * Performs the given action for each (key, value).
3425 dl 1.119 *
3426 dl 1.137 * @param action the action
3427 dl 1.119 */
3428 jsr166 1.168 public void forEachSequentially
3429 dl 1.171 (BiConsumer<? super K, ? super V> action) {
3430 dl 1.151 if (action == null) throw new NullPointerException();
3431     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3432     V v;
3433 dl 1.191 while ((v = it.advanceValue()) != null)
3434 jsr166 1.168 action.accept(it.nextKey, v);
3435 dl 1.119 }
3436    
3437     /**
3438 dl 1.137 * Performs the given action for each non-null transformation
3439     * of each (key, value).
3440     *
3441     * @param transformer a function returning the transformation
3442 jsr166 1.169 * for an element, or null if there is no transformation (in
3443 jsr166 1.172 * which case the action is not applied)
3444 dl 1.137 * @param action the action
3445 dl 1.119 */
3446 jsr166 1.168 public <U> void forEachSequentially
3447 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> transformer,
3448 dl 1.171 Consumer<? super U> action) {
3449 dl 1.151 if (transformer == null || action == null)
3450     throw new NullPointerException();
3451     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3452     V v; U u;
3453 dl 1.191 while ((v = it.advanceValue()) != null) {
3454 jsr166 1.168 if ((u = transformer.apply(it.nextKey, v)) != null)
3455 dl 1.153 action.accept(u);
3456 dl 1.151 }
3457 dl 1.137 }
3458    
3459     /**
3460     * Returns a non-null result from applying the given search
3461 dl 1.151 * function on each (key, value), or null if none.
3462 dl 1.137 *
3463     * @param searchFunction a function returning a non-null
3464     * result on success, else null
3465     * @return a non-null result from applying the given search
3466     * function on each (key, value), or null if none
3467     */
3468 jsr166 1.168 public <U> U searchSequentially
3469 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3470 dl 1.151 if (searchFunction == null) throw new NullPointerException();
3471     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3472     V v; U u;
3473 dl 1.191 while ((v = it.advanceValue()) != null) {
3474 jsr166 1.168 if ((u = searchFunction.apply(it.nextKey, v)) != null)
3475 dl 1.151 return u;
3476     }
3477     return null;
3478 dl 1.137 }
3479    
3480     /**
3481     * Returns the result of accumulating the given transformation
3482     * of all (key, value) pairs using the given reducer to
3483     * combine values, or null if none.
3484     *
3485     * @param transformer a function returning the transformation
3486 jsr166 1.169 * for an element, or null if there is no transformation (in
3487 jsr166 1.172 * which case it is not combined)
3488 dl 1.137 * @param reducer a commutative associative combining function
3489     * @return the result of accumulating the given transformation
3490     * of all (key, value) pairs
3491     */
3492 jsr166 1.168 public <U> U reduceSequentially
3493 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> transformer,
3494     BiFunction<? super U, ? super U, ? extends U> reducer) {
3495 dl 1.151 if (transformer == null || reducer == null)
3496     throw new NullPointerException();
3497     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3498     U r = null, u; V v;
3499 dl 1.191 while ((v = it.advanceValue()) != null) {
3500 jsr166 1.168 if ((u = transformer.apply(it.nextKey, v)) != null)
3501 dl 1.151 r = (r == null) ? u : reducer.apply(r, u);
3502     }
3503     return r;
3504 dl 1.137 }
3505    
3506     /**
3507     * Returns the result of accumulating the given transformation
3508     * of all (key, value) pairs using the given reducer to
3509     * combine values, and the given basis as an identity value.
3510     *
3511     * @param transformer a function returning the transformation
3512     * for an element
3513     * @param basis the identity (initial default value) for the reduction
3514     * @param reducer a commutative associative combining function
3515     * @return the result of accumulating the given transformation
3516     * of all (key, value) pairs
3517     */
3518 jsr166 1.168 public double reduceToDoubleSequentially
3519 dl 1.171 (ToDoubleBiFunction<? super K, ? super V> transformer,
3520 dl 1.151 double basis,
3521 dl 1.153 DoubleBinaryOperator reducer) {
3522 dl 1.151 if (transformer == null || reducer == null)
3523     throw new NullPointerException();
3524     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3525     double r = basis; V v;
3526 dl 1.191 while ((v = it.advanceValue()) != null)
3527 jsr166 1.168 r = reducer.applyAsDouble(r, transformer.applyAsDouble(it.nextKey, v));
3528 dl 1.151 return r;
3529 dl 1.137 }
3530 dl 1.119
3531 dl 1.137 /**
3532     * Returns the result of accumulating the given transformation
3533     * of all (key, value) pairs using the given reducer to
3534     * combine values, and the given basis as an identity value.
3535     *
3536     * @param transformer a function returning the transformation
3537     * for an element
3538     * @param basis the identity (initial default value) for the reduction
3539     * @param reducer a commutative associative combining function
3540     * @return the result of accumulating the given transformation
3541     * of all (key, value) pairs
3542     */
3543 jsr166 1.168 public long reduceToLongSequentially
3544 dl 1.171 (ToLongBiFunction<? super K, ? super V> transformer,
3545 dl 1.151 long basis,
3546 dl 1.153 LongBinaryOperator reducer) {
3547 dl 1.151 if (transformer == null || reducer == null)
3548     throw new NullPointerException();
3549     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3550     long r = basis; V v;
3551 dl 1.191 while ((v = it.advanceValue()) != null)
3552 jsr166 1.168 r = reducer.applyAsLong(r, transformer.applyAsLong(it.nextKey, v));
3553 dl 1.151 return r;
3554 dl 1.137 }
3555    
3556     /**
3557     * Returns the result of accumulating the given transformation
3558     * of all (key, value) pairs using the given reducer to
3559     * combine values, and the given basis as an identity value.
3560     *
3561     * @param transformer a function returning the transformation
3562     * for an element
3563     * @param basis the identity (initial default value) for the reduction
3564     * @param reducer a commutative associative combining function
3565     * @return the result of accumulating the given transformation
3566     * of all (key, value) pairs
3567     */
3568 jsr166 1.168 public int reduceToIntSequentially
3569 dl 1.171 (ToIntBiFunction<? super K, ? super V> transformer,
3570 dl 1.151 int basis,
3571 dl 1.153 IntBinaryOperator reducer) {
3572 dl 1.151 if (transformer == null || reducer == null)
3573     throw new NullPointerException();
3574     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3575     int r = basis; V v;
3576 dl 1.191 while ((v = it.advanceValue()) != null)
3577 jsr166 1.168 r = reducer.applyAsInt(r, transformer.applyAsInt(it.nextKey, v));
3578 dl 1.151 return r;
3579 dl 1.137 }
3580    
3581     /**
3582     * Performs the given action for each key.
3583     *
3584     * @param action the action
3585     */
3586 jsr166 1.168 public void forEachKeySequentially
3587 dl 1.171 (Consumer<? super K> action) {
3588 dl 1.192 new Traverser<K,V,Object>(this).forEachKey(action);
3589 dl 1.137 }
3590 dl 1.119
3591 dl 1.137 /**
3592     * Performs the given action for each non-null transformation
3593     * of each key.
3594     *
3595     * @param transformer a function returning the transformation
3596 jsr166 1.169 * for an element, or null if there is no transformation (in
3597 jsr166 1.172 * which case the action is not applied)
3598 dl 1.137 * @param action the action
3599     */
3600 jsr166 1.168 public <U> void forEachKeySequentially
3601 dl 1.153 (Function<? super K, ? extends U> transformer,
3602 dl 1.171 Consumer<? super U> action) {
3603 dl 1.151 if (transformer == null || action == null)
3604     throw new NullPointerException();
3605     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3606 dl 1.191 K k; U u;
3607     while ((k = it.advanceKey()) != null) {
3608     if ((u = transformer.apply(k)) != null)
3609 dl 1.153 action.accept(u);
3610 dl 1.151 }
3611 dl 1.137 }
3612 dl 1.119
3613 dl 1.137 /**
3614     * Returns a non-null result from applying the given search
3615 dl 1.151 * function on each key, or null if none.
3616 dl 1.137 *
3617     * @param searchFunction a function returning a non-null
3618     * result on success, else null
3619     * @return a non-null result from applying the given search
3620     * function on each key, or null if none
3621     */
3622 jsr166 1.168 public <U> U searchKeysSequentially
3623 dl 1.153 (Function<? super K, ? extends U> searchFunction) {
3624 dl 1.151 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3625 dl 1.191 K k; U u;
3626     while ((k = it.advanceKey()) != null) {
3627     if ((u = searchFunction.apply(k)) != null)
3628 dl 1.151 return u;
3629     }
3630     return null;
3631 dl 1.137 }
3632 dl 1.119
3633 dl 1.137 /**
3634     * Returns the result of accumulating all keys using the given
3635     * reducer to combine values, or null if none.
3636     *
3637     * @param reducer a commutative associative combining function
3638     * @return the result of accumulating all keys using the given
3639     * reducer to combine values, or null if none
3640     */
3641 jsr166 1.168 public K reduceKeysSequentially
3642 dl 1.153 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3643 dl 1.151 if (reducer == null) throw new NullPointerException();
3644     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3645 dl 1.191 K u, r = null;
3646     while ((u = it.advanceKey()) != null) {
3647 dl 1.151 r = (r == null) ? u : reducer.apply(r, u);
3648     }
3649     return r;
3650 dl 1.137 }
3651 dl 1.119
3652 dl 1.137 /**
3653     * Returns the result of accumulating the given transformation
3654     * of all keys using the given reducer to combine values, or
3655     * null if none.
3656     *
3657     * @param transformer a function returning the transformation
3658 jsr166 1.169 * for an element, or null if there is no transformation (in
3659 jsr166 1.172 * which case it is not combined)
3660 dl 1.137 * @param reducer a commutative associative combining function
3661     * @return the result of accumulating the given transformation
3662     * of all keys
3663     */
3664 jsr166 1.168 public <U> U reduceKeysSequentially
3665 dl 1.153 (Function<? super K, ? extends U> transformer,
3666     BiFunction<? super U, ? super U, ? extends U> reducer) {
3667 dl 1.151 if (transformer == null || reducer == null)
3668     throw new NullPointerException();
3669     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3670 dl 1.191 K k; U r = null, u;
3671     while ((k = it.advanceKey()) != null) {
3672     if ((u = transformer.apply(k)) != null)
3673 dl 1.151 r = (r == null) ? u : reducer.apply(r, u);
3674     }
3675     return r;
3676 dl 1.137 }
3677 dl 1.119
3678 dl 1.137 /**
3679     * Returns the result of accumulating the given transformation
3680     * of all keys using the given reducer to combine values, and
3681     * the given basis as an identity value.
3682     *
3683     * @param transformer a function returning the transformation
3684     * for an element
3685     * @param basis the identity (initial default value) for the reduction
3686     * @param reducer a commutative associative combining function
3687 jsr166 1.157 * @return the result of accumulating the given transformation
3688 dl 1.137 * of all keys
3689     */
3690 jsr166 1.168 public double reduceKeysToDoubleSequentially
3691 dl 1.171 (ToDoubleFunction<? super K> transformer,
3692 dl 1.151 double basis,
3693 dl 1.153 DoubleBinaryOperator reducer) {
3694 dl 1.151 if (transformer == null || reducer == null)
3695     throw new NullPointerException();
3696     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3697     double r = basis;
3698 dl 1.191 K k;
3699     while ((k = it.advanceKey()) != null)
3700     r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
3701 dl 1.151 return r;
3702 dl 1.137 }
3703 dl 1.119
3704 dl 1.137 /**
3705     * Returns the result of accumulating the given transformation
3706     * of all keys using the given reducer to combine values, and
3707     * the given basis as an identity value.
3708     *
3709     * @param transformer a function returning the transformation
3710     * for an element
3711     * @param basis the identity (initial default value) for the reduction
3712     * @param reducer a commutative associative combining function
3713     * @return the result of accumulating the given transformation
3714     * of all keys
3715     */
3716 jsr166 1.168 public long reduceKeysToLongSequentially
3717 dl 1.171 (ToLongFunction<? super K> transformer,
3718 dl 1.151 long basis,
3719 dl 1.153 LongBinaryOperator reducer) {
3720 dl 1.151 if (transformer == null || reducer == null)
3721     throw new NullPointerException();
3722     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3723     long r = basis;
3724 dl 1.191 K k;
3725     while ((k = it.advanceKey()) != null)
3726     r = reducer.applyAsLong(r, transformer.applyAsLong(k));
3727 dl 1.151 return r;
3728 dl 1.137 }
3729 dl 1.119
3730 dl 1.137 /**
3731     * Returns the result of accumulating the given transformation
3732     * of all keys using the given reducer to combine values, and
3733     * the given basis as an identity value.
3734     *
3735     * @param transformer a function returning the transformation
3736     * for an element
3737     * @param basis the identity (initial default value) for the reduction
3738     * @param reducer a commutative associative combining function
3739     * @return the result of accumulating the given transformation
3740     * of all keys
3741     */
3742 jsr166 1.168 public int reduceKeysToIntSequentially
3743 dl 1.171 (ToIntFunction<? super K> transformer,
3744 dl 1.151 int basis,
3745 dl 1.153 IntBinaryOperator reducer) {
3746 dl 1.151 if (transformer == null || reducer == null)
3747     throw new NullPointerException();
3748     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3749     int r = basis;
3750 dl 1.191 K k;
3751     while ((k = it.advanceKey()) != null)
3752     r = reducer.applyAsInt(r, transformer.applyAsInt(k));
3753 dl 1.151 return r;
3754 dl 1.137 }
3755 dl 1.119
3756 dl 1.137 /**
3757     * Performs the given action for each value.
3758     *
3759     * @param action the action
3760     */
3761 dl 1.171 public void forEachValueSequentially(Consumer<? super V> action) {
3762 dl 1.192 new Traverser<K,V,Object>(this).forEachValue(action);
3763 dl 1.137 }
3764 dl 1.119
3765 dl 1.137 /**
3766     * Performs the given action for each non-null transformation
3767     * of each value.
3768     *
3769     * @param transformer a function returning the transformation
3770 jsr166 1.169 * for an element, or null if there is no transformation (in
3771 jsr166 1.172 * which case the action is not applied)
3772 jsr166 1.179 * @param action the action
3773 dl 1.137 */
3774 dl 1.151 public <U> void forEachValueSequentially
3775 dl 1.153 (Function<? super V, ? extends U> transformer,
3776 dl 1.171 Consumer<? super U> action) {
3777 dl 1.151 if (transformer == null || action == null)
3778     throw new NullPointerException();
3779     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3780     V v; U u;
3781 dl 1.191 while ((v = it.advanceValue()) != null) {
3782 dl 1.151 if ((u = transformer.apply(v)) != null)
3783 dl 1.153 action.accept(u);
3784 dl 1.151 }
3785 dl 1.137 }
3786 dl 1.119
3787 dl 1.137 /**
3788     * Returns a non-null result from applying the given search
3789 dl 1.151 * function on each value, or null if none.
3790 dl 1.137 *
3791     * @param searchFunction a function returning a non-null
3792     * result on success, else null
3793     * @return a non-null result from applying the given search
3794     * function on each value, or null if none
3795     */
3796 dl 1.151 public <U> U searchValuesSequentially
3797 dl 1.153 (Function<? super V, ? extends U> searchFunction) {
3798 dl 1.151 if (searchFunction == null) throw new NullPointerException();
3799     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3800     V v; U u;
3801 dl 1.191 while ((v = it.advanceValue()) != null) {
3802 dl 1.151 if ((u = searchFunction.apply(v)) != null)
3803     return u;
3804     }
3805     return null;
3806 dl 1.137 }
3807 dl 1.119
3808 dl 1.137 /**
3809     * Returns the result of accumulating all values using the
3810     * given reducer to combine values, or null if none.
3811     *
3812     * @param reducer a commutative associative combining function
3813 jsr166 1.157 * @return the result of accumulating all values
3814 dl 1.137 */
3815 dl 1.151 public V reduceValuesSequentially
3816 dl 1.153 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3817 dl 1.151 if (reducer == null) throw new NullPointerException();
3818     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3819     V r = null; V v;
3820 dl 1.191 while ((v = it.advanceValue()) != null)
3821 dl 1.151 r = (r == null) ? v : reducer.apply(r, v);
3822     return r;
3823 dl 1.137 }
3824 dl 1.119
3825 dl 1.137 /**
3826     * Returns the result of accumulating the given transformation
3827     * of all values using the given reducer to combine values, or
3828     * null if none.
3829     *
3830     * @param transformer a function returning the transformation
3831 jsr166 1.169 * for an element, or null if there is no transformation (in
3832 jsr166 1.172 * which case it is not combined)
3833 dl 1.137 * @param reducer a commutative associative combining function
3834     * @return the result of accumulating the given transformation
3835     * of all values
3836     */
3837 dl 1.151 public <U> U reduceValuesSequentially
3838 dl 1.153 (Function<? super V, ? extends U> transformer,
3839     BiFunction<? super U, ? super U, ? extends U> reducer) {
3840 dl 1.151 if (transformer == null || reducer == null)
3841     throw new NullPointerException();
3842     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3843     U r = null, u; V v;
3844 dl 1.191 while ((v = it.advanceValue()) != null) {
3845 dl 1.151 if ((u = transformer.apply(v)) != null)
3846     r = (r == null) ? u : reducer.apply(r, u);
3847     }
3848     return r;
3849 dl 1.137 }
3850 dl 1.119
3851 dl 1.137 /**
3852     * Returns the result of accumulating the given transformation
3853     * of all values using the given reducer to combine values,
3854     * and the given basis as an identity value.
3855     *
3856     * @param transformer a function returning the transformation
3857     * for an element
3858     * @param basis the identity (initial default value) for the reduction
3859     * @param reducer a commutative associative combining function
3860     * @return the result of accumulating the given transformation
3861     * of all values
3862     */
3863 dl 1.151 public double reduceValuesToDoubleSequentially
3864 dl 1.171 (ToDoubleFunction<? super V> transformer,
3865 dl 1.151 double basis,
3866 dl 1.153 DoubleBinaryOperator reducer) {
3867 dl 1.151 if (transformer == null || reducer == null)
3868     throw new NullPointerException();
3869     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3870     double r = basis; V v;
3871 dl 1.191 while ((v = it.advanceValue()) != null)
3872 dl 1.153 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3873 dl 1.151 return r;
3874 dl 1.137 }
3875 dl 1.119
3876 dl 1.137 /**
3877     * Returns the result of accumulating the given transformation
3878     * of all values using the given reducer to combine values,
3879     * and the given basis as an identity value.
3880     *
3881     * @param transformer a function returning the transformation
3882     * for an element
3883     * @param basis the identity (initial default value) for the reduction
3884     * @param reducer a commutative associative combining function
3885     * @return the result of accumulating the given transformation
3886     * of all values
3887     */
3888 dl 1.151 public long reduceValuesToLongSequentially
3889 dl 1.171 (ToLongFunction<? super V> transformer,
3890 dl 1.151 long basis,
3891 dl 1.153 LongBinaryOperator reducer) {
3892 dl 1.151 if (transformer == null || reducer == null)
3893     throw new NullPointerException();
3894     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3895     long r = basis; V v;
3896 dl 1.191 while ((v = it.advanceValue()) != null)
3897 dl 1.153 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3898 dl 1.151 return r;
3899 dl 1.137 }
3900 dl 1.119
3901 dl 1.137 /**
3902     * Returns the result of accumulating the given transformation
3903     * of all values using the given reducer to combine values,
3904     * and the given basis as an identity value.
3905     *
3906     * @param transformer a function returning the transformation
3907     * for an element
3908     * @param basis the identity (initial default value) for the reduction
3909     * @param reducer a commutative associative combining function
3910     * @return the result of accumulating the given transformation
3911     * of all values
3912     */
3913 dl 1.151 public int reduceValuesToIntSequentially
3914 dl 1.171 (ToIntFunction<? super V> transformer,
3915 dl 1.151 int basis,
3916 dl 1.153 IntBinaryOperator reducer) {
3917 dl 1.151 if (transformer == null || reducer == null)
3918     throw new NullPointerException();
3919     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3920     int r = basis; V v;
3921 dl 1.191 while ((v = it.advanceValue()) != null)
3922 dl 1.153 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3923 dl 1.151 return r;
3924 dl 1.137 }
3925 dl 1.119
3926 dl 1.137 /**
3927     * Performs the given action for each entry.
3928     *
3929     * @param action the action
3930     */
3931 jsr166 1.168 public void forEachEntrySequentially
3932 dl 1.171 (Consumer<? super Map.Entry<K,V>> action) {
3933 dl 1.151 if (action == null) throw new NullPointerException();
3934     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3935     V v;
3936 dl 1.191 while ((v = it.advanceValue()) != null)
3937 jsr166 1.168 action.accept(entryFor(it.nextKey, v));
3938 dl 1.137 }
3939 dl 1.119
3940 dl 1.137 /**
3941     * Performs the given action for each non-null transformation
3942     * of each entry.
3943     *
3944     * @param transformer a function returning the transformation
3945 jsr166 1.169 * for an element, or null if there is no transformation (in
3946 jsr166 1.172 * which case the action is not applied)
3947 dl 1.137 * @param action the action
3948     */
3949 jsr166 1.168 public <U> void forEachEntrySequentially
3950 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> transformer,
3951 dl 1.171 Consumer<? super U> action) {
3952 dl 1.151 if (transformer == null || action == null)
3953     throw new NullPointerException();
3954     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3955     V v; U u;
3956 dl 1.191 while ((v = it.advanceValue()) != null) {
3957 jsr166 1.168 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
3958 dl 1.153 action.accept(u);
3959 dl 1.151 }
3960 dl 1.137 }
3961 dl 1.119
3962 dl 1.137 /**
3963     * Returns a non-null result from applying the given search
3964 dl 1.151 * function on each entry, or null if none.
3965 dl 1.137 *
3966     * @param searchFunction a function returning a non-null
3967     * result on success, else null
3968     * @return a non-null result from applying the given search
3969     * function on each entry, or null if none
3970     */
3971 jsr166 1.168 public <U> U searchEntriesSequentially
3972 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3973 dl 1.151 if (searchFunction == null) throw new NullPointerException();
3974     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3975     V v; U u;
3976 dl 1.191 while ((v = it.advanceValue()) != null) {
3977 jsr166 1.168 if ((u = searchFunction.apply(entryFor(it.nextKey, v))) != null)
3978 dl 1.151 return u;
3979     }
3980     return null;
3981 dl 1.137 }
3982 dl 1.119
3983 dl 1.137 /**
3984     * Returns the result of accumulating all entries using the
3985     * given reducer to combine values, or null if none.
3986     *
3987     * @param reducer a commutative associative combining function
3988     * @return the result of accumulating all entries
3989     */
3990 jsr166 1.168 public Map.Entry<K,V> reduceEntriesSequentially
3991 dl 1.153 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3992 dl 1.151 if (reducer == null) throw new NullPointerException();
3993     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3994     Map.Entry<K,V> r = null; V v;
3995 dl 1.191 while ((v = it.advanceValue()) != null) {
3996 jsr166 1.168 Map.Entry<K,V> u = entryFor(it.nextKey, v);
3997 dl 1.151 r = (r == null) ? u : reducer.apply(r, u);
3998     }
3999     return r;
4000 dl 1.137 }
4001 dl 1.119
4002 dl 1.137 /**
4003     * Returns the result of accumulating the given transformation
4004     * of all entries using the given reducer to combine values,
4005     * or null if none.
4006     *
4007     * @param transformer a function returning the transformation
4008 jsr166 1.169 * for an element, or null if there is no transformation (in
4009 jsr166 1.172 * which case it is not combined)
4010 dl 1.137 * @param reducer a commutative associative combining function
4011     * @return the result of accumulating the given transformation
4012     * of all entries
4013     */
4014 jsr166 1.168 public <U> U reduceEntriesSequentially
4015 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> transformer,
4016     BiFunction<? super U, ? super U, ? extends U> reducer) {
4017 dl 1.151 if (transformer == null || reducer == null)
4018     throw new NullPointerException();
4019     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4020     U r = null, u; V v;
4021 dl 1.191 while ((v = it.advanceValue()) != null) {
4022 jsr166 1.168 if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
4023 dl 1.151 r = (r == null) ? u : reducer.apply(r, u);
4024     }
4025     return r;
4026 dl 1.137 }
4027 dl 1.119
4028 dl 1.137 /**
4029     * Returns the result of accumulating the given transformation
4030     * of all entries using the given reducer to combine values,
4031     * and the given basis as an identity value.
4032     *
4033     * @param transformer a function returning the transformation
4034     * for an element
4035     * @param basis the identity (initial default value) for the reduction
4036     * @param reducer a commutative associative combining function
4037     * @return the result of accumulating the given transformation
4038     * of all entries
4039     */
4040 jsr166 1.168 public double reduceEntriesToDoubleSequentially
4041 dl 1.171 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4042 dl 1.151 double basis,
4043 dl 1.153 DoubleBinaryOperator reducer) {
4044 dl 1.151 if (transformer == null || reducer == null)
4045     throw new NullPointerException();
4046     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4047     double r = basis; V v;
4048 dl 1.191 while ((v = it.advanceValue()) != null)
4049 jsr166 1.168 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(it.nextKey, v)));
4050 dl 1.151 return r;
4051 dl 1.137 }
4052 dl 1.119
4053 dl 1.137 /**
4054     * Returns the result of accumulating the given transformation
4055     * of all entries using the given reducer to combine values,
4056     * and the given basis as an identity value.
4057     *
4058     * @param transformer a function returning the transformation
4059     * for an element
4060     * @param basis the identity (initial default value) for the reduction
4061     * @param reducer a commutative associative combining function
4062 jsr166 1.157 * @return the result of accumulating the given transformation
4063 dl 1.137 * of all entries
4064     */
4065 jsr166 1.168 public long reduceEntriesToLongSequentially
4066 dl 1.171 (ToLongFunction<Map.Entry<K,V>> transformer,
4067 dl 1.151 long basis,
4068 dl 1.153 LongBinaryOperator reducer) {
4069 dl 1.151 if (transformer == null || reducer == null)
4070     throw new NullPointerException();
4071     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4072     long r = basis; V v;
4073 dl 1.191 while ((v = it.advanceValue()) != null)
4074 jsr166 1.168 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(it.nextKey, v)));
4075 dl 1.151 return r;
4076 dl 1.137 }
4077 dl 1.119
4078 dl 1.137 /**
4079     * Returns the result of accumulating the given transformation
4080     * of all entries using the given reducer to combine values,
4081     * and the given basis as an identity value.
4082     *
4083     * @param transformer a function returning the transformation
4084     * for an element
4085     * @param basis the identity (initial default value) for the reduction
4086     * @param reducer a commutative associative combining function
4087     * @return the result of accumulating the given transformation
4088     * of all entries
4089     */
4090 jsr166 1.168 public int reduceEntriesToIntSequentially
4091 dl 1.171 (ToIntFunction<Map.Entry<K,V>> transformer,
4092 dl 1.151 int basis,
4093 dl 1.153 IntBinaryOperator reducer) {
4094 dl 1.151 if (transformer == null || reducer == null)
4095     throw new NullPointerException();
4096     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4097     int r = basis; V v;
4098 dl 1.191 while ((v = it.advanceValue()) != null)
4099 jsr166 1.168 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(it.nextKey, v)));
4100 dl 1.151 return r;
4101 dl 1.119 }
4102    
4103 dl 1.209 // Overrides of other default Map methods
4104    
4105     public void forEach(BiConsumer<? super K, ? super V> action) {
4106     forEachSequentially(action);
4107     }
4108    
4109     public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
4110     if (function == null) throw new NullPointerException();
4111     Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4112     V v;
4113     while ((v = it.advanceValue()) != null) {
4114     K k = it.nextKey;
4115     internalPut(k, function.apply(k, v), false);
4116     }
4117     }
4118    
4119 dl 1.151 // Parallel bulk operations
4120 dl 1.142
4121     /**
4122 dl 1.151 * Performs the given action for each (key, value).
4123     *
4124     * @param action the action
4125 dl 1.142 */
4126 dl 1.171 public void forEachInParallel(BiConsumer<? super K,? super V> action) {
4127 dl 1.151 ForkJoinTasks.forEach
4128     (this, action).invoke();
4129     }
4130 dl 1.142
4131 dl 1.151 /**
4132     * Performs the given action for each non-null transformation
4133     * of each (key, value).
4134     *
4135     * @param transformer a function returning the transformation
4136 jsr166 1.169 * for an element, or null if there is no transformation (in
4137 jsr166 1.172 * which case the action is not applied)
4138 dl 1.151 * @param action the action
4139     */
4140     public <U> void forEachInParallel
4141 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> transformer,
4142 dl 1.171 Consumer<? super U> action) {
4143 dl 1.151 ForkJoinTasks.forEach
4144     (this, transformer, action).invoke();
4145     }
4146 dl 1.142
4147 dl 1.151 /**
4148     * Returns a non-null result from applying the given search
4149     * function on each (key, value), or null if none. Upon
4150     * success, further element processing is suppressed and the
4151     * results of any other parallel invocations of the search
4152     * function are ignored.
4153     *
4154     * @param searchFunction a function returning a non-null
4155     * result on success, else null
4156     * @return a non-null result from applying the given search
4157     * function on each (key, value), or null if none
4158     */
4159     public <U> U searchInParallel
4160 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4161 dl 1.151 return ForkJoinTasks.search
4162     (this, searchFunction).invoke();
4163     }
4164 dl 1.142
4165 dl 1.151 /**
4166     * Returns the result of accumulating the given transformation
4167     * of all (key, value) pairs using the given reducer to
4168     * combine values, or null if none.
4169     *
4170     * @param transformer a function returning the transformation
4171 jsr166 1.169 * for an element, or null if there is no transformation (in
4172 jsr166 1.172 * which case it is not combined)
4173 dl 1.151 * @param reducer a commutative associative combining function
4174     * @return the result of accumulating the given transformation
4175     * of all (key, value) pairs
4176     */
4177     public <U> U reduceInParallel
4178 dl 1.153 (BiFunction<? super K, ? super V, ? extends U> transformer,
4179     BiFunction<? super U, ? super U, ? extends U> reducer) {
4180 dl 1.151 return ForkJoinTasks.reduce
4181     (this, transformer, reducer).invoke();
4182     }
4183 dl 1.142
4184 dl 1.151 /**
4185     * Returns the result of accumulating the given transformation
4186     * of all (key, value) pairs using the given reducer to
4187     * combine values, and the given basis as an identity value.
4188     *
4189     * @param transformer a function returning the transformation
4190     * for an element
4191     * @param basis the identity (initial default value) for the reduction
4192     * @param reducer a commutative associative combining function
4193     * @return the result of accumulating the given transformation
4194     * of all (key, value) pairs
4195     */
4196     public double reduceToDoubleInParallel
4197 dl 1.171 (ToDoubleBiFunction<? super K, ? super V> transformer,
4198 dl 1.151 double basis,
4199 dl 1.153 DoubleBinaryOperator reducer) {
4200 dl 1.151 return ForkJoinTasks.reduceToDouble
4201     (this, transformer, basis, reducer).invoke();
4202     }
4203    
4204     /**
4205     * Returns the result of accumulating the given transformation
4206     * of all (key, value) pairs using the given reducer to
4207     * combine values, and the given basis as an identity value.
4208     *
4209     * @param transformer a function returning the transformation
4210     * for an element
4211     * @param basis the identity (initial default value) for the reduction
4212     * @param reducer a commutative associative combining function
4213     * @return the result of accumulating the given transformation
4214     * of all (key, value) pairs
4215     */
4216     public long reduceToLongInParallel
4217 dl 1.171 (ToLongBiFunction<? super K, ? super V> transformer,
4218 dl 1.151 long basis,
4219 dl 1.153 LongBinaryOperator reducer) {
4220 dl 1.151 return ForkJoinTasks.reduceToLong
4221     (this, transformer, basis, reducer).invoke();
4222     }
4223    
4224     /**
4225     * Returns the result of accumulating the given transformation
4226     * of all (key, value) pairs using the given reducer to
4227     * combine values, and the given basis as an identity value.
4228     *
4229     * @param transformer a function returning the transformation
4230     * for an element
4231     * @param basis the identity (initial default value) for the reduction
4232     * @param reducer a commutative associative combining function
4233     * @return the result of accumulating the given transformation
4234     * of all (key, value) pairs
4235     */
4236     public int reduceToIntInParallel
4237 dl 1.171 (ToIntBiFunction<? super K, ? super V> transformer,
4238 dl 1.151 int basis,
4239 dl 1.153 IntBinaryOperator reducer) {
4240 dl 1.151 return ForkJoinTasks.reduceToInt
4241     (this, transformer, basis, reducer).invoke();
4242     }
4243    
4244     /**
4245     * Performs the given action for each key.
4246     *
4247     * @param action the action
4248     */
4249 dl 1.171 public void forEachKeyInParallel(Consumer<? super K> action) {
4250 dl 1.151 ForkJoinTasks.forEachKey
4251     (this, action).invoke();
4252     }
4253    
4254     /**
4255     * Performs the given action for each non-null transformation
4256     * of each key.
4257     *
4258     * @param transformer a function returning the transformation
4259 jsr166 1.169 * for an element, or null if there is no transformation (in
4260 jsr166 1.172 * which case the action is not applied)
4261 dl 1.151 * @param action the action
4262     */
4263     public <U> void forEachKeyInParallel
4264 dl 1.153 (Function<? super K, ? extends U> transformer,
4265 dl 1.171 Consumer<? super U> action) {
4266 dl 1.151 ForkJoinTasks.forEachKey
4267     (this, transformer, action).invoke();
4268     }
4269    
4270     /**
4271     * Returns a non-null result from applying the given search
4272     * function on each key, or null if none. Upon success,
4273     * further element processing is suppressed and the results of
4274     * any other parallel invocations of the search function are
4275     * ignored.
4276     *
4277     * @param searchFunction a function returning a non-null
4278     * result on success, else null
4279     * @return a non-null result from applying the given search
4280     * function on each key, or null if none
4281     */
4282     public <U> U searchKeysInParallel
4283 dl 1.153 (Function<? super K, ? extends U> searchFunction) {
4284 dl 1.151 return ForkJoinTasks.searchKeys
4285     (this, searchFunction).invoke();
4286     }
4287    
4288     /**
4289     * Returns the result of accumulating all keys using the given
4290     * reducer to combine values, or null if none.
4291     *
4292     * @param reducer a commutative associative combining function
4293     * @return the result of accumulating all keys using the given
4294     * reducer to combine values, or null if none
4295     */
4296     public K reduceKeysInParallel
4297 dl 1.153 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4298 dl 1.151 return ForkJoinTasks.reduceKeys
4299     (this, reducer).invoke();
4300     }
4301    
4302     /**
4303     * Returns the result of accumulating the given transformation
4304     * of all keys using the given reducer to combine values, or
4305     * null if none.
4306     *
4307     * @param transformer a function returning the transformation
4308 jsr166 1.169 * for an element, or null if there is no transformation (in
4309 jsr166 1.172 * which case it is not combined)
4310 dl 1.151 * @param reducer a commutative associative combining function
4311     * @return the result of accumulating the given transformation
4312     * of all keys
4313     */
4314     public <U> U reduceKeysInParallel
4315 dl 1.153 (Function<? super K, ? extends U> transformer,
4316     BiFunction<? super U, ? super U, ? extends U> reducer) {
4317 dl 1.151 return ForkJoinTasks.reduceKeys
4318     (this, transformer, reducer).invoke();
4319     }
4320    
4321     /**
4322     * Returns the result of accumulating the given transformation
4323     * of all keys using the given reducer to combine values, and
4324     * the given basis as an identity value.
4325     *
4326     * @param transformer a function returning the transformation
4327     * for an element
4328     * @param basis the identity (initial default value) for the reduction
4329     * @param reducer a commutative associative combining function
4330 jsr166 1.157 * @return the result of accumulating the given transformation
4331 dl 1.151 * of all keys
4332     */
4333     public double reduceKeysToDoubleInParallel
4334 dl 1.171 (ToDoubleFunction<? super K> transformer,
4335 dl 1.151 double basis,
4336 dl 1.153 DoubleBinaryOperator reducer) {
4337 dl 1.151 return ForkJoinTasks.reduceKeysToDouble
4338     (this, transformer, basis, reducer).invoke();
4339     }
4340    
4341     /**
4342     * Returns the result of accumulating the given transformation
4343     * of all keys using the given reducer to combine values, and
4344     * the given basis as an identity value.
4345     *
4346     * @param transformer a function returning the transformation
4347     * for an element
4348     * @param basis the identity (initial default value) for the reduction
4349     * @param reducer a commutative associative combining function
4350     * @return the result of accumulating the given transformation
4351     * of all keys
4352     */
4353     public long reduceKeysToLongInParallel
4354 dl 1.171 (ToLongFunction<? super K> transformer,
4355 dl 1.151 long basis,
4356 dl 1.153 LongBinaryOperator reducer) {
4357 dl 1.151 return ForkJoinTasks.reduceKeysToLong
4358     (this, transformer, basis, reducer).invoke();
4359     }
4360    
4361     /**
4362     * Returns the result of accumulating the given transformation
4363     * of all keys using the given reducer to combine values, and
4364     * the given basis as an identity value.
4365     *
4366     * @param transformer a function returning the transformation
4367     * for an element
4368     * @param basis the identity (initial default value) for the reduction
4369     * @param reducer a commutative associative combining function
4370     * @return the result of accumulating the given transformation
4371     * of all keys
4372     */
4373     public int reduceKeysToIntInParallel
4374 dl 1.171 (ToIntFunction<? super K> transformer,
4375 dl 1.151 int basis,
4376 dl 1.153 IntBinaryOperator reducer) {
4377 dl 1.151 return ForkJoinTasks.reduceKeysToInt
4378     (this, transformer, basis, reducer).invoke();
4379     }
4380    
4381     /**
4382     * Performs the given action for each value.
4383     *
4384     * @param action the action
4385     */
4386 dl 1.171 public void forEachValueInParallel(Consumer<? super V> action) {
4387 dl 1.151 ForkJoinTasks.forEachValue
4388     (this, action).invoke();
4389     }
4390    
4391     /**
4392     * Performs the given action for each non-null transformation
4393     * of each value.
4394     *
4395     * @param transformer a function returning the transformation
4396 jsr166 1.169 * for an element, or null if there is no transformation (in
4397 jsr166 1.172 * which case the action is not applied)
4398 jsr166 1.179 * @param action the action
4399 dl 1.151 */
4400     public <U> void forEachValueInParallel
4401 dl 1.153 (Function<? super V, ? extends U> transformer,
4402 dl 1.171 Consumer<? super U> action) {
4403 dl 1.151 ForkJoinTasks.forEachValue
4404     (this, transformer, action).invoke();
4405     }
4406    
4407     /**
4408     * Returns a non-null result from applying the given search
4409     * function on each value, or null if none. Upon success,
4410     * further element processing is suppressed and the results of
4411     * any other parallel invocations of the search function are
4412     * ignored.
4413     *
4414     * @param searchFunction a function returning a non-null
4415     * result on success, else null
4416     * @return a non-null result from applying the given search
4417     * function on each value, or null if none
4418     */
4419     public <U> U searchValuesInParallel
4420 dl 1.153 (Function<? super V, ? extends U> searchFunction) {
4421 dl 1.151 return ForkJoinTasks.searchValues
4422     (this, searchFunction).invoke();
4423     }
4424    
4425     /**
4426     * Returns the result of accumulating all values using the
4427     * given reducer to combine values, or null if none.
4428     *
4429     * @param reducer a commutative associative combining function
4430 jsr166 1.157 * @return the result of accumulating all values
4431 dl 1.151 */
4432     public V reduceValuesInParallel
4433 dl 1.153 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4434 dl 1.151 return ForkJoinTasks.reduceValues
4435     (this, reducer).invoke();
4436     }
4437    
4438     /**
4439     * Returns the result of accumulating the given transformation
4440     * of all values using the given reducer to combine values, or
4441     * null if none.
4442     *
4443     * @param transformer a function returning the transformation
4444 jsr166 1.169 * for an element, or null if there is no transformation (in
4445 jsr166 1.172 * which case it is not combined)
4446 dl 1.151 * @param reducer a commutative associative combining function
4447     * @return the result of accumulating the given transformation
4448     * of all values
4449     */
4450     public <U> U reduceValuesInParallel
4451 dl 1.153 (Function<? super V, ? extends U> transformer,
4452     BiFunction<? super U, ? super U, ? extends U> reducer) {
4453 dl 1.151 return ForkJoinTasks.reduceValues
4454     (this, transformer, reducer).invoke();
4455     }
4456    
4457     /**
4458     * Returns the result of accumulating the given transformation
4459     * of all values using the given reducer to combine values,
4460     * and the given basis as an identity value.
4461     *
4462     * @param transformer a function returning the transformation
4463     * for an element
4464     * @param basis the identity (initial default value) for the reduction
4465     * @param reducer a commutative associative combining function
4466     * @return the result of accumulating the given transformation
4467     * of all values
4468     */
4469     public double reduceValuesToDoubleInParallel
4470 dl 1.171 (ToDoubleFunction<? super V> transformer,
4471 dl 1.151 double basis,
4472 dl 1.153 DoubleBinaryOperator reducer) {
4473 dl 1.151 return ForkJoinTasks.reduceValuesToDouble
4474     (this, transformer, basis, reducer).invoke();
4475     }
4476    
4477     /**
4478     * Returns the result of accumulating the given transformation
4479     * of all values using the given reducer to combine values,
4480     * and the given basis as an identity value.
4481     *
4482     * @param transformer a function returning the transformation
4483     * for an element
4484     * @param basis the identity (initial default value) for the reduction
4485     * @param reducer a commutative associative combining function
4486     * @return the result of accumulating the given transformation
4487     * of all values
4488     */
4489     public long reduceValuesToLongInParallel
4490 dl 1.171 (ToLongFunction<? super V> transformer,
4491 dl 1.151 long basis,
4492 dl 1.153 LongBinaryOperator reducer) {
4493 dl 1.151 return ForkJoinTasks.reduceValuesToLong
4494     (this, transformer, basis, reducer).invoke();
4495     }
4496    
4497     /**
4498     * Returns the result of accumulating the given transformation
4499     * of all values using the given reducer to combine values,
4500     * and the given basis as an identity value.
4501     *
4502     * @param transformer a function returning the transformation
4503     * for an element
4504     * @param basis the identity (initial default value) for the reduction
4505     * @param reducer a commutative associative combining function
4506     * @return the result of accumulating the given transformation
4507     * of all values
4508     */
4509     public int reduceValuesToIntInParallel
4510 dl 1.171 (ToIntFunction<? super V> transformer,
4511 dl 1.151 int basis,
4512 dl 1.153 IntBinaryOperator reducer) {
4513 dl 1.151 return ForkJoinTasks.reduceValuesToInt
4514     (this, transformer, basis, reducer).invoke();
4515     }
4516    
4517     /**
4518     * Performs the given action for each entry.
4519     *
4520     * @param action the action
4521     */
4522 dl 1.171 public void forEachEntryInParallel(Consumer<? super Map.Entry<K,V>> action) {
4523 dl 1.151 ForkJoinTasks.forEachEntry
4524     (this, action).invoke();
4525     }
4526    
4527     /**
4528     * Performs the given action for each non-null transformation
4529     * of each entry.
4530     *
4531     * @param transformer a function returning the transformation
4532 jsr166 1.169 * for an element, or null if there is no transformation (in
4533 jsr166 1.172 * which case the action is not applied)
4534 dl 1.151 * @param action the action
4535     */
4536     public <U> void forEachEntryInParallel
4537 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> transformer,
4538 dl 1.171 Consumer<? super U> action) {
4539 dl 1.151 ForkJoinTasks.forEachEntry
4540     (this, transformer, action).invoke();
4541     }
4542    
4543     /**
4544     * Returns a non-null result from applying the given search
4545     * function on each entry, or null if none. Upon success,
4546     * further element processing is suppressed and the results of
4547     * any other parallel invocations of the search function are
4548     * ignored.
4549     *
4550     * @param searchFunction a function returning a non-null
4551     * result on success, else null
4552     * @return a non-null result from applying the given search
4553     * function on each entry, or null if none
4554     */
4555     public <U> U searchEntriesInParallel
4556 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4557 dl 1.151 return ForkJoinTasks.searchEntries
4558     (this, searchFunction).invoke();
4559     }
4560    
4561     /**
4562     * Returns the result of accumulating all entries using the
4563     * given reducer to combine values, or null if none.
4564     *
4565     * @param reducer a commutative associative combining function
4566     * @return the result of accumulating all entries
4567     */
4568     public Map.Entry<K,V> reduceEntriesInParallel
4569 dl 1.153 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4570 dl 1.151 return ForkJoinTasks.reduceEntries
4571     (this, reducer).invoke();
4572     }
4573    
4574     /**
4575     * Returns the result of accumulating the given transformation
4576     * of all entries using the given reducer to combine values,
4577     * or null if none.
4578     *
4579     * @param transformer a function returning the transformation
4580 jsr166 1.169 * for an element, or null if there is no transformation (in
4581 jsr166 1.172 * which case it is not combined)
4582 dl 1.151 * @param reducer a commutative associative combining function
4583     * @return the result of accumulating the given transformation
4584     * of all entries
4585     */
4586     public <U> U reduceEntriesInParallel
4587 dl 1.153 (Function<Map.Entry<K,V>, ? extends U> transformer,
4588     BiFunction<? super U, ? super U, ? extends U> reducer) {
4589 dl 1.151 return ForkJoinTasks.reduceEntries
4590     (this, transformer, reducer).invoke();
4591     }
4592    
4593     /**
4594     * Returns the result of accumulating the given transformation
4595     * of all entries using the given reducer to combine values,
4596     * and the given basis as an identity value.
4597     *
4598     * @param transformer a function returning the transformation
4599     * for an element
4600     * @param basis the identity (initial default value) for the reduction
4601     * @param reducer a commutative associative combining function
4602     * @return the result of accumulating the given transformation
4603     * of all entries
4604     */
4605     public double reduceEntriesToDoubleInParallel
4606 dl 1.171 (ToDoubleFunction<Map.Entry<K,V>> transformer,
4607 dl 1.151 double basis,
4608 dl 1.153 DoubleBinaryOperator reducer) {
4609 dl 1.151 return ForkJoinTasks.reduceEntriesToDouble
4610     (this, transformer, basis, reducer).invoke();
4611     }
4612    
4613     /**
4614     * Returns the result of accumulating the given transformation
4615     * of all entries using the given reducer to combine values,
4616     * and the given basis as an identity value.
4617     *
4618     * @param transformer a function returning the transformation
4619     * for an element
4620     * @param basis the identity (initial default value) for the reduction
4621     * @param reducer a commutative associative combining function
4622 jsr166 1.157 * @return the result of accumulating the given transformation
4623 dl 1.151 * of all entries
4624     */
4625     public long reduceEntriesToLongInParallel
4626 dl 1.171 (ToLongFunction<Map.Entry<K,V>> transformer,
4627 dl 1.151 long basis,
4628 dl 1.153 LongBinaryOperator reducer) {
4629 dl 1.151 return ForkJoinTasks.reduceEntriesToLong
4630     (this, transformer, basis, reducer).invoke();
4631     }
4632    
4633     /**
4634     * Returns the result of accumulating the given transformation
4635     * of all entries using the given reducer to combine values,
4636     * and the given basis as an identity value.
4637     *
4638     * @param transformer a function returning the transformation
4639     * for an element
4640     * @param basis the identity (initial default value) for the reduction
4641     * @param reducer a commutative associative combining function
4642     * @return the result of accumulating the given transformation
4643     * of all entries
4644     */
4645     public int reduceEntriesToIntInParallel
4646 dl 1.171 (ToIntFunction<Map.Entry<K,V>> transformer,
4647 dl 1.151 int basis,
4648 dl 1.153 IntBinaryOperator reducer) {
4649 dl 1.151 return ForkJoinTasks.reduceEntriesToInt
4650     (this, transformer, basis, reducer).invoke();
4651     }
4652    
4653    
4654     /* ----------------Views -------------- */
4655    
4656     /**
4657     * Base class for views.
4658     */
4659 jsr166 1.187 abstract static class CHMCollectionView<K,V,E>
4660 jsr166 1.184 implements Collection<E>, java.io.Serializable {
4661 dl 1.163 private static final long serialVersionUID = 7249069246763182397L;
4662 jsr166 1.186 final ConcurrentHashMap<K,V> map;
4663     CHMCollectionView(ConcurrentHashMap<K,V> map) { this.map = map; }
4664 dl 1.151
4665     /**
4666     * Returns the map backing this view.
4667     *
4668     * @return the map backing this view
4669     */
4670     public ConcurrentHashMap<K,V> getMap() { return map; }
4671    
4672 jsr166 1.184 /**
4673     * Removes all of the elements from this view, by removing all
4674     * the mappings from the map backing this view.
4675     */
4676     public final void clear() { map.clear(); }
4677     public final int size() { return map.size(); }
4678     public final boolean isEmpty() { return map.isEmpty(); }
4679 dl 1.151
4680     // implementations below rely on concrete classes supplying these
4681 jsr166 1.184 // abstract methods
4682     /**
4683     * Returns a "weakly consistent" iterator that will never
4684     * throw {@link ConcurrentModificationException}, and
4685     * guarantees to traverse elements as they existed upon
4686     * construction of the iterator, and may (but is not
4687     * guaranteed to) reflect any modifications subsequent to
4688     * construction.
4689     */
4690     public abstract Iterator<E> iterator();
4691 jsr166 1.165 public abstract boolean contains(Object o);
4692     public abstract boolean remove(Object o);
4693 dl 1.151
4694     private static final String oomeMsg = "Required array size too large";
4695 dl 1.142
4696     public final Object[] toArray() {
4697     long sz = map.mappingCount();
4698 jsr166 1.184 if (sz > MAX_ARRAY_SIZE)
4699 dl 1.142 throw new OutOfMemoryError(oomeMsg);
4700     int n = (int)sz;
4701     Object[] r = new Object[n];
4702     int i = 0;
4703 jsr166 1.184 for (E e : this) {
4704 dl 1.142 if (i == n) {
4705     if (n >= MAX_ARRAY_SIZE)
4706     throw new OutOfMemoryError(oomeMsg);
4707     if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4708     n = MAX_ARRAY_SIZE;
4709     else
4710     n += (n >>> 1) + 1;
4711     r = Arrays.copyOf(r, n);
4712     }
4713 jsr166 1.184 r[i++] = e;
4714 dl 1.142 }
4715     return (i == n) ? r : Arrays.copyOf(r, i);
4716     }
4717    
4718 jsr166 1.184 @SuppressWarnings("unchecked")
4719     public final <T> T[] toArray(T[] a) {
4720 dl 1.142 long sz = map.mappingCount();
4721 jsr166 1.184 if (sz > MAX_ARRAY_SIZE)
4722 dl 1.142 throw new OutOfMemoryError(oomeMsg);
4723     int m = (int)sz;
4724     T[] r = (a.length >= m) ? a :
4725     (T[])java.lang.reflect.Array
4726     .newInstance(a.getClass().getComponentType(), m);
4727     int n = r.length;
4728     int i = 0;
4729 jsr166 1.184 for (E e : this) {
4730 dl 1.142 if (i == n) {
4731     if (n >= MAX_ARRAY_SIZE)
4732     throw new OutOfMemoryError(oomeMsg);
4733     if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4734     n = MAX_ARRAY_SIZE;
4735     else
4736     n += (n >>> 1) + 1;
4737     r = Arrays.copyOf(r, n);
4738     }
4739 jsr166 1.184 r[i++] = (T)e;
4740 dl 1.142 }
4741     if (a == r && i < n) {
4742     r[i] = null; // null-terminate
4743     return r;
4744     }
4745     return (i == n) ? r : Arrays.copyOf(r, i);
4746     }
4747    
4748 jsr166 1.184 /**
4749     * Returns a string representation of this collection.
4750     * The string representation consists of the string representations
4751     * of the collection's elements in the order they are returned by
4752     * its iterator, enclosed in square brackets ({@code "[]"}).
4753     * Adjacent elements are separated by the characters {@code ", "}
4754     * (comma and space). Elements are converted to strings as by
4755     * {@link String#valueOf(Object)}.
4756     *
4757     * @return a string representation of this collection
4758     */
4759 dl 1.142 public final String toString() {
4760     StringBuilder sb = new StringBuilder();
4761     sb.append('[');
4762 jsr166 1.184 Iterator<E> it = iterator();
4763 dl 1.142 if (it.hasNext()) {
4764     for (;;) {
4765     Object e = it.next();
4766     sb.append(e == this ? "(this Collection)" : e);
4767     if (!it.hasNext())
4768     break;
4769     sb.append(',').append(' ');
4770     }
4771     }
4772     return sb.append(']').toString();
4773     }
4774    
4775     public final boolean containsAll(Collection<?> c) {
4776     if (c != this) {
4777 jsr166 1.184 for (Object e : c) {
4778 dl 1.142 if (e == null || !contains(e))
4779     return false;
4780     }
4781     }
4782     return true;
4783     }
4784    
4785     public final boolean removeAll(Collection<?> c) {
4786     boolean modified = false;
4787 jsr166 1.184 for (Iterator<E> it = iterator(); it.hasNext();) {
4788 dl 1.142 if (c.contains(it.next())) {
4789     it.remove();
4790     modified = true;
4791     }
4792     }
4793     return modified;
4794     }
4795    
4796     public final boolean retainAll(Collection<?> c) {
4797     boolean modified = false;
4798 jsr166 1.184 for (Iterator<E> it = iterator(); it.hasNext();) {
4799 dl 1.142 if (!c.contains(it.next())) {
4800     it.remove();
4801     modified = true;
4802     }
4803     }
4804     return modified;
4805     }
4806    
4807     }
4808    
4809 jsr166 1.187 abstract static class CHMSetView<K,V,E>
4810     extends CHMCollectionView<K,V,E>
4811 jsr166 1.184 implements Set<E>, java.io.Serializable {
4812     private static final long serialVersionUID = 7249069246763182397L;
4813 jsr166 1.186 CHMSetView(ConcurrentHashMap<K,V> map) { super(map); }
4814 jsr166 1.184
4815     // Implement Set API
4816    
4817     /**
4818     * Implements {@link Set#hashCode()}.
4819     * @return the hash code value for this set
4820     */
4821     public final int hashCode() {
4822     int h = 0;
4823     for (E e : this)
4824     h += e.hashCode();
4825     return h;
4826     }
4827    
4828     /**
4829     * Implements {@link Set#equals(Object)}.
4830     * @param o object to be compared for equality with this set
4831     * @return {@code true} if the specified object is equal to this set
4832     */
4833     public final boolean equals(Object o) {
4834     Set<?> c;
4835     return ((o instanceof Set) &&
4836     ((c = (Set<?>)o) == this ||
4837     (containsAll(c) && c.containsAll(this))));
4838     }
4839     }
4840    
4841 dl 1.142 /**
4842     * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4843     * which additions may optionally be enabled by mapping to a
4844 jsr166 1.185 * common value. This class cannot be directly instantiated.
4845     * See {@link #keySet() keySet()},
4846     * {@link #keySet(Object) keySet(V)},
4847     * {@link #newKeySet() newKeySet()},
4848     * {@link #newKeySet(int) newKeySet(int)}.
4849 dl 1.142 */
4850 jsr166 1.184 public static class KeySetView<K,V>
4851     extends CHMSetView<K,V,K>
4852     implements Set<K>, java.io.Serializable {
4853 dl 1.142 private static final long serialVersionUID = 7249069246763182397L;
4854     private final V value;
4855 jsr166 1.186 KeySetView(ConcurrentHashMap<K,V> map, V value) { // non-public
4856 dl 1.142 super(map);
4857     this.value = value;
4858     }
4859    
4860     /**
4861     * Returns the default mapped value for additions,
4862     * or {@code null} if additions are not supported.
4863     *
4864     * @return the default mapped value for additions, or {@code null}
4865 jsr166 1.172 * if not supported
4866 dl 1.142 */
4867     public V getMappedValue() { return value; }
4868    
4869 jsr166 1.184 /**
4870     * {@inheritDoc}
4871     * @throws NullPointerException if the specified key is null
4872     */
4873     public boolean contains(Object o) { return map.containsKey(o); }
4874 dl 1.142
4875 jsr166 1.184 /**
4876     * Removes the key from this map view, by removing the key (and its
4877     * corresponding value) from the backing map. This method does
4878     * nothing if the key is not in the map.
4879     *
4880     * @param o the key to be removed from the backing map
4881     * @return {@code true} if the backing map contained the specified key
4882     * @throws NullPointerException if the specified key is null
4883     */
4884     public boolean remove(Object o) { return map.remove(o) != null; }
4885    
4886     /**
4887     * @return an iterator over the keys of the backing map
4888     */
4889     public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4890 dl 1.142
4891     /**
4892 jsr166 1.184 * Adds the specified key to this set view by mapping the key to
4893     * the default mapped value in the backing map, if defined.
4894 dl 1.142 *
4895 jsr166 1.184 * @param e key to be added
4896     * @return {@code true} if this set changed as a result of the call
4897     * @throws NullPointerException if the specified key is null
4898     * @throws UnsupportedOperationException if no default mapped value
4899     * for additions was provided
4900 dl 1.142 */
4901     public boolean add(K e) {
4902     V v;
4903     if ((v = value) == null)
4904     throw new UnsupportedOperationException();
4905 dl 1.149 return map.internalPut(e, v, true) == null;
4906 dl 1.142 }
4907 jsr166 1.184
4908     /**
4909     * Adds all of the elements in the specified collection to this set,
4910     * as if by calling {@link #add} on each one.
4911     *
4912     * @param c the elements to be inserted into this set
4913     * @return {@code true} if this set changed as a result of the call
4914     * @throws NullPointerException if the collection or any of its
4915     * elements are {@code null}
4916     * @throws UnsupportedOperationException if no default mapped value
4917     * for additions was provided
4918     */
4919 dl 1.142 public boolean addAll(Collection<? extends K> c) {
4920     boolean added = false;
4921     V v;
4922     if ((v = value) == null)
4923     throw new UnsupportedOperationException();
4924     for (K e : c) {
4925 dl 1.149 if (map.internalPut(e, v, true) == null)
4926 dl 1.142 added = true;
4927     }
4928     return added;
4929     }
4930 dl 1.153
4931 dl 1.194 public Spliterator<K> spliterator() {
4932 dl 1.191 return new KeyIterator<>(map, null);
4933     }
4934    
4935 dl 1.142 }
4936    
4937     /**
4938     * A view of a ConcurrentHashMap as a {@link Collection} of
4939     * values, in which additions are disabled. This class cannot be
4940 jsr166 1.181 * directly instantiated. See {@link #values()}.
4941 dl 1.142 *
4942     * <p>The view's {@code iterator} is a "weakly consistent" iterator
4943     * that will never throw {@link ConcurrentModificationException},
4944     * and guarantees to traverse elements as they existed upon
4945     * construction of the iterator, and may (but is not guaranteed to)
4946     * reflect any modifications subsequent to construction.
4947     */
4948 jsr166 1.184 public static final class ValuesView<K,V>
4949     extends CHMCollectionView<K,V,V>
4950     implements Collection<V>, java.io.Serializable {
4951 jsr166 1.166 private static final long serialVersionUID = 2249069246763182397L;
4952 jsr166 1.186 ValuesView(ConcurrentHashMap<K,V> map) { super(map); }
4953 jsr166 1.184 public final boolean contains(Object o) {
4954     return map.containsValue(o);
4955     }
4956 dl 1.142 public final boolean remove(Object o) {
4957     if (o != null) {
4958 jsr166 1.184 for (Iterator<V> it = iterator(); it.hasNext();) {
4959 dl 1.142 if (o.equals(it.next())) {
4960     it.remove();
4961     return true;
4962     }
4963     }
4964     }
4965     return false;
4966     }
4967    
4968     /**
4969 jsr166 1.184 * @return an iterator over the values of the backing map
4970 dl 1.142 */
4971     public final Iterator<V> iterator() {
4972     return new ValueIterator<K,V>(map);
4973     }
4974 jsr166 1.184
4975     /** Always throws {@link UnsupportedOperationException}. */
4976 dl 1.142 public final boolean add(V e) {
4977     throw new UnsupportedOperationException();
4978     }
4979 jsr166 1.184 /** Always throws {@link UnsupportedOperationException}. */
4980 dl 1.142 public final boolean addAll(Collection<? extends V> c) {
4981     throw new UnsupportedOperationException();
4982     }
4983    
4984 dl 1.194 public Spliterator<V> spliterator() {
4985 dl 1.191 return new ValueIterator<K,V>(map, null);
4986     }
4987    
4988 dl 1.142 }
4989    
4990     /**
4991     * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4992     * entries. This class cannot be directly instantiated. See
4993 jsr166 1.180 * {@link #entrySet()}.
4994 dl 1.142 */
4995 jsr166 1.184 public static final class EntrySetView<K,V>
4996     extends CHMSetView<K,V,Map.Entry<K,V>>
4997     implements Set<Map.Entry<K,V>>, java.io.Serializable {
4998 jsr166 1.166 private static final long serialVersionUID = 2249069246763182397L;
4999 jsr166 1.186 EntrySetView(ConcurrentHashMap<K,V> map) { super(map); }
5000 jsr166 1.184
5001 dl 1.142 public final boolean contains(Object o) {
5002     Object k, v, r; Map.Entry<?,?> e;
5003     return ((o instanceof Map.Entry) &&
5004     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
5005     (r = map.get(k)) != null &&
5006     (v = e.getValue()) != null &&
5007     (v == r || v.equals(r)));
5008     }
5009     public final boolean remove(Object o) {
5010     Object k, v; Map.Entry<?,?> e;
5011     return ((o instanceof Map.Entry) &&
5012     (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
5013     (v = e.getValue()) != null &&
5014     map.remove(k, v));
5015     }
5016    
5017     /**
5018 jsr166 1.184 * @return an iterator over the entries of the backing map
5019 dl 1.142 */
5020     public final Iterator<Map.Entry<K,V>> iterator() {
5021     return new EntryIterator<K,V>(map);
5022     }
5023    
5024 jsr166 1.184 /**
5025     * Adds the specified mapping to this view.
5026     *
5027     * @param e mapping to be added
5028     * @return {@code true} if this set changed as a result of the call
5029     * @throws NullPointerException if the entry, its key, or its
5030     * value is null
5031     */
5032 dl 1.142 public final boolean add(Entry<K,V> e) {
5033 jsr166 1.182 return map.internalPut(e.getKey(), e.getValue(), false) == null;
5034 dl 1.142 }
5035 jsr166 1.200
5036 jsr166 1.184 /**
5037     * Adds all of the mappings in the specified collection to this
5038     * set, as if by calling {@link #add(Map.Entry)} on each one.
5039     * @param c the mappings to be inserted into this set
5040     * @return {@code true} if this set changed as a result of the call
5041     * @throws NullPointerException if the collection or any of its
5042     * entries, keys, or values are null
5043     */
5044 dl 1.142 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
5045     boolean added = false;
5046     for (Entry<K,V> e : c) {
5047     if (add(e))
5048     added = true;
5049     }
5050     return added;
5051     }
5052 dl 1.153
5053 dl 1.194 public Spliterator<Map.Entry<K,V>> spliterator() {
5054 dl 1.191 return new EntryIterator<K,V>(map, null);
5055     }
5056    
5057 dl 1.142 }
5058    
5059 dl 1.119 // ---------------------------------------------------------------------
5060    
5061     /**
5062     * Predefined tasks for performing bulk parallel operations on
5063     * ConcurrentHashMaps. These tasks follow the forms and rules used
5064 dl 1.137 * for bulk operations. Each method has the same name, but returns
5065     * a task rather than invoking it. These methods may be useful in
5066     * custom applications such as submitting a task without waiting
5067     * for completion, using a custom pool, or combining with other
5068     * tasks.
5069 dl 1.119 */
5070     public static class ForkJoinTasks {
5071     private ForkJoinTasks() {}
5072    
5073     /**
5074     * Returns a task that when invoked, performs the given
5075     * action for each (key, value)
5076     *
5077     * @param map the map
5078     * @param action the action
5079     * @return the task
5080     */
5081 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEach
5082 dl 1.119 (ConcurrentHashMap<K,V> map,
5083 dl 1.171 BiConsumer<? super K, ? super V> action) {
5084 dl 1.119 if (action == null) throw new NullPointerException();
5085 dl 1.146 return new ForEachMappingTask<K,V>(map, null, -1, action);
5086 dl 1.119 }
5087    
5088     /**
5089     * Returns a task that when invoked, performs the given
5090     * action for each non-null transformation of each (key, value)
5091     *
5092     * @param map the map
5093     * @param transformer a function returning the transformation
5094 jsr166 1.135 * for an element, or null if there is no transformation (in
5095 jsr166 1.134 * which case the action is not applied)
5096 dl 1.119 * @param action the action
5097     * @return the task
5098     */
5099 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEach
5100 dl 1.119 (ConcurrentHashMap<K,V> map,
5101 dl 1.153 BiFunction<? super K, ? super V, ? extends U> transformer,
5102 dl 1.171 Consumer<? super U> action) {
5103 dl 1.119 if (transformer == null || action == null)
5104     throw new NullPointerException();
5105     return new ForEachTransformedMappingTask<K,V,U>
5106 dl 1.146 (map, null, -1, transformer, action);
5107 dl 1.119 }
5108    
5109     /**
5110 dl 1.126 * Returns a task that when invoked, returns a non-null result
5111     * from applying the given search function on each (key,
5112     * value), or null if none. Upon success, further element
5113     * processing is suppressed and the results of any other
5114     * parallel invocations of the search function are ignored.
5115 dl 1.119 *
5116     * @param map the map
5117     * @param searchFunction a function returning a non-null
5118     * result on success, else null
5119     * @return the task
5120     */
5121     public static <K,V,U> ForkJoinTask<U> search
5122     (ConcurrentHashMap<K,V> map,
5123 dl 1.153 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
5124 dl 1.119 if (searchFunction == null) throw new NullPointerException();
5125     return new SearchMappingsTask<K,V,U>
5126 dl 1.146 (map, null, -1, searchFunction,
5127 dl 1.119 new AtomicReference<U>());
5128     }
5129    
5130     /**
5131     * Returns a task that when invoked, returns the result of
5132     * accumulating the given transformation of all (key, value) pairs
5133     * using the given reducer to combine values, or null if none.
5134     *
5135     * @param map the map
5136     * @param transformer a function returning the transformation
5137 jsr166 1.135 * for an element, or null if there is no transformation (in
5138 jsr166 1.172 * which case it is not combined)
5139 dl 1.119 * @param reducer a commutative associative combining function
5140     * @return the task
5141     */
5142     public static <K,V,U> ForkJoinTask<U> reduce
5143     (ConcurrentHashMap<K,V> map,
5144 dl 1.153 BiFunction<? super K, ? super V, ? extends U> transformer,
5145     BiFunction<? super U, ? super U, ? extends U> reducer) {
5146 dl 1.119 if (transformer == null || reducer == null)
5147     throw new NullPointerException();
5148     return new MapReduceMappingsTask<K,V,U>
5149 dl 1.130 (map, null, -1, null, transformer, reducer);
5150 dl 1.119 }
5151    
5152     /**
5153     * Returns a task that when invoked, returns the result of
5154     * accumulating the given transformation of all (key, value) pairs
5155     * using the given reducer to combine values, and the given
5156     * basis as an identity value.
5157     *
5158     * @param map the map
5159     * @param transformer a function returning the transformation
5160     * for an element
5161     * @param basis the identity (initial default value) for the reduction
5162     * @param reducer a commutative associative combining function
5163     * @return the task
5164     */
5165     public static <K,V> ForkJoinTask<Double> reduceToDouble
5166     (ConcurrentHashMap<K,V> map,
5167 dl 1.171 ToDoubleBiFunction<? super K, ? super V> transformer,
5168 dl 1.119 double basis,
5169 dl 1.153 DoubleBinaryOperator reducer) {
5170 dl 1.119 if (transformer == null || reducer == null)
5171     throw new NullPointerException();
5172     return new MapReduceMappingsToDoubleTask<K,V>
5173 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5174 dl 1.119 }
5175    
5176     /**
5177     * Returns a task that when invoked, returns the result of
5178     * accumulating the given transformation of all (key, value) pairs
5179     * using the given reducer to combine values, and the given
5180     * basis as an identity value.
5181     *
5182     * @param map the map
5183     * @param transformer a function returning the transformation
5184     * for an element
5185     * @param basis the identity (initial default value) for the reduction
5186     * @param reducer a commutative associative combining function
5187     * @return the task
5188     */
5189     public static <K,V> ForkJoinTask<Long> reduceToLong
5190     (ConcurrentHashMap<K,V> map,
5191 dl 1.171 ToLongBiFunction<? super K, ? super V> transformer,
5192 dl 1.119 long basis,
5193 dl 1.153 LongBinaryOperator reducer) {
5194 dl 1.119 if (transformer == null || reducer == null)
5195     throw new NullPointerException();
5196     return new MapReduceMappingsToLongTask<K,V>
5197 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5198 dl 1.119 }
5199    
5200     /**
5201     * Returns a task that when invoked, returns the result of
5202     * accumulating the given transformation of all (key, value) pairs
5203     * using the given reducer to combine values, and the given
5204     * basis as an identity value.
5205     *
5206 jsr166 1.179 * @param map the map
5207 dl 1.119 * @param transformer a function returning the transformation
5208     * for an element
5209     * @param basis the identity (initial default value) for the reduction
5210     * @param reducer a commutative associative combining function
5211     * @return the task
5212     */
5213     public static <K,V> ForkJoinTask<Integer> reduceToInt
5214     (ConcurrentHashMap<K,V> map,
5215 dl 1.171 ToIntBiFunction<? super K, ? super V> transformer,
5216 dl 1.119 int basis,
5217 dl 1.153 IntBinaryOperator reducer) {
5218 dl 1.119 if (transformer == null || reducer == null)
5219     throw new NullPointerException();
5220     return new MapReduceMappingsToIntTask<K,V>
5221 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5222 dl 1.119 }
5223    
5224     /**
5225     * Returns a task that when invoked, performs the given action
5226 jsr166 1.123 * for each key.
5227 dl 1.119 *
5228     * @param map the map
5229     * @param action the action
5230     * @return the task
5231     */
5232 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachKey
5233 dl 1.119 (ConcurrentHashMap<K,V> map,
5234 dl 1.171 Consumer<? super K> action) {
5235 dl 1.119 if (action == null) throw new NullPointerException();
5236 dl 1.146 return new ForEachKeyTask<K,V>(map, null, -1, action);
5237 dl 1.119 }
5238    
5239     /**
5240     * Returns a task that when invoked, performs the given action
5241 jsr166 1.123 * for each non-null transformation of each key.
5242 dl 1.119 *
5243     * @param map the map
5244     * @param transformer a function returning the transformation
5245 jsr166 1.135 * for an element, or null if there is no transformation (in
5246 jsr166 1.134 * which case the action is not applied)
5247 dl 1.119 * @param action the action
5248     * @return the task
5249     */
5250 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachKey
5251 dl 1.119 (ConcurrentHashMap<K,V> map,
5252 dl 1.153 Function<? super K, ? extends U> transformer,
5253 dl 1.171 Consumer<? super U> action) {
5254 dl 1.119 if (transformer == null || action == null)
5255     throw new NullPointerException();
5256     return new ForEachTransformedKeyTask<K,V,U>
5257 dl 1.146 (map, null, -1, transformer, action);
5258 dl 1.119 }
5259    
5260     /**
5261     * Returns a task that when invoked, returns a non-null result
5262     * from applying the given search function on each key, or
5263 dl 1.126 * null if none. Upon success, further element processing is
5264     * suppressed and the results of any other parallel
5265     * invocations of the search function are ignored.
5266 dl 1.119 *
5267     * @param map the map
5268     * @param searchFunction a function returning a non-null
5269     * result on success, else null
5270     * @return the task
5271     */
5272     public static <K,V,U> ForkJoinTask<U> searchKeys
5273     (ConcurrentHashMap<K,V> map,
5274 dl 1.153 Function<? super K, ? extends U> searchFunction) {
5275 dl 1.119 if (searchFunction == null) throw new NullPointerException();
5276     return new SearchKeysTask<K,V,U>
5277 dl 1.146 (map, null, -1, searchFunction,
5278 dl 1.119 new AtomicReference<U>());
5279     }
5280    
5281     /**
5282     * Returns a task that when invoked, returns the result of
5283     * accumulating all keys using the given reducer to combine
5284     * values, or null if none.
5285     *
5286     * @param map the map
5287     * @param reducer a commutative associative combining function
5288     * @return the task
5289     */
5290     public static <K,V> ForkJoinTask<K> reduceKeys
5291     (ConcurrentHashMap<K,V> map,
5292 dl 1.153 BiFunction<? super K, ? super K, ? extends K> reducer) {
5293 dl 1.119 if (reducer == null) throw new NullPointerException();
5294     return new ReduceKeysTask<K,V>
5295 dl 1.130 (map, null, -1, null, reducer);
5296 dl 1.119 }
5297 jsr166 1.125
5298 dl 1.119 /**
5299     * Returns a task that when invoked, returns the result of
5300     * accumulating the given transformation of all keys using the given
5301     * reducer to combine values, or null if none.
5302     *
5303     * @param map the map
5304     * @param transformer a function returning the transformation
5305 jsr166 1.135 * for an element, or null if there is no transformation (in
5306 jsr166 1.172 * which case it is not combined)
5307 dl 1.119 * @param reducer a commutative associative combining function
5308     * @return the task
5309     */
5310     public static <K,V,U> ForkJoinTask<U> reduceKeys
5311     (ConcurrentHashMap<K,V> map,
5312 dl 1.153 Function<? super K, ? extends U> transformer,
5313     BiFunction<? super U, ? super U, ? extends U> reducer) {
5314 dl 1.119 if (transformer == null || reducer == null)
5315     throw new NullPointerException();
5316     return new MapReduceKeysTask<K,V,U>
5317 dl 1.130 (map, null, -1, null, transformer, reducer);
5318 dl 1.119 }
5319    
5320     /**
5321     * Returns a task that when invoked, returns the result of
5322     * accumulating the given transformation of all keys using the given
5323     * reducer to combine values, and the given basis as an
5324     * identity value.
5325     *
5326     * @param map the map
5327     * @param transformer a function returning the transformation
5328     * for an element
5329     * @param basis the identity (initial default value) for the reduction
5330     * @param reducer a commutative associative combining function
5331     * @return the task
5332     */
5333     public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5334     (ConcurrentHashMap<K,V> map,
5335 dl 1.171 ToDoubleFunction<? super K> transformer,
5336 dl 1.119 double basis,
5337 dl 1.153 DoubleBinaryOperator reducer) {
5338 dl 1.119 if (transformer == null || reducer == null)
5339     throw new NullPointerException();
5340     return new MapReduceKeysToDoubleTask<K,V>
5341 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5342 dl 1.119 }
5343    
5344     /**
5345     * Returns a task that when invoked, returns the result of
5346     * accumulating the given transformation of all keys using the given
5347     * reducer to combine values, and the given basis as an
5348     * identity value.
5349     *
5350     * @param map the map
5351     * @param transformer a function returning the transformation
5352     * for an element
5353     * @param basis the identity (initial default value) for the reduction
5354     * @param reducer a commutative associative combining function
5355     * @return the task
5356     */
5357     public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5358     (ConcurrentHashMap<K,V> map,
5359 dl 1.171 ToLongFunction<? super K> transformer,
5360 dl 1.119 long basis,
5361 dl 1.153 LongBinaryOperator reducer) {
5362 dl 1.119 if (transformer == null || reducer == null)
5363     throw new NullPointerException();
5364     return new MapReduceKeysToLongTask<K,V>
5365 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5366 dl 1.119 }
5367    
5368     /**
5369     * Returns a task that when invoked, returns the result of
5370     * accumulating the given transformation of all keys using the given
5371     * reducer to combine values, and the given basis as an
5372     * identity value.
5373     *
5374     * @param map the map
5375     * @param transformer a function returning the transformation
5376     * for an element
5377     * @param basis the identity (initial default value) for the reduction
5378     * @param reducer a commutative associative combining function
5379     * @return the task
5380     */
5381     public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5382     (ConcurrentHashMap<K,V> map,
5383 dl 1.171 ToIntFunction<? super K> transformer,
5384 dl 1.119 int basis,
5385 dl 1.153 IntBinaryOperator reducer) {
5386 dl 1.119 if (transformer == null || reducer == null)
5387     throw new NullPointerException();
5388     return new MapReduceKeysToIntTask<K,V>
5389 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5390 dl 1.119 }
5391    
5392     /**
5393     * Returns a task that when invoked, performs the given action
5394 jsr166 1.123 * for each value.
5395 dl 1.119 *
5396     * @param map the map
5397     * @param action the action
5398 jsr166 1.173 * @return the task
5399 dl 1.119 */
5400 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachValue
5401 dl 1.119 (ConcurrentHashMap<K,V> map,
5402 dl 1.171 Consumer<? super V> action) {
5403 dl 1.119 if (action == null) throw new NullPointerException();
5404 dl 1.146 return new ForEachValueTask<K,V>(map, null, -1, action);
5405 dl 1.119 }
5406    
5407     /**
5408     * Returns a task that when invoked, performs the given action
5409 jsr166 1.123 * for each non-null transformation of each value.
5410 dl 1.119 *
5411     * @param map the map
5412     * @param transformer a function returning the transformation
5413 jsr166 1.135 * for an element, or null if there is no transformation (in
5414 jsr166 1.134 * which case the action is not applied)
5415 dl 1.119 * @param action the action
5416 jsr166 1.173 * @return the task
5417 dl 1.119 */
5418 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachValue
5419 dl 1.119 (ConcurrentHashMap<K,V> map,
5420 dl 1.153 Function<? super V, ? extends U> transformer,
5421 dl 1.171 Consumer<? super U> action) {
5422 dl 1.119 if (transformer == null || action == null)
5423     throw new NullPointerException();
5424     return new ForEachTransformedValueTask<K,V,U>
5425 dl 1.146 (map, null, -1, transformer, action);
5426 dl 1.119 }
5427    
5428     /**
5429     * Returns a task that when invoked, returns a non-null result
5430     * from applying the given search function on each value, or
5431 dl 1.126 * null if none. Upon success, further element processing is
5432     * suppressed and the results of any other parallel
5433     * invocations of the search function are ignored.
5434 dl 1.119 *
5435     * @param map the map
5436     * @param searchFunction a function returning a non-null
5437     * result on success, else null
5438     * @return the task
5439     */
5440     public static <K,V,U> ForkJoinTask<U> searchValues
5441     (ConcurrentHashMap<K,V> map,
5442 dl 1.153 Function<? super V, ? extends U> searchFunction) {
5443 dl 1.119 if (searchFunction == null) throw new NullPointerException();
5444     return new SearchValuesTask<K,V,U>
5445 dl 1.146 (map, null, -1, searchFunction,
5446 dl 1.119 new AtomicReference<U>());
5447     }
5448    
5449     /**
5450     * Returns a task that when invoked, returns the result of
5451     * accumulating all values using the given reducer to combine
5452     * values, or null if none.
5453     *
5454     * @param map the map
5455     * @param reducer a commutative associative combining function
5456     * @return the task
5457     */
5458     public static <K,V> ForkJoinTask<V> reduceValues
5459     (ConcurrentHashMap<K,V> map,
5460 dl 1.153 BiFunction<? super V, ? super V, ? extends V> reducer) {
5461 dl 1.119 if (reducer == null) throw new NullPointerException();
5462     return new ReduceValuesTask<K,V>
5463 dl 1.130 (map, null, -1, null, reducer);
5464 dl 1.119 }
5465    
5466     /**
5467     * Returns a task that when invoked, returns the result of
5468     * accumulating the given transformation of all values using the
5469     * given reducer to combine values, or null if none.
5470     *
5471     * @param map the map
5472     * @param transformer a function returning the transformation
5473 jsr166 1.135 * for an element, or null if there is no transformation (in
5474 jsr166 1.172 * which case it is not combined)
5475 dl 1.119 * @param reducer a commutative associative combining function
5476     * @return the task
5477     */
5478     public static <K,V,U> ForkJoinTask<U> reduceValues
5479     (ConcurrentHashMap<K,V> map,
5480 dl 1.153 Function<? super V, ? extends U> transformer,
5481     BiFunction<? super U, ? super U, ? extends U> reducer) {
5482 dl 1.119 if (transformer == null || reducer == null)
5483     throw new NullPointerException();
5484     return new MapReduceValuesTask<K,V,U>
5485 dl 1.130 (map, null, -1, null, transformer, reducer);
5486 dl 1.119 }
5487    
5488     /**
5489     * Returns a task that when invoked, returns the result of
5490     * accumulating the given transformation of all values using the
5491     * given reducer to combine values, and the given basis as an
5492     * identity value.
5493     *
5494     * @param map the map
5495     * @param transformer a function returning the transformation
5496     * for an element
5497     * @param basis the identity (initial default value) for the reduction
5498     * @param reducer a commutative associative combining function
5499     * @return the task
5500     */
5501     public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5502     (ConcurrentHashMap<K,V> map,
5503 dl 1.171 ToDoubleFunction<? super V> transformer,
5504 dl 1.119 double basis,
5505 dl 1.153 DoubleBinaryOperator reducer) {
5506 dl 1.119 if (transformer == null || reducer == null)
5507     throw new NullPointerException();
5508     return new MapReduceValuesToDoubleTask<K,V>
5509 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5510 dl 1.119 }
5511    
5512     /**
5513     * Returns a task that when invoked, returns the result of
5514     * accumulating the given transformation of all values using the
5515     * given reducer to combine values, and the given basis as an
5516     * identity value.
5517     *
5518     * @param map the map
5519     * @param transformer a function returning the transformation
5520     * for an element
5521     * @param basis the identity (initial default value) for the reduction
5522     * @param reducer a commutative associative combining function
5523     * @return the task
5524     */
5525     public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5526     (ConcurrentHashMap<K,V> map,
5527 dl 1.171 ToLongFunction<? super V> transformer,
5528 dl 1.119 long basis,
5529 dl 1.153 LongBinaryOperator reducer) {
5530 dl 1.119 if (transformer == null || reducer == null)
5531     throw new NullPointerException();
5532     return new MapReduceValuesToLongTask<K,V>
5533 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5534 dl 1.119 }
5535    
5536     /**
5537     * Returns a task that when invoked, returns the result of
5538     * accumulating the given transformation of all values using the
5539     * given reducer to combine values, and the given basis as an
5540     * identity value.
5541     *
5542     * @param map the map
5543     * @param transformer a function returning the transformation
5544     * for an element
5545     * @param basis the identity (initial default value) for the reduction
5546     * @param reducer a commutative associative combining function
5547     * @return the task
5548     */
5549     public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5550     (ConcurrentHashMap<K,V> map,
5551 dl 1.171 ToIntFunction<? super V> transformer,
5552 dl 1.119 int basis,
5553 dl 1.153 IntBinaryOperator reducer) {
5554 dl 1.119 if (transformer == null || reducer == null)
5555     throw new NullPointerException();
5556     return new MapReduceValuesToIntTask<K,V>
5557 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5558 dl 1.119 }
5559    
5560     /**
5561     * Returns a task that when invoked, perform the given action
5562 jsr166 1.123 * for each entry.
5563 dl 1.119 *
5564     * @param map the map
5565     * @param action the action
5566 jsr166 1.173 * @return the task
5567 dl 1.119 */
5568 jsr166 1.120 public static <K,V> ForkJoinTask<Void> forEachEntry
5569 dl 1.119 (ConcurrentHashMap<K,V> map,
5570 dl 1.171 Consumer<? super Map.Entry<K,V>> action) {
5571 dl 1.119 if (action == null) throw new NullPointerException();
5572 dl 1.146 return new ForEachEntryTask<K,V>(map, null, -1, action);
5573 dl 1.119 }
5574    
5575     /**
5576     * Returns a task that when invoked, perform the given action
5577 jsr166 1.123 * for each non-null transformation of each entry.
5578 dl 1.119 *
5579     * @param map the map
5580     * @param transformer a function returning the transformation
5581 jsr166 1.135 * for an element, or null if there is no transformation (in
5582 jsr166 1.134 * which case the action is not applied)
5583 dl 1.119 * @param action the action
5584 jsr166 1.173 * @return the task
5585 dl 1.119 */
5586 jsr166 1.120 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5587 dl 1.119 (ConcurrentHashMap<K,V> map,
5588 dl 1.153 Function<Map.Entry<K,V>, ? extends U> transformer,
5589 dl 1.171 Consumer<? super U> action) {
5590 dl 1.119 if (transformer == null || action == null)
5591     throw new NullPointerException();
5592     return new ForEachTransformedEntryTask<K,V,U>
5593 dl 1.146 (map, null, -1, transformer, action);
5594 dl 1.119 }
5595    
5596     /**
5597     * Returns a task that when invoked, returns a non-null result
5598     * from applying the given search function on each entry, or
5599 dl 1.126 * null if none. Upon success, further element processing is
5600     * suppressed and the results of any other parallel
5601     * invocations of the search function are ignored.
5602 dl 1.119 *
5603     * @param map the map
5604     * @param searchFunction a function returning a non-null
5605     * result on success, else null
5606     * @return the task
5607     */
5608     public static <K,V,U> ForkJoinTask<U> searchEntries
5609     (ConcurrentHashMap<K,V> map,
5610 dl 1.153 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5611 dl 1.119 if (searchFunction == null) throw new NullPointerException();
5612     return new SearchEntriesTask<K,V,U>
5613 dl 1.146 (map, null, -1, searchFunction,
5614 dl 1.119 new AtomicReference<U>());
5615     }
5616    
5617     /**
5618     * Returns a task that when invoked, returns the result of
5619     * accumulating all entries using the given reducer to combine
5620     * values, or null if none.
5621     *
5622     * @param map the map
5623     * @param reducer a commutative associative combining function
5624     * @return the task
5625     */
5626     public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5627     (ConcurrentHashMap<K,V> map,
5628 dl 1.153 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5629 dl 1.119 if (reducer == null) throw new NullPointerException();
5630     return new ReduceEntriesTask<K,V>
5631 dl 1.130 (map, null, -1, null, reducer);
5632 dl 1.119 }
5633    
5634     /**
5635     * Returns a task that when invoked, returns the result of
5636     * accumulating the given transformation of all entries using the
5637     * given reducer to combine values, or null if none.
5638     *
5639     * @param map the map
5640     * @param transformer a function returning the transformation
5641 jsr166 1.135 * for an element, or null if there is no transformation (in
5642 jsr166 1.172 * which case it is not combined)
5643 dl 1.119 * @param reducer a commutative associative combining function
5644     * @return the task
5645     */
5646     public static <K,V,U> ForkJoinTask<U> reduceEntries
5647     (ConcurrentHashMap<K,V> map,
5648 dl 1.153 Function<Map.Entry<K,V>, ? extends U> transformer,
5649     BiFunction<? super U, ? super U, ? extends U> reducer) {
5650 dl 1.119 if (transformer == null || reducer == null)
5651     throw new NullPointerException();
5652     return new MapReduceEntriesTask<K,V,U>
5653 dl 1.130 (map, null, -1, null, transformer, reducer);
5654 dl 1.119 }
5655    
5656     /**
5657     * Returns a task that when invoked, returns the result of
5658     * accumulating the given transformation of all entries using the
5659     * given reducer to combine values, and the given basis as an
5660     * identity value.
5661     *
5662     * @param map the map
5663     * @param transformer a function returning the transformation
5664     * for an element
5665     * @param basis the identity (initial default value) for the reduction
5666     * @param reducer a commutative associative combining function
5667     * @return the task
5668     */
5669     public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5670     (ConcurrentHashMap<K,V> map,
5671 dl 1.171 ToDoubleFunction<Map.Entry<K,V>> transformer,
5672 dl 1.119 double basis,
5673 dl 1.153 DoubleBinaryOperator reducer) {
5674 dl 1.119 if (transformer == null || reducer == null)
5675     throw new NullPointerException();
5676     return new MapReduceEntriesToDoubleTask<K,V>
5677 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5678 dl 1.119 }
5679    
5680     /**
5681     * Returns a task that when invoked, returns the result of
5682     * accumulating the given transformation of all entries using the
5683     * given reducer to combine values, and the given basis as an
5684     * identity value.
5685     *
5686     * @param map the map
5687     * @param transformer a function returning the transformation
5688     * for an element
5689     * @param basis the identity (initial default value) for the reduction
5690     * @param reducer a commutative associative combining function
5691     * @return the task
5692     */
5693     public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5694     (ConcurrentHashMap<K,V> map,
5695 dl 1.171 ToLongFunction<Map.Entry<K,V>> transformer,
5696 dl 1.119 long basis,
5697 dl 1.153 LongBinaryOperator reducer) {
5698 dl 1.119 if (transformer == null || reducer == null)
5699     throw new NullPointerException();
5700     return new MapReduceEntriesToLongTask<K,V>
5701 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5702 dl 1.119 }
5703    
5704     /**
5705     * Returns a task that when invoked, returns the result of
5706     * accumulating the given transformation of all entries using the
5707     * given reducer to combine values, and the given basis as an
5708     * identity value.
5709     *
5710     * @param map the map
5711     * @param transformer a function returning the transformation
5712     * for an element
5713     * @param basis the identity (initial default value) for the reduction
5714     * @param reducer a commutative associative combining function
5715     * @return the task
5716     */
5717     public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5718     (ConcurrentHashMap<K,V> map,
5719 dl 1.171 ToIntFunction<Map.Entry<K,V>> transformer,
5720 dl 1.119 int basis,
5721 dl 1.153 IntBinaryOperator reducer) {
5722 dl 1.119 if (transformer == null || reducer == null)
5723     throw new NullPointerException();
5724     return new MapReduceEntriesToIntTask<K,V>
5725 dl 1.130 (map, null, -1, null, transformer, basis, reducer);
5726 dl 1.119 }
5727     }
5728    
5729     // -------------------------------------------------------
5730    
5731     /*
5732     * Task classes. Coded in a regular but ugly format/style to
5733     * simplify checks that each variant differs in the right way from
5734 dl 1.149 * others. The null screenings exist because compilers cannot tell
5735     * that we've already null-checked task arguments, so we force
5736     * simplest hoisted bypass to help avoid convoluted traps.
5737 dl 1.119 */
5738    
5739 dl 1.128 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5740 dl 1.146 extends Traverser<K,V,Void> {
5741 dl 1.171 final Consumer<? super K> action;
5742 dl 1.119 ForEachKeyTask
5743 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5744 dl 1.171 Consumer<? super K> action) {
5745 dl 1.146 super(m, p, b);
5746 dl 1.119 this.action = action;
5747     }
5748 jsr166 1.168 public final void compute() {
5749 dl 1.171 final Consumer<? super K> action;
5750 dl 1.149 if ((action = this.action) != null) {
5751     for (int b; (b = preSplit()) > 0;)
5752     new ForEachKeyTask<K,V>(map, this, b, action).fork();
5753 dl 1.192 forEachKey(action);
5754 dl 1.149 propagateCompletion();
5755     }
5756 dl 1.119 }
5757     }
5758    
5759 dl 1.128 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5760 dl 1.146 extends Traverser<K,V,Void> {
5761 dl 1.171 final Consumer<? super V> action;
5762 dl 1.119 ForEachValueTask
5763 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5764 dl 1.171 Consumer<? super V> action) {
5765 dl 1.146 super(m, p, b);
5766 dl 1.119 this.action = action;
5767     }
5768 jsr166 1.168 public final void compute() {
5769 dl 1.171 final Consumer<? super V> action;
5770 dl 1.149 if ((action = this.action) != null) {
5771     for (int b; (b = preSplit()) > 0;)
5772     new ForEachValueTask<K,V>(map, this, b, action).fork();
5773 dl 1.192 forEachValue(action);
5774 dl 1.149 propagateCompletion();
5775     }
5776 dl 1.119 }
5777     }
5778    
5779 dl 1.128 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5780 dl 1.146 extends Traverser<K,V,Void> {
5781 dl 1.171 final Consumer<? super Entry<K,V>> action;
5782 dl 1.119 ForEachEntryTask
5783 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5784 dl 1.171 Consumer<? super Entry<K,V>> action) {
5785 dl 1.146 super(m, p, b);
5786 dl 1.119 this.action = action;
5787     }
5788 jsr166 1.168 public final void compute() {
5789 dl 1.171 final Consumer<? super Entry<K,V>> action;
5790 dl 1.149 if ((action = this.action) != null) {
5791     for (int b; (b = preSplit()) > 0;)
5792     new ForEachEntryTask<K,V>(map, this, b, action).fork();
5793 dl 1.151 V v;
5794 dl 1.191 while ((v = advanceValue()) != null)
5795 jsr166 1.168 action.accept(entryFor(nextKey, v));
5796 dl 1.149 propagateCompletion();
5797     }
5798 dl 1.119 }
5799     }
5800    
5801 dl 1.128 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5802 dl 1.146 extends Traverser<K,V,Void> {
5803 dl 1.171 final BiConsumer<? super K, ? super V> action;
5804 dl 1.119 ForEachMappingTask
5805 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5806 dl 1.171 BiConsumer<? super K,? super V> action) {
5807 dl 1.146 super(m, p, b);
5808 dl 1.119 this.action = action;
5809     }
5810 jsr166 1.168 public final void compute() {
5811 dl 1.171 final BiConsumer<? super K, ? super V> action;
5812 dl 1.149 if ((action = this.action) != null) {
5813     for (int b; (b = preSplit()) > 0;)
5814     new ForEachMappingTask<K,V>(map, this, b, action).fork();
5815 dl 1.151 V v;
5816 dl 1.191 while ((v = advanceValue()) != null)
5817 jsr166 1.168 action.accept(nextKey, v);
5818 dl 1.149 propagateCompletion();
5819     }
5820 dl 1.119 }
5821     }
5822    
5823 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5824 dl 1.146 extends Traverser<K,V,Void> {
5825 dl 1.153 final Function<? super K, ? extends U> transformer;
5826 dl 1.171 final Consumer<? super U> action;
5827 dl 1.119 ForEachTransformedKeyTask
5828 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5829 dl 1.171 Function<? super K, ? extends U> transformer, Consumer<? super U> action) {
5830 dl 1.146 super(m, p, b);
5831     this.transformer = transformer; this.action = action;
5832     }
5833 jsr166 1.168 public final void compute() {
5834 dl 1.153 final Function<? super K, ? extends U> transformer;
5835 dl 1.171 final Consumer<? super U> action;
5836 dl 1.149 if ((transformer = this.transformer) != null &&
5837     (action = this.action) != null) {
5838     for (int b; (b = preSplit()) > 0;)
5839     new ForEachTransformedKeyTask<K,V,U>
5840     (map, this, b, transformer, action).fork();
5841 dl 1.191 K k; U u;
5842     while ((k = advanceKey()) != null) {
5843     if ((u = transformer.apply(k)) != null)
5844 dl 1.153 action.accept(u);
5845 dl 1.149 }
5846     propagateCompletion();
5847 dl 1.119 }
5848     }
5849     }
5850    
5851 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5852 dl 1.146 extends Traverser<K,V,Void> {
5853 dl 1.153 final Function<? super V, ? extends U> transformer;
5854 dl 1.171 final Consumer<? super U> action;
5855 dl 1.119 ForEachTransformedValueTask
5856 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5857 dl 1.171 Function<? super V, ? extends U> transformer, Consumer<? super U> action) {
5858 dl 1.146 super(m, p, b);
5859     this.transformer = transformer; this.action = action;
5860     }
5861 jsr166 1.168 public final void compute() {
5862 dl 1.153 final Function<? super V, ? extends U> transformer;
5863 dl 1.171 final Consumer<? super U> action;
5864 dl 1.149 if ((transformer = this.transformer) != null &&
5865     (action = this.action) != null) {
5866     for (int b; (b = preSplit()) > 0;)
5867     new ForEachTransformedValueTask<K,V,U>
5868     (map, this, b, transformer, action).fork();
5869 dl 1.151 V v; U u;
5870 dl 1.191 while ((v = advanceValue()) != null) {
5871 dl 1.151 if ((u = transformer.apply(v)) != null)
5872 dl 1.153 action.accept(u);
5873 dl 1.149 }
5874     propagateCompletion();
5875 dl 1.119 }
5876     }
5877 tim 1.1 }
5878    
5879 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5880 dl 1.146 extends Traverser<K,V,Void> {
5881 dl 1.153 final Function<Map.Entry<K,V>, ? extends U> transformer;
5882 dl 1.171 final Consumer<? super U> action;
5883 dl 1.119 ForEachTransformedEntryTask
5884 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5885 dl 1.171 Function<Map.Entry<K,V>, ? extends U> transformer, Consumer<? super U> action) {
5886 dl 1.146 super(m, p, b);
5887     this.transformer = transformer; this.action = action;
5888     }
5889 jsr166 1.168 public final void compute() {
5890 dl 1.153 final Function<Map.Entry<K,V>, ? extends U> transformer;
5891 dl 1.171 final Consumer<? super U> action;
5892 dl 1.149 if ((transformer = this.transformer) != null &&
5893     (action = this.action) != null) {
5894     for (int b; (b = preSplit()) > 0;)
5895     new ForEachTransformedEntryTask<K,V,U>
5896     (map, this, b, transformer, action).fork();
5897 dl 1.151 V v; U u;
5898 dl 1.191 while ((v = advanceValue()) != null) {
5899 jsr166 1.168 if ((u = transformer.apply(entryFor(nextKey,
5900 dl 1.151 v))) != null)
5901 dl 1.153 action.accept(u);
5902 dl 1.149 }
5903     propagateCompletion();
5904 dl 1.119 }
5905     }
5906 tim 1.1 }
5907    
5908 dl 1.128 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5909 dl 1.146 extends Traverser<K,V,Void> {
5910 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> transformer;
5911 dl 1.171 final Consumer<? super U> action;
5912 dl 1.119 ForEachTransformedMappingTask
5913 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5914 dl 1.153 BiFunction<? super K, ? super V, ? extends U> transformer,
5915 dl 1.171 Consumer<? super U> action) {
5916 dl 1.146 super(m, p, b);
5917     this.transformer = transformer; this.action = action;
5918 dl 1.119 }
5919 jsr166 1.168 public final void compute() {
5920 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> transformer;
5921 dl 1.171 final Consumer<? super U> action;
5922 dl 1.149 if ((transformer = this.transformer) != null &&
5923     (action = this.action) != null) {
5924     for (int b; (b = preSplit()) > 0;)
5925     new ForEachTransformedMappingTask<K,V,U>
5926     (map, this, b, transformer, action).fork();
5927 dl 1.151 V v; U u;
5928 dl 1.191 while ((v = advanceValue()) != null) {
5929 jsr166 1.168 if ((u = transformer.apply(nextKey, v)) != null)
5930 dl 1.153 action.accept(u);
5931 dl 1.149 }
5932     propagateCompletion();
5933 dl 1.119 }
5934     }
5935 tim 1.1 }
5936    
5937 dl 1.128 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5938 dl 1.146 extends Traverser<K,V,U> {
5939 dl 1.153 final Function<? super K, ? extends U> searchFunction;
5940 dl 1.119 final AtomicReference<U> result;
5941     SearchKeysTask
5942 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5943 dl 1.153 Function<? super K, ? extends U> searchFunction,
5944 dl 1.119 AtomicReference<U> result) {
5945 dl 1.146 super(m, p, b);
5946 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5947     }
5948 dl 1.146 public final U getRawResult() { return result.get(); }
5949 jsr166 1.168 public final void compute() {
5950 dl 1.153 final Function<? super K, ? extends U> searchFunction;
5951 dl 1.146 final AtomicReference<U> result;
5952 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5953     (result = this.result) != null) {
5954     for (int b;;) {
5955     if (result.get() != null)
5956     return;
5957     if ((b = preSplit()) <= 0)
5958     break;
5959     new SearchKeysTask<K,V,U>
5960     (map, this, b, searchFunction, result).fork();
5961 dl 1.128 }
5962 dl 1.149 while (result.get() == null) {
5963 dl 1.191 K k; U u;
5964     if ((k = advanceKey()) == null) {
5965 dl 1.149 propagateCompletion();
5966     break;
5967     }
5968 dl 1.191 if ((u = searchFunction.apply(k)) != null) {
5969 dl 1.149 if (result.compareAndSet(null, u))
5970     quietlyCompleteRoot();
5971     break;
5972     }
5973 dl 1.119 }
5974     }
5975     }
5976 tim 1.1 }
5977    
5978 dl 1.128 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5979 dl 1.146 extends Traverser<K,V,U> {
5980 dl 1.153 final Function<? super V, ? extends U> searchFunction;
5981 dl 1.119 final AtomicReference<U> result;
5982     SearchValuesTask
5983 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5984 dl 1.153 Function<? super V, ? extends U> searchFunction,
5985 dl 1.119 AtomicReference<U> result) {
5986 dl 1.146 super(m, p, b);
5987 dl 1.119 this.searchFunction = searchFunction; this.result = result;
5988     }
5989 dl 1.146 public final U getRawResult() { return result.get(); }
5990 jsr166 1.168 public final void compute() {
5991 dl 1.153 final Function<? super V, ? extends U> searchFunction;
5992 dl 1.146 final AtomicReference<U> result;
5993 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
5994     (result = this.result) != null) {
5995     for (int b;;) {
5996     if (result.get() != null)
5997     return;
5998     if ((b = preSplit()) <= 0)
5999     break;
6000     new SearchValuesTask<K,V,U>
6001     (map, this, b, searchFunction, result).fork();
6002 dl 1.128 }
6003 dl 1.149 while (result.get() == null) {
6004 dl 1.151 V v; U u;
6005 dl 1.191 if ((v = advanceValue()) == null) {
6006 dl 1.149 propagateCompletion();
6007     break;
6008     }
6009 dl 1.151 if ((u = searchFunction.apply(v)) != null) {
6010 dl 1.149 if (result.compareAndSet(null, u))
6011     quietlyCompleteRoot();
6012     break;
6013     }
6014 dl 1.119 }
6015     }
6016     }
6017     }
6018 tim 1.11
6019 dl 1.128 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
6020 dl 1.146 extends Traverser<K,V,U> {
6021 dl 1.153 final Function<Entry<K,V>, ? extends U> searchFunction;
6022 dl 1.119 final AtomicReference<U> result;
6023     SearchEntriesTask
6024 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6025 dl 1.153 Function<Entry<K,V>, ? extends U> searchFunction,
6026 dl 1.119 AtomicReference<U> result) {
6027 dl 1.146 super(m, p, b);
6028 dl 1.119 this.searchFunction = searchFunction; this.result = result;
6029     }
6030 dl 1.146 public final U getRawResult() { return result.get(); }
6031 jsr166 1.168 public final void compute() {
6032 dl 1.153 final Function<Entry<K,V>, ? extends U> searchFunction;
6033 dl 1.146 final AtomicReference<U> result;
6034 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
6035     (result = this.result) != null) {
6036     for (int b;;) {
6037     if (result.get() != null)
6038     return;
6039     if ((b = preSplit()) <= 0)
6040     break;
6041     new SearchEntriesTask<K,V,U>
6042     (map, this, b, searchFunction, result).fork();
6043 dl 1.128 }
6044 dl 1.149 while (result.get() == null) {
6045 dl 1.151 V v; U u;
6046 dl 1.191 if ((v = advanceValue()) == null) {
6047 dl 1.149 propagateCompletion();
6048     break;
6049     }
6050 jsr166 1.168 if ((u = searchFunction.apply(entryFor(nextKey,
6051 dl 1.151 v))) != null) {
6052 dl 1.149 if (result.compareAndSet(null, u))
6053     quietlyCompleteRoot();
6054     return;
6055     }
6056 dl 1.119 }
6057     }
6058     }
6059     }
6060 tim 1.1
6061 dl 1.128 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
6062 dl 1.146 extends Traverser<K,V,U> {
6063 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6064 dl 1.119 final AtomicReference<U> result;
6065     SearchMappingsTask
6066 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6067 dl 1.153 BiFunction<? super K, ? super V, ? extends U> searchFunction,
6068 dl 1.119 AtomicReference<U> result) {
6069 dl 1.146 super(m, p, b);
6070 dl 1.119 this.searchFunction = searchFunction; this.result = result;
6071     }
6072 dl 1.146 public final U getRawResult() { return result.get(); }
6073 jsr166 1.168 public final void compute() {
6074 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
6075 dl 1.146 final AtomicReference<U> result;
6076 dl 1.149 if ((searchFunction = this.searchFunction) != null &&
6077     (result = this.result) != null) {
6078     for (int b;;) {
6079     if (result.get() != null)
6080     return;
6081     if ((b = preSplit()) <= 0)
6082     break;
6083     new SearchMappingsTask<K,V,U>
6084     (map, this, b, searchFunction, result).fork();
6085 dl 1.128 }
6086 dl 1.149 while (result.get() == null) {
6087 dl 1.151 V v; U u;
6088 dl 1.191 if ((v = advanceValue()) == null) {
6089 dl 1.149 propagateCompletion();
6090     break;
6091     }
6092 jsr166 1.168 if ((u = searchFunction.apply(nextKey, v)) != null) {
6093 dl 1.149 if (result.compareAndSet(null, u))
6094     quietlyCompleteRoot();
6095     break;
6096     }
6097 dl 1.119 }
6098     }
6099 tim 1.1 }
6100 dl 1.119 }
6101 tim 1.1
6102 dl 1.128 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
6103 dl 1.146 extends Traverser<K,V,K> {
6104 dl 1.153 final BiFunction<? super K, ? super K, ? extends K> reducer;
6105 dl 1.119 K result;
6106 dl 1.128 ReduceKeysTask<K,V> rights, nextRight;
6107 dl 1.119 ReduceKeysTask
6108 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6109 dl 1.128 ReduceKeysTask<K,V> nextRight,
6110 dl 1.153 BiFunction<? super K, ? super K, ? extends K> reducer) {
6111 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6112 dl 1.119 this.reducer = reducer;
6113     }
6114 dl 1.146 public final K getRawResult() { return result; }
6115     @SuppressWarnings("unchecked") public final void compute() {
6116 dl 1.153 final BiFunction<? super K, ? super K, ? extends K> reducer;
6117 dl 1.149 if ((reducer = this.reducer) != null) {
6118     for (int b; (b = preSplit()) > 0;)
6119     (rights = new ReduceKeysTask<K,V>
6120     (map, this, b, rights, reducer)).fork();
6121 dl 1.191 K u, r = null;
6122     while ((u = advanceKey()) != null) {
6123 jsr166 1.154 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
6124 dl 1.149 }
6125     result = r;
6126     CountedCompleter<?> c;
6127     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6128     ReduceKeysTask<K,V>
6129     t = (ReduceKeysTask<K,V>)c,
6130     s = t.rights;
6131     while (s != null) {
6132     K tr, sr;
6133     if ((sr = s.result) != null)
6134     t.result = (((tr = t.result) == null) ? sr :
6135     reducer.apply(tr, sr));
6136     s = t.rights = s.nextRight;
6137     }
6138 dl 1.99 }
6139 dl 1.138 }
6140 tim 1.1 }
6141 dl 1.119 }
6142 tim 1.1
6143 dl 1.128 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
6144 dl 1.146 extends Traverser<K,V,V> {
6145 dl 1.153 final BiFunction<? super V, ? super V, ? extends V> reducer;
6146 dl 1.119 V result;
6147 dl 1.128 ReduceValuesTask<K,V> rights, nextRight;
6148 dl 1.119 ReduceValuesTask
6149 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6150 dl 1.128 ReduceValuesTask<K,V> nextRight,
6151 dl 1.153 BiFunction<? super V, ? super V, ? extends V> reducer) {
6152 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6153 dl 1.119 this.reducer = reducer;
6154     }
6155 dl 1.146 public final V getRawResult() { return result; }
6156     @SuppressWarnings("unchecked") public final void compute() {
6157 dl 1.153 final BiFunction<? super V, ? super V, ? extends V> reducer;
6158 dl 1.149 if ((reducer = this.reducer) != null) {
6159     for (int b; (b = preSplit()) > 0;)
6160     (rights = new ReduceValuesTask<K,V>
6161     (map, this, b, rights, reducer)).fork();
6162 dl 1.153 V r = null, v;
6163 dl 1.191 while ((v = advanceValue()) != null)
6164 dl 1.156 r = (r == null) ? v : reducer.apply(r, v);
6165 dl 1.149 result = r;
6166     CountedCompleter<?> c;
6167     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6168     ReduceValuesTask<K,V>
6169     t = (ReduceValuesTask<K,V>)c,
6170     s = t.rights;
6171     while (s != null) {
6172     V tr, sr;
6173     if ((sr = s.result) != null)
6174     t.result = (((tr = t.result) == null) ? sr :
6175     reducer.apply(tr, sr));
6176     s = t.rights = s.nextRight;
6177     }
6178 dl 1.119 }
6179     }
6180 tim 1.1 }
6181 dl 1.119 }
6182 tim 1.1
6183 dl 1.128 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
6184 dl 1.146 extends Traverser<K,V,Map.Entry<K,V>> {
6185 dl 1.153 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6186 dl 1.119 Map.Entry<K,V> result;
6187 dl 1.128 ReduceEntriesTask<K,V> rights, nextRight;
6188 dl 1.119 ReduceEntriesTask
6189 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6190 dl 1.130 ReduceEntriesTask<K,V> nextRight,
6191 dl 1.153 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
6192 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6193 dl 1.119 this.reducer = reducer;
6194     }
6195 dl 1.146 public final Map.Entry<K,V> getRawResult() { return result; }
6196     @SuppressWarnings("unchecked") public final void compute() {
6197 dl 1.153 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
6198 dl 1.149 if ((reducer = this.reducer) != null) {
6199     for (int b; (b = preSplit()) > 0;)
6200     (rights = new ReduceEntriesTask<K,V>
6201     (map, this, b, rights, reducer)).fork();
6202     Map.Entry<K,V> r = null;
6203 dl 1.151 V v;
6204 dl 1.191 while ((v = advanceValue()) != null) {
6205 jsr166 1.168 Map.Entry<K,V> u = entryFor(nextKey, v);
6206 dl 1.149 r = (r == null) ? u : reducer.apply(r, u);
6207     }
6208     result = r;
6209     CountedCompleter<?> c;
6210     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6211     ReduceEntriesTask<K,V>
6212     t = (ReduceEntriesTask<K,V>)c,
6213     s = t.rights;
6214     while (s != null) {
6215     Map.Entry<K,V> tr, sr;
6216     if ((sr = s.result) != null)
6217     t.result = (((tr = t.result) == null) ? sr :
6218     reducer.apply(tr, sr));
6219     s = t.rights = s.nextRight;
6220     }
6221 dl 1.119 }
6222 dl 1.138 }
6223 dl 1.119 }
6224     }
6225 dl 1.99
6226 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
6227 dl 1.146 extends Traverser<K,V,U> {
6228 dl 1.153 final Function<? super K, ? extends U> transformer;
6229     final BiFunction<? super U, ? super U, ? extends U> reducer;
6230 dl 1.119 U result;
6231 dl 1.128 MapReduceKeysTask<K,V,U> rights, nextRight;
6232 dl 1.119 MapReduceKeysTask
6233 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6234 dl 1.128 MapReduceKeysTask<K,V,U> nextRight,
6235 dl 1.153 Function<? super K, ? extends U> transformer,
6236     BiFunction<? super U, ? super U, ? extends U> reducer) {
6237 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6238 dl 1.119 this.transformer = transformer;
6239     this.reducer = reducer;
6240     }
6241 dl 1.146 public final U getRawResult() { return result; }
6242     @SuppressWarnings("unchecked") public final void compute() {
6243 dl 1.153 final Function<? super K, ? extends U> transformer;
6244     final BiFunction<? super U, ? super U, ? extends U> reducer;
6245 dl 1.149 if ((transformer = this.transformer) != null &&
6246     (reducer = this.reducer) != null) {
6247     for (int b; (b = preSplit()) > 0;)
6248     (rights = new MapReduceKeysTask<K,V,U>
6249     (map, this, b, rights, transformer, reducer)).fork();
6250 dl 1.191 K k; U r = null, u;
6251     while ((k = advanceKey()) != null) {
6252     if ((u = transformer.apply(k)) != null)
6253 dl 1.149 r = (r == null) ? u : reducer.apply(r, u);
6254     }
6255     result = r;
6256     CountedCompleter<?> c;
6257     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6258     MapReduceKeysTask<K,V,U>
6259     t = (MapReduceKeysTask<K,V,U>)c,
6260     s = t.rights;
6261     while (s != null) {
6262     U tr, sr;
6263     if ((sr = s.result) != null)
6264     t.result = (((tr = t.result) == null) ? sr :
6265     reducer.apply(tr, sr));
6266     s = t.rights = s.nextRight;
6267     }
6268 dl 1.119 }
6269 dl 1.138 }
6270 tim 1.1 }
6271 dl 1.4 }
6272    
6273 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6274 dl 1.146 extends Traverser<K,V,U> {
6275 dl 1.153 final Function<? super V, ? extends U> transformer;
6276     final BiFunction<? super U, ? super U, ? extends U> reducer;
6277 dl 1.119 U result;
6278 dl 1.128 MapReduceValuesTask<K,V,U> rights, nextRight;
6279 dl 1.119 MapReduceValuesTask
6280 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6281 dl 1.128 MapReduceValuesTask<K,V,U> nextRight,
6282 dl 1.153 Function<? super V, ? extends U> transformer,
6283     BiFunction<? super U, ? super U, ? extends U> reducer) {
6284 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6285 dl 1.119 this.transformer = transformer;
6286     this.reducer = reducer;
6287     }
6288 dl 1.146 public final U getRawResult() { return result; }
6289     @SuppressWarnings("unchecked") public final void compute() {
6290 dl 1.153 final Function<? super V, ? extends U> transformer;
6291     final BiFunction<? super U, ? super U, ? extends U> reducer;
6292 dl 1.149 if ((transformer = this.transformer) != null &&
6293     (reducer = this.reducer) != null) {
6294     for (int b; (b = preSplit()) > 0;)
6295     (rights = new MapReduceValuesTask<K,V,U>
6296     (map, this, b, rights, transformer, reducer)).fork();
6297     U r = null, u;
6298 dl 1.151 V v;
6299 dl 1.191 while ((v = advanceValue()) != null) {
6300 dl 1.151 if ((u = transformer.apply(v)) != null)
6301 dl 1.149 r = (r == null) ? u : reducer.apply(r, u);
6302     }
6303     result = r;
6304     CountedCompleter<?> c;
6305     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6306     MapReduceValuesTask<K,V,U>
6307     t = (MapReduceValuesTask<K,V,U>)c,
6308     s = t.rights;
6309     while (s != null) {
6310     U tr, sr;
6311     if ((sr = s.result) != null)
6312     t.result = (((tr = t.result) == null) ? sr :
6313     reducer.apply(tr, sr));
6314     s = t.rights = s.nextRight;
6315     }
6316 dl 1.119 }
6317     }
6318     }
6319 dl 1.4 }
6320    
6321 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6322 dl 1.146 extends Traverser<K,V,U> {
6323 dl 1.153 final Function<Map.Entry<K,V>, ? extends U> transformer;
6324     final BiFunction<? super U, ? super U, ? extends U> reducer;
6325 dl 1.119 U result;
6326 dl 1.128 MapReduceEntriesTask<K,V,U> rights, nextRight;
6327 dl 1.119 MapReduceEntriesTask
6328 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6329 dl 1.128 MapReduceEntriesTask<K,V,U> nextRight,
6330 dl 1.153 Function<Map.Entry<K,V>, ? extends U> transformer,
6331     BiFunction<? super U, ? super U, ? extends U> reducer) {
6332 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6333 dl 1.119 this.transformer = transformer;
6334     this.reducer = reducer;
6335     }
6336 dl 1.146 public final U getRawResult() { return result; }
6337     @SuppressWarnings("unchecked") public final void compute() {
6338 dl 1.153 final Function<Map.Entry<K,V>, ? extends U> transformer;
6339     final BiFunction<? super U, ? super U, ? extends U> reducer;
6340 dl 1.149 if ((transformer = this.transformer) != null &&
6341     (reducer = this.reducer) != null) {
6342     for (int b; (b = preSplit()) > 0;)
6343     (rights = new MapReduceEntriesTask<K,V,U>
6344     (map, this, b, rights, transformer, reducer)).fork();
6345     U r = null, u;
6346 dl 1.151 V v;
6347 dl 1.191 while ((v = advanceValue()) != null) {
6348 jsr166 1.168 if ((u = transformer.apply(entryFor(nextKey,
6349 dl 1.151 v))) != null)
6350 dl 1.149 r = (r == null) ? u : reducer.apply(r, u);
6351     }
6352     result = r;
6353     CountedCompleter<?> c;
6354     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6355     MapReduceEntriesTask<K,V,U>
6356     t = (MapReduceEntriesTask<K,V,U>)c,
6357     s = t.rights;
6358     while (s != null) {
6359     U tr, sr;
6360     if ((sr = s.result) != null)
6361     t.result = (((tr = t.result) == null) ? sr :
6362     reducer.apply(tr, sr));
6363     s = t.rights = s.nextRight;
6364     }
6365 dl 1.119 }
6366 dl 1.138 }
6367 dl 1.119 }
6368 dl 1.4 }
6369 tim 1.1
6370 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6371 dl 1.146 extends Traverser<K,V,U> {
6372 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> transformer;
6373     final BiFunction<? super U, ? super U, ? extends U> reducer;
6374 dl 1.119 U result;
6375 dl 1.128 MapReduceMappingsTask<K,V,U> rights, nextRight;
6376 dl 1.119 MapReduceMappingsTask
6377 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6378 dl 1.128 MapReduceMappingsTask<K,V,U> nextRight,
6379 dl 1.153 BiFunction<? super K, ? super V, ? extends U> transformer,
6380     BiFunction<? super U, ? super U, ? extends U> reducer) {
6381 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6382 dl 1.119 this.transformer = transformer;
6383     this.reducer = reducer;
6384     }
6385 dl 1.146 public final U getRawResult() { return result; }
6386     @SuppressWarnings("unchecked") public final void compute() {
6387 dl 1.153 final BiFunction<? super K, ? super V, ? extends U> transformer;
6388     final BiFunction<? super U, ? super U, ? extends U> reducer;
6389 dl 1.149 if ((transformer = this.transformer) != null &&
6390     (reducer = this.reducer) != null) {
6391     for (int b; (b = preSplit()) > 0;)
6392     (rights = new MapReduceMappingsTask<K,V,U>
6393     (map, this, b, rights, transformer, reducer)).fork();
6394     U r = null, u;
6395 dl 1.151 V v;
6396 dl 1.191 while ((v = advanceValue()) != null) {
6397 jsr166 1.168 if ((u = transformer.apply(nextKey, v)) != null)
6398 dl 1.149 r = (r == null) ? u : reducer.apply(r, u);
6399     }
6400     result = r;
6401     CountedCompleter<?> c;
6402     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6403     MapReduceMappingsTask<K,V,U>
6404     t = (MapReduceMappingsTask<K,V,U>)c,
6405     s = t.rights;
6406     while (s != null) {
6407     U tr, sr;
6408     if ((sr = s.result) != null)
6409     t.result = (((tr = t.result) == null) ? sr :
6410     reducer.apply(tr, sr));
6411     s = t.rights = s.nextRight;
6412     }
6413 dl 1.119 }
6414     }
6415     }
6416     }
6417 jsr166 1.114
6418 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6419 dl 1.146 extends Traverser<K,V,Double> {
6420 dl 1.171 final ToDoubleFunction<? super K> transformer;
6421 dl 1.153 final DoubleBinaryOperator reducer;
6422 dl 1.119 final double basis;
6423     double result;
6424 dl 1.128 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6425 dl 1.119 MapReduceKeysToDoubleTask
6426 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6427 dl 1.128 MapReduceKeysToDoubleTask<K,V> nextRight,
6428 dl 1.171 ToDoubleFunction<? super K> transformer,
6429 dl 1.119 double basis,
6430 dl 1.153 DoubleBinaryOperator reducer) {
6431 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6432 dl 1.119 this.transformer = transformer;
6433     this.basis = basis; this.reducer = reducer;
6434     }
6435 dl 1.146 public final Double getRawResult() { return result; }
6436     @SuppressWarnings("unchecked") public final void compute() {
6437 dl 1.171 final ToDoubleFunction<? super K> transformer;
6438 dl 1.153 final DoubleBinaryOperator reducer;
6439 dl 1.149 if ((transformer = this.transformer) != null &&
6440     (reducer = this.reducer) != null) {
6441     double r = this.basis;
6442     for (int b; (b = preSplit()) > 0;)
6443     (rights = new MapReduceKeysToDoubleTask<K,V>
6444     (map, this, b, rights, transformer, r, reducer)).fork();
6445 dl 1.191 K k;
6446     while ((k = advanceKey()) != null)
6447     r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
6448 dl 1.149 result = r;
6449     CountedCompleter<?> c;
6450     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6451     MapReduceKeysToDoubleTask<K,V>
6452     t = (MapReduceKeysToDoubleTask<K,V>)c,
6453     s = t.rights;
6454     while (s != null) {
6455 dl 1.153 t.result = reducer.applyAsDouble(t.result, s.result);
6456 dl 1.149 s = t.rights = s.nextRight;
6457     }
6458 dl 1.119 }
6459 dl 1.138 }
6460 dl 1.79 }
6461 dl 1.119 }
6462 dl 1.79
6463 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6464 dl 1.146 extends Traverser<K,V,Double> {
6465 dl 1.171 final ToDoubleFunction<? super V> transformer;
6466 dl 1.153 final DoubleBinaryOperator reducer;
6467 dl 1.119 final double basis;
6468     double result;
6469 dl 1.128 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6470 dl 1.119 MapReduceValuesToDoubleTask
6471 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6472 dl 1.128 MapReduceValuesToDoubleTask<K,V> nextRight,
6473 dl 1.171 ToDoubleFunction<? super V> transformer,
6474 dl 1.119 double basis,
6475 dl 1.153 DoubleBinaryOperator reducer) {
6476 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6477 dl 1.119 this.transformer = transformer;
6478     this.basis = basis; this.reducer = reducer;
6479     }
6480 dl 1.146 public final Double getRawResult() { return result; }
6481     @SuppressWarnings("unchecked") public final void compute() {
6482 dl 1.171 final ToDoubleFunction<? super V> transformer;
6483 dl 1.153 final DoubleBinaryOperator reducer;
6484 dl 1.149 if ((transformer = this.transformer) != null &&
6485     (reducer = this.reducer) != null) {
6486     double r = this.basis;
6487     for (int b; (b = preSplit()) > 0;)
6488     (rights = new MapReduceValuesToDoubleTask<K,V>
6489     (map, this, b, rights, transformer, r, reducer)).fork();
6490 dl 1.151 V v;
6491 dl 1.191 while ((v = advanceValue()) != null)
6492 dl 1.153 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6493 dl 1.149 result = r;
6494     CountedCompleter<?> c;
6495     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6496     MapReduceValuesToDoubleTask<K,V>
6497     t = (MapReduceValuesToDoubleTask<K,V>)c,
6498     s = t.rights;
6499     while (s != null) {
6500 dl 1.153 t.result = reducer.applyAsDouble(t.result, s.result);
6501 dl 1.149 s = t.rights = s.nextRight;
6502     }
6503 dl 1.119 }
6504     }
6505 dl 1.30 }
6506 dl 1.79 }
6507 dl 1.30
6508 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6509 dl 1.146 extends Traverser<K,V,Double> {
6510 dl 1.171 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6511 dl 1.153 final DoubleBinaryOperator reducer;
6512 dl 1.119 final double basis;
6513     double result;
6514 dl 1.128 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6515 dl 1.119 MapReduceEntriesToDoubleTask
6516 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6517 dl 1.128 MapReduceEntriesToDoubleTask<K,V> nextRight,
6518 dl 1.171 ToDoubleFunction<Map.Entry<K,V>> transformer,
6519 dl 1.119 double basis,
6520 dl 1.153 DoubleBinaryOperator reducer) {
6521 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6522 dl 1.119 this.transformer = transformer;
6523     this.basis = basis; this.reducer = reducer;
6524     }
6525 dl 1.146 public final Double getRawResult() { return result; }
6526     @SuppressWarnings("unchecked") public final void compute() {
6527 dl 1.171 final ToDoubleFunction<Map.Entry<K,V>> transformer;
6528 dl 1.153 final DoubleBinaryOperator reducer;
6529 dl 1.149 if ((transformer = this.transformer) != null &&
6530     (reducer = this.reducer) != null) {
6531     double r = this.basis;
6532     for (int b; (b = preSplit()) > 0;)
6533     (rights = new MapReduceEntriesToDoubleTask<K,V>
6534     (map, this, b, rights, transformer, r, reducer)).fork();
6535 dl 1.151 V v;
6536 dl 1.191 while ((v = advanceValue()) != null)
6537 jsr166 1.168 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(nextKey,
6538 dl 1.151 v)));
6539 dl 1.149 result = r;
6540     CountedCompleter<?> c;
6541     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6542     MapReduceEntriesToDoubleTask<K,V>
6543     t = (MapReduceEntriesToDoubleTask<K,V>)c,
6544     s = t.rights;
6545     while (s != null) {
6546 dl 1.153 t.result = reducer.applyAsDouble(t.result, s.result);
6547 dl 1.149 s = t.rights = s.nextRight;
6548     }
6549 dl 1.119 }
6550 dl 1.138 }
6551 dl 1.30 }
6552 tim 1.1 }
6553    
6554 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6555 dl 1.146 extends Traverser<K,V,Double> {
6556 dl 1.171 final ToDoubleBiFunction<? super K, ? super V> transformer;
6557 dl 1.153 final DoubleBinaryOperator reducer;
6558 dl 1.119 final double basis;
6559     double result;
6560 dl 1.128 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6561 dl 1.119 MapReduceMappingsToDoubleTask
6562 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6563 dl 1.128 MapReduceMappingsToDoubleTask<K,V> nextRight,
6564 dl 1.171 ToDoubleBiFunction<? super K, ? super V> transformer,
6565 dl 1.119 double basis,
6566 dl 1.153 DoubleBinaryOperator reducer) {
6567 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6568 dl 1.119 this.transformer = transformer;
6569     this.basis = basis; this.reducer = reducer;
6570     }
6571 dl 1.146 public final Double getRawResult() { return result; }
6572     @SuppressWarnings("unchecked") public final void compute() {
6573 dl 1.171 final ToDoubleBiFunction<? super K, ? super V> transformer;
6574 dl 1.153 final DoubleBinaryOperator reducer;
6575 dl 1.149 if ((transformer = this.transformer) != null &&
6576     (reducer = this.reducer) != null) {
6577     double r = this.basis;
6578     for (int b; (b = preSplit()) > 0;)
6579     (rights = new MapReduceMappingsToDoubleTask<K,V>
6580     (map, this, b, rights, transformer, r, reducer)).fork();
6581 dl 1.151 V v;
6582 dl 1.191 while ((v = advanceValue()) != null)
6583 jsr166 1.168 r = reducer.applyAsDouble(r, transformer.applyAsDouble(nextKey, v));
6584 dl 1.149 result = r;
6585     CountedCompleter<?> c;
6586     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6587     MapReduceMappingsToDoubleTask<K,V>
6588     t = (MapReduceMappingsToDoubleTask<K,V>)c,
6589     s = t.rights;
6590     while (s != null) {
6591 dl 1.153 t.result = reducer.applyAsDouble(t.result, s.result);
6592 dl 1.149 s = t.rights = s.nextRight;
6593     }
6594 dl 1.119 }
6595     }
6596 dl 1.4 }
6597 dl 1.119 }
6598    
6599 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6600 dl 1.146 extends Traverser<K,V,Long> {
6601 dl 1.171 final ToLongFunction<? super K> transformer;
6602 dl 1.153 final LongBinaryOperator reducer;
6603 dl 1.119 final long basis;
6604     long result;
6605 dl 1.128 MapReduceKeysToLongTask<K,V> rights, nextRight;
6606 dl 1.119 MapReduceKeysToLongTask
6607 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6608 dl 1.128 MapReduceKeysToLongTask<K,V> nextRight,
6609 dl 1.171 ToLongFunction<? super K> transformer,
6610 dl 1.119 long basis,
6611 dl 1.153 LongBinaryOperator reducer) {
6612 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6613 dl 1.119 this.transformer = transformer;
6614     this.basis = basis; this.reducer = reducer;
6615     }
6616 dl 1.146 public final Long getRawResult() { return result; }
6617     @SuppressWarnings("unchecked") public final void compute() {
6618 dl 1.171 final ToLongFunction<? super K> transformer;
6619 dl 1.153 final LongBinaryOperator reducer;
6620 dl 1.149 if ((transformer = this.transformer) != null &&
6621     (reducer = this.reducer) != null) {
6622     long r = this.basis;
6623     for (int b; (b = preSplit()) > 0;)
6624     (rights = new MapReduceKeysToLongTask<K,V>
6625     (map, this, b, rights, transformer, r, reducer)).fork();
6626 dl 1.191 K k;
6627     while ((k = advanceKey()) != null)
6628     r = reducer.applyAsLong(r, transformer.applyAsLong(k));
6629 dl 1.149 result = r;
6630     CountedCompleter<?> c;
6631     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6632     MapReduceKeysToLongTask<K,V>
6633     t = (MapReduceKeysToLongTask<K,V>)c,
6634     s = t.rights;
6635     while (s != null) {
6636 dl 1.153 t.result = reducer.applyAsLong(t.result, s.result);
6637 dl 1.149 s = t.rights = s.nextRight;
6638     }
6639 dl 1.119 }
6640 dl 1.138 }
6641 dl 1.4 }
6642 dl 1.119 }
6643    
6644 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6645 dl 1.146 extends Traverser<K,V,Long> {
6646 dl 1.171 final ToLongFunction<? super V> transformer;
6647 dl 1.153 final LongBinaryOperator reducer;
6648 dl 1.119 final long basis;
6649     long result;
6650 dl 1.128 MapReduceValuesToLongTask<K,V> rights, nextRight;
6651 dl 1.119 MapReduceValuesToLongTask
6652 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6653 dl 1.128 MapReduceValuesToLongTask<K,V> nextRight,
6654 dl 1.171 ToLongFunction<? super V> transformer,
6655 dl 1.119 long basis,
6656 dl 1.153 LongBinaryOperator reducer) {
6657 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6658 dl 1.119 this.transformer = transformer;
6659     this.basis = basis; this.reducer = reducer;
6660     }
6661 dl 1.146 public final Long getRawResult() { return result; }
6662     @SuppressWarnings("unchecked") public final void compute() {
6663 dl 1.171 final ToLongFunction<? super V> transformer;
6664 dl 1.153 final LongBinaryOperator reducer;
6665 dl 1.149 if ((transformer = this.transformer) != null &&
6666     (reducer = this.reducer) != null) {
6667     long r = this.basis;
6668     for (int b; (b = preSplit()) > 0;)
6669     (rights = new MapReduceValuesToLongTask<K,V>
6670     (map, this, b, rights, transformer, r, reducer)).fork();
6671 dl 1.151 V v;
6672 dl 1.191 while ((v = advanceValue()) != null)
6673 dl 1.153 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6674 dl 1.149 result = r;
6675     CountedCompleter<?> c;
6676     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6677     MapReduceValuesToLongTask<K,V>
6678     t = (MapReduceValuesToLongTask<K,V>)c,
6679     s = t.rights;
6680     while (s != null) {
6681 dl 1.153 t.result = reducer.applyAsLong(t.result, s.result);
6682 dl 1.149 s = t.rights = s.nextRight;
6683     }
6684 dl 1.119 }
6685     }
6686 jsr166 1.95 }
6687 dl 1.119 }
6688    
6689 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6690 dl 1.146 extends Traverser<K,V,Long> {
6691 dl 1.171 final ToLongFunction<Map.Entry<K,V>> transformer;
6692 dl 1.153 final LongBinaryOperator reducer;
6693 dl 1.119 final long basis;
6694     long result;
6695 dl 1.128 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6696 dl 1.119 MapReduceEntriesToLongTask
6697 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6698 dl 1.128 MapReduceEntriesToLongTask<K,V> nextRight,
6699 dl 1.171 ToLongFunction<Map.Entry<K,V>> transformer,
6700 dl 1.119 long basis,
6701 dl 1.153 LongBinaryOperator reducer) {
6702 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6703 dl 1.119 this.transformer = transformer;
6704     this.basis = basis; this.reducer = reducer;
6705     }
6706 dl 1.146 public final Long getRawResult() { return result; }
6707     @SuppressWarnings("unchecked") public final void compute() {
6708 dl 1.171 final ToLongFunction<Map.Entry<K,V>> transformer;
6709 dl 1.153 final LongBinaryOperator reducer;
6710 dl 1.149 if ((transformer = this.transformer) != null &&
6711     (reducer = this.reducer) != null) {
6712     long r = this.basis;
6713     for (int b; (b = preSplit()) > 0;)
6714     (rights = new MapReduceEntriesToLongTask<K,V>
6715     (map, this, b, rights, transformer, r, reducer)).fork();
6716 dl 1.151 V v;
6717 dl 1.191 while ((v = advanceValue()) != null)
6718 jsr166 1.168 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(nextKey, v)));
6719 dl 1.149 result = r;
6720     CountedCompleter<?> c;
6721     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6722     MapReduceEntriesToLongTask<K,V>
6723     t = (MapReduceEntriesToLongTask<K,V>)c,
6724     s = t.rights;
6725     while (s != null) {
6726 dl 1.153 t.result = reducer.applyAsLong(t.result, s.result);
6727 dl 1.149 s = t.rights = s.nextRight;
6728     }
6729 dl 1.119 }
6730 dl 1.138 }
6731 dl 1.4 }
6732 tim 1.1 }
6733    
6734 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6735 dl 1.146 extends Traverser<K,V,Long> {
6736 dl 1.171 final ToLongBiFunction<? super K, ? super V> transformer;
6737 dl 1.153 final LongBinaryOperator reducer;
6738 dl 1.119 final long basis;
6739     long result;
6740 dl 1.128 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6741 dl 1.119 MapReduceMappingsToLongTask
6742 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6743 dl 1.128 MapReduceMappingsToLongTask<K,V> nextRight,
6744 dl 1.171 ToLongBiFunction<? super K, ? super V> transformer,
6745 dl 1.119 long basis,
6746 dl 1.153 LongBinaryOperator reducer) {
6747 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6748 dl 1.119 this.transformer = transformer;
6749     this.basis = basis; this.reducer = reducer;
6750     }
6751 dl 1.146 public final Long getRawResult() { return result; }
6752     @SuppressWarnings("unchecked") public final void compute() {
6753 dl 1.171 final ToLongBiFunction<? super K, ? super V> transformer;
6754 dl 1.153 final LongBinaryOperator reducer;
6755 dl 1.149 if ((transformer = this.transformer) != null &&
6756     (reducer = this.reducer) != null) {
6757     long r = this.basis;
6758     for (int b; (b = preSplit()) > 0;)
6759     (rights = new MapReduceMappingsToLongTask<K,V>
6760     (map, this, b, rights, transformer, r, reducer)).fork();
6761 dl 1.151 V v;
6762 dl 1.191 while ((v = advanceValue()) != null)
6763 jsr166 1.168 r = reducer.applyAsLong(r, transformer.applyAsLong(nextKey, v));
6764 dl 1.149 result = r;
6765     CountedCompleter<?> c;
6766     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6767     MapReduceMappingsToLongTask<K,V>
6768     t = (MapReduceMappingsToLongTask<K,V>)c,
6769     s = t.rights;
6770     while (s != null) {
6771 dl 1.153 t.result = reducer.applyAsLong(t.result, s.result);
6772 dl 1.149 s = t.rights = s.nextRight;
6773     }
6774 dl 1.119 }
6775     }
6776 dl 1.4 }
6777 tim 1.1 }
6778    
6779 dl 1.128 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6780 dl 1.146 extends Traverser<K,V,Integer> {
6781 dl 1.171 final ToIntFunction<? super K> transformer;
6782 dl 1.153 final IntBinaryOperator reducer;
6783 dl 1.119 final int basis;
6784     int result;
6785 dl 1.128 MapReduceKeysToIntTask<K,V> rights, nextRight;
6786 dl 1.119 MapReduceKeysToIntTask
6787 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6788 dl 1.128 MapReduceKeysToIntTask<K,V> nextRight,
6789 dl 1.171 ToIntFunction<? super K> transformer,
6790 dl 1.119 int basis,
6791 dl 1.153 IntBinaryOperator reducer) {
6792 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6793 dl 1.119 this.transformer = transformer;
6794     this.basis = basis; this.reducer = reducer;
6795     }
6796 dl 1.146 public final Integer getRawResult() { return result; }
6797     @SuppressWarnings("unchecked") public final void compute() {
6798 dl 1.171 final ToIntFunction<? super K> transformer;
6799 dl 1.153 final IntBinaryOperator reducer;
6800 dl 1.149 if ((transformer = this.transformer) != null &&
6801     (reducer = this.reducer) != null) {
6802     int r = this.basis;
6803     for (int b; (b = preSplit()) > 0;)
6804     (rights = new MapReduceKeysToIntTask<K,V>
6805     (map, this, b, rights, transformer, r, reducer)).fork();
6806 dl 1.191 K k;
6807     while ((k = advanceKey()) != null)
6808     r = reducer.applyAsInt(r, transformer.applyAsInt(k));
6809 dl 1.149 result = r;
6810     CountedCompleter<?> c;
6811     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6812     MapReduceKeysToIntTask<K,V>
6813     t = (MapReduceKeysToIntTask<K,V>)c,
6814     s = t.rights;
6815     while (s != null) {
6816 dl 1.153 t.result = reducer.applyAsInt(t.result, s.result);
6817 dl 1.149 s = t.rights = s.nextRight;
6818     }
6819 dl 1.119 }
6820 dl 1.138 }
6821 dl 1.30 }
6822     }
6823    
6824 dl 1.128 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6825 dl 1.146 extends Traverser<K,V,Integer> {
6826 dl 1.171 final ToIntFunction<? super V> transformer;
6827 dl 1.153 final IntBinaryOperator reducer;
6828 dl 1.119 final int basis;
6829     int result;
6830 dl 1.128 MapReduceValuesToIntTask<K,V> rights, nextRight;
6831 dl 1.119 MapReduceValuesToIntTask
6832 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6833 dl 1.128 MapReduceValuesToIntTask<K,V> nextRight,
6834 dl 1.171 ToIntFunction<? super V> transformer,
6835 dl 1.119 int basis,
6836 dl 1.153 IntBinaryOperator reducer) {
6837 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6838 dl 1.119 this.transformer = transformer;
6839     this.basis = basis; this.reducer = reducer;
6840     }
6841 dl 1.146 public final Integer getRawResult() { return result; }
6842     @SuppressWarnings("unchecked") public final void compute() {
6843 dl 1.171 final ToIntFunction<? super V> transformer;
6844 dl 1.153 final IntBinaryOperator reducer;
6845 dl 1.149 if ((transformer = this.transformer) != null &&
6846     (reducer = this.reducer) != null) {
6847     int r = this.basis;
6848     for (int b; (b = preSplit()) > 0;)
6849     (rights = new MapReduceValuesToIntTask<K,V>
6850     (map, this, b, rights, transformer, r, reducer)).fork();
6851 dl 1.151 V v;
6852 dl 1.191 while ((v = advanceValue()) != null)
6853 dl 1.153 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6854 dl 1.149 result = r;
6855     CountedCompleter<?> c;
6856     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6857     MapReduceValuesToIntTask<K,V>
6858     t = (MapReduceValuesToIntTask<K,V>)c,
6859     s = t.rights;
6860     while (s != null) {
6861 dl 1.153 t.result = reducer.applyAsInt(t.result, s.result);
6862 dl 1.149 s = t.rights = s.nextRight;
6863     }
6864 dl 1.119 }
6865 dl 1.2 }
6866 tim 1.1 }
6867     }
6868    
6869 dl 1.128 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6870 dl 1.146 extends Traverser<K,V,Integer> {
6871 dl 1.171 final ToIntFunction<Map.Entry<K,V>> transformer;
6872 dl 1.153 final IntBinaryOperator reducer;
6873 dl 1.119 final int basis;
6874     int result;
6875 dl 1.128 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6876 dl 1.119 MapReduceEntriesToIntTask
6877 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6878 dl 1.128 MapReduceEntriesToIntTask<K,V> nextRight,
6879 dl 1.171 ToIntFunction<Map.Entry<K,V>> transformer,
6880 dl 1.119 int basis,
6881 dl 1.153 IntBinaryOperator reducer) {
6882 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6883 dl 1.119 this.transformer = transformer;
6884     this.basis = basis; this.reducer = reducer;
6885     }
6886 dl 1.146 public final Integer getRawResult() { return result; }
6887     @SuppressWarnings("unchecked") public final void compute() {
6888 dl 1.171 final ToIntFunction<Map.Entry<K,V>> transformer;
6889 dl 1.153 final IntBinaryOperator reducer;
6890 dl 1.149 if ((transformer = this.transformer) != null &&
6891     (reducer = this.reducer) != null) {
6892     int r = this.basis;
6893     for (int b; (b = preSplit()) > 0;)
6894     (rights = new MapReduceEntriesToIntTask<K,V>
6895     (map, this, b, rights, transformer, r, reducer)).fork();
6896 dl 1.151 V v;
6897 dl 1.191 while ((v = advanceValue()) != null)
6898 jsr166 1.168 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(nextKey,
6899 dl 1.151 v)));
6900 dl 1.149 result = r;
6901     CountedCompleter<?> c;
6902     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6903     MapReduceEntriesToIntTask<K,V>
6904     t = (MapReduceEntriesToIntTask<K,V>)c,
6905     s = t.rights;
6906     while (s != null) {
6907 dl 1.153 t.result = reducer.applyAsInt(t.result, s.result);
6908 dl 1.149 s = t.rights = s.nextRight;
6909     }
6910 dl 1.119 }
6911 dl 1.138 }
6912 dl 1.4 }
6913 dl 1.119 }
6914 tim 1.1
6915 dl 1.128 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6916 dl 1.146 extends Traverser<K,V,Integer> {
6917 dl 1.171 final ToIntBiFunction<? super K, ? super V> transformer;
6918 dl 1.153 final IntBinaryOperator reducer;
6919 dl 1.119 final int basis;
6920     int result;
6921 dl 1.128 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6922 dl 1.119 MapReduceMappingsToIntTask
6923 dl 1.146 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6924     MapReduceMappingsToIntTask<K,V> nextRight,
6925 dl 1.171 ToIntBiFunction<? super K, ? super V> transformer,
6926 dl 1.119 int basis,
6927 dl 1.153 IntBinaryOperator reducer) {
6928 dl 1.130 super(m, p, b); this.nextRight = nextRight;
6929 dl 1.119 this.transformer = transformer;
6930     this.basis = basis; this.reducer = reducer;
6931     }
6932 dl 1.146 public final Integer getRawResult() { return result; }
6933     @SuppressWarnings("unchecked") public final void compute() {
6934 dl 1.171 final ToIntBiFunction<? super K, ? super V> transformer;
6935 dl 1.153 final IntBinaryOperator reducer;
6936 dl 1.149 if ((transformer = this.transformer) != null &&
6937     (reducer = this.reducer) != null) {
6938     int r = this.basis;
6939     for (int b; (b = preSplit()) > 0;)
6940     (rights = new MapReduceMappingsToIntTask<K,V>
6941     (map, this, b, rights, transformer, r, reducer)).fork();
6942 dl 1.151 V v;
6943 dl 1.191 while ((v = advanceValue()) != null)
6944 jsr166 1.168 r = reducer.applyAsInt(r, transformer.applyAsInt(nextKey, v));
6945 dl 1.149 result = r;
6946     CountedCompleter<?> c;
6947     for (c = firstComplete(); c != null; c = c.nextComplete()) {
6948     MapReduceMappingsToIntTask<K,V>
6949     t = (MapReduceMappingsToIntTask<K,V>)c,
6950     s = t.rights;
6951     while (s != null) {
6952 dl 1.153 t.result = reducer.applyAsInt(t.result, s.result);
6953 dl 1.149 s = t.rights = s.nextRight;
6954     }
6955 dl 1.119 }
6956 dl 1.138 }
6957 tim 1.1 }
6958     }
6959 dl 1.99
6960     // Unsafe mechanics
6961 dl 1.149 private static final sun.misc.Unsafe U;
6962     private static final long SIZECTL;
6963     private static final long TRANSFERINDEX;
6964     private static final long TRANSFERORIGIN;
6965     private static final long BASECOUNT;
6966 dl 1.153 private static final long CELLSBUSY;
6967 dl 1.149 private static final long CELLVALUE;
6968 dl 1.119 private static final long ABASE;
6969     private static final int ASHIFT;
6970 dl 1.99
6971     static {
6972     try {
6973 dl 1.149 U = sun.misc.Unsafe.getUnsafe();
6974 dl 1.119 Class<?> k = ConcurrentHashMap.class;
6975 dl 1.149 SIZECTL = U.objectFieldOffset
6976 dl 1.119 (k.getDeclaredField("sizeCtl"));
6977 dl 1.149 TRANSFERINDEX = U.objectFieldOffset
6978     (k.getDeclaredField("transferIndex"));
6979     TRANSFERORIGIN = U.objectFieldOffset
6980     (k.getDeclaredField("transferOrigin"));
6981     BASECOUNT = U.objectFieldOffset
6982     (k.getDeclaredField("baseCount"));
6983 dl 1.153 CELLSBUSY = U.objectFieldOffset
6984     (k.getDeclaredField("cellsBusy"));
6985     Class<?> ck = Cell.class;
6986 dl 1.149 CELLVALUE = U.objectFieldOffset
6987     (ck.getDeclaredField("value"));
6988 dl 1.119 Class<?> sc = Node[].class;
6989 dl 1.149 ABASE = U.arrayBaseOffset(sc);
6990 jsr166 1.167 int scale = U.arrayIndexScale(sc);
6991     if ((scale & (scale - 1)) != 0)
6992     throw new Error("data type scale not a power of two");
6993     ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6994 dl 1.99 } catch (Exception e) {
6995     throw new Error(e);
6996     }
6997     }
6998 jsr166 1.152
6999     }