--- jsr166/src/jsr166e/ConcurrentHashMapV8.java 2013/06/19 17:00:58 1.104 +++ jsr166/src/jsr166e/ConcurrentHashMapV8.java 2013/08/23 20:12:21 1.115 @@ -12,6 +12,7 @@ import java.io.ObjectStreamField; import java.io.Serializable; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; +import java.util.AbstractMap; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -218,7 +219,7 @@ import java.util.concurrent.locks.Reentr * @param the type of keys maintained by this map * @param the type of mapped values */ -public class ConcurrentHashMapV8 +public class ConcurrentHashMapV8 extends AbstractMap implements ConcurrentMap, Serializable { private static final long serialVersionUID = 7249069246763182397L; @@ -299,7 +300,7 @@ public class ConcurrentHashMapV8 * because they have negative hash fields and null key and value * fields. (These special nodes are either uncommon or transient, * so the impact of carrying around some unused fields is - * insignficant.) + * insignificant.) * * The table is lazily initialized to a power-of-two size upon the * first insertion. Each bin in the table normally contains a @@ -446,23 +447,25 @@ public class ConcurrentHashMapV8 * related operations (which is the main reason we cannot use * existing collections such as TreeMaps). TreeBins contain * Comparable elements, but may contain others, as well as - * elements that are Comparable but not necessarily Comparable - * for the same T, so we cannot invoke compareTo among them. To - * handle this, the tree is ordered primarily by hash value, then - * by Comparable.compareTo order if applicable. On lookup at a - * node, if elements are not comparable or compare as 0 then both - * left and right children may need to be searched in the case of - * tied hash values. (This corresponds to the full list search - * that would be necessary if all elements were non-Comparable and - * had tied hashes.) The red-black balancing code is updated from - * pre-jdk-collections + * elements that are Comparable but not necessarily Comparable for + * the same T, so we cannot invoke compareTo among them. To handle + * this, the tree is ordered primarily by hash value, then by + * Comparable.compareTo order if applicable. On lookup at a node, + * if elements are not comparable or compare as 0 then both left + * and right children may need to be searched in the case of tied + * hash values. (This corresponds to the full list search that + * would be necessary if all elements were non-Comparable and had + * tied hashes.) On insertion, to keep a total ordering (or as + * close as is required here) across rebalancings, we compare + * classes and identityHashCodes as tie-breakers. The red-black + * balancing code is updated from pre-jdk-collections * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java) * based in turn on Cormen, Leiserson, and Rivest "Introduction to * Algorithms" (CLR). * * TreeBins also require an additional locking mechanism. While * list traversal is always possible by readers even during - * updates, tree traversal is not, mainly beause of tree-rotations + * updates, tree traversal is not, mainly because of tree-rotations * that may change the root node and/or its linkages. TreeBins * include a simple read-write lock mechanism parasitic on the * main bin-synchronization strategy: Structural adjustments @@ -485,6 +488,10 @@ public class ConcurrentHashMapV8 * unused "Segment" class that is instantiated in minimal form * only when serializing. * + * Also, solely for compatibility with previous versions of this + * class, it extends AbstractMap, even though all of its methods + * are overridden, so it is just useless baggage. + * * This file is organized to make things a little easier to follow * while reading than they might otherwise: First the main static * declarations and utilities, then fields, then main public @@ -568,9 +575,9 @@ public class ConcurrentHashMapV8 /* * Encodings for Node hash fields. See above for explanation. */ - static final int MOVED = 0x8fffffff; // (-1) hash for forwarding nodes - static final int TREEBIN = 0x80000000; // hash for heads of treea - static final int RESERVED = 0x80000001; // hash for transient reservations + static final int MOVED = -1; // hash for forwarding nodes + static final int TREEBIN = -2; // hash for roots of trees + static final int RESERVED = -3; // hash for transient reservations static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash /** Number of CPUS, to place bounds on some sizings */ @@ -589,7 +596,7 @@ public class ConcurrentHashMapV8 * Key-value entry. This class is never exported out as a * user-mutable Map.Entry (i.e., one supporting setValue; see * MapEntry below), but can be used for read-only traversals used - * in bulk tasks. Subclasses of Node with a negativehash field + * in bulk tasks. Subclasses of Node with a negative hash field * are special, and contain null keys and values (but are never * exported). Otherwise, keys and vals are never null. */ @@ -597,7 +604,7 @@ public class ConcurrentHashMapV8 final int hash; final K key; volatile V val; - Node next; + volatile Node next; Node(int hash, K key, V val, Node next) { this.hash = hash; @@ -722,8 +729,9 @@ public class ConcurrentHashMapV8 * errors by users, these checks must operate on local variables, * which accounts for some odd-looking inline assignments below. * Note that calls to setTabAt always occur within locked regions, - * and so do not need full volatile semantics, but still require - * ordering to maintain concurrent readability. + * and so in principle require only release ordering, not need + * full volatile semantics, but are currently coded as volatile + * writes to be conservative. */ @SuppressWarnings("unchecked") @@ -737,7 +745,7 @@ public class ConcurrentHashMapV8 } static final void setTabAt(Node[] tab, int i, Node v) { - U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v); + U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v); } /* ---------------- Fields -------------- */ @@ -1358,6 +1366,7 @@ public class ConcurrentHashMapV8 * Saves the state of the {@code ConcurrentHashMapV8} instance to a * stream (i.e., serializes it). * @param s the stream + * @throws java.io.IOException if an I/O error occurs * @serialData * the key (Object) and value (Object) * for each key-value mapping, followed by a null pair. @@ -1400,6 +1409,9 @@ public class ConcurrentHashMapV8 /** * Reconstitutes the instance from a stream (that is, deserializes it). * @param s the stream + * @throws ClassNotFoundException if the class of a serialized object + * could not be found + * @throws java.io.IOException if an I/O error occurs */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { @@ -1434,8 +1446,8 @@ public class ConcurrentHashMapV8 int sz = (int)size; n = tableSizeFor(sz + (sz >>> 1) + 1); } - @SuppressWarnings({"rawtypes","unchecked"}) - Node[] tab = (Node[])new Node[n]; + @SuppressWarnings("unchecked") + Node[] tab = (Node[])new Node[n]; int mask = n - 1; long added = 0L; while (p != null) { @@ -2100,9 +2112,9 @@ public class ConcurrentHashMapV8 * * @param initialCapacity The implementation performs internal * sizing to accommodate this many elements. + * @return the new set * @throws IllegalArgumentException if the initial capacity of * elements is negative - * @return the new set * @since 1.8 */ public static KeySetView newKeySet(int initialCapacity) { @@ -2140,20 +2152,29 @@ public class ConcurrentHashMapV8 } Node find(int h, Object k) { - Node e; int n; - Node[] tab = nextTable; - if (k != null && tab != null && (n = tab.length) > 0 && - (e = tabAt(tab, (n - 1) & h)) != null) { - do { + // loop to avoid arbitrarily deep recursion on forwarding nodes + outer: for (Node[] tab = nextTable;;) { + Node e; int n; + if (k == null || tab == null || (n = tab.length) == 0 || + (e = tabAt(tab, (n - 1) & h)) == null) + return null; + for (;;) { int eh; K ek; if ((eh = e.hash) == h && ((ek = e.key) == k || (ek != null && k.equals(ek)))) return e; - if (eh < 0) - return e.find(h, k); - } while ((e = e.next) != null); + if (eh < 0) { + if (e instanceof ForwardingNode) { + tab = ((ForwardingNode)e).nextTable; + continue outer; + } + else + return e.find(h, k); + } + if ((e = e.next) == null) + return null; + } } - return null; } } @@ -2184,8 +2205,8 @@ public class ConcurrentHashMapV8 try { if ((tab = table) == null || tab.length == 0) { int n = (sc > 0) ? sc : DEFAULT_CAPACITY; - @SuppressWarnings({"rawtypes","unchecked"}) - Node[] nt = (Node[])new Node[n]; + @SuppressWarnings("unchecked") + Node[] nt = (Node[])new Node[n]; table = tab = nt; sc = n - (n >>> 2); } @@ -2276,8 +2297,8 @@ public class ConcurrentHashMapV8 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) { try { if (table == tab) { - @SuppressWarnings({"rawtypes","unchecked"}) - Node[] nt = (Node[])new Node[n]; + @SuppressWarnings("unchecked") + Node[] nt = (Node[])new Node[n]; table = nt; sc = n - (n >>> 2); } @@ -2304,8 +2325,8 @@ public class ConcurrentHashMapV8 stride = MIN_TRANSFER_STRIDE; // subdivide range if (nextTab == null) { // initiating try { - @SuppressWarnings({"rawtypes","unchecked"}) - Node[] nt = (Node[])new Node[n << 1]; + @SuppressWarnings("unchecked") + Node[] nt = (Node[])new Node[n << 1]; nextTab = nt; } catch (Throwable ex) { // try to cope with OOME sizeCtl = Integer.MAX_VALUE; @@ -2327,10 +2348,11 @@ public class ConcurrentHashMapV8 int nextn = nextTab.length; ForwardingNode fwd = new ForwardingNode(nextTab); boolean advance = true; + boolean finishing = false; // to ensure sweep before committing nextTab for (int i = 0, bound = 0;;) { int nextIndex, nextBound, fh; Node f; while (advance) { - if (--i >= bound) + if (--i >= bound || finishing) advance = false; else if ((nextIndex = transferIndex) <= transferOrigin) { i = -1; @@ -2346,14 +2368,19 @@ public class ConcurrentHashMapV8 } } if (i < 0 || i >= n || i + n >= nextn) { + if (finishing) { + nextTable = null; + table = nextTab; + sizeCtl = (n << 1) - (n >>> 1); + return; + } for (int sc;;) { if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) { - if (sc == -1) { - nextTable = null; - table = nextTab; - sizeCtl = (n << 1) - (n >>> 1); - } - return; + if (sc != -1) + return; + finishing = advance = true; + i = n; // recheck before commit + break; } } } @@ -2395,6 +2422,10 @@ public class ConcurrentHashMapV8 else hn = new Node(ph, pk, pv, hn); } + setTabAt(nextTab, i, ln); + setTabAt(nextTab, i + n, hn); + setTabAt(tab, i, fwd); + advance = true; } else if (f instanceof TreeBin) { TreeBin t = (TreeBin)f; @@ -2426,13 +2457,11 @@ public class ConcurrentHashMapV8 (hc != 0) ? new TreeBin(lo) : t; hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) : (lc != 0) ? new TreeBin(hi) : t; + setTabAt(nextTab, i, ln); + setTabAt(nextTab, i + n, hn); + setTabAt(tab, i, fwd); + advance = true; } - else - ln = hn = null; - setTabAt(nextTab, i, ln); - setTabAt(nextTab, i + n, hn); - setTabAt(tab, i, fwd); - advance = true; } } } @@ -2453,7 +2482,7 @@ public class ConcurrentHashMapV8 U.compareAndSwapInt(this, SIZECTL, sc, -2)) transfer(tab, null); } - else if ((b = tabAt(tab, index)) != null) { + else if ((b = tabAt(tab, index)) != null && b.hash >= 0) { synchronized (b) { if (tabAt(tab, index) == b) { TreeNode hd = null, tl = null; @@ -2475,7 +2504,7 @@ public class ConcurrentHashMapV8 } /** - * Returns a list on non-TreeNodes replacing those in given list + * Returns a list on non-TreeNodes replacing those in given list. */ static Node untreeify(Node b) { Node hd = null, tl = null; @@ -2528,19 +2557,18 @@ public class ConcurrentHashMapV8 p = pr; else if ((pk = p.key) == k || (pk != null && k.equals(pk))) return p; - else if (pl == null && pr == null) - break; + else if (pl == null) + p = pr; + else if (pr == null) + p = pl; else if ((kc != null || (kc = comparableClassFor(k)) != null) && (dir = compareComparables(kc, k, pk)) != 0) p = (dir < 0) ? pl : pr; - else if (pl == null) - p = pr; - else if (pr == null || - (q = pr.findTreeNode(h, k, kc)) == null) - p = pl; - else + else if ((q = pr.findTreeNode(h, k, kc)) != null) return q; + else + p = pl; } while (p != null); } return null; @@ -2567,6 +2595,23 @@ public class ConcurrentHashMapV8 static final int READER = 4; // increment value for setting read lock /** + * Tie-breaking utility for ordering insertions when equal + * hashCodes and non-comparable. We don't require a total + * order, just a consistent insertion rule to maintain + * equivalence across rebalancings. Tie-breaking further than + * necessary simplifies testing a bit. + */ + static int tieBreakOrder(Object a, Object b) { + int d; + if (a == null || b == null || + (d = a.getClass().getName(). + compareTo(b.getClass().getName())) == 0) + d = (System.identityHashCode(a) <= System.identityHashCode(b) ? + -1 : 1); + return d; + } + + /** * Creates bin with initial set of nodes headed by b. */ TreeBin(TreeNode b) { @@ -2582,21 +2627,21 @@ public class ConcurrentHashMapV8 r = x; } else { - Object key = x.key; - int hash = x.hash; + K k = x.key; + int h = x.hash; Class kc = null; for (TreeNode p = r;;) { int dir, ph; - if ((ph = p.hash) > hash) + K pk = p.key; + if ((ph = p.hash) > h) dir = -1; - else if (ph < hash) + else if (ph < h) dir = 1; - else if ((kc != null || - (kc = comparableClassFor(key)) != null)) - dir = compareComparables(kc, key, p.key); - else - dir = 0; - TreeNode xp = p; + else if ((kc == null && + (kc = comparableClassFor(k)) == null) || + (dir = compareComparables(kc, k, pk)) == 0) + dir = tieBreakOrder(k, pk); + TreeNode xp = p; if ((p = (dir <= 0) ? p.left : p.right) == null) { x.parent = xp; if (dir <= 0) @@ -2610,10 +2655,11 @@ public class ConcurrentHashMapV8 } } this.root = r; + assert checkInvariants(root); } /** - * Acquires write lock for tree restructuring + * Acquires write lock for tree restructuring. */ private final void lockRoot() { if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER)) @@ -2621,14 +2667,14 @@ public class ConcurrentHashMapV8 } /** - * Releases write lock for tree restructuring + * Releases write lock for tree restructuring. */ private final void unlockRoot() { lockState = 0; } /** - * Possibly blocks awaiting root lock + * Possibly blocks awaiting root lock. */ private final void contendedLock() { boolean waiting = false; @@ -2640,7 +2686,7 @@ public class ConcurrentHashMapV8 return; } } - else if ((s | WAITER) == 0) { + else if ((s & WAITER) == 0) { if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) { waiting = true; waiter = Thread.currentThread(); @@ -2653,10 +2699,10 @@ public class ConcurrentHashMapV8 /** * Returns matching node or null if none. Tries to search - * using tree compareisons from root, but continues linear + * using tree comparisons from root, but continues linear * search when lock not available. */ - final Node find(int h, Object k) { +final Node find(int h, Object k) { if (k != null) { for (Node e = first; e != null; e = e.next) { int s; K ek; @@ -2693,8 +2739,9 @@ public class ConcurrentHashMapV8 */ final TreeNode putTreeVal(int h, K k, V v) { Class kc = null; + boolean searched = false; for (TreeNode p = root;;) { - int dir, ph; K pk; TreeNode q, pr; + int dir, ph; K pk; if (p == null) { first = root = new TreeNode(h, k, v, null, null); break; @@ -2708,21 +2755,25 @@ public class ConcurrentHashMapV8 else if ((kc == null && (kc = comparableClassFor(k)) == null) || (dir = compareComparables(kc, k, pk)) == 0) { - if (p.left == null) - dir = 1; - else if ((pr = p.right) == null || - (q = pr.findTreeNode(h, k, kc)) == null) - dir = -1; - else - return q; + if (!searched) { + TreeNode q, ch; + searched = true; + if (((ch = p.left) != null && + (q = ch.findTreeNode(h, k, kc)) != null) || + ((ch = p.right) != null && + (q = ch.findTreeNode(h, k, kc)) != null)) + return q; + } + dir = tieBreakOrder(k, pk); } + TreeNode xp = p; - if ((p = (dir < 0) ? p.left : p.right) == null) { + if ((p = (dir <= 0) ? p.left : p.right) == null) { TreeNode x, f = first; first = x = new TreeNode(h, k, v, f, xp); if (f != null) f.prev = x; - if (dir < 0) + if (dir <= 0) xp.left = x; else xp.right = x; @@ -2751,7 +2802,7 @@ public class ConcurrentHashMapV8 * that are accessible independently of lock. So instead we * swap the tree linkages. * - * @return true if now too small so should be untreeified. + * @return true if now too small, so should be untreeified */ final boolean removeTreeNode(TreeNode p) { TreeNode next = (TreeNode)p.next; @@ -3146,7 +3197,7 @@ public class ConcurrentHashMapV8 /** * Base of key, value, and entry Iterators. Adds fields to - * Traverser to support iterator.remove + * Traverser to support iterator.remove. */ static class BaseIterator extends Traverser { final ConcurrentHashMapV8 map; @@ -3498,10 +3549,10 @@ public class ConcurrentHashMapV8 * of all (key, value) pairs * @since 1.8 */ - public double reduceToDoubleIn(long parallelismThreshold, - ObjectByObjectToDouble transformer, - double basis, - DoubleByDoubleToDouble reducer) { + public double reduceToDouble(long parallelismThreshold, + ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); return new MapReduceMappingsToDoubleTask @@ -5986,7 +6037,7 @@ public class ConcurrentHashMapV8 } /** - * Generates initial value for per-thread CounterHashCodes + * Generates initial value for per-thread CounterHashCodes. */ static final AtomicInteger counterHashCodeGenerator = new AtomicInteger();