fastutil-7.1.0/drv/AVLTreeMap.drv0000664000000000000000000026143513050701620015267 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION; import VALUE_PACKAGE.VALUE_ITERATOR; import java.util.Comparator; import java.util.Iterator; import java.util.Map; import java.util.SortedMap; import java.util.NoSuchElementException; #if VALUES_PRIMITIVE import VALUE_PACKAGE.VALUE_LIST_ITERATOR; #endif /** A type-specific AVL tree map with a fast, small-footprint implementation. * *

The iterators provided by the views of this class are type-specific {@linkplain * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. * Moreover, the iterator returned by iterator() can be safely cast * to a type-specific {@linkplain java.util.ListIterator list iterator}. */ public class AVL_TREE_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { /** A reference to the root entry. */ protected transient Entry KEY_VALUE_GENERIC tree; /** Number of entries in this map. */ protected int count; /** The first key in this map. */ protected transient Entry KEY_VALUE_GENERIC firstEntry; /** The last key in this map. */ protected transient Entry KEY_VALUE_GENERIC lastEntry; /** Cached set of entries. */ protected transient ObjectSortedSet entries; /** Cached set of keys. */ protected transient SORTED_SET KEY_GENERIC keys; /** Cached collection of values. */ protected transient VALUE_COLLECTION VALUE_GENERIC values; /** The value of this variable remembers, after a put() * or a remove(), whether the domain of the map * has been modified. */ protected transient boolean modified; /** This map's comparator, as provided in the constructor. */ protected Comparator storedComparator; /** This map's actual comparator; it may differ from {@link #storedComparator} because it is always a type-specific comparator, so it could be derived from the former by wrapping. */ protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator; private static final long serialVersionUID = -7046029254386353129L; private static final boolean ASSERTS = ASSERTS_VALUE; { allocatePaths(); } /** Creates a new empty tree map. */ public AVL_TREE_MAP() { tree = null; count = 0; } /** Generates the comparator that will be actually used. * *

When a specific {@link Comparator} is specified and stored in {@link * #storedComparator}, we must check whether it is type-specific. If it is * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise, * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator} * and makes it into a type-specific one. */ private void setActualComparator() { #if KEY_CLASS_Object actualComparator = storedComparator; #else /* If the provided comparator is already type-specific, we use it. Otherwise, we use a wrapper anonymous class to fake that it is type-specific. */ if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator; else actualComparator = new KEY_COMPARATOR KEY_SUPER_GENERIC() { public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) { return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) ); } public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) { return storedComparator.compare( ok1, ok2 ); } }; #endif } /** Creates a new empty tree map with the given comparator. * * @param c a (possibly type-specific) comparator. */ public AVL_TREE_MAP( final Comparator c ) { this(); storedComparator = c; setActualComparator(); } /** Creates a new tree map copying a given map. * * @param m a {@link Map} to be copied into the new tree map. */ public AVL_TREE_MAP( final Map m ) { this(); putAll( m ); } /** Creates a new tree map copying a given sorted map (and its {@link Comparator}). * * @param m a {@link SortedMap} to be copied into the new tree map. */ public AVL_TREE_MAP( final SortedMap m ) { this( m.comparator() ); putAll( m ); } /** Creates a new tree map copying a given map. * * @param m a type-specific map to be copied into the new tree map. */ public AVL_TREE_MAP( final MAP KEY_VALUE_EXTENDS_GENERIC m ) { this(); putAll( m ); } /** Creates a new tree map copying a given sorted map (and its {@link Comparator}). * * @param m a type-specific sorted map to be copied into the new tree map. */ public AVL_TREE_MAP( final SORTED_MAP KEY_VALUE_GENERIC m ) { this( m.comparator() ); putAll( m ); } /** Creates a new tree map using the elements of two parallel arrays and the given comparator. * * @param k the array of keys of the new tree map. * @param v the array of corresponding values in the new tree map. * @param c a (possibly type-specific) comparator. * @throws IllegalArgumentException if k and v have different lengths. */ public AVL_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final Comparator c ) { this( c ); if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" ); for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] ); } /** Creates a new tree map using the elements of two parallel arrays. * * @param k the array of keys of the new tree map. * @param v the array of corresponding values in the new tree map. * @throws IllegalArgumentException if k and v have different lengths. */ public AVL_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) { this( k, v, null ); } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in AVLTreeSet.drv. * * The put()/remove() code is derived from Ben Pfaff's GNU libavl * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's * going on, you should have a look at the literate code contained therein * first. */ /** Compares two keys in the right way. * *

This method uses the {@link #actualComparator} if it is non-null. * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}. * * @param k1 the first key. * @param k2 the second key. * @return a number smaller than, equal to or greater than 0, as usual * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively). */ SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 ); } /** Returns the entry corresponding to the given key, if it is in the tree; null, otherwise. * * @param k the key to search for. * @return the corresponding entry, or null if no entry with the given key exists. */ final Entry KEY_VALUE_GENERIC findKey( final KEY_GENERIC_TYPE k ) { Entry KEY_VALUE_GENERIC e = tree; int cmp; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right(); return e; } /** Locates a key. * * @param k a key. * @return the last entry on a search for the given key; this will be * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key. */ final Entry KEY_VALUE_GENERIC locateKey( final KEY_GENERIC_TYPE k ) { Entry KEY_VALUE_GENERIC e = tree, last = tree; int cmp = 0; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) { last = e; e = cmp < 0 ? e.left() : e.right(); } return cmp == 0 ? e : last; } /** This vector remembers the directions followed during * the current insertion. It suffices for about 232 entries. */ private transient boolean dirPath[]; private void allocatePaths() { dirPath = new boolean[ 48 ]; } #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /** Adds an increment to value currently associated with a key. * *

Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when * called with a key that does not currently appears in the map, the key * will be associated with the default return value plus * the given increment. * * @param k the key. * @param incr the increment. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE addTo( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE incr) { Entry KEY_VALUE_GENERIC e = add( k ); final VALUE_GENERIC_TYPE oldValue = e.value; e.value += incr; return oldValue; } #endif public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { Entry KEY_VALUE_GENERIC e = add( k ); final VALUE_GENERIC_TYPE oldValue = e.value; e.value = v; return oldValue; } /** Returns a node with key k in the balanced tree, creating one with defRetValue if necessary. * * @param k the key * @return a node with key k. If a node with key k already exists, then that node is returned, * otherwise a new node with defRetValue is created ensuring that the tree is balanced after creation of the node. */ private Entry KEY_VALUE_GENERIC add( final KEY_GENERIC_TYPE k ) { /* After execution of this method, modified is true iff a new entry has been inserted. */ modified = false; Entry KEY_VALUE_GENERIC e = null; if ( tree == null ) { // The case of the empty tree is treated separately. count++; e = tree = lastEntry = firstEntry = new Entry KEY_VALUE_GENERIC( k, defRetValue ); modified = true; } else { Entry KEY_VALUE_GENERIC p = tree, q = null, y = tree, z = null, w = null; int cmp, i = 0; while( true ) { if ( ( cmp = compare( k, p.key ) ) == 0 ) { return p; } if ( p.balance() != 0 ) { i = 0; z = q; y = p; } if ( dirPath[ i++ ] = cmp > 0 ) { if ( p.succ() ) { count++; e = new Entry KEY_VALUE_GENERIC( k, defRetValue ); modified = true; if ( p.right == null ) lastEntry = e; e.left = p; e.right = p.right; p.right( e ); break; } q = p; p = p.right; } else { if ( p.pred() ) { count++; e = new Entry KEY_VALUE_GENERIC( k, defRetValue ); modified = true; if ( p.left == null ) firstEntry = e; e.right = p; e.left = p.left; p.left( e ); break; } q = p; p = p.left; } } p = y; i = 0; while( p != e ) { if ( dirPath[ i ] ) p.incBalance(); else p.decBalance(); p = dirPath[ i++ ] ? p.right : p.left; } if ( y.balance() == -2 ) { Entry KEY_VALUE_GENERIC x = y.left; if ( x.balance() == -1 ) { w = x; if ( x.succ() ) { x.succ( false ); y.pred( x ); } else y.left = x.right; x.right = y; x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert x.balance() == 1; w = x.right; x.right = w.left; w.left = x; y.left = w.right; w.right = y; if ( w.balance() == -1 ) { x.balance( 0 ); y.balance( 1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { x.balance( -1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { x.succ( w ); w.pred( false ); } if ( w.succ() ) { y.pred( w ); w.succ( false ); } } } else if ( y.balance() == +2 ) { Entry KEY_VALUE_GENERIC x = y.right; if ( x.balance() == 1 ) { w = x; if ( x.pred() ) { x.pred( false ); y.succ( x ); } else y.right = x.left; x.left = y; x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert x.balance() == -1; w = x.left; x.left = w.right; w.right = x; y.right = w.left; w.left = y; if ( w.balance() == 1 ) { x.balance( 0 ); y.balance( -1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { x.balance( 1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { y.succ( w ); w.pred( false ); } if ( w.succ() ) { x.pred( w ); w.succ( false ); } } } else return e; if ( z == null ) tree = w; else { if ( z.left == y ) z.left = w; else z.right = w; } } if ( ASSERTS ) checkTree( tree ); return e; } /** Finds the parent of an entry. * * @param e a node of the tree. * @return the parent of the given node, or null for the root. */ private Entry KEY_VALUE_GENERIC parent( final Entry KEY_VALUE_GENERIC e ) { if ( e == tree ) return null; Entry KEY_VALUE_GENERIC x, y, p; x = y = e; while( true ) { if ( y.succ() ) { p = y.right; if ( p == null || p.left != e ) { while( ! x.pred() ) x = x.left; p = x.left; } return p; } else if ( x.pred() ) { p = x.left; if ( p == null || p.right != e ) { while( ! y.succ() ) y = y.right; p = y.right; } return p; } x = x.left; y = y.right; } } /* After execution of this method, {@link #modified} is true iff an entry has been deleted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { modified = false; if ( tree == null ) return defRetValue; int cmp; Entry KEY_VALUE_GENERIC p = tree, q = null; boolean dir = false; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; while( true ) { if ( ( cmp = compare( kk, p.key ) ) == 0 ) break; else if ( dir = cmp > 0 ) { q = p; if ( ( p = p.right() ) == null ) return defRetValue; } else { q = p; if ( ( p = p.left() ) == null ) return defRetValue; } } if ( p.left == null ) firstEntry = p.next(); if ( p.right == null ) lastEntry = p.prev(); if ( p.succ() ) { if ( p.pred() ) { if ( q != null ) { if ( dir ) q.succ( p.right ); else q.pred( p.left ); } else tree = dir ? p.right : p.left; } else { p.prev().right = p.right; if ( q != null ) { if ( dir ) q.right = p.left; else q.left = p.left; } else tree = p.left; } } else { Entry KEY_VALUE_GENERIC r = p.right; if ( r.pred() ) { r.left = p.left; r.pred( p.pred() ); if ( ! r.pred() ) r.prev().right = r; if ( q != null ) { if ( dir ) q.right = r; else q.left = r; } else tree = r; r.balance( p.balance() ); q = r; dir = true; } else { Entry KEY_VALUE_GENERIC s; while( true ) { s = r.left; if ( s.pred() ) break; r = s; } if ( s.succ() ) r.pred( s ); else r.left = s.right; s.left = p.left; if ( ! p.pred() ) { p.prev().right = s; s.pred( false ); } s.right = p.right; s.succ( false ); if ( q != null ) { if ( dir ) q.right = s; else q.left = s; } else tree = s; s.balance( p.balance() ); q = r; dir = false; } } Entry KEY_VALUE_GENERIC y; while( q != null ) { y = q; q = parent( y ); if ( ! dir ) { dir = q != null && q.left != y; y.incBalance(); if ( y.balance() == 1 ) break; else if ( y.balance() == 2 ) { Entry KEY_VALUE_GENERIC x = y.right; if ( ASSERTS ) assert x != null; if ( x.balance() == -1 ) { Entry KEY_VALUE_GENERIC w; if ( ASSERTS ) assert x.balance() == -1; w = x.left; x.left = w.right; w.right = x; y.right = w.left; w.left = y; if ( w.balance() == 1 ) { x.balance( 0 ); y.balance( -1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert w.balance() == -1; x.balance( 1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { y.succ( w ); w.pred( false ); } if ( w.succ() ) { x.pred( w ); w.succ( false ); } if ( q != null ) { if ( dir ) q.right = w; else q.left = w; } else tree = w; } else { if ( q != null ) { if ( dir ) q.right = x; else q.left = x; } else tree = x; if ( x.balance() == 0 ) { y.right = x.left; x.left = y; x.balance( -1 ); y.balance( +1 ); break; } if ( ASSERTS ) assert x.balance() == 1; if ( x.pred() ) { y.succ( true ); x.pred( false ); } else y.right = x.left; x.left = y; y.balance( 0 ); x.balance( 0 ); } } } else { dir = q != null && q.left != y; y.decBalance(); if ( y.balance() == -1 ) break; else if ( y.balance() == -2 ) { Entry KEY_VALUE_GENERIC x = y.left; if ( ASSERTS ) assert x != null; if ( x.balance() == 1 ) { Entry KEY_VALUE_GENERIC w; if ( ASSERTS ) assert x.balance() == 1; w = x.right; x.right = w.left; w.left = x; y.left = w.right; w.right = y; if ( w.balance() == -1 ) { x.balance( 0 ); y.balance( 1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert w.balance() == 1; x.balance( -1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { x.succ( w ); w.pred( false ); } if ( w.succ() ) { y.pred( w ); w.succ( false ); } if ( q != null ) { if ( dir ) q.right = w; else q.left = w; } else tree = w; } else { if ( q != null ) { if ( dir ) q.right = x; else q.left = x; } else tree = x; if ( x.balance() == 0 ) { y.left = x.right; x.right = y; x.balance( +1 ); y.balance( -1 ); break; } if ( ASSERTS ) assert x.balance() == -1; if ( x.succ() ) { y.pred( true ); x.succ( false ); } else y.left = x.right; x.right = y; y.balance( 0 ); x.balance( 0 ); } } } } modified = true; count--; if ( ASSERTS ) checkTree( tree ); return p.value; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) ); return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue ); } #endif #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS remove( final Object ok ) { final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) ); return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE; } #endif public boolean containsValue( final VALUE_TYPE v ) { final ValueIterator i = new ValueIterator(); VALUE_GENERIC_TYPE ev; int j = count; while( j-- != 0 ) { ev = i.NEXT_VALUE(); if ( VALUE_EQUALS( ev, v ) ) return true; } return false; } public void clear() { count = 0; tree = null; entries = null; values = null; keys = null; firstEntry = lastEntry = null; } /** This class represent an entry in a tree map. * *

We use the only "metadata", i.e., {@link Entry#info}, to store * information about balance, predecessor status and successor status. * *

Note that since the class is recursive, it can be * considered equivalently a tree. */ private static final class Entry KEY_VALUE_GENERIC implements Cloneable, MAP.Entry KEY_VALUE_GENERIC { /** If the bit in this mask is true, {@link #right} points to a successor. */ private final static int SUCC_MASK = 1 << 31; /** If the bit in this mask is true, {@link #left} points to a predecessor. */ private final static int PRED_MASK = 1 << 30; /** The bits in this mask hold the node balance info. You can get it just by casting to byte. */ private final static int BALANCE_MASK = 0xFF; /** The key of this entry. */ KEY_GENERIC_TYPE key; /** The value of this entry. */ VALUE_GENERIC_TYPE value; /** The pointers to the left and right subtrees. */ Entry KEY_VALUE_GENERIC left, right; /** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BALANCE_MASK}). */ int info; Entry() {} /** Creates a new entry with the given key and value. * * @param k a key. * @param v a value. */ Entry( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { this.key = k; this.value = v; info = SUCC_MASK | PRED_MASK; } /** Returns the left subtree. * * @return the left subtree (null if the left * subtree is empty). */ Entry KEY_VALUE_GENERIC left() { return ( info & PRED_MASK ) != 0 ? null : left; } /** Returns the right subtree. * * @return the right subtree (null if the right * subtree is empty). */ Entry KEY_VALUE_GENERIC right() { return ( info & SUCC_MASK ) != 0 ? null : right; } /** Checks whether the left pointer is really a predecessor. * @return true if the left pointer is a predecessor. */ boolean pred() { return ( info & PRED_MASK ) != 0; } /** Checks whether the right pointer is really a successor. * @return true if the right pointer is a successor. */ boolean succ() { return ( info & SUCC_MASK ) != 0; } /** Sets whether the left pointer is really a predecessor. * @param pred if true then the left pointer will be considered a predecessor. */ void pred( final boolean pred ) { if ( pred ) info |= PRED_MASK; else info &= ~PRED_MASK; } /** Sets whether the right pointer is really a successor. * @param succ if true then the right pointer will be considered a successor. */ void succ( final boolean succ ) { if ( succ ) info |= SUCC_MASK; else info &= ~SUCC_MASK; } /** Sets the left pointer to a predecessor. * @param pred the predecessr. */ void pred( final Entry KEY_VALUE_GENERIC pred ) { info |= PRED_MASK; left = pred; } /** Sets the right pointer to a successor. * @param succ the successor. */ void succ( final Entry KEY_VALUE_GENERIC succ ) { info |= SUCC_MASK; right = succ; } /** Sets the left pointer to the given subtree. * @param left the new left subtree. */ void left( final Entry KEY_VALUE_GENERIC left ) { info &= ~PRED_MASK; this.left = left; } /** Sets the right pointer to the given subtree. * @param right the new right subtree. */ void right( final Entry KEY_VALUE_GENERIC right ) { info &= ~SUCC_MASK; this.right = right; } /** Returns the current level of the node. * @return the current level of this node. */ int balance() { return (byte)info; } /** Sets the level of this node. * @param level the new level of this node. */ void balance( int level ) { info &= ~BALANCE_MASK; info |= ( level & BALANCE_MASK ); } /** Increments the level of this node. */ void incBalance() { info = info & ~BALANCE_MASK | ( (byte)info + 1 ) & 0xFF; } /** Decrements the level of this node. */ protected void decBalance() { info = info & ~BALANCE_MASK | ( (byte)info - 1 ) & 0xFF; } /** Computes the next entry in the set order. * * @return the next entry (null) if this is the last entry). */ Entry KEY_VALUE_GENERIC next() { Entry KEY_VALUE_GENERIC next = this.right; if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left; return next; } /** Computes the previous entry in the set order. * * @return the previous entry (null) if this is the first entry). */ Entry KEY_VALUE_GENERIC prev() { Entry KEY_VALUE_GENERIC prev = this.left; if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right; return prev; } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public KEY_GENERIC_CLASS getKey() { return KEY2OBJ(key); } #if ! KEY_CLASS_Object public KEY_GENERIC_TYPE ENTRY_GET_KEY() { return key; } #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ(value); } #if VALUES_PRIMITIVE public VALUE_TYPE ENTRY_GET_VALUE() { return value; } #endif public VALUE_GENERIC_TYPE setValue(final VALUE_GENERIC_TYPE value) { final VALUE_GENERIC_TYPE oldValue = this.value; this.value = value; return oldValue; } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS setValue(final VALUE_GENERIC_CLASS value) { return VALUE2OBJ(setValue(VALUE_CLASS2TYPE(value))); } #endif SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public Entry KEY_VALUE_GENERIC clone() { Entry KEY_VALUE_GENERIC c; try { c = (Entry KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key; c.value = value; c.info = info; return c; } @SuppressWarnings("unchecked") public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return KEY_EQUALS( key, KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_CLASS2TYPE( e.getValue() ) ); } public int hashCode() { return KEY2JAVAHASH_NOT_NULL(key) ^ VALUE2JAVAHASH(value); } public String toString() { return key + "=>" + value; } /* public void prettyPrint() { prettyPrint(0); } public void prettyPrint(int level) { if ( pred() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("pred: " + left ); } else if (left != null) left.prettyPrint(level +1 ); for (int i = 0; i < level; i++) System.err.print(" "); System.err.println(key + "=" + value + " (" + balance() + ")"); if ( succ() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("succ: " + right ); } else if (right != null) right.prettyPrint(level + 1); } */ } /* public void prettyPrint() { System.err.println("size: " + count); if (tree != null) tree.prettyPrint(); } */ SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean containsKey( final KEY_TYPE k ) { return findKey( KEY_GENERIC_CAST k ) != null; } public int size() { return count; } public boolean isEmpty() { return count == 0; } SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST k ); return e == null ? defRetValue : e.value; } #if KEY_CLASS_Object && VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_CLASS get( final Object ok ) { final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST ok ); return e == null ? OBJECT_DEFAULT_RETURN_VALUE : e.getValue(); } #endif public KEY_GENERIC_TYPE FIRST_KEY() { if ( tree == null ) throw new NoSuchElementException(); return firstEntry.key; } public KEY_GENERIC_TYPE LAST_KEY() { if ( tree == null ) throw new NoSuchElementException(); return lastEntry.key; } /** An abstract iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class TreeIterator { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ Entry KEY_VALUE_GENERIC prev; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ Entry KEY_VALUE_GENERIC next; /** The last entry that was returned (or null if we did not iterate or used {@link #remove()}). */ Entry KEY_VALUE_GENERIC curr; /** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link TreeIterator} has been created using the nonempty constructor.*/ int index = 0; TreeIterator() { next = firstEntry; } TreeIterator( final KEY_GENERIC_TYPE k ) { if ( ( next = locateKey( k ) ) != null ) { if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } public boolean hasNext() { return next != null; } public boolean hasPrevious() { return prev != null; } void updateNext() { next = next.next(); } Entry KEY_VALUE_GENERIC nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = prev = next; index++; updateNext(); return curr; } void updatePrevious() { prev = prev.prev(); } Entry KEY_VALUE_GENERIC previousEntry() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = next = prev; index--; updatePrevious(); return curr; } public int nextIndex() { return index; } public int previousIndex() { return index - 1; } public void remove() { if ( curr == null ) throw new IllegalStateException(); /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ if ( curr == prev ) index--; next = prev = curr; updatePrevious(); updateNext(); AVL_TREE_MAP.this.REMOVE_VALUE( curr.key ); curr = null; } public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) nextEntry(); return n - i - 1; } public int back( final int n ) { int i = n; while( i-- != 0 && hasPrevious() ) previousEntry(); return n - i - 1; } } /** An iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class EntryIterator extends TreeIterator implements ObjectListIterator { EntryIterator() {} EntryIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); } public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = new AbstractObjectSortedSet() { final Comparator comparator = new Comparator () { public int compare( final MAP.Entry KEY_VALUE_GENERIC x, final MAP.Entry KEY_VALUE_GENERIC y ) { return AVL_TREE_MAP.this.actualComparator.compare( x.ENTRY_GET_KEY(), y.ENTRY_GET_KEY() ); } }; public Comparator comparator() { return comparator; } public ObjectBidirectionalIterator iterator() { return new EntryIterator(); } public ObjectBidirectionalIterator iterator( final MAP.Entry KEY_VALUE_GENERIC from ) { return new EntryIterator( from.ENTRY_GET_KEY() ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); return e.equals( f ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean remove( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); if ( f != null ) AVL_TREE_MAP.this.REMOVE_VALUE( f.key ); return f != null; } public int size() { return count; } public void clear() { AVL_TREE_MAP.this.clear(); } public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry; } public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry; } public ObjectSortedSet subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to ) { return subMap( from.ENTRY_GET_KEY(), to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet headSet( MAP.Entry KEY_VALUE_GENERIC to ) { return headMap( to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet tailSet( MAP.Entry KEY_VALUE_GENERIC from ) { return tailMap( from.ENTRY_GET_KEY() ).ENTRYSET(); } }; return entries; } /** An iterator on the whole range of keys. * *

This class can iterate in both directions on the keys of a threaded tree. We * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly * their type-specific counterparts) so that they return keys instead of entries. */ private final class KeyIterator extends TreeIterator implements KEY_LIST_ITERATOR KEY_GENERIC { public KeyIterator() {} public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if !KEY_CLASS_Object public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); } public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); } public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** A keyset implementation using a more direct implementation for iterators. */ private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet { public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeyIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeyIterator( from ); } } /** Returns a type-specific sorted set view of the keys contained in this map. * *

In addition to the semantics of {@link java.util.Map#keySet()}, you can * safely cast the set returned by this call to a type-specific sorted * set interface. * * @return a type-specific sorted set view of the keys contained in this map. */ public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = new KeySet(); return keys; } /** An iterator on the whole range of values. * *

This class can iterate in both directions on the values of a threaded tree. We * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly * their type-specific counterparts) so that they return values instead of entries. */ private final class ValueIterator extends TreeIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC { public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; } public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; } public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); } public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); } public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** Returns a type-specific collection view of the values contained in this map. * *

In addition to the semantics of {@link java.util.Map#values()}, you can * safely cast the collection returned by this call to a type-specific collection * interface. * * @return a type-specific collection view of the values contained in this map. */ public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new ValueIterator(); } public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return count; } public void clear() { AVL_TREE_MAP.this.clear(); } }; return values; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) { return new Submap( KEY_NULL, true, to, false ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) { return new Submap( from, false, KEY_NULL, true ); } public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { return new Submap( from, false, to, false ); } /** A submap with given range. * *

This class represents a submap. One has to specify the left/right * limits (which can be set to -∞ or ∞). Since the submap is a * view on the map, at a given moment it could happen that the limits of * the range are not any longer in the main map. Thus, things such as * {@link java.util.SortedMap#firstKey()} or {@link java.util.Collection#size()} must be always computed * on-the-fly. */ private final class Submap extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; /** The start of the submap range, unless {@link #bottom} is true. */ KEY_GENERIC_TYPE from; /** The end of the submap range, unless {@link #top} is true. */ KEY_GENERIC_TYPE to; /** If true, the submap range starts from -∞. */ boolean bottom; /** If true, the submap range goes to ∞. */ boolean top; /** Cached set of entries. */ @SuppressWarnings("hiding") protected transient ObjectSortedSet entries; /** Cached set of keys. */ @SuppressWarnings("hiding") protected transient SORTED_SET KEY_GENERIC keys; /** Cached collection of values. */ @SuppressWarnings("hiding") protected transient VALUE_COLLECTION VALUE_GENERIC values; /** Creates a new submap with given key range. * * @param from the start of the submap range. * @param bottom if true, the first parameter is ignored and the range starts from -∞. * @param to the end of the submap range. * @param top if true, the third parameter is ignored and the range goes to ∞. */ public Submap( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) { if ( ! bottom && ! top && AVL_TREE_MAP.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start key (" + from + ") is larger than end key (" + to + ")" ); this.from = from; this.bottom = bottom; this.to = to; this.top = top; this.defRetValue = AVL_TREE_MAP.this.defRetValue; } public void clear() { final SubmapIterator i = new SubmapIterator(); while( i.hasNext() ) { i.nextEntry(); i.remove(); } } /** Checks whether a key is in the submap range. * @param k a key. * @return true if is the key is in the submap range. */ final boolean in( final KEY_GENERIC_TYPE k ) { return ( bottom || AVL_TREE_MAP.this.compare( k, from ) >= 0 ) && ( top || AVL_TREE_MAP.this.compare( k, to ) < 0 ); } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = new AbstractObjectSortedSet() { public ObjectBidirectionalIterator iterator() { return new SubmapEntryIterator(); } public ObjectBidirectionalIterator iterator( final MAP.Entry KEY_VALUE_GENERIC from ) { return new SubmapEntryIterator( from.ENTRY_GET_KEY() ); } public Comparator comparator() { return AVL_TREE_MAP.this.entrySet().comparator(); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); return f != null && in( f.key ) && e.equals( f ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean remove( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); if ( f != null && in( f.key ) ) Submap.this.REMOVE_VALUE( f.key ); return f != null; } public int size() { int c = 0; for( Iterator i = iterator(); i.hasNext(); i.next() ) c++; return c; } public boolean isEmpty() { return ! new SubmapIterator().hasNext(); } public void clear() { Submap.this.clear(); } public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry(); } public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry(); } public ObjectSortedSet subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to ) { return subMap( from.ENTRY_GET_KEY(), to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet headSet( MAP.Entry KEY_VALUE_GENERIC to ) { return headMap( to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet tailSet( MAP.Entry KEY_VALUE_GENERIC from ) { return tailMap( from.ENTRY_GET_KEY() ).ENTRYSET(); } }; return entries; } private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet { public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubmapKeyIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubmapKeyIterator( from ); } } public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = new KeySet(); return keys; } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new SubmapValueIterator(); } public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return Submap.this.size(); } public void clear() { Submap.this.clear(); } }; return values; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean containsKey( final KEY_TYPE k ) { return in( KEY_GENERIC_CAST k ) && AVL_TREE_MAP.this.containsKey( k ); } public boolean containsValue( final VALUE_TYPE v ) { final SubmapIterator i = new SubmapIterator(); VALUE_TYPE ev; while( i.hasNext() ) { ev = i.nextEntry().value; if ( VALUE_EQUALS( ev, v ) ) return true; } return false; } SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE(final KEY_TYPE k) { final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; return in( kk ) && ( e = findKey( kk ) ) != null ? e.value : this.defRetValue; } #if KEY_CLASS_Object && VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_CLASS get( final Object ok ) { final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok ); return in( kk ) && ( e = findKey( kk ) ) != null ? e.getValue() : OBJECT_DEFAULT_RETURN_VALUE; } #endif public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) { modified = false; if ( ! in( k ) ) throw new IllegalArgumentException( "Key (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); final VALUE_GENERIC_TYPE oldValue = AVL_TREE_MAP.this.put( k, v ); return modified ? this.defRetValue : oldValue; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) ); return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue ); } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { modified = false; if ( ! in( KEY_GENERIC_CAST k ) ) return this.defRetValue; final VALUE_GENERIC_TYPE oldValue = AVL_TREE_MAP.this.REMOVE_VALUE( k ); return modified ? oldValue : this.defRetValue; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS remove( final Object ok ) { final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) ); return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE; } #endif public int size() { final SubmapIterator i = new SubmapIterator(); int n = 0; while( i.hasNext() ) { n++; i.nextEntry(); } return n; } public boolean isEmpty() { return ! new SubmapIterator().hasNext(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { if ( top ) return new Submap( from, bottom, to, false ); return compare( to, this.to ) < 0 ? new Submap( from, bottom, to, false ) : this; } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { if ( bottom ) return new Submap( from, false, to, top ); return compare( from, this.from ) > 0 ? new Submap( from, false, to, top ) : this; } public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { if ( top && bottom ) return new Submap( from, false, to, false ); if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to; if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from; if ( ! top && ! bottom && from == this.from && to == this.to ) return this; return new Submap( from, false, to, false ); } /** Locates the first entry. * * @return the first entry of this submap, or null if the submap is empty. */ public AVL_TREE_MAP.Entry KEY_VALUE_GENERIC firstEntry() { if ( tree == null ) return null; // If this submap goes to -infinity, we return the main map first entry; otherwise, we locate the start of the map. AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e; if ( bottom ) e = firstEntry; else { e = locateKey( from ); // If we find either the start or something greater we're OK. if ( compare( e.key, from ) < 0 ) e = e.next(); } // Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end. if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null; return e; } /** Locates the last entry. * * @return the last entry of this submap, or null if the submap is empty. */ public AVL_TREE_MAP.Entry KEY_VALUE_GENERIC lastEntry() { if ( tree == null ) return null; // If this submap goes to infinity, we return the main map last entry; otherwise, we locate the end of the map. AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e; if ( top ) e = lastEntry; else { e = locateKey( to ); // If we find something smaller than the end we're OK. if ( compare( e.key, to ) >= 0 ) e = e.prev(); } // Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start. if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null; return e; } public KEY_GENERIC_TYPE FIRST_KEY() { AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } public KEY_GENERIC_TYPE LAST_KEY() { AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } #if !KEY_CLASS_Object /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS firstKey() { AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.getKey(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS lastKey() { AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.getKey(); } #endif /** An iterator for subranges. * *

This class inherits from {@link TreeIterator}, but overrides the methods that * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would * move out of the range of the submap we just overwrite the next or previous * entry with null. */ private class SubmapIterator extends TreeIterator { SubmapIterator() { next = firstEntry(); } SubmapIterator( final KEY_GENERIC_TYPE k ) { this(); if ( next != null ) { if ( ! bottom && compare( k, next.key ) < 0 ) prev = null; else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null; else { next = locateKey( k ); if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } } void updatePrevious() { prev = prev.prev(); if ( ! bottom && prev != null && AVL_TREE_MAP.this.compare( prev.key, from ) < 0 ) prev = null; } void updateNext() { next = next.next(); if ( ! top && next != null && AVL_TREE_MAP.this.compare( next.key, to ) >= 0 ) next = null; } } private class SubmapEntryIterator extends SubmapIterator implements ObjectListIterator { SubmapEntryIterator() {} SubmapEntryIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); } public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } /** An iterator on a subrange of keys. * *

This class can iterate in both directions on a subrange of the * keys of a threaded tree. We simply override the {@link * java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their * type-specific counterparts) so that they return keys instead of * entries. */ private final class SubmapKeyIterator extends SubmapIterator implements KEY_LIST_ITERATOR KEY_GENERIC { public SubmapKeyIterator() { super(); } public SubmapKeyIterator( KEY_GENERIC_TYPE from ) { super( from ); } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if !KEY_CLASS_Object public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); } public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); } public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** An iterator on a subrange of values. * *

This class can iterate in both directions on the values of a * subrange of the keys of a threaded tree. We simply override the * {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their * type-specific counterparts) so that they return values instead of * entries. */ private final class SubmapValueIterator extends SubmapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC { public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; } public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; } public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); } public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); } public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; } /** Returns a deep copy of this tree map. * *

This method performs a deep copy of this tree map; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this tree map. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public AVL_TREE_MAP KEY_VALUE_GENERIC clone() { AVL_TREE_MAP KEY_VALUE_GENERIC c; try { c = (AVL_TREE_MAP KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.keys = null; c.values = null; c.entries = null; c.allocatePaths(); if ( count != 0 ) { // Also this apparently unfathomable code is derived from GNU libavl. Entry KEY_VALUE_GENERIC e, p, q, rp = new Entry KEY_VALUE_GENERIC(), rq = new Entry KEY_VALUE_GENERIC(); p = rp; rp.left( tree ); q = rq; rq.pred( null ); while( true ) { if ( ! p.pred() ) { e = p.left.clone(); e.pred( q.left ); e.succ( q ); q.left( e ); p = p.left; q = q.left; } else { while( p.succ() ) { p = p.right; if ( p == null ) { q.right = null; c.tree = rq.left; c.firstEntry = c.tree; while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left; c.lastEntry = c.tree; while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right; return c; } q = q.right; } p = p.right; q = q.right; } if ( ! p.succ() ) { e = p.right.clone(); e.succ( q.right ); e.pred( q ); q.right( e ); } } } return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { int n = count; EntryIterator i = new EntryIterator(); Entry KEY_VALUE_GENERIC e; s.defaultWriteObject(); while(n-- != 0) { e = i.nextEntry(); s.WRITE_KEY( e.key ); s.WRITE_VALUE( e.value ); } } /** Reads the given number of entries from the input stream, returning the corresponding tree. * * @param s the input stream. * @param n the (positive) number of entries to read. * @param pred the entry containing the key that preceeds the first key in the tree. * @param succ the entry containing the key that follows the last key in the tree. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED private Entry KEY_VALUE_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_VALUE_GENERIC pred, final Entry KEY_VALUE_GENERIC succ ) throws java.io.IOException, ClassNotFoundException { if ( n == 1 ) { final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ); top.pred( pred ); top.succ( succ ); return top; } if ( n == 2 ) { /* We handle separately this case so that recursion will *always* be on nonempty subtrees. */ final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ); top.right( new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ) ); top.right.pred( top ); top.balance( 1 ); top.pred( pred ); top.right.succ( succ ); return top; } // The right subtree is the largest one. final int rightN = n / 2, leftN = n - rightN - 1; final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC(); top.left( readTree( s, leftN, pred, top ) ); top.key = KEY_GENERIC_CAST s.READ_KEY(); top.value = VALUE_GENERIC_CAST s.READ_VALUE(); top.right( readTree( s, rightN, top, succ ) ); if ( n == ( n & -n ) ) top.balance( 1 ); // Quick test for determining whether n is a power of 2. return top; } private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); /* The storedComparator is now correctly set, but we must restore on-the-fly the actualComparator. */ setActualComparator(); allocatePaths(); if ( count != 0 ) { tree = readTree( s, count, null, null ); Entry KEY_VALUE_GENERIC e; e = tree; while( e.left() != null ) e = e.left(); firstEntry = e; e = tree; while( e.right() != null ) e = e.right(); lastEntry = e; } if ( ASSERTS ) checkTree( tree ); } #ifdef ASSERTS_CODE private static KEY_VALUE_GENERIC int checkTree( Entry KEY_VALUE_GENERIC e ) { if ( e == null ) return 0; final int leftN = checkTree( e.left() ), rightN = checkTree( e.right() ); if ( leftN + e.balance() != rightN ) throw new AssertionError( "Mismatch between left tree size (" + leftN + "), right tree size (" + rightN + ") and balance (" + e.balance() + ")" ); return Math.max( leftN , rightN ) + 1; } #else private static KEY_VALUE_GENERIC int checkTree( @SuppressWarnings("unused") Entry KEY_VALUE_GENERIC e ) { return 0; } #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #else return Integer.toBinaryString( r.nextInt() ); #endif } private static VALUE_TYPE genValue() { #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Character return (VALUE_TYPE)(r.nextInt()); #elif VALUES_PRIMITIVE return r.NEXT_VALUE(); #elif !VALUE_CLASS_Reference || KEY_CLASS_Reference return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { int i, j; AVL_TREE_MAP m; java.util.TreeMap t; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; VALUE_TYPE v[] = new VALUE_TYPE[n]; long ms; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); v[i] = genValue(); } double totPut = 0, totYes = 0, totNo = 0, totAddTo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd, ddd; if ( comp ) { for( j = 0; j < 20; j++ ) { t = new java.util.TreeMap(); /* We first add all pairs to t. */ for( i = 0; i < n; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totPut += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /* we perform n/2 addTo() operations with get then put */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), (VALUE_TYPE) ((VALUE_CLASS) t.get( KEY2OBJ(k[i])) + i) ); ddd = System.currentTimeMillis() - ms; if ( j > 2 ) totAddTo += n/ddd; System.out.print("AddTo: " + format( n/ddd ) +" K/s " ); #endif /* We check for pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on t. */ ms = System.currentTimeMillis(); for( Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ "K/s AddTo: " + format( totAddTo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); t = null; totPut = totYes = totNo = totIterFor = totIterBack = totRemYes = totAddTo = 0; } for( j = 0; j < 20; j++ ) { m = new AVL_TREE_MAP(); /* We first add all pairs to m. */ for( i = 0; i < n; i++ ) m.put( k[i], v[i] ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) m.remove( k[i] ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.put( k[i], v[i] ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.remove( k[i] ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.put( k[i], v[i] ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totPut += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.remove( k[i] ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) m.put( k[i], v[i] ); #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /* we perform n/2 addTo() operations with get then put */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.addTo( k[i], (VALUE_TYPE) i ); ddd = System.currentTimeMillis() - ms; if ( j > 2 ) totAddTo += n/ddd; System.out.print("AddTo: " + format( n/ddd ) +" K/s " ); #endif /* We check for pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.containsKey( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.containsKey( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on m. */ java.util.ListIterator it = (java.util.ListIterator)m.entrySet().iterator(); ms = System.currentTimeMillis(); for( it = (java.util.ListIterator)m.entrySet().iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); /* We iterate back on m. */ ms = System.currentTimeMillis(); for( ; it.hasPrevious(); it.previous() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterBack += d; System.out.print("IterBack: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ "K/s AddTo: " + format( totAddTo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static void compareMT( SORTED_MAP m, SortedMap t, int level, long seed ) { /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.entrySet().iterator(); i.hasNext(); ) { Entry e = (Entry)i.next(); ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" ); } /* Now we check that m actually holds the same keys. */ for(Iterator i=t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" ); ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for(Iterator i=m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" ); ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" ); } /* Now we check that m actually hold the same values. */ for(Iterator i=t.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" ); ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" ); } /* Now we check that m actually hold the same values, but iterating on m. */ for(Iterator i=m.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)"); ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)"); } } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static VALUE_TYPE vt[]; private static AVL_TREE_MAP topMap; protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) { long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement; Object rt = null, rm = null; if ( level > 4 ) return; /* Now we check that both maps agree on first/last keys. */ mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.firstKey(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.firstKey(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" ); mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.lastKey(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.lastKey(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")"); compareMT( m, t, level, seed ); /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 0 ) { badPrevious = true; j.previous(); break; } previous = k; } i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); badPrevious = false; if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in next"); i.remove(); j.remove(); t.remove( J ); } } else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in prev"); i.remove(); j.remove(); t.remove( J ); } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a submap. */ if ( ! m.isEmpty() ) { java.util.ListIterator i; Object start = m.firstKey(), end = m.firstKey(); for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() ); for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() ); //System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS)start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" ); //System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" ); //System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" ); } } private static void test( int n ) { AVL_TREE_MAP m = new AVL_TREE_MAP(); SortedMap t = new java.util.TreeMap(); topMap = m; k = new Object[n]; v = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; vt = new VALUE_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEY_CLASS_Object k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif #if VALUES_REFERENCE v[i] = vt[i] = genValue(); #else v[i] = new VALUE_CLASS( vt[i] = genValue() ); #endif } /* We add pairs to t. */ for( int i = 0; i < n; i++ ) t.put( k[i], v[i] ); /* We add to m the same data */ m.putAll(t); testMaps( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/AVLTreeSet.drv0000664000000000000000000017304713050701620015306 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.SortedSet; import java.util.NoSuchElementException; /** A type-specific AVL tree set with a fast, small-footprint implementation. * *

The iterators provided by this class are type-specific {@link * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. * Moreover, the iterator returned by iterator() can be safely cast * to a type-specific {@linkplain java.util.ListIterator list iterator}. */ public class AVL_TREE_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, SORTED_SET KEY_GENERIC { /** A reference to the root entry. */ protected transient Entry KEY_GENERIC tree; /** Number of elements in this set. */ protected int count; /** The entry of the first element of this set. */ protected transient Entry KEY_GENERIC firstEntry; /** The entry of the last element of this set. */ protected transient Entry KEY_GENERIC lastEntry; /** This set's comparator, as provided in the constructor. */ protected Comparator storedComparator; /** This set's actual comparator; it may differ from {@link #storedComparator} because it is always a type-specific comparator, so it could be derived from the former by wrapping. */ protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator; private static final long serialVersionUID = -7046029254386353130L; private static final boolean ASSERTS = ASSERTS_VALUE; { allocatePaths(); } /** Creates a new empty tree set. */ public AVL_TREE_SET() { tree = null; count = 0; } /** Generates the comparator that will be actually used. * *

When a specific {@link Comparator} is specified and stored in {@link * #storedComparator}, we must check whether it is type-specific. If it is * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise, * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator} * and makes it into a type-specific one. */ private void setActualComparator() { #if KEY_CLASS_Object actualComparator = storedComparator; #else /* If the provided comparator is already type-specific, we use it. Otherwise, we use a wrapper anonymous class to fake that it is type-specific. */ if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator; else actualComparator = new KEY_COMPARATOR KEY_GENERIC() { public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) { return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) ); } public int compare( KEY_CLASS ok1, KEY_CLASS ok2 ) { return storedComparator.compare( ok1, ok2 ); } }; #endif } /** Creates a new empty tree set with the given comparator. * * @param c a {@link Comparator} (even better, a type-specific comparator). */ public AVL_TREE_SET( final Comparator c ) { this(); storedComparator = c; setActualComparator(); } /** Creates a new tree set copying a given set. * * @param c a collection to be copied into the new tree set. */ public AVL_TREE_SET( final Collection c ) { this(); addAll( c ); } /** Creates a new tree set copying a given sorted set (and its {@link Comparator}). * * @param s a {@link SortedSet} to be copied into the new tree set. */ public AVL_TREE_SET( final SortedSet s ) { this( s.comparator() ); addAll( s ); } /** Creates a new tree set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new tree set. */ public AVL_TREE_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) { this(); addAll( c ); } /** Creates a new tree set copying a given type-specific sorted set (and its {@link Comparator}). * * @param s a type-specific sorted set to be copied into the new tree set. */ public AVL_TREE_SET( final SORTED_SET KEY_GENERIC s ) { this( s.comparator() ); addAll( s ); } /** Creates a new tree set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. */ public AVL_TREE_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { while( i.hasNext() ) add( i.NEXT_KEY() ); } #if KEYS_PRIMITIVE /** Creates a new tree set using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public AVL_TREE_SET( final Iterator i ) { this( ITERATORS.AS_KEY_ITERATOR( i ) ); } #endif /** Creates a new tree set and fills it with the elements of a given array using a given {@link Comparator}. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param c a {@link Comparator} (even better, a type-specific comparator). */ public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final Comparator c ) { this( c ); ARRAYS.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } /** Creates a new tree set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. */ public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) { this( a, offset, length, null ); } /** Creates a new tree set copying the elements of an array. * * @param a an array to be copied into the new tree set. */ public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a ) { this(); int i = a.length; while( i-- != 0 ) add( a[ i ] ); } /** Creates a new tree set copying the elements of an array using a given {@link Comparator}. * * @param a an array to be copied into the new tree set. * @param c a {@link Comparator} (even better, a type-specific comparator). */ public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final Comparator c ) { this( c ); int i = a.length; while( i-- != 0 ) add( a[ i ] ); } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in AVLTreeMap.drv. * * The add()/remove() code is derived from Ben Pfaff's GNU libavl * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's * going on, you should have a look at the literate code contained therein * first. */ /** Compares two keys in the right way. * *

This method uses the {@link #actualComparator} if it is non-null. * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}. * * @param k1 the first key. * @param k2 the second key. * @return a number smaller than, equal to or greater than 0, as usual * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively). */ SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 ); } /** Returns the entry corresponding to the given key, if it is in the tree; null, otherwise. * * @param k the key to search for. * @return the corresponding entry, or null if no entry with the given key exists. */ private Entry KEY_GENERIC findKey( final KEY_GENERIC_TYPE k ) { Entry KEY_GENERIC e = tree; int cmp; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right(); return e; } /** Locates a key. * * @param k a key. * @return the last entry on a search for the given key; this will be * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key. */ final Entry KEY_GENERIC locateKey( final KEY_GENERIC_TYPE k ) { Entry KEY_GENERIC e = tree, last = tree; int cmp = 0; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) { last = e; e = cmp < 0 ? e.left() : e.right(); } return cmp == 0 ? e : last; } /** This vector remembers the path followed during the current insertion. It suffices for about 232 entries. */ private transient boolean dirPath[]; private void allocatePaths() { dirPath = new boolean[ 48 ]; } public boolean add( final KEY_GENERIC_TYPE k ) { if ( tree == null ) { // The case of the empty tree is treated separately. count++; tree = lastEntry = firstEntry = new Entry KEY_GENERIC( k ); } else { Entry KEY_GENERIC p = tree, q = null, y = tree, z = null, e = null, w = null; int cmp, i = 0; while( true ) { if ( ( cmp = compare( k, p.key ) ) == 0 ) return false; if ( p.balance() != 0 ) { i = 0; z = q; y = p; } if ( dirPath[ i++ ] = cmp > 0 ) { if ( p.succ() ) { count++; e = new Entry KEY_GENERIC( k ); if ( p.right == null ) lastEntry = e; e.left = p; e.right = p.right; p.right( e ); break; } q = p; p = p.right; } else { if ( p.pred() ) { count++; e = new Entry KEY_GENERIC( k ); if ( p.left == null ) firstEntry = e; e.right = p; e.left = p.left; p.left( e ); break; } q = p; p = p.left; } } p = y; i = 0; while( p != e ) { if ( dirPath[ i ] ) p.incBalance(); else p.decBalance(); p = dirPath[ i++ ] ? p.right : p.left; } if ( y.balance() == -2 ) { Entry KEY_GENERIC x = y.left; if ( x.balance() == -1 ) { w = x; if ( x.succ() ) { x.succ( false ); y.pred( x ); } else y.left = x.right; x.right = y; x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert x.balance() == 1; w = x.right; x.right = w.left; w.left = x; y.left = w.right; w.right = y; if ( w.balance() == -1 ) { x.balance( 0 ); y.balance( 1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { x.balance( -1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { x.succ( w ); w.pred( false ); } if ( w.succ() ) { y.pred( w ); w.succ( false ); } } } else if ( y.balance() == +2 ) { Entry KEY_GENERIC x = y.right; if ( x.balance() == 1 ) { w = x; if ( x.pred() ) { x.pred( false ); y.succ( x ); } else y.right = x.left; x.left = y; x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert x.balance() == -1; w = x.left; x.left = w.right; w.right = x; y.right = w.left; w.left = y; if ( w.balance() == 1 ) { x.balance( 0 ); y.balance( -1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { x.balance( 1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { y.succ( w ); w.pred( false ); } if ( w.succ() ) { x.pred( w ); w.succ( false ); } } } else return true; if ( z == null ) tree = w; else { if ( z.left == y ) z.left = w; else z.right = w; } } if ( ASSERTS ) checkTree( tree ); return true; } /** Finds the parent of an entry. * * @param e a node of the tree. * @return the parent of the given node, or null for the root. */ private Entry KEY_GENERIC parent( final Entry KEY_GENERIC e ) { if ( e == tree ) return null; Entry KEY_GENERIC x, y, p; x = y = e; while( true ) { if ( y.succ() ) { p = y.right; if ( p == null || p.left != e ) { while( ! x.pred() ) x = x.left; p = x.left; } return p; } else if ( x.pred() ) { p = x.left; if ( p == null || p.right != e ) { while( ! y.succ() ) y = y.right; p = y.right; } return p; } x = x.left; y = y.right; } } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean rem( final KEY_TYPE k ) { if ( tree == null ) return false; int cmp; Entry KEY_GENERIC p = tree, q = null; boolean dir = false; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; while( true ) { if ( ( cmp = compare( kk, p.key ) ) == 0 ) break; else if ( dir = cmp > 0 ) { q = p; if ( ( p = p.right() ) == null ) return false; } else { q = p; if ( ( p = p.left() ) == null ) return false; } } if ( p.left == null ) firstEntry = p.next(); if ( p.right == null ) lastEntry = p.prev(); if ( p.succ() ) { if ( p.pred() ) { if ( q != null ) { if ( dir ) q.succ( p.right ); else q.pred( p.left ); } else tree = dir ? p.right : p.left; } else { p.prev().right = p.right; if ( q != null ) { if ( dir ) q.right = p.left; else q.left = p.left; } else tree = p.left; } } else { Entry KEY_GENERIC r = p.right; if ( r.pred() ) { r.left = p.left; r.pred( p.pred() ); if ( ! r.pred() ) r.prev().right = r; if ( q != null ) { if ( dir ) q.right = r; else q.left = r; } else tree = r; r.balance( p.balance() ); q = r; dir = true; } else { Entry KEY_GENERIC s; while( true ) { s = r.left; if ( s.pred() ) break; r = s; } if ( s.succ() ) r.pred( s ); else r.left = s.right; s.left = p.left; if ( ! p.pred() ) { p.prev().right = s; s.pred( false ); } s.right = p.right; s.succ( false ); if ( q != null ) { if ( dir ) q.right = s; else q.left = s; } else tree = s; s.balance( p.balance() ); q = r; dir = false; } } Entry KEY_GENERIC y; while( q != null ) { y = q; q = parent( y ); if ( ! dir ) { dir = q != null && q.left != y; y.incBalance(); if ( y.balance() == 1 ) break; else if ( y.balance() == 2 ) { Entry KEY_GENERIC x = y.right; if ( ASSERTS ) assert x != null; if ( x.balance() == -1 ) { Entry KEY_GENERIC w; if ( ASSERTS ) assert x.balance() == -1; w = x.left; x.left = w.right; w.right = x; y.right = w.left; w.left = y; if ( w.balance() == 1 ) { x.balance( 0 ); y.balance( -1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert w.balance() == -1; x.balance( 1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { y.succ( w ); w.pred( false ); } if ( w.succ() ) { x.pred( w ); w.succ( false ); } if ( q != null ) { if ( dir ) q.right = w; else q.left = w; } else tree = w; } else { if ( q != null ) { if ( dir ) q.right = x; else q.left = x; } else tree = x; if ( x.balance() == 0 ) { y.right = x.left; x.left = y; x.balance( -1 ); y.balance( +1 ); break; } if ( ASSERTS ) assert x.balance() == 1; if ( x.pred() ) { y.succ( true ); x.pred( false ); } else y.right = x.left; x.left = y; y.balance( 0 ); x.balance( 0 ); } } } else { dir = q != null && q.left != y; y.decBalance(); if ( y.balance() == -1 ) break; else if ( y.balance() == -2 ) { Entry KEY_GENERIC x = y.left; if ( ASSERTS ) assert x != null; if ( x.balance() == 1 ) { Entry KEY_GENERIC w; if ( ASSERTS ) assert x.balance() == 1; w = x.right; x.right = w.left; w.left = x; y.left = w.right; w.right = y; if ( w.balance() == -1 ) { x.balance( 0 ); y.balance( 1 ); } else if ( w.balance() == 0 ) { x.balance( 0 ); y.balance( 0 ); } else { if ( ASSERTS ) assert w.balance() == 1; x.balance( -1 ); y.balance( 0 ); } w.balance( 0 ); if ( w.pred() ) { x.succ( w ); w.pred( false ); } if ( w.succ() ) { y.pred( w ); w.succ( false ); } if ( q != null ) { if ( dir ) q.right = w; else q.left = w; } else tree = w; } else { if ( q != null ) { if ( dir ) q.right = x; else q.left = x; } else tree = x; if ( x.balance() == 0 ) { y.left = x.right; x.right = y; x.balance( +1 ); y.balance( -1 ); break; } if ( ASSERTS ) assert x.balance() == -1; if ( x.succ() ) { y.pred( true ); x.succ( false ); } else y.left = x.right; x.right = y; y.balance( 0 ); x.balance( 0 ); } } } } count--; if ( ASSERTS ) checkTree( tree ); return true; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final KEY_TYPE k ) { return findKey( KEY_GENERIC_CAST k ) != null; } #if KEY_CLASS_Object SUPPRESS_WARNINGS_KEY_UNCHECKED public K get( final KEY_TYPE k ) { final Entry KEY_GENERIC entry = findKey( KEY_GENERIC_CAST k ); return entry == null ? null : entry.key; } #endif public void clear() { count = 0; tree = null; firstEntry = lastEntry = null; } /** This class represent an entry in a tree set. * *

We use the only "metadata", i.e., {@link Entry#info}, to store * information about balance, predecessor status and successor status. * *

Note that since the class is recursive, it can be * considered equivalently a tree. */ private static final class Entry KEY_GENERIC implements Cloneable { /** If the bit in this mask is true, {@link #right} points to a successor. */ private final static int SUCC_MASK = 1 << 31; /** If the bit in this mask is true, {@link #left} points to a predecessor. */ private final static int PRED_MASK = 1 << 30; /** The bits in this mask hold the node balance info. You can get it just by casting to byte. */ private final static int BALANCE_MASK = 0xFF; /** The key of this entry. */ KEY_GENERIC_TYPE key; /** The pointers to the left and right subtrees. */ Entry KEY_GENERIC left, right; /** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BALANCE_MASK}). */ int info; Entry() {} /** Creates a new entry with the given key. * * @param k a key. */ Entry( final KEY_GENERIC_TYPE k ) { this.key = k; info = SUCC_MASK | PRED_MASK; } /** Returns the left subtree. * * @return the left subtree (null if the left * subtree is empty). */ Entry KEY_GENERIC left() { return ( info & PRED_MASK ) != 0 ? null : left; } /** Returns the right subtree. * * @return the right subtree (null if the right * subtree is empty). */ Entry KEY_GENERIC right() { return ( info & SUCC_MASK ) != 0 ? null : right; } /** Checks whether the left pointer is really a predecessor. * @return true if the left pointer is a predecessor. */ boolean pred() { return ( info & PRED_MASK ) != 0; } /** Checks whether the right pointer is really a successor. * @return true if the right pointer is a successor. */ boolean succ() { return ( info & SUCC_MASK ) != 0; } /** Sets whether the left pointer is really a predecessor. * @param pred if true then the left pointer will be considered a predecessor. */ void pred( final boolean pred ) { if ( pred ) info |= PRED_MASK; else info &= ~PRED_MASK; } /** Sets whether the right pointer is really a successor. * @param succ if true then the right pointer will be considered a successor. */ void succ( final boolean succ ) { if ( succ ) info |= SUCC_MASK; else info &= ~SUCC_MASK; } /** Sets the left pointer to a predecessor. * @param pred the predecessr. */ void pred( final Entry KEY_GENERIC pred ) { info |= PRED_MASK; left = pred; } /** Sets the right pointer to a successor. * @param succ the successor. */ void succ( final Entry KEY_GENERIC succ ) { info |= SUCC_MASK; right = succ; } /** Sets the left pointer to the given subtree. * @param left the new left subtree. */ void left( final Entry KEY_GENERIC left ) { info &= ~PRED_MASK; this.left = left; } /** Sets the right pointer to the given subtree. * @param right the new right subtree. */ void right( final Entry KEY_GENERIC right ) { info &= ~SUCC_MASK; this.right = right; } /** Returns the current level of the node. * @return the current level of this node. */ int balance() { return (byte)info; } /** Sets the level of this node. * @param level the new level of this node. */ void balance( int level ) { info &= ~BALANCE_MASK; info |= ( level & BALANCE_MASK ); } /** Increments the level of this node. */ void incBalance() { info = info & ~BALANCE_MASK | ( (byte)info + 1 ) & 0xFF; } /** Decrements the level of this node. */ protected void decBalance() { info = info & ~BALANCE_MASK | ( (byte)info - 1 ) & 0xFF; } /** Computes the next entry in the set order. * * @return the next entry (null) if this is the last entry). */ Entry KEY_GENERIC next() { Entry KEY_GENERIC next = this.right; if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left; return next; } /** Computes the previous entry in the set order. * * @return the previous entry (null) if this is the first entry). */ Entry KEY_GENERIC prev() { Entry KEY_GENERIC prev = this.left; if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right; return prev; } SUPPRESS_WARNINGS_KEY_UNCHECKED public Entry KEY_GENERIC clone() { Entry KEY_GENERIC c; try { c = (Entry KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key; c.info = info; return c; } public boolean equals( final Object o ) { if (!(o instanceof Entry)) return false; Entry KEY_GENERIC_WILDCARD e = (Entry KEY_GENERIC_WILDCARD)o; return KEY_EQUALS(key, e.key); } public int hashCode() { return KEY2JAVAHASH_NOT_NULL(key); } public String toString() { return String.valueOf( key ); } /* public void prettyPrint() { prettyPrint(0); } public void prettyPrint(int level) { if ( pred() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("pred: " + left ); } else if (left != null) left.prettyPrint(level +1 ); for (int i = 0; i < level; i++) System.err.print(" "); System.err.println(key + " (" + level() + ")"); if ( succ() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("succ: " + right ); } else if (right != null) right.prettyPrint(level + 1); } */ } /* public void prettyPrint() { System.err.println("size: " + count); if (tree != null) tree.prettyPrint(); } */ public int size() { return count; } public boolean isEmpty() { return count == 0; } public KEY_GENERIC_TYPE FIRST() { if ( tree == null ) throw new NoSuchElementException(); return firstEntry.key; } public KEY_GENERIC_TYPE LAST() { if ( tree == null ) throw new NoSuchElementException(); return lastEntry.key; } /** An iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ Entry KEY_GENERIC prev; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ Entry KEY_GENERIC next; /** The last entry that was returned (or null if we did not iterate or used {@link #remove()}). */ Entry KEY_GENERIC curr; /** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link SetIterator} has been created using the nonempty constructor.*/ int index = 0; SetIterator() { next = firstEntry; } SetIterator( final KEY_GENERIC_TYPE k ) { if ( ( next = locateKey( k ) ) != null ) { if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } public boolean hasNext() { return next != null; } public boolean hasPrevious() { return prev != null; } void updateNext() { next = next.next(); } Entry KEY_GENERIC nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = prev = next; index++; updateNext(); return curr; } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } void updatePrevious() { prev = prev.prev(); } Entry KEY_GENERIC previousEntry() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = next = prev; index--; updatePrevious(); return curr; } public int nextIndex() { return index; } public int previousIndex() { return index - 1; } public void remove() { if ( curr == null ) throw new IllegalStateException(); /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ if ( curr == prev ) index--; next = prev = curr; updatePrevious(); updateNext(); AVL_TREE_SET.this.rem( curr.key ); curr = null; } } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SetIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SetIterator( from ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new Subset( KEY_NULL, true, to, false ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new Subset( from, false, KEY_NULL, true ); } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new Subset( from, false, to, false ); } /** A subset with given range. * *

This class represents a subset. One has to specify the left/right * limits (which can be set to -∞ or ∞). Since the subset is a * view on the set, at a given moment it could happen that the limits of * the range are not any longer in the main set. Thus, things such as * {@link java.util.SortedSet#first()} or {@link java.util.SortedSet#size()} must be always computed * on-the-fly. */ private final class Subset extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, SORTED_SET KEY_GENERIC { private static final long serialVersionUID = -7046029254386353129L; /** The start of the subset range, unless {@link #bottom} is true. */ KEY_GENERIC_TYPE from; /** The end of the subset range, unless {@link #top} is true. */ KEY_GENERIC_TYPE to; /** If true, the subset range starts from -∞. */ boolean bottom; /** If true, the subset range goes to ∞. */ boolean top; /** Creates a new subset with given key range. * * @param from the start of the subset range. * @param bottom if true, the first parameter is ignored and the range starts from -∞. * @param to the end of the subset range. * @param top if true, the third parameter is ignored and the range goes to ∞. */ public Subset( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) { if ( ! bottom && ! top && AVL_TREE_SET.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start element (" + from + ") is larger than end element (" + to + ")" ); this.from = from; this.bottom = bottom; this.to = to; this.top = top; } public void clear() { final SubsetIterator i = new SubsetIterator(); while( i.hasNext() ) { i.NEXT_KEY(); i.remove(); } } /** Checks whether a key is in the subset range. * @param k a key. * @return true if is the key is in the subset range. */ final boolean in( final KEY_GENERIC_TYPE k ) { return ( bottom || AVL_TREE_SET.this.compare( k, from ) >= 0 ) && ( top || AVL_TREE_SET.this.compare( k, to ) < 0 ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final KEY_TYPE k ) { return in( KEY_GENERIC_CAST k ) && AVL_TREE_SET.this.contains( k ); } public boolean add( final KEY_GENERIC_TYPE k ) { if ( ! in( k ) ) throw new IllegalArgumentException( "Element (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); return AVL_TREE_SET.this.add( k ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean rem( final KEY_TYPE k ) { if ( ! in( KEY_GENERIC_CAST k ) ) return false; return AVL_TREE_SET.this.rem( k ); } public int size() { final SubsetIterator i = new SubsetIterator(); int n = 0; while( i.hasNext() ) { n++; i.NEXT_KEY(); } return n; } public boolean isEmpty() { return ! new SubsetIterator().hasNext(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubsetIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubsetIterator( from ); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { if ( top ) return new Subset( from, bottom, to, false ); return compare( to, this.to ) < 0 ? new Subset( from, bottom, to, false ) : this; } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { if ( bottom ) return new Subset( from, false, to, top ); return compare( from, this.from ) > 0 ? new Subset( from, false, to, top ) : this; } public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { if ( top && bottom ) return new Subset( from, false, to, false ); if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to; if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from; if ( ! top && ! bottom && from == this.from && to == this.to ) return this; return new Subset( from, false, to, false ); } /** Locates the first entry. * * @return the first entry of this subset, or null if the subset is empty. */ public AVL_TREE_SET.Entry KEY_GENERIC firstEntry() { if ( tree == null ) return null; // If this subset goes to -infinity, we return the main set first entry; otherwise, we locate the start of the set. AVL_TREE_SET.Entry KEY_GENERIC e; if ( bottom ) e = firstEntry; else { e = locateKey( from ); // If we find either the start or something greater we're OK. if ( compare( e.key, from ) < 0 ) e = e.next(); } // Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end. if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null; return e; } /** Locates the last entry. * * @return the last entry of this subset, or null if the subset is empty. */ public AVL_TREE_SET.Entry KEY_GENERIC lastEntry() { if ( tree == null ) return null; // If this subset goes to infinity, we return the main set last entry; otherwise, we locate the end of the set. AVL_TREE_SET.Entry KEY_GENERIC e; if ( top ) e = lastEntry; else { e = locateKey( to ); // If we find something smaller than the end we're OK. if ( compare( e.key, to ) >= 0 ) e = e.prev(); } // Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start. if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null; return e; } public KEY_GENERIC_TYPE FIRST() { AVL_TREE_SET.Entry KEY_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } public KEY_GENERIC_TYPE LAST() { AVL_TREE_SET.Entry KEY_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } /** An iterator for subranges. * *

This class inherits from {@link SetIterator}, but overrides the methods that * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would * move out of the range of the subset we just overwrite the next or previous * entry with null. */ private final class SubsetIterator extends SetIterator { SubsetIterator() { next = firstEntry(); } SubsetIterator( final KEY_GENERIC_TYPE k ) { this(); if ( next != null ) { if ( ! bottom && compare( k, next.key ) < 0 ) prev = null; else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null; else { next = locateKey( k ); if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } } void updatePrevious() { prev = prev.prev(); if ( ! bottom && prev != null && AVL_TREE_SET.this.compare( prev.key, from ) < 0 ) prev = null; } void updateNext() { next = next.next(); if ( ! top && next != null && AVL_TREE_SET.this.compare( next.key, to ) >= 0 ) next = null; } } } /** Returns a deep copy of this tree set. * *

This method performs a deep copy of this tree set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this tree set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public Object clone() { AVL_TREE_SET KEY_GENERIC c; try { c = (AVL_TREE_SET KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.allocatePaths(); if ( count != 0 ) { // Also this apparently unfathomable code is derived from GNU libavl. Entry KEY_GENERIC e, p, q, rp = new Entry KEY_GENERIC(), rq = new Entry KEY_GENERIC(); p = rp; rp.left( tree ); q = rq; rq.pred( null ); while( true ) { if ( ! p.pred() ) { e = p.left.clone(); e.pred( q.left ); e.succ( q ); q.left( e ); p = p.left; q = q.left; } else { while( p.succ() ) { p = p.right; if ( p == null ) { q.right = null; c.tree = rq.left; c.firstEntry = c.tree; while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left; c.lastEntry = c.tree; while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right; return c; } q = q.right; } p = p.right; q = q.right; } if ( ! p.succ() ) { e = p.right.clone(); e.succ( q.right ); e.pred( q ); q.right( e ); } } } return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { int n = count; SetIterator i = new SetIterator(); s.defaultWriteObject(); while( n-- != 0 ) s.WRITE_KEY( i.NEXT_KEY() ); } /** Reads the given number of entries from the input stream, returning the corresponding tree. * * @param s the input stream. * @param n the (positive) number of entries to read. * @param pred the entry containing the key that preceeds the first key in the tree. * @param succ the entry containing the key that follows the last key in the tree. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private Entry KEY_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_GENERIC pred, final Entry KEY_GENERIC succ ) throws java.io.IOException, ClassNotFoundException { if ( n == 1 ) { final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ); top.pred( pred ); top.succ( succ ); return top; } if ( n == 2 ) { /* We handle separately this case so that recursion will *always* be on nonempty subtrees. */ final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ); top.right( new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ) ); top.right.pred( top ); top.balance( 1 ); top.pred( pred ); top.right.succ( succ ); return top; } // The right subtree is the largest one. final int rightN = n / 2, leftN = n - rightN - 1; final Entry KEY_GENERIC top = new Entry KEY_GENERIC(); top.left( readTree( s, leftN, pred, top ) ); top.key = KEY_GENERIC_CAST s.READ_KEY(); top.right( readTree( s, rightN, top, succ ) ); if ( n == ( n & -n ) ) top.balance( 1 ); // Quick test for determining whether n is a power of 2. return top; } private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); /* The storedComparator is now correctly set, but we must restore on-the-fly the actualComparator. */ setActualComparator(); allocatePaths(); if ( count != 0 ) { tree = readTree( s, count, null, null ); Entry KEY_GENERIC e; e = tree; while( e.left() != null ) e = e.left(); firstEntry = e; e = tree; while( e.right() != null ) e = e.right(); lastEntry = e; } if ( ASSERTS ) checkTree( tree ); } #ifdef ASSERTS_CODE private static KEY_GENERIC int checkTree( Entry KEY_GENERIC e ) { if ( e == null ) return 0; final int leftN = checkTree( e.left() ), rightN = checkTree( e.right() ); if ( leftN + e.balance() != rightN ) throw new AssertionError( "Mismatch between left tree size (" + leftN + "), right tree size (" + rightN + ") and balance (" + e.balance() + ")" ); return Math.max( leftN , rightN ) + 1; } #else private static KEY_GENERIC int checkTree( @SuppressWarnings("unused") Entry KEY_GENERIC e ) { return 0; } #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #else return Integer.toBinaryString( r.nextInt() ); #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { int i, j; AVL_TREE_SET m; java.util.TreeSet t; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; long ms; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); } double totAdd = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd; if ( comp ) { for( j = 0; j < 20; j++ ) { t = new java.util.TreeSet(); /* We first add all pairs to t. */ for( i = 0; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.add( KEY2OBJ( k[i] ) ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totAdd += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) t.add( KEY2OBJ( k[i] ) ); /* We check for pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on t. */ ms = System.currentTimeMillis(); for( Iterator it = t.iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); totAdd = totYes = totNo = totIterFor = totIterBack = totRemYes = 0; } for( j = 0; j < 20; j++ ) { m = new AVL_TREE_SET(); /* We first add all pairs to m. */ for( i = 0; i < n; i++ ) m.add( k[i] ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) m.remove( k[i] ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.add( k[i] ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.remove( k[i] ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.add( k[i] ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totAdd += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.remove( k[i] ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) m.add( k[i] ); /* We check for pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on m. */ KEY_LIST_ITERATOR it = (KEY_LIST_ITERATOR)m.iterator(); ms = System.currentTimeMillis(); for( ; it.hasNext(); it.NEXT_KEY() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); /* We iterate back on m. */ ms = System.currentTimeMillis(); for( ; it.hasPrevious(); it.PREV_KEY() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterBack += d; System.out.print("IterBack: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s" ); System.out.println(); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static AVL_TREE_SET topSet; protected static void testSets( SORTED_SET m, SortedSet t, int n, int level ) { long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement; boolean rt = false, rm = false; if ( level > 4 ) return; /* Now we check that both sets agree on first/last keys. */ mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.first(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.first(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.first() + ", " + t.first() +")" ); mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.last(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.last(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.last() + ", " + t.last() +")"); /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.iterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 0 ) { badPrevious = true; j.previous(); break; } previous = k; } i = (it.unimi.dsi.fastutil.BidirectionalIterator)m.iterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); badPrevious = false; if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in next"); i.remove(); j.remove(); t.remove( J ); } } else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in prev"); i.remove(); j.remove(); t.remove( J ); } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a subset. */ if ( ! m.isEmpty() ) { java.util.ListIterator i; Object start = m.first(), end = m.first(); for( i = (java.util.ListIterator)m.iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() ); for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() ); //System.err.println("Checking subSet from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.subSet( (KEY_CLASS)start, (KEY_CLASS)end ), t.subSet( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subSet" ); //System.err.println("Checking headSet to " + end + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.headSet( (KEY_CLASS)end ), t.headSet( end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headSet" ); //System.err.println("Checking tailSet from " + start + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.tailSet( (KEY_CLASS)start ), t.tailSet( start ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailSet" ); } } private static void test( int n ) { AVL_TREE_SET m = new AVL_TREE_SET(); SortedSet t = new java.util.TreeSet(); topSet = m; k = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEY_CLASS_Object k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif } /* We add pairs to t. */ for( int i = 0; i < n; i++ ) t.add( k[i] ); /* We add to m the same data */ m.addAll(t); testSets( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/AbstractBidirectionalIterator.drv0000664000000000000000000000367713050701620021337 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class facilitating the creation of type-specific {@linkplain it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. * *

To create a type-specific bidirectional iterator, besides what is needed * for an iterator you need both a method returning the previous element as * primitive type and a method returning the previous element as an * object. However, if you inherit from this class you need just one (anyone). * *

This class implements also a trivial version of {@link #back(int)} that * uses type-specific methods. */ public abstract class KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC implements KEY_BIDI_ITERATOR KEY_GENERIC { protected KEY_ABSTRACT_BIDI_ITERATOR() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding generic method. */ public KEY_TYPE PREV_KEY() { return previous().KEY_VALUE(); } /** Delegates to the corresponding type-specific method. */ public KEY_GENERIC_CLASS previous() { return KEY_CLASS.valueOf( PREV_KEY() ); } #endif /** This method just iterates the type-specific version of {@link #previous()} for * at most n times, stopping if {@link * #hasPrevious()} becomes false. */ public int back( final int n ) { int i = n; while( i-- != 0 && hasPrevious() ) PREV_KEY(); return n - i - 1; } } fastutil-7.1.0/drv/AbstractBigList.drv0000664000000000000000000005140213050701620016377 0ustar rootroot/* * Copyright (C) 2010-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEYS_REFERENCE import it.unimi.dsi.fastutil.Stack; #endif import java.util.Iterator; import java.util.Collection; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.BigList; import it.unimi.dsi.fastutil.BigListIterator; /** An abstract class providing basic methods for big lists implementing a type-specific big list interface. */ public abstract class ABSTRACT_BIG_LIST KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements BIG_LIST KEY_GENERIC, STACK KEY_GENERIC { protected ABSTRACT_BIG_LIST() {} /** Ensures that the given index is nonnegative and not greater than this big-list size. * * @param index an index. * @throws IndexOutOfBoundsException if the given index is negative or greater than this big-list size. */ protected void ensureIndex( final long index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index > size64() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than list size (" + ( size64() ) + ")" ); } /** Ensures that the given index is nonnegative and smaller than this big-list size. * * @param index an index. * @throws IndexOutOfBoundsException if the given index is negative or not smaller than this big-list size. */ protected void ensureRestrictedIndex( final long index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index >= size64() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + ( size64() ) + ")" ); } public void add( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_TYPE k ) { add( size64(), k ); return true; } public KEY_GENERIC_TYPE REMOVE_KEY( long i ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE REMOVE_KEY( int i ) { return REMOVE_KEY( (long)i ); } public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) { return set( (long)index, k ); } public boolean addAll( long index, final Collection c ) { ensureIndex( index ); int n = c.size(); if ( n == 0 ) return false; Iterator i = c.iterator(); while( n-- != 0 ) add( index++, i.next() ); return true; } public boolean addAll( int index, final Collection c ) { return addAll( (long)index, c ); } /** Delegates to a more generic method. */ public boolean addAll( final Collection c ) { return addAll( size64(), c ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return listIterator( 0L ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) { ensureIndex( index ); return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() { long pos = index, last = -1; public boolean hasNext() { return pos < ABSTRACT_BIG_LIST.this.size64(); } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return ABSTRACT_BIG_LIST.this.GET_KEY( last = pos++ ); } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return ABSTRACT_BIG_LIST.this.GET_KEY( last = --pos ); } public long nextIndex() { return pos; } public long previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { ABSTRACT_BIG_LIST.this.add( pos++, k ); last = -1; } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); ABSTRACT_BIG_LIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); ABSTRACT_BIG_LIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; } }; } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) { return listIterator( (long)index ); } public boolean contains( final KEY_TYPE k ) { return indexOf( k ) >= 0; } public long indexOf( final KEY_TYPE k ) { final KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator(); KEY_GENERIC_TYPE e; while( i.hasNext() ) { e = i.NEXT_KEY(); if ( KEY_EQUALS( k, e ) ) return i.previousIndex(); } return -1; } public long lastIndexOf( final KEY_TYPE k ) { KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( size64() ); KEY_GENERIC_TYPE e; while( i.hasPrevious() ) { e = i.PREV_KEY(); if ( KEY_EQUALS( k, e ) ) return i.nextIndex(); } return -1; } public void size( final long size ) { long i = size64(); if ( size > i ) while( i++ < size ) add( KEY_NULL ); else while( i-- != size ) remove( i ); } public void size( final int size ) { size( (long)size ); } public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" ); return new SUBLIST KEY_GENERIC( this, from, to ); } /** Removes elements of this type-specific big list one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * @param from the start index (inclusive). * @param to the end index (exclusive). */ public void removeElements( final long from, final long to ) { ensureIndex( to ); KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( from ); long n = to - from; if ( n < 0 ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" ); while( n-- != 0 ) { i.NEXT_KEY(); i.remove(); } } /** Adds elements to this type-specific big list one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * @param index the index at which to add elements. * @param a the big array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { ensureIndex( index ); BIG_ARRAYS.ensureOffsetLength( a, offset, length ); while( length-- != 0 ) add( index++, BIG_ARRAYS.get( a, offset++ ) ); } public void addElements( final long index, final KEY_GENERIC_TYPE a[][] ) { addElements( index, a, 0, BIG_ARRAYS.length( a ) ); } /** Copies element of this type-specific big list into the given big array one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * @param from the start index (inclusive). * @param a the destination big array. * @param offset the offset into the destination big array where to store the first element copied. * @param length the number of elements to be copied. */ public void getElements( final long from, final KEY_TYPE a[][], long offset, long length ) { KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( from ); BIG_ARRAYS.ensureOffsetLength( a, offset, length ); if ( from + length > size64() ) throw new IndexOutOfBoundsException( "End index (" + ( from + length ) + ") is greater than list size (" + size64() + ")" ); while( length-- != 0 ) BIG_ARRAYS.set( a, offset++, i.NEXT_KEY() ); } @Deprecated public int size() { return (int)Math.min( Integer.MAX_VALUE, size64() ); } #if ! KEY_CLASS_Reference private boolean valEquals( final Object a, final Object b ) { return a == null ? b == null : a.equals( b ); } #endif public boolean equals( final Object o ) { if ( o == this ) return true; if ( ! ( o instanceof BigList ) ) return false; final BigList l = (BigList)o; long s = size64(); if ( s != l.size64() ) return false; #if KEYS_PRIMITIVE if ( l instanceof BIG_LIST ) { final KEY_BIG_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((BIG_LIST KEY_GENERIC)l).listIterator(); while( s-- != 0 ) if ( i1.NEXT_KEY() != i2.NEXT_KEY() ) return false; return true; } #endif final BigListIterator i1 = listIterator(), i2 = l.listIterator(); #if KEY_CLASS_Reference while( s-- != 0 ) if ( i1.next() != i2.next() ) return false; #else while( s-- != 0 ) if ( ! valEquals( i1.next(), i2.next() ) ) return false; #endif return true; } #if ! KEY_CLASS_Reference /** Compares this big list to another object. If the * argument is a {@link BigList}, this method performs a lexicographical comparison; otherwise, * it throws a ClassCastException. * * @param l a big list. * @return if the argument is a {@link BigList}, a negative integer, * zero, or a positive integer as this list is lexicographically less than, equal * to, or greater than the argument. * @throws ClassCastException if the argument is not a big list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public int compareTo( final BigList l ) { if ( l == this ) return 0; if ( l instanceof BIG_LIST ) { final KEY_BIG_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((BIG_LIST KEY_GENERIC)l).listIterator(); int r; KEY_GENERIC_TYPE e1, e2; while( i1.hasNext() && i2.hasNext() ) { e1 = i1.NEXT_KEY(); e2 = i2.NEXT_KEY(); if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r; } return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 ); } BigListIterator i1 = listIterator(), i2 = l.listIterator(); int r; while( i1.hasNext() && i2.hasNext() ) { if ( ( r = ((Comparable)i1.next()).compareTo( i2.next() ) ) != 0 ) return r; } return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 ); } #endif /** Returns the hash code for this big list, which is identical to {@link java.util.List#hashCode()}. * * @return the hash code for this big list. */ public int hashCode() { KEY_ITERATOR KEY_GENERIC i = iterator(); int h = 1; long s = size64(); while ( s-- != 0 ) { KEY_GENERIC_TYPE k = i.NEXT_KEY(); h = 31 * h + KEY2JAVAHASH( k ); } return h; } public void push( KEY_GENERIC_TYPE o ) { add( o ); } public KEY_GENERIC_TYPE POP() { if ( isEmpty() ) throw new NoSuchElementException(); return REMOVE_KEY( size64() - 1 ); } public KEY_GENERIC_TYPE TOP() { if ( isEmpty() ) throw new NoSuchElementException(); return GET_KEY( size64() - 1 ); } public KEY_GENERIC_TYPE PEEK( int i ) { return GET_KEY( size64() - 1 - i ); } #if KEYS_PRIMITIVE public KEY_TYPE GET_KEY( final int index ) { return GET_KEY( (long)index ); } public boolean rem( KEY_TYPE k ) { long index = indexOf( k ); if ( index == -1 ) return false; REMOVE_KEY( index ); return true; } /** Delegates to a more generic method. */ public boolean addAll( final long index, final COLLECTION c ) { return addAll( index, (Collection)c ); } /** Delegates to a more generic method. */ public boolean addAll( final long index, final BIG_LIST l ) { return addAll( index, (COLLECTION)l ); } public boolean addAll( final COLLECTION c ) { return addAll( size64(), c ); } public boolean addAll( final BIG_LIST l ) { return addAll( size64(), l ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public void add( final long index, final KEY_CLASS ok ) { add( index, ok.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS set( final long index, final KEY_CLASS ok ) { return KEY2OBJ( set( index, ok.KEY_VALUE() ) ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS get( final long index ) { return KEY2OBJ( GET_KEY( index ) ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public long indexOf( final Object ok ) { return indexOf( KEY_OBJ2TYPE( ok ) ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public long lastIndexOf( final Object ok ) { return lastIndexOf( KEY_OBJ2TYPE( ok ) ); } /** * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS remove( final int index ) { return KEY2OBJ( REMOVE_KEY( index ) ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS remove( final long index ) { return KEY2OBJ( REMOVE_KEY( index ) ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public void push( KEY_CLASS o ) { push( o.KEY_VALUE() ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS pop() { return KEY_CLASS.valueOf( POP() ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS top() { return KEY_CLASS.valueOf( TOP() ); } /** {@inheritDoc} * * Delegates to a more generic method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS peek( int i ) { return KEY_CLASS.valueOf( PEEK( i ) ); } #else public KEY_GENERIC_CLASS get( int index ) { return get( (long)index ); } #endif public String toString() { final StringBuilder s = new StringBuilder(); final KEY_ITERATOR KEY_GENERIC i = iterator(); long n = size64(); KEY_GENERIC_TYPE k; boolean first = true; s.append("["); while( n-- != 0 ) { if (first) first = false; else s.append(", "); k = i.NEXT_KEY(); #if KEYS_REFERENCE if (this == k) s.append("(this big list)"); else #endif s.append( String.valueOf( k ) ); } s.append("]"); return s.toString(); } public static class SUBLIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; /** The list this sublist restricts. */ protected final BIG_LIST KEY_GENERIC l; /** Initial (inclusive) index of this sublist. */ protected final long from; /** Final (exclusive) index of this sublist. */ protected long to; private static final boolean ASSERTS = ASSERTS_VALUE; public SUBLIST( final BIG_LIST KEY_GENERIC l, final long from, final long to ) { this.l = l; this.from = from; this.to = to; } private void assertRange() { if ( ASSERTS ) { assert from <= l.size64(); assert to <= l.size64(); assert to >= from; } } public boolean add( final KEY_GENERIC_TYPE k ) { l.add( to, k ); to++; if ( ASSERTS ) assertRange(); return true; } public void add( final long index, final KEY_GENERIC_TYPE k ) { ensureIndex( index ); l.add( from + index, k ); to++; if ( ASSERTS ) assertRange(); } public boolean addAll( final long index, final Collection c ) { ensureIndex( index ); to += c.size(); if ( ASSERTS ) { boolean retVal = l.addAll( from + index, c ); assertRange(); return retVal; } return l.addAll( from + index, c ); } public KEY_GENERIC_TYPE GET_KEY( long index ) { ensureRestrictedIndex( index ); return l.GET_KEY( from + index ); } public KEY_GENERIC_TYPE REMOVE_KEY( long index ) { ensureRestrictedIndex( index ); to--; return l.REMOVE_KEY( from + index ); } public KEY_GENERIC_TYPE set( long index, KEY_GENERIC_TYPE k ) { ensureRestrictedIndex( index ); return l.set( from + index, k ); } public void clear() { removeElements( 0, size64() ); if ( ASSERTS ) assertRange(); } public long size64() { return to - from; } public void getElements( final long from, final KEY_TYPE[][] a, final long offset, final long length ) { ensureIndex( from ); if ( from + length > size64() ) throw new IndexOutOfBoundsException( "End index (" + from + length + ") is greater than list size (" + size64() + ")" ); l.getElements( this.from + from, a, offset, length ); } public void removeElements( final long from, final long to ) { ensureIndex( from ); ensureIndex( to ); l.removeElements( this.from + from, this.from + to ); this.to -= ( to - from ); if ( ASSERTS ) assertRange(); } public void addElements( final long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { ensureIndex( index ); l.addElements( this.from + index, a, offset, length ); this.to += length; if ( ASSERTS ) assertRange(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) { ensureIndex( index ); return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() { long pos = index, last = -1; public boolean hasNext() { return pos < size64(); } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = pos++ ) ); } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = --pos ) ); } public long nextIndex() { return pos; } public long previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.add( pos++, k ); last = -1; if ( ASSERTS ) assertRange(); } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; if ( ASSERTS ) assertRange(); } }; } public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" ); return new SUBLIST KEY_GENERIC( this, from, to ); } #if KEYS_PRIMITIVE public boolean rem( KEY_TYPE k ) { long index = indexOf( k ); if ( index == -1 ) return false; to--; l.REMOVE_KEY( from + index ); if ( ASSERTS ) assertRange(); return true; } public boolean remove( final Object o ) { return rem( KEY_OBJ2TYPE( o ) ); } public boolean addAll( final long index, final COLLECTION c ) { ensureIndex( index ); to += c.size(); if ( ASSERTS ) { boolean retVal = l.addAll( from + index, c ); assertRange(); return retVal; } return l.addAll( from + index, c ); } public boolean addAll( final long index, final LIST l ) { ensureIndex( index ); to += l.size(); if ( ASSERTS ) { boolean retVal = this.l.addAll( from + index, l ); assertRange(); return retVal; } return this.l.addAll( from + index, l ); } #else public boolean remove( final Object o ) { long index = indexOf( o ); if ( index == -1 ) return false; REMOVE_KEY( index ); return true; } #endif } } fastutil-7.1.0/drv/AbstractBigListIterator.drv0000664000000000000000000000476513050701620020123 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.ListIterator; import it.unimi.dsi.fastutil.BigListIterator; /** An abstract class facilitating the creation of type-specific {@linkplain it.unimi.dsi.fastutil.BigListIterator big-list iterators}. * *

This implementation provides (deprecated) implementations of {@link ListIterator#previousIndex()} and {@link ListIterator#nextIndex()} that * just invoke the corresponding {@link BigListIterator} methods. * * @see java.util.ListIterator * @see it.unimi.dsi.fastutil.BigListIterator */ public abstract class KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC implements KEY_BIG_LIST_ITERATOR KEY_GENERIC { protected KEY_ABSTRACT_BIG_LIST_ITERATOR() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding type-specific method. */ public void set( KEY_GENERIC_CLASS ok ) { set( ok.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. */ public void add( KEY_GENERIC_CLASS ok ) { add( ok.KEY_VALUE() ); } #endif /** This method just throws an {@link UnsupportedOperationException}. */ public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } /** This method just throws an {@link UnsupportedOperationException}. */ public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } /** This method just iterates the type-specific version of {@code next()} for at most * n times, stopping if {@link #hasNext()} becomes false.*/ public long skip( final long n ) { long i = n; while( i-- != 0 && hasNext() ) NEXT_KEY(); return n - i - 1; } /** This method just iterates the type-specific version of {@link #previous()} for * at most n times, stopping if {@link * #hasPrevious()} becomes false. */ public long back( final long n ) { long i = n; while( i-- != 0 && hasPrevious() ) PREV_KEY(); return n - i - 1; } } fastutil-7.1.0/drv/AbstractCollection.drv0000664000000000000000000001754413050701620017146 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.AbstractCollection; import java.util.Collection; import java.util.Iterator; /** An abstract class providing basic methods for collections implementing a type-specific interface. * *

In particular, this class provide {@link #iterator()}, add(), {@link #remove(Object)} and * {@link #contains(Object)} methods that just call the type-specific counterpart. * *

Warning: Because of a name clash between the list and collection interfaces * the type-specific deletion method of a type-specific abstract * collection is rem(), rather then remove(). A * subclass must thus override rem(), rather than * remove(), to make all inherited methods work properly. */ public abstract class ABSTRACT_COLLECTION KEY_GENERIC extends AbstractCollection implements COLLECTION KEY_GENERIC { protected ABSTRACT_COLLECTION() {} #if KEYS_PRIMITIVE public KEY_TYPE[] toArray( KEY_TYPE a[] ) { return TO_KEY_ARRAY( a ); } public KEY_TYPE[] TO_KEY_ARRAY() { return TO_KEY_ARRAY( null ); } public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] ) { if ( a == null || a.length < size() ) a = new KEY_TYPE[ size() ]; ITERATORS.unwrap( iterator(), a ); return a; } /** Adds all elements of the given type-specific collection to this collection. * * @param c a type-specific collection. * @return true if this collection changed as a result of the call. */ public boolean addAll( COLLECTION c ) { boolean retVal = false; final KEY_ITERATOR i = c.iterator(); int n = c.size(); while( n-- != 0 ) if ( add( i.NEXT_KEY() ) ) retVal = true; return retVal; } /** Checks whether this collection contains all elements from the given type-specific collection. * * @param c a type-specific collection. * @return true if this collection contains all elements of the argument. */ public boolean containsAll( COLLECTION c ) { final KEY_ITERATOR i = c.iterator(); int n = c.size(); while( n-- != 0 ) if ( ! contains( i.NEXT_KEY() ) ) return false; return true; } /** Retains in this collection only elements from the given type-specific collection. * * @param c a type-specific collection. * @return true if this collection changed as a result of the call. */ public boolean retainAll( COLLECTION c ) { boolean retVal = false; int n = size(); final KEY_ITERATOR i = iterator(); while( n-- != 0 ) { if ( ! c.contains( i.NEXT_KEY() ) ) { i.remove(); retVal = true; } } return retVal; } /** Remove from this collection all elements in the given type-specific collection. * * @param c a type-specific collection. * @return true if this collection changed as a result of the call. */ public boolean removeAll( COLLECTION c ) { boolean retVal = false; int n = c.size(); final KEY_ITERATOR i = c.iterator(); while( n-- != 0 ) if ( rem( i.NEXT_KEY() ) ) retVal = true; return retVal; } #endif public Object[] toArray() { final Object[] a = new Object[ size() ]; it.unimi.dsi.fastutil.objects.ObjectIterators.unwrap( iterator(), a ); return a; } @SuppressWarnings("unchecked") public T[] toArray( T[] a ) { final int size = size(); if ( a.length < size ) a = (T[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size ); it.unimi.dsi.fastutil.objects.ObjectIterators.unwrap( iterator(), a ); if ( size < a.length ) a[ size ] = null; return a; } /** Adds all elements of the given collection to this collection. * * @param c a collection. * @return true if this collection changed as a result of the call. */ public boolean addAll( Collection c ) { boolean retVal = false; final Iterator i = c.iterator(); int n = c.size(); while( n-- != 0 ) if ( add( i.next() ) ) retVal = true; return retVal; } public boolean add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } /** Delegates to the new covariantly stronger generic method. */ @Deprecated public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public abstract KEY_ITERATOR KEY_GENERIC iterator(); #if KEYS_PRIMITIVE /** Delegates to the type-specific rem() method. */ public boolean remove( Object ok ) { if ( ok == null ) return false; return rem( KEY_OBJ2TYPE( ok ) ); } /** Delegates to the corresponding type-specific method. */ public boolean add( final KEY_CLASS o ) { return add( o.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. */ public boolean rem( final Object o ) { if ( o == null ) return false; return rem( KEY_OBJ2TYPE(o) ); } /** Delegates to the corresponding type-specific method. */ public boolean contains( final Object o ) { if ( o == null ) return false; return contains( KEY_OBJ2TYPE(o) ); } public boolean contains( final KEY_TYPE k ) { final KEY_ITERATOR iterator = iterator(); while ( iterator.hasNext() ) if ( k == iterator.NEXT_KEY() ) return true; return false; } #else /** Delegates to {@link #rem(Object)}. */ @Override public boolean remove( final Object o ) { return rem(o); } #endif public boolean rem( final KEY_TYPE k ) { final KEY_ITERATOR iterator = iterator(); while ( iterator.hasNext() ) if ( k == iterator.NEXT_KEY() ) { iterator.remove(); return true; } return false; } /** Checks whether this collection contains all elements from the given collection. * * @param c a collection. * @return true if this collection contains all elements of the argument. */ public boolean containsAll( Collection c ) { int n = c.size(); final Iterator i = c.iterator(); while( n-- != 0 ) if ( ! contains( i.next() ) ) return false; return true; } /** Retains in this collection only elements from the given collection. * * @param c a collection. * @return true if this collection changed as a result of the call. */ public boolean retainAll( Collection c ) { boolean retVal = false; int n = size(); final Iterator i = iterator(); while( n-- != 0 ) { if ( ! c.contains( i.next() ) ) { i.remove(); retVal = true; } } return retVal; } /** Remove from this collection all elements in the given collection. * If the collection is an instance of this class, it uses faster iterators. * * @param c a collection. * @return true if this collection changed as a result of the call. */ public boolean removeAll( Collection c ) { boolean retVal = false; int n = c.size(); final Iterator i = c.iterator(); while( n-- != 0 ) if ( remove( i.next() ) ) retVal = true; return retVal; } public boolean isEmpty() { return size() == 0; } public String toString() { final StringBuilder s = new StringBuilder(); final KEY_ITERATOR KEY_GENERIC i = iterator(); int n = size(); KEY_TYPE k; boolean first = true; s.append("{"); while(n-- != 0) { if (first) first = false; else s.append(", "); k = i.NEXT_KEY(); #if KEYS_REFERENCE if (this == k) s.append("(this collection)"); else #endif s.append(String.valueOf(k)); } s.append("}"); return s.toString(); } } fastutil-7.1.0/drv/AbstractComparator.drv0000664000000000000000000000262413050701620017153 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class facilitating the creation of type-specific {@linkplain java.util.Comparator comparators}. * *

To create a type-specific comparator you need both a method comparing * primitive types and a method comparing objects. However, if you have the * first one you can just inherit from this class and get for free the second * one. * * @see java.util.Comparator */ public abstract class KEY_ABSTRACT_COMPARATOR KEY_GENERIC implements KEY_COMPARATOR KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = 0L; protected KEY_ABSTRACT_COMPARATOR() {} public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) { return compare( ok1.KEY_VALUE(), ok2.KEY_VALUE() ); } public abstract int compare( KEY_TYPE k1, KEY_TYPE k2 ); } fastutil-7.1.0/drv/AbstractFunction.drv0000664000000000000000000001425313050701620016632 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class providing basic methods for functions implementing a type-specific interface. * *

Optional operations just throw an {@link * UnsupportedOperationException}. Generic versions of accessors delegate to * the corresponding type-specific counterparts following the interface rules * (they take care of returning null on a missing key). * *

This class handles directly a default return * value (including {@linkplain #defaultReturnValue() methods to access * it}). Instances of classes inheriting from this class have just to return * defRetValue to denote lack of a key in type-specific methods. The value * is serialized. * *

Implementing subclasses have just to provide type-specific get(), * type-specific containsKey(), and size() methods. * */ public abstract class ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements FUNCTION KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -4940583368468432370L; protected ABSTRACT_FUNCTION() {} /** * The default return value for get(), put() and * remove(). */ protected VALUE_GENERIC_TYPE defRetValue; public void defaultReturnValue( final VALUE_GENERIC_TYPE rv ) { defRetValue = rv; } public VALUE_GENERIC_TYPE defaultReturnValue() { return defRetValue; } public VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) { throw new UnsupportedOperationException(); } public VALUE_GENERIC_TYPE REMOVE_VALUE( KEY_TYPE key ) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } #if KEYS_PRIMITIVE public boolean containsKey( final Object ok ) { if ( ok == null ) return false; return containsKey( KEY_OBJ2TYPE( ok ) ); } #endif #if KEYS_PRIMITIVE || VALUES_PRIMITIVE #if VALUES_PRIMITIVE /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #else /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. */ #endif public VALUE_GENERIC_CLASS get( final Object ok ) { #if KEYS_PRIMITIVE if ( ok == null ) return null; #endif final KEY_TYPE k = KEY_OBJ2TYPE( ok ); return containsKey( k ) ? VALUE2OBJ( GET_VALUE( k ) ) : null; } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #else /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. */ #endif public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( ok ); final boolean containsKey = containsKey( k ); final VALUE_GENERIC_TYPE v = put( k, VALUE_CLASS2TYPE( ov ) ); return containsKey ? VALUE2OBJ( v ) : null; } #if VALUES_PRIMITIVE /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. * * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #else /** Delegates to the corresponding type-specific method, taking care of returning null on a missing key. * *

This method must check whether the provided key is in the map using containsKey(). Thus, * it probes the map twice. Implementors of subclasses should override it with a more efficient method. */ #endif public VALUE_GENERIC_CLASS remove( final Object ok ) { #if KEYS_PRIMITIVE if ( ok == null ) return null; #endif final KEY_TYPE k = KEY_OBJ2TYPE( ok ); final boolean containsKey = containsKey( k ); final VALUE_GENERIC_TYPE v = REMOVE_VALUE( k ); return containsKey ? VALUE2OBJ( v ) : null; } #endif } fastutil-7.1.0/drv/AbstractIterator.drv0000664000000000000000000000410013050701620016624 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class facilitating the creation of type-specific iterators. * *

To create a type-specific iterator you need both a method returning the * next element as primitive type and a method returning the next element as an * object. However, if you inherit from this class you need just one (anyone). * *

This class implements also a trivial version of {@link #skip(int)} that uses * type-specific methods; moreover, {@link #remove()} will throw an {@link * UnsupportedOperationException}. * * @see java.util.Iterator */ public abstract class KEY_ABSTRACT_ITERATOR KEY_GENERIC implements KEY_ITERATOR KEY_GENERIC { protected KEY_ABSTRACT_ITERATOR() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding generic method. */ public KEY_TYPE NEXT_KEY() { return next().KEY_VALUE(); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS next() { return KEY_CLASS.valueOf( NEXT_KEY() ); } #endif /** This method just throws an {@link UnsupportedOperationException}. */ public void remove() { throw new UnsupportedOperationException(); } /** This method just iterates the type-specific version of {@link #next()} for at most * n times, stopping if {@link #hasNext()} becomes false.*/ public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) NEXT_KEY(); return n - i - 1; } } fastutil-7.1.0/drv/AbstractList.drv0000664000000000000000000005023413050701620015757 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEYS_REFERENCE import it.unimi.dsi.fastutil.Stack; #endif import java.util.List; import java.util.Iterator; import java.util.ListIterator; import java.util.Collection; import java.util.NoSuchElementException; /** An abstract class providing basic methods for lists implementing a type-specific list interface. * *

As an additional bonus, this class implements on top of the list operations a type-specific stack. */ public abstract class ABSTRACT_LIST KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements LIST KEY_GENERIC, STACK KEY_GENERIC { protected ABSTRACT_LIST() {} /** Ensures that the given index is nonnegative and not greater than the list size. * * @param index an index. * @throws IndexOutOfBoundsException if the given index is negative or greater than the list size. */ protected void ensureIndex( final int index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index > size() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than list size (" + ( size() ) + ")" ); } /** Ensures that the given index is nonnegative and smaller than the list size. * * @param index an index. * @throws IndexOutOfBoundsException if the given index is negative or not smaller than the list size. */ protected void ensureRestrictedIndex( final int index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index >= size() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + ( size() ) + ")" ); } public void add( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_TYPE k ) { add( size(), k ); return true; } public KEY_GENERIC_TYPE REMOVE_KEY( int i ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean addAll( int index, final Collection c ) { ensureIndex( index ); int n = c.size(); if ( n == 0 ) return false; Iterator i = c.iterator(); while( n-- != 0 ) add( index++, i.next() ); return true; } /** Delegates to a more generic method. */ public boolean addAll( final Collection c ) { return addAll( size(), c ); } /** Delegates to the new covariantly stronger generic method. */ @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); } /** Delegates to the new covariantly stronger generic method. */ @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int index ) { return listIterator( index ); } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return listIterator( 0 ); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) { ensureIndex( index ); return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() { int pos = index, last = -1; public boolean hasNext() { return pos < ABSTRACT_LIST.this.size(); } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return ABSTRACT_LIST.this.GET_KEY( last = pos++ ); } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return ABSTRACT_LIST.this.GET_KEY( last = --pos ); } public int nextIndex() { return pos; } public int previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { ABSTRACT_LIST.this.add( pos++, k ); last = -1; } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); ABSTRACT_LIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); ABSTRACT_LIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; } }; } public boolean contains( final KEY_TYPE k ) { return indexOf( k ) >= 0; } public int indexOf( final KEY_TYPE k ) { final KEY_LIST_ITERATOR KEY_GENERIC i = listIterator(); KEY_GENERIC_TYPE e; while( i.hasNext() ) { e = i.NEXT_KEY(); if ( KEY_EQUALS( k, e ) ) return i.previousIndex(); } return -1; } public int lastIndexOf( final KEY_TYPE k ) { KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( size() ); KEY_GENERIC_TYPE e; while( i.hasPrevious() ) { e = i.PREV_KEY(); if ( KEY_EQUALS( k, e ) ) return i.nextIndex(); } return -1; } public void size( final int size ) { int i = size(); if ( size > i ) while( i++ < size ) add( KEY_NULL ); else while( i-- != size ) remove( i ); } public LIST KEY_GENERIC subList( final int from, final int to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" ); return new SUBLIST KEY_GENERIC( this, from, to ); } /** Delegates to the new covariantly stronger generic method. */ @Deprecated public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) { return subList( from, to ); } /** Removes elements of this type-specific list one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * * @param from the start index (inclusive). * @param to the end index (exclusive). */ public void removeElements( final int from, final int to ) { ensureIndex( to ); KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( from ); int n = to - from; if ( n < 0 ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" ); while( n-- != 0 ) { i.NEXT_KEY(); i.remove(); } } /** Adds elements to this type-specific list one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * @param index the index at which to add elements. * @param a the array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { ensureIndex( index ); if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" ); if ( offset + length > a.length ) throw new ArrayIndexOutOfBoundsException( "End index (" + ( offset + length ) + ") is greater than array length (" + a.length + ")" ); while( length-- != 0 ) add( index++, a[ offset++ ] ); } public void addElements( final int index, final KEY_GENERIC_TYPE a[] ) { addElements( index, a, 0, a.length ); } /** Copies element of this type-specific list into the given array one-by-one. * *

This is a trivial iterator-based implementation. It is expected that * implementations will override this method with a more optimized version. * * @param from the start index (inclusive). * @param a the destination array. * @param offset the offset into the destination array where to store the first element copied. * @param length the number of elements to be copied. */ public void getElements( final int from, final KEY_TYPE a[], int offset, int length ) { KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( from ); if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" ); if ( offset + length > a.length ) throw new ArrayIndexOutOfBoundsException( "End index (" + ( offset + length ) + ") is greater than array length (" + a.length + ")" ); if ( from + length > size() ) throw new IndexOutOfBoundsException( "End index (" + ( from + length ) + ") is greater than list size (" + size() + ")" ); while( length-- != 0 ) a[ offset++ ] = i.NEXT_KEY(); } #if ! KEY_CLASS_Reference private boolean valEquals( final Object a, final Object b ) { return a == null ? b == null : a.equals( b ); } #endif public boolean equals( final Object o ) { if ( o == this ) return true; if ( ! ( o instanceof List ) ) return false; final List l = (List)o; int s = size(); if ( s != l.size() ) return false; #if KEYS_PRIMITIVE if ( l instanceof LIST ) { final KEY_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((LIST KEY_GENERIC)l).listIterator(); while( s-- != 0 ) if ( i1.NEXT_KEY() != i2.NEXT_KEY() ) return false; return true; } #endif final ListIterator i1 = listIterator(), i2 = l.listIterator(); #if KEY_CLASS_Reference while( s-- != 0 ) if ( i1.next() != i2.next() ) return false; #else while( s-- != 0 ) if ( ! valEquals( i1.next(), i2.next() ) ) return false; #endif return true; } #if ! KEY_CLASS_Reference /** Compares this list to another object. If the * argument is a {@link java.util.List}, this method performs a lexicographical comparison; otherwise, * it throws a ClassCastException. * * @param l a list. * @return if the argument is a {@link java.util.List}, a negative integer, * zero, or a positive integer as this list is lexicographically less than, equal * to, or greater than the argument. * @throws ClassCastException if the argument is not a list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public int compareTo( final List l ) { if ( l == this ) return 0; if ( l instanceof LIST ) { final KEY_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((LIST KEY_GENERIC)l).listIterator(); int r; KEY_GENERIC_TYPE e1, e2; while( i1.hasNext() && i2.hasNext() ) { e1 = i1.NEXT_KEY(); e2 = i2.NEXT_KEY(); if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r; } return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 ); } ListIterator i1 = listIterator(), i2 = l.listIterator(); int r; while( i1.hasNext() && i2.hasNext() ) { if ( ( r = ((Comparable)i1.next()).compareTo( i2.next() ) ) != 0 ) return r; } return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 ); } #endif /** Returns the hash code for this list, which is identical to {@link java.util.List#hashCode()}. * * @return the hash code for this list. */ public int hashCode() { KEY_ITERATOR KEY_GENERIC i = iterator(); int h = 1, s = size(); while ( s-- != 0 ) { KEY_GENERIC_TYPE k = i.NEXT_KEY(); h = 31 * h + KEY2JAVAHASH( k ); } return h; } public void push( KEY_GENERIC_TYPE o ) { add( o ); } public KEY_GENERIC_TYPE POP() { if ( isEmpty() ) throw new NoSuchElementException(); return REMOVE_KEY( size() - 1 ); } public KEY_GENERIC_TYPE TOP() { if ( isEmpty() ) throw new NoSuchElementException(); return GET_KEY( size() - 1 ); } public KEY_GENERIC_TYPE PEEK( int i ) { return GET_KEY( size() - 1 - i ); } #if KEYS_PRIMITIVE public boolean rem( KEY_TYPE k ) { int index = indexOf( k ); if ( index == -1 ) return false; REMOVE_KEY( index ); return true; } /** Delegates to rem(). */ public boolean remove( final Object o ) { return rem( KEY_OBJ2TYPE( o ) ); } /** Delegates to a more generic method. */ public boolean addAll( final int index, final COLLECTION c ) { return addAll( index, (Collection)c ); } /** Delegates to a more generic method. */ public boolean addAll( final int index, final LIST l ) { return addAll( index, (COLLECTION)l ); } public boolean addAll( final COLLECTION c ) { return addAll( size(), c ); } public boolean addAll( final LIST l ) { return addAll( size(), l ); } /** Delegates to the corresponding type-specific method. */ public void add( final int index, final KEY_CLASS ok ) { add( index, ok.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS set( final int index, final KEY_CLASS ok ) { return KEY2OBJ( set( index, ok.KEY_VALUE() ) ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS get( final int index ) { return KEY2OBJ( GET_KEY( index ) ); } /** Delegates to the corresponding type-specific method. */ public int indexOf( final Object ok) { return indexOf( KEY_OBJ2TYPE( ok ) ); } /** Delegates to the corresponding type-specific method. */ public int lastIndexOf( final Object ok ) { return lastIndexOf( KEY_OBJ2TYPE( ok ) ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS remove( final int index ) { return KEY2OBJ( REMOVE_KEY( index ) ); } /** Delegates to the corresponding type-specific method. */ public void push( KEY_CLASS o ) { push( o.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS pop() { return KEY_CLASS.valueOf( POP() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS top() { return KEY_CLASS.valueOf( TOP() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS peek( int i ) { return KEY_CLASS.valueOf( PEEK( i ) ); } #endif public String toString() { final StringBuilder s = new StringBuilder(); final KEY_ITERATOR KEY_GENERIC i = iterator(); int n = size(); KEY_GENERIC_TYPE k; boolean first = true; s.append("["); while( n-- != 0 ) { if (first) first = false; else s.append(", "); k = i.NEXT_KEY(); #if KEYS_REFERENCE if (this == k) s.append("(this list)"); else #endif s.append( String.valueOf( k ) ); } s.append("]"); return s.toString(); } public static class SUBLIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; /** The list this sublist restricts. */ protected final LIST KEY_GENERIC l; /** Initial (inclusive) index of this sublist. */ protected final int from; /** Final (exclusive) index of this sublist. */ protected int to; private static final boolean ASSERTS = ASSERTS_VALUE; public SUBLIST( final LIST KEY_GENERIC l, final int from, final int to ) { this.l = l; this.from = from; this.to = to; } private void assertRange() { if ( ASSERTS ) { assert from <= l.size(); assert to <= l.size(); assert to >= from; } } public boolean add( final KEY_GENERIC_TYPE k ) { l.add( to, k ); to++; if ( ASSERTS ) assertRange(); return true; } public void add( final int index, final KEY_GENERIC_TYPE k ) { ensureIndex( index ); l.add( from + index, k ); to++; if ( ASSERTS ) assertRange(); } public boolean addAll( final int index, final Collection c ) { ensureIndex( index ); to += c.size(); if ( ASSERTS ) { boolean retVal = l.addAll( from + index, c ); assertRange(); return retVal; } return l.addAll( from + index, c ); } public KEY_GENERIC_TYPE GET_KEY( int index ) { ensureRestrictedIndex( index ); return l.GET_KEY( from + index ); } public KEY_GENERIC_TYPE REMOVE_KEY( int index ) { ensureRestrictedIndex( index ); to--; return l.REMOVE_KEY( from + index ); } public KEY_GENERIC_TYPE set( int index, KEY_GENERIC_TYPE k ) { ensureRestrictedIndex( index ); return l.set( from + index, k ); } public void clear() { removeElements( 0, size() ); if ( ASSERTS ) assertRange(); } public int size() { return to - from; } public void getElements( final int from, final KEY_TYPE[] a, final int offset, final int length ) { ensureIndex( from ); if ( from + length > size() ) throw new IndexOutOfBoundsException( "End index (" + from + length + ") is greater than list size (" + size() + ")" ); l.getElements( this.from + from, a, offset, length ); } public void removeElements( final int from, final int to ) { ensureIndex( from ); ensureIndex( to ); l.removeElements( this.from + from, this.from + to ); this.to -= ( to - from ); if ( ASSERTS ) assertRange(); } public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { ensureIndex( index ); l.addElements( this.from + index, a, offset, length ); this.to += length; if ( ASSERTS ) assertRange(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) { ensureIndex( index ); return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() { int pos = index, last = -1; public boolean hasNext() { return pos < size(); } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = pos++ ) ); } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = --pos ) ); } public int nextIndex() { return pos; } public int previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.add( pos++, k ); last = -1; if ( ASSERTS ) assertRange(); } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); SUBLIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; if ( ASSERTS ) assertRange(); } }; } public LIST KEY_GENERIC subList( final int from, final int to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" ); return new SUBLIST KEY_GENERIC( this, from, to ); } #if KEYS_PRIMITIVE public boolean rem( KEY_TYPE k ) { int index = indexOf( k ); if ( index == -1 ) return false; to--; l.REMOVE_KEY( from + index ); if ( ASSERTS ) assertRange(); return true; } public boolean remove( final Object o ) { return rem( KEY_OBJ2TYPE( o ) ); } public boolean addAll( final int index, final COLLECTION c ) { ensureIndex( index ); to += c.size(); if ( ASSERTS ) { boolean retVal = l.addAll( from + index, c ); assertRange(); return retVal; } return l.addAll( from + index, c ); } public boolean addAll( final int index, final LIST l ) { ensureIndex( index ); to += l.size(); if ( ASSERTS ) { boolean retVal = this.l.addAll( from + index, l ); assertRange(); return retVal; } return this.l.addAll( from + index, l ); } #else public boolean remove( final Object o ) { int index = indexOf( o ); if ( index == -1 ) return false; REMOVE_KEY( index ); return true; } #endif } } fastutil-7.1.0/drv/AbstractListIterator.drv0000664000000000000000000000377613050701620017502 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class facilitating the creation of type-specific {@linkplain java.util.ListIterator list iterators}. * *

This class provides trivial type-specific implementations of {@link * java.util.ListIterator#set(Object) set()} and {@link java.util.ListIterator#add(Object) add()} which * throw an {@link UnsupportedOperationException}. For primitive types, it also * provides a trivial implementation of {@link java.util.ListIterator#set(Object) set()} and {@link * java.util.ListIterator#add(Object) add()} that just invokes the type-specific one. * * * @see java.util.ListIterator */ public abstract class KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC implements KEY_LIST_ITERATOR KEY_GENERIC { protected KEY_ABSTRACT_LIST_ITERATOR() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding type-specific method. */ public void set( KEY_GENERIC_CLASS ok ) { set( ok.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. */ public void add( KEY_GENERIC_CLASS ok ) { add( ok.KEY_VALUE() ); } #endif /** This method just throws an {@link UnsupportedOperationException}. */ public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } /** This method just throws an {@link UnsupportedOperationException}. */ public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } } fastutil-7.1.0/drv/AbstractMap.drv0000664000000000000000000002405213050701620015560 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION; import VALUE_PACKAGE.VALUE_ITERATOR; import VALUE_PACKAGE.VALUE_ABSTRACT_ITERATOR; import it.unimi.dsi.fastutil.objects.ObjectSet; #if KEYS_PRIMITIVE && VALUES_PRIMITIVE import it.unimi.dsi.fastutil.objects.ObjectIterator; #endif import java.util.Iterator; import java.util.Map; /** An abstract class providing basic methods for maps implementing a type-specific interface. * *

Optional operations just throw an {@link * UnsupportedOperationException}. Generic versions of accessors delegate to * the corresponding type-specific counterparts following the interface rules * (they take care of returning null on a missing key). * *

As a further help, this class provides a {@link BasicEntry BasicEntry} inner class * that implements a type-specific version of {@link java.util.Map.Entry}; it * is particularly useful for those classes that do not implement their own * entries (e.g., most immutable maps). */ public abstract class ABSTRACT_MAP KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -4940583368468432370L; protected ABSTRACT_MAP() {} #if VALUES_PRIMITIVE public boolean containsValue( Object ov ) { if ( ov == null ) return false; return containsValue( VALUE_OBJ2TYPE( ov ) ); } #endif /** Checks whether the given value is contained in {@link #values()}. */ public boolean containsValue( VALUE_TYPE v ) { return values().contains( v ); } /** Checks whether the given value is contained in {@link #keySet()}. */ public boolean containsKey( KEY_TYPE k ) { return keySet().contains( k ); } /** Puts all pairs in the given map. * If the map implements the interface of this map, * it uses the faster iterators. * * @param m a map. */ #if KEYS_PRIMITIVE && VALUES_PRIMITIVE @SuppressWarnings("deprecation") #elif KEYS_PRIMITIVE ^ VALUES_PRIMITIVE @SuppressWarnings({"unchecked","deprecation"}) #endif public void putAll(Map m) { int n = m.size(); final Iterator> i = m.entrySet().iterator(); if (m instanceof MAP) { MAP.Entry KEY_VALUE_EXTENDS_GENERIC e; while(n-- != 0) { e = (MAP.Entry KEY_VALUE_EXTENDS_GENERIC)i.next(); put(e.ENTRY_GET_KEY(), e.ENTRY_GET_VALUE()); } } else { Map.Entry e; while(n-- != 0) { e = i.next(); put(e.getKey(), e.getValue()); } } } public boolean isEmpty() { return size() == 0; } /** This class provides a basic but complete type-specific entry class for all those maps implementations * that do not have entries on their own (e.g., most immutable maps). * *

This class does not implement {@link java.util.Map.Entry#setValue(Object) setValue()}, as the modification * would not be reflected in the base map. */ public static class BasicEntry KEY_VALUE_GENERIC implements MAP.Entry KEY_VALUE_GENERIC { protected KEY_GENERIC_TYPE key; protected VALUE_GENERIC_TYPE value; public BasicEntry( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) { this.key = KEY_CLASS2TYPE(key); this.value = VALUE_CLASS2TYPE(value); } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE public BasicEntry( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) { this.key = key; this.value = value; } #endif #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public KEY_GENERIC_CLASS getKey() { return KEY2OBJ(key); } #if KEYS_PRIMITIVE public KEY_TYPE ENTRY_GET_KEY() { return key; } #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ(value); } #if VALUES_PRIMITIVE public VALUE_TYPE ENTRY_GET_VALUE() { return value; } #endif public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE value ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS value ) { return VALUE_CLASS.valueOf(setValue(value.VALUE_VALUE())); } #endif public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif return KEY_EQUALS( key, KEY_OBJ2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_OBJ2TYPE( e.getValue() ) ); } public int hashCode() { return KEY2JAVAHASH(key) ^ VALUE2JAVAHASH(value); } public String toString() { return key + "->" + value; } } /** Returns a type-specific-set view of the keys of this map. * *

The view is backed by the set returned by {@link #entrySet()}. Note that * no attempt is made at caching the result of this method, as this would * require adding some attributes that lightweight implementations would * not need. Subclasses may easily override this policy by calling * this method and caching the result, but implementors are encouraged to * write more efficient ad-hoc implementations. * * @return a set view of the keys of this map; it may be safely cast to a type-specific interface. */ public SET KEY_GENERIC keySet() { return new ABSTRACT_SET KEY_GENERIC() { public boolean contains( final KEY_TYPE k ) { return containsKey( k ); } public int size() { return ABSTRACT_MAP.this.size(); } public void clear() { ABSTRACT_MAP.this.clear(); } public KEY_ITERATOR KEY_GENERIC iterator() { return new KEY_ABSTRACT_ITERATOR KEY_GENERIC() { final ObjectIterator> i = entrySet().iterator(); @Override public KEY_GENERIC_TYPE NEXT_KEY() { return ((MAP.Entry KEY_VALUE_GENERIC)i.next()).ENTRY_GET_KEY(); }; @Override public boolean hasNext() { return i.hasNext(); } @Override public void remove() { i.remove(); } }; } }; } /** Returns a type-specific-set view of the values of this map. * *

The view is backed by the set returned by {@link #entrySet()}. Note that * no attempt is made at caching the result of this method, as this would * require adding some attributes that lightweight implementations would * not need. Subclasses may easily override this policy by calling * this method and caching the result, but implementors are encouraged to * write more efficient ad-hoc implementations. * * @return a set view of the values of this map; it may be safely cast to a type-specific interface. */ public VALUE_COLLECTION VALUE_GENERIC values() { return new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return ABSTRACT_MAP.this.size(); } public void clear() { ABSTRACT_MAP.this.clear(); } public VALUE_ITERATOR VALUE_GENERIC iterator() { return new VALUE_ABSTRACT_ITERATOR VALUE_GENERIC() { final ObjectIterator> i = entrySet().iterator(); /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public VALUE_GENERIC_TYPE NEXT_VALUE() { return ((MAP.Entry KEY_VALUE_GENERIC)i.next()).ENTRY_GET_VALUE(); }; public boolean hasNext() { return i.hasNext(); } }; } }; } @SuppressWarnings({ "unchecked", "rawtypes" }) public ObjectSet> entrySet() { return (ObjectSet)ENTRYSET(); } /** Returns a hash code for this map. * * The hash code of a map is computed by summing the hash codes of its entries. * * @return a hash code for this map. */ public int hashCode() { int h = 0, n = size(); final ObjectIterator> i = entrySet().iterator(); while( n-- != 0 ) h += i.next().hashCode(); return h; } public boolean equals(Object o) { if ( o == this ) return true; if ( ! ( o instanceof Map ) ) return false; Map m = (Map)o; if ( m.size() != size() ) return false; return entrySet().containsAll( m.entrySet() ); } public String toString() { final StringBuilder s = new StringBuilder(); final ObjectIterator> i = entrySet().iterator(); int n = size(); MAP.Entry KEY_VALUE_GENERIC e; boolean first = true; s.append("{"); while(n-- != 0) { if (first) first = false; else s.append(", "); e = (MAP.Entry KEY_VALUE_GENERIC)i.next(); #if KEYS_REFERENCE if (this == e.getKey()) s.append("(this map)"); else #endif s.append(String.valueOf(e.ENTRY_GET_KEY())); s.append("=>"); #if VALUES_REFERENCE if (this == e.getValue()) s.append("(this map)"); else #endif s.append(String.valueOf(e.ENTRY_GET_VALUE())); } s.append("}"); return s.toString(); } } fastutil-7.1.0/drv/AbstractPriorityQueue.drv0000664000000000000000000000375513050701620017700 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.AbstractPriorityQueue; /** An abstract class providing basic methods for priority queues implementing a type-specific interface. * */ public abstract class ABSTRACT_PRIORITY_QUEUE KEY_GENERIC extends AbstractPriorityQueue implements java.io.Serializable, PRIORITY_QUEUE KEY_GENERIC { private static final long serialVersionUID = 1L; /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public void enqueue( final KEY_GENERIC_CLASS x ) { enqueue( x.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS dequeue() { return KEY2OBJ( DEQUEUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS first() { return KEY2OBJ( FIRST() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS last() { return KEY2OBJ( LAST() ); } /** Throws an {@link UnsupportedOperationException}. */ public KEY_TYPE LAST() { throw new UnsupportedOperationException(); } } fastutil-7.1.0/drv/AbstractSet.drv0000664000000000000000000000454213050701620015600 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Set; /** An abstract class providing basic methods for sets implementing a type-specific interface. * *

Warning: As in the case of a type-specific abstract collection, * the type-specific deletion method of a type-specific abstract * set is rem(), rather then remove(). A * subclass must thus override rem(), rather than * remove(), to make all inherited methods work properly. * Note, however, that this class specifies a type-specific remove() * that just invokes rem(), so final users of the class do not * need to know about rem(). */ public abstract class ABSTRACT_SET KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements Cloneable, SET KEY_GENERIC { protected ABSTRACT_SET() {} @Override public abstract KEY_ITERATOR KEY_GENERIC iterator(); @Override public boolean equals( final Object o ) { if ( o == this ) return true; if ( !( o instanceof Set ) ) return false; Set s = (Set) o; if ( s.size() != size() ) return false; return containsAll(s); } /** Returns a hash code for this set. * * The hash code of a set is computed by summing the hash codes of * its elements. * * @return a hash code for this set. */ @Override public int hashCode() { int h = 0, n = size(); KEY_ITERATOR KEY_GENERIC i = iterator(); KEY_GENERIC_TYPE k; while( n-- != 0 ) { k = i.NEXT_KEY(); // We need k because KEY2JAVAHASH() is a macro with repeated evaluation. h += KEY2JAVAHASH( k ); } return h; } #if KEYS_PRIMITIVE /** Delegates to the type-specific rem() method. */ @Override public boolean remove( KEY_TYPE k ) { return rem( k ); } #endif } fastutil-7.1.0/drv/AbstractSortedMap.drv0000664000000000000000000001651513050701620016746 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_ITERATOR; import VALUE_PACKAGE.VALUE_ITERATOR; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import java.util.Map; #if KEYS_REFERENCE import java.util.Comparator; #endif /** An abstract class providing basic methods for sorted maps implementing a type-specific interface. */ public abstract class ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC { private static final long serialVersionUID = -1773560792952436569L; protected ABSTRACT_SORTED_MAP() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to ) { return headMap( KEY_CLASS2TYPE( to ) ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) { return tailMap( KEY_CLASS2TYPE( from ) ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return subMap( KEY_CLASS2TYPE( from ), KEY_CLASS2TYPE( to ) ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS firstKey() { return KEY2OBJ( FIRST_KEY() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS lastKey() { return KEY2OBJ( LAST_KEY() ); } #endif /** Returns a type-specific-sorted-set view of the keys of this map. * *

The view is backed by the sorted set returned by {@link #entrySet()}. Note that * no attempt is made at caching the result of this method, as this would * require adding some attributes that lightweight implementations would * not need. Subclasses may easily override this policy by calling * this method and caching the result, but implementors are encouraged to * write more efficient ad-hoc implementations. * * @return a sorted set view of the keys of this map; it may be safely cast to a type-specific interface. */ public SORTED_SET KEY_GENERIC keySet() { return new KeySet(); } /** A wrapper exhibiting the keys of a map. */ protected class KeySet extends ABSTRACT_SORTED_SET KEY_GENERIC { public boolean contains( final KEY_TYPE k ) { return containsKey( k ); } public int size() { return ABSTRACT_SORTED_MAP.this.size(); } public void clear() { ABSTRACT_SORTED_MAP.this.clear(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return ABSTRACT_SORTED_MAP.this.comparator(); } public KEY_GENERIC_TYPE FIRST() { return FIRST_KEY(); } public KEY_GENERIC_TYPE LAST() { return LAST_KEY(); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return headMap( to ).keySet(); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return tailMap( from ).keySet(); } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return subMap( from, to ).keySet(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeySetIterator KEY_VALUE_GENERIC( entrySet().iterator( new BasicEntry KEY_VALUE_GENERIC( from, VALUE_NULL ) ) ); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeySetIterator KEY_VALUE_GENERIC( entrySet().iterator() ); } } /** A wrapper exhibiting a map iterator as an iterator on keys. * *

To provide an iterator on keys, just create an instance of this * class using the corresponding iterator on entries. */ protected static class KeySetIterator KEY_VALUE_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC { protected final ObjectBidirectionalIterator> i; public KeySetIterator( ObjectBidirectionalIterator> i ) { this.i = i; } public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next().getKey() ); }; public KEY_GENERIC_TYPE PREV_KEY() { return KEY_CLASS2TYPE( i.previous().getKey() ); }; public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } } /** Returns a type-specific collection view of the values contained in this map. * *

The view is backed by the sorted set returned by {@link #entrySet()}. Note that * no attempt is made at caching the result of this method, as this would * require adding some attributes that lightweight implementations would * not need. Subclasses may easily override this policy by calling * this method and caching the result, but implementors are encouraged to * write more efficient ad-hoc implementations. * * @return a type-specific collection view of the values contained in this map. */ public VALUE_COLLECTION VALUE_GENERIC values() { return new ValuesCollection(); } /** A wrapper exhibiting the values of a map. */ protected class ValuesCollection extends VALUE_ABSTRACT_COLLECTION VALUE_GENERIC { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new ValuesIterator KEY_VALUE_GENERIC( entrySet().iterator() ); } public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return ABSTRACT_SORTED_MAP.this.size(); } public void clear() { ABSTRACT_SORTED_MAP.this.clear(); } } /** A wrapper exhibiting a map iterator as an iterator on values. * *

To provide an iterator on values, just create an instance of this * class using the corresponding iterator on entries. */ protected static class ValuesIterator KEY_VALUE_GENERIC extends VALUE_ABSTRACT_ITERATOR VALUE_GENERIC { protected final ObjectBidirectionalIterator> i; public ValuesIterator( ObjectBidirectionalIterator> i ) { this.i = i; } public VALUE_GENERIC_TYPE NEXT_VALUE() { return VALUE_CLASS2TYPE( i.next().getValue() ); }; public boolean hasNext() { return i.hasNext(); } } @SuppressWarnings({ "unchecked", "rawtypes" }) public ObjectSortedSet> entrySet() { return (ObjectSortedSet)ENTRYSET(); } } fastutil-7.1.0/drv/AbstractSortedSet.drv0000664000000000000000000000456613050701620016767 0ustar rootroot/* * Copyright (C) 2003-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** An abstract class providing basic methods for sorted sets implementing a type-specific interface. */ public abstract class ABSTRACT_SORTED_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements SORTED_SET KEY_GENERIC { protected ABSTRACT_SORTED_SET() {} #if KEYS_PRIMITIVE /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_CLASS to ) { return headSet( to.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_CLASS from ) { return tailSet( from.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return subSet( from.KEY_VALUE(), to.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS first() { return KEY2OBJ( FIRST() ); } /** Delegates to the corresponding type-specific method. * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS last() { return KEY2OBJ( LAST() ); } #endif /** Delegates to the new covariantly stronger generic method. */ @Deprecated public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public abstract KEY_BIDI_ITERATOR KEY_GENERIC iterator(); } fastutil-7.1.0/drv/AbstractStack.drv0000664000000000000000000000413613050701620016111 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.AbstractStack; /** An abstract class providing basic methods for implementing a type-specific stack interface. * *

To create a type-specific stack, you need both object methods and * primitive-type methods. However, if you inherit from this class you need * just one (anyone). */ public abstract class ABSTRACT_STACK KEY_GENERIC extends AbstractStack implements STACK KEY_GENERIC { protected ABSTRACT_STACK() {} /** Delegates to the corresponding type-specific method. */ public void push( KEY_GENERIC_CLASS o ) { push( o.KEY_VALUE() ); } /** Delegates to the corresponding type-specific method. */ public KEY_GENERIC_CLASS pop() { return KEY_CLASS.valueOf( POP() ); } /** Delegates to the corresponding type-specific method. */ public KEY_GENERIC_CLASS top() { return KEY_CLASS.valueOf( TOP() ); } /** Delegates to the corresponding type-specific method. */ public KEY_GENERIC_CLASS peek( int i ) { return KEY_CLASS.valueOf( PEEK( i ) ); } /** Delegates to the corresponding generic method. */ public void push( KEY_TYPE k ) { push( KEY_CLASS.valueOf( k ) ); } /** Delegates to the corresponding generic method. */ public KEY_TYPE POP() { return pop().KEY_VALUE(); } /** Delegates to the corresponding generic method. */ public KEY_TYPE TOP() { return top().KEY_VALUE(); } /** Delegates to the corresponding generic method. */ public KEY_TYPE PEEK( int i ) { return peek( i ).KEY_VALUE(); } } fastutil-7.1.0/drv/ArrayFIFOQueue.drv0000664000000000000000000001605513050701620016112 0ustar rootroot/* * Copyright (C) 2010-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Arrays; import java.util.Comparator; import it.unimi.dsi.fastutil.AbstractPriorityQueue; #endif import java.io.Serializable; import it.unimi.dsi.fastutil.HashCommon; import java.util.NoSuchElementException; /** A type-specific array-based FIFO queue, supporting also deque operations. * *

Instances of this class represent a FIFO queue using a backing * array in a circular way. The array is enlarged and shrunk as needed. You can use the {@link #trim()} method * to reduce its memory usage, if necessary. * *

This class provides additional methods that implement a deque (double-ended queue). */ public class ARRAY_FIFO_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC implements Serializable { private static final long serialVersionUID = 0L; /** The standard initial capacity of a queue. */ public final static int INITIAL_CAPACITY = 4; /** The backing array. */ protected transient KEY_GENERIC_TYPE array[]; /** The current (cached) length of {@link #array}. */ protected transient int length; /** The start position in {@link #array}. It is always strictly smaller than {@link #length}.*/ protected transient int start; /** The end position in {@link #array}. It is always strictly smaller than {@link #length}. * Might be actually smaller than {@link #start} because {@link #array} is used cyclically. */ protected transient int end; /** Creates a new empty queue with given capacity. * * @param capacity the initial capacity of this queue. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public ARRAY_FIFO_QUEUE( final int capacity ) { if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" ); array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ Math.max( 1, capacity ) ]; // Never build a queue with zero-sized backing array. length = array.length; } /** Creates a new empty queue with standard {@linkplain #INITIAL_CAPACITY initial capacity}. */ public ARRAY_FIFO_QUEUE() { this( INITIAL_CAPACITY ); } /** Returns null (FIFO queues have no comparator). * @return null. */ @Override public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } /** Dequeues the {@linkplain #first() first} element from the queue. * * @return the dequeued element. * @throws NoSuchElementException if the queue is empty. */ @Override public KEY_GENERIC_TYPE DEQUEUE() { if ( start == end ) throw new NoSuchElementException(); final KEY_GENERIC_TYPE t = array[ start ]; #if KEYS_REFERENCE array[ start ] = null; // Clean-up for the garbage collector. #endif if ( ++start == length ) start = 0; reduce(); return t; } /** Dequeues the {@linkplain #last() last} element from the queue. * * @return the dequeued element. * @throws NoSuchElementException if the queue is empty. */ public KEY_GENERIC_TYPE DEQUEUE_LAST() { if ( start == end ) throw new NoSuchElementException(); if ( end == 0 ) end = length; final KEY_GENERIC_TYPE t = array[ --end ]; #if KEYS_REFERENCE array[ end ] = null; // Clean-up for the garbage collector. #endif reduce(); return t; } SUPPRESS_WARNINGS_KEY_UNCHECKED private final void resize( final int size, final int newLength ) { final KEY_GENERIC_TYPE[] newArray = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newLength ]; if ( start >= end ) { if ( size != 0 ) { System.arraycopy( array, start, newArray, 0, length - start ); System.arraycopy( array, 0, newArray, length - start, end ); } } else System.arraycopy( array, start, newArray, 0, end - start ); start = 0; end = size; array = newArray; length = newLength; } private final void expand() { resize( length, (int)Math.min( it.unimi.dsi.fastutil.Arrays.MAX_ARRAY_SIZE, 2L * length ) ); } private final void reduce() { final int size = size(); if ( length > INITIAL_CAPACITY && size <= length / 4 ) resize( size, length / 2 ); } @Override public void enqueue( KEY_GENERIC_TYPE x ) { array[ end++ ] = x; if ( end == length ) end = 0; if ( end == start ) expand(); } /** Enqueues a new element as the first element (in dequeuing order) of the queue. */ public void enqueueFirst( KEY_GENERIC_TYPE x ) { if ( start == 0 ) start = length; array[ --start ] = x; if ( end == start ) expand(); } /** Returns the first element of the queue. * @return the first element of the queue. * @throws NoSuchElementException if the queue is empty. */ public KEY_GENERIC_TYPE FIRST() { if ( start == end ) throw new NoSuchElementException(); return array[ start ]; } /** Returns the last element of the queue. * @return the last element of the queue. * @throws NoSuchElementException if the queue is empty. */ public KEY_GENERIC_TYPE LAST() { if ( start == end ) throw new NoSuchElementException(); return array[ ( end == 0 ? length : end ) - 1 ]; } @Override public void clear() { #if KEYS_REFERENCE if ( start <= end ) Arrays.fill( array, start, end, null ); else { Arrays.fill( array, start, length, null ); Arrays.fill( array, 0, end, null ); } #endif start = end = 0; } /** Trims the queue to the smallest possible size. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public void trim() { final int size = size(); final KEY_GENERIC_TYPE[] newArray = #if KEYS_PRIMITIVE new KEY_GENERIC_TYPE[ size + 1 ]; #else (KEY_GENERIC_TYPE[])new Object[ size + 1 ]; #endif if ( start <= end ) System.arraycopy( array, start, newArray, 0, end - start ); else { System.arraycopy( array, start, newArray, 0, length - start ); System.arraycopy( array, 0, newArray, length - start, end ); } start = 0; length = ( end = size ) + 1; array = newArray; } @Override public int size() { final int apparentLength = end - start; return apparentLength >= 0 ? apparentLength : length + apparentLength; } private void writeObject( java.io.ObjectOutputStream s ) throws java.io.IOException { s.defaultWriteObject(); int size = size(); s.writeInt( size ); for( int i = start; size-- != 0; ) { s.WRITE_KEY( array[ i++ ] ); if ( i == length ) i = 0; } } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); end = s.readInt(); array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ length = HashCommon.nextPowerOfTwo( end + 1 ) ]; for( int i = 0; i < end; i++ ) array[ i ] = KEY_GENERIC_CAST s.READ_KEY(); } } fastutil-7.1.0/drv/ArrayFrontCodedList.drv0000664000000000000000000005562613050701620017254 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.AbstractObjectListIterator; import it.unimi.dsi.fastutil.objects.AbstractObjectList; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import it.unimi.dsi.fastutil.longs.LongArrays; import java.io.Serializable; import java.util.Iterator; import java.util.Collection; import java.util.NoSuchElementException; import java.util.RandomAccess; /** Compact storage of lists of arrays using front coding. * *

This class stores immutably a list of arrays in a single large array * using front coding (of course, the compression will be reasonable only if * the list is sorted lexicographically—see below). It implements an * immutable type-specific list that returns the i-th array when * calling {@link #get(int) get(i)}. The returned array may be * freely modified. * *

Front coding is based on the idea that if the i-th and the * (i+1)-th array have a common prefix, we might store the length * of the common prefix, and then the rest of the second array. * *

This approach, of course, requires that once in a while an array is * stored entirely. The ratio of a front-coded list defines how * often this happens (once every {@link #ratio()} arrays). A higher ratio * means more compression, but means also a longer access time, as more arrays * have to be probed to build the result. Note that we must build an array * every time {@link #get(int)} is called, but this class provides also methods * that extract one of the stored arrays in a given array, reducing garbage * collection. See the documentation of the family of get() * methods. * *

By setting the ratio to 1 we actually disable front coding: however, we * still have a data structure storing large list of arrays with a reduced * overhead (just one integer per array, plus the space required for lengths). * *

Note that the typical usage of front-coded lists is under the form of * serialized objects; usually, the data that has to be compacted is processed * offline, and the resulting structure is stored permanently. Since the * pointer array is not stored, the serialized format is very small. * *

Implementation Details

* *

All arrays are stored in a {@linkplain it.unimi.dsi.fastutil.BigArrays big array}. A separate array of pointers * indexes arrays whose position is a multiple of the ratio: thus, a higher ratio * means also less pointers. * *

More in detail, an array whose position is a multiple of the ratio is * stored as the array length, followed by the elements of the array. The array * length is coded by a simple variable-length list of k-1 bit * blocks, where k is the number of bits of the underlying primitive * type. All other arrays are stored as follows: let common the * length of the maximum common prefix between the array and its predecessor. * Then we store the array length decremented by common, followed * by common, followed by the array elements whose index is * greater than or equal to common. For instance, if we store * foo, foobar, football and * fool in a front-coded character-array list with ratio 3, the * character array will contain * *

 * 3 f o o 3 3 b a r 5 3 t b a l l 4 f o o l 
 * 
*/ public class ARRAY_FRONT_CODED_LIST extends AbstractObjectList implements Serializable, Cloneable, RandomAccess { private static final long serialVersionUID = 1L; /** The number of arrays in the list. */ protected final int n; /** The ratio of this front-coded list. */ protected final int ratio; /** The big array containing the compressed arrays. */ protected final KEY_TYPE[][] array; /** The pointers to entire arrays in the list. */ protected transient long[] p; /** Creates a new front-coded list containing the arrays returned by the given iterator. * * @param arrays an iterator returning arrays. * @param ratio the desired ratio. */ public ARRAY_FRONT_CODED_LIST( final Iterator arrays, final int ratio ) { if ( ratio < 1 ) throw new IllegalArgumentException( "Illegal ratio (" + ratio + ")" ); KEY_TYPE[][] array = BIG_ARRAYS.EMPTY_BIG_ARRAY; long[] p = LongArrays.EMPTY_ARRAY; KEY_TYPE[][] a = new KEY_TYPE[ 2 ][]; long curSize = 0; int n = 0, b = 0, common, length, minLength; while( arrays.hasNext() ) { a[ b ] = arrays.next(); length = a[ b ].length; if ( n % ratio == 0 ) { p = LongArrays.grow( p, n / ratio + 1 ); p[ n / ratio ] = curSize; array = BIG_ARRAYS.grow( array, curSize + count( length ) + length, curSize ); curSize += writeInt( array, length, curSize ); BIG_ARRAYS.copyToBig( a[ b ], 0, array, curSize, length ); curSize += length; } else { minLength = a[ 1 - b ].length; if ( length < minLength ) minLength = length; for( common = 0; common < minLength; common++ ) if ( a[ 0 ][ common ] != a[ 1 ][ common ] ) break; length -= common; array = BIG_ARRAYS.grow( array, curSize + count( length ) + count( common ) + length, curSize ); curSize += writeInt( array, length, curSize ); curSize += writeInt( array, common, curSize ); BIG_ARRAYS.copyToBig( a[ b ], common, array, curSize, length ); curSize += length; } b = 1 - b; n++; } this.n = n; this.ratio = ratio; this.array = BIG_ARRAYS.trim( array, curSize ); this.p = LongArrays.trim( p, ( n + ratio - 1 ) / ratio ); } /** Creates a new front-coded list containing the arrays in the given collection. * * @param c a collection containing arrays. * @param ratio the desired ratio. */ public ARRAY_FRONT_CODED_LIST( final Collection c, final int ratio ) { this( c.iterator(), ratio ); } /* The following (rather messy) methods implements the encoding of arbitrary integers inside a big array. * Unfortunately, we have to specify different codes for almost every type. */ /** Reads a coded length. * @param a the data big array. * @param pos the starting position. * @return the length coded at pos. */ private static int readInt( final KEY_TYPE a[][], long pos ) { #if KEY_CLASS_Integer return IntBigArrays.get( a, pos ); #elif KEY_CLASS_Long return (int)LongBigArrays.get( a, pos ); #elif KEY_CLASS_Character final char c0 = CharBigArrays.get( a, pos ); return c0 < 0x8000 ? c0 : ( c0 & 0x7FFF ) << 16 | CharBigArrays.get( a, pos + 1 ); #elif KEY_CLASS_Short final short s0 = ShortBigArrays.get( a, pos ); return s0 >= 0 ? s0 : s0 << 16 | ( ShortBigArrays.get( a, pos + 1 ) & 0xFFFF ); #else final byte b0 = ByteBigArrays.get( a, pos ); if ( b0 >= 0 ) return b0; final byte b1 = ByteBigArrays.get( a, pos + 1 ); if ( b1 >= 0 ) return ( - b0 - 1 ) << 7 | b1; final byte b2 = ByteBigArrays.get( a, pos + 2 ); if ( b2 >= 0 ) return ( - b0 - 1 ) << 14 | ( - b1 - 1 ) << 7 | b2; final byte b3 = ByteBigArrays.get( a, pos + 3 ); if ( b3 >= 0 ) return ( - b0 - 1 ) << 21 | ( - b1 - 1 ) << 14 | ( - b2 - 1 ) << 7 | b3; return ( - b0 - 1 ) << 28 | ( - b1 - 1 ) << 21 | ( - b2 - 1 ) << 14 | ( - b3 - 1 ) << 7 | ByteBigArrays.get( a, pos + 4 ); #endif } /** Computes the number of elements coding a given length. * @param length the length to be coded. * @return the number of elements coding length. */ private static int count( final int length ) { #if KEY_CLASS_Integer || KEY_CLASS_Long return 1; #elif KEY_CLASS_Character || KEY_CLASS_Short return length < ( 1 << 15 ) ? 1 : 2; #else if ( length < ( 1 << 7 ) ) return 1; if ( length < ( 1 << 14 ) ) return 2; if ( length < ( 1 << 21 ) ) return 3; if ( length < ( 1 << 28 ) ) return 4; return 5; #endif } /** Writes a length. * @param a the data array. * @param length the length to be written. * @param pos the starting position. * @return the number of elements coding length. */ private static int writeInt( final KEY_TYPE a[][], int length, long pos ) { #if KEY_CLASS_Long LongBigArrays.set( a, pos, length ); return 1; #elif KEY_CLASS_Integer IntBigArrays.set( a, pos, length ); return 1; #elif KEY_CLASS_Character if ( length < ( 1 << 15 ) ) { CharBigArrays.set( a, pos, (char)length ); return 1; } CharBigArrays.set( a, pos++, (char)( length >>> 16 | 0x8000 ) ); CharBigArrays.set( a, pos, (char)( length & 0xFFFF ) ); return 2; #elif KEY_CLASS_Short if ( length < ( 1 << 15 ) ) { ShortBigArrays.set( a, pos, (short)length ); return 1; } ShortBigArrays.set( a, pos++, (short)( - ( length >>> 16 ) - 1 ) ); ShortBigArrays.set( a, pos, (short)( length & 0xFFFF ) ); return 2; #else final int count = count( length ); ByteBigArrays.set( a, pos + count - 1, (byte)( length & 0x7F ) ); if ( count != 1 ) { int i = count - 1; while( i-- != 0 ) { length >>>= 7; ByteBigArrays.set( a, pos + i, (byte)( - ( length & 0x7F ) - 1 ) ); } } return count; #endif } /** Returns the ratio of this list. * * @return the ratio of this list. */ public int ratio() { return ratio; } /** Computes the length of the array at the given index. * *

This private version of {@link #arrayLength(int)} does not check its argument. * * @param index an index. * @return the length of the index-th array. */ private int length( final int index ) { final KEY_TYPE[][] array = this.array; final int delta = index % ratio; // The index into the p array, and the delta inside the block. long pos = p[ index / ratio ]; // The position into the array of the first entire word before the index-th. int length = readInt( array, pos ); if ( delta == 0 ) return length; // First of all, we recover the array length and the maximum amount of copied elements. int common; pos += count( length ) + length; length = readInt( array, pos ); common = readInt( array, pos + count( length ) ); for( int i = 0; i < delta - 1; i++ ) { pos += count( length ) + count( common ) + length; length = readInt( array, pos ); common = readInt( array, pos + count( length ) ); } return length + common; } /** Computes the length of the array at the given index. * * @param index an index. * @return the length of the index-th array. */ public int arrayLength( final int index ) { ensureRestrictedIndex( index ); return length( index ); } /** Extracts the array at the given index. * * @param index an index. * @param a the array that will store the result (we assume that it can hold the result). * @param offset an offset into a where elements will be store. * @param length a maximum number of elements to store in a. * @return the length of the extracted array. */ private int extract( final int index, final KEY_TYPE a[], final int offset, final int length ) { final int delta = index % ratio; // The delta inside the block. final long startPos = p[ index / ratio ]; // The position into the array of the first entire word before the index-th. long pos, prevArrayPos; int arrayLength = readInt( array, pos = startPos ), currLen = 0, actualCommon; if ( delta == 0 ) { pos = p[ index / ratio ] + count( arrayLength ); BIG_ARRAYS.copyFromBig( array, pos, a, offset, Math.min( length, arrayLength ) ); return arrayLength; } int common = 0; for( int i = 0; i < delta; i++ ) { prevArrayPos = pos + count( arrayLength ) + ( i != 0 ? count( common ) : 0 ); pos = prevArrayPos + arrayLength; arrayLength = readInt( array, pos ); common = readInt( array, pos + count( arrayLength ) ); actualCommon = Math.min( common, length ); if ( actualCommon <= currLen ) currLen = actualCommon; else { BIG_ARRAYS.copyFromBig( array, prevArrayPos, a, currLen + offset, actualCommon - currLen ); currLen = actualCommon; } } if ( currLen < length ) BIG_ARRAYS.copyFromBig( array, pos + count( arrayLength ) + count( common ), a, currLen + offset, Math.min( arrayLength, length - currLen ) ); return arrayLength + common; } public KEY_TYPE[] get( final int index ) { return getArray( index ); } /** * @see #get(int) */ public KEY_TYPE[] getArray( final int index ) { ensureRestrictedIndex( index ); final int length = length( index ); final KEY_TYPE a[] = new KEY_TYPE[ length ]; extract( index, a, 0, length ); return a; } /** Stores in the given array elements from an array stored in this front-coded list. * * @param index an index. * @param a the array that will store the result. * @param offset an offset into a where elements will be store. * @param length a maximum number of elements to store in a. * @return if a can hold the extracted elements, the number of extracted elements; * otherwise, the number of remaining elements with the sign changed. */ public int get( final int index, final KEY_TYPE[] a, final int offset, final int length ) { ensureRestrictedIndex( index ); ARRAYS.ensureOffsetLength( a, offset, length ); final int arrayLength = extract( index, a, offset, length ); if ( length >= arrayLength ) return arrayLength; return length - arrayLength; } /** Stores in the given array an array stored in this front-coded list. * * @param index an index. * @param a the array that will store the content of the result (we assume that it can hold the result). * @return if a can hold the extracted elements, the number of extracted elements; * otherwise, the number of remaining elements with the sign changed. */ public int get( final int index, final KEY_TYPE[] a ) { return get( index, a, 0, a.length ); } public int size() { return n; } public ObjectListIterator listIterator( final int start ) { ensureIndex( start ); return new AbstractObjectListIterator() { KEY_TYPE s[] = ARRAYS.EMPTY_ARRAY; int i = 0; long pos = 0; boolean inSync; // Whether the current value in a is the string just before the next to be produced. { if ( start != 0 ) { if ( start == n ) i = start; // If we start at the end, we do nothing. else { pos = p[ start / ratio ]; int j = start % ratio; i = start - j; while( j-- != 0 ) next(); } } } public boolean hasNext() { return i < n; } public boolean hasPrevious() { return i > 0; } public int previousIndex() { return i - 1; } public int nextIndex() { return i; } public KEY_TYPE[] next() { int length, common; if ( ! hasNext() ) throw new NoSuchElementException(); if ( i % ratio == 0 ) { pos = p[ i / ratio ]; length = readInt( array, pos ); s = ARRAYS.ensureCapacity( s, length, 0 ); BIG_ARRAYS.copyFromBig( array, pos + count( length ), s, 0, length ); pos += length + count( length ); inSync = true; } else { if ( inSync ) { length = readInt( array, pos ); common = readInt( array, pos + count( length ) ); s = ARRAYS.ensureCapacity( s, length + common, common ); BIG_ARRAYS.copyFromBig( array, pos + count( length ) + count ( common ), s, common, length ); pos += count( length ) + count( common ) + length; length += common; } else { s = ARRAYS.ensureCapacity( s, length = length( i ), 0 ); extract( i, s, 0, length ); } } i++; return ARRAYS.copy( s, 0, length ); } public KEY_TYPE[] previous() { if ( ! hasPrevious() ) throw new NoSuchElementException(); inSync = false; return getArray( --i ); } }; } /** Returns a copy of this list. * * @return a copy of this list. */ public ARRAY_FRONT_CODED_LIST clone() { return this; } public String toString() { final StringBuffer s = new StringBuffer(); s.append( "[ " ); for( int i = 0; i < n; i++ ) { if ( i != 0 ) s.append( ", " ); s.append( ARRAY_LIST.wrap( getArray( i ) ).toString() ); } s.append( " ]" ); return s.toString(); } /** Computes the pointer array using the currently set ratio, number of elements and underlying array. * * @return the computed pointer array. */ protected long[] rebuildPointerArray() { final long[] p = new long[ ( n + ratio - 1 ) / ratio ]; final KEY_TYPE a[][] = array; int length, count; long pos = 0; for( int i = 0, j = 0, skip = ratio - 1; i < n; i++ ) { length = readInt( a, pos ); count = count( length ); if ( ++skip == ratio ) { skip = 0; p[ j++ ] = pos; pos += count + length; } else pos += count + count( readInt( a, pos + count ) ) + length; } return p; } private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); // Rebuild pointer array p = rebuildPointerArray(); } #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, fp ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for( int i = 0; i < x.size(); i++ ) if ( ! java.util.Arrays.equals( (KEY_TYPE[])x.get( i ), (KEY_TYPE[])y.get( i ) ) ) return false; return true; } private static int l[]; private static KEY_TYPE[][] a; private static void test( int n ) { int c; l = new int[ n ]; a = new KEY_TYPE[n][]; for( int i = 0; i < n; i++ ) l[i] = (int)(Math.abs(r.nextGaussian())*32); for( int i = 0; i < n; i++ ) a[i] = new KEY_TYPE[l[i]]; for( int i = 0; i < n; i++ ) for( int j = 0; j < l[i]; j++ ) a[i][j] = genKey(); ARRAY_FRONT_CODED_LIST m = new ARRAY_FRONT_CODED_LIST( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); //System.out.println(m); //for( i = 0; i < t.size(); i++ ) System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ ensure( contentEquals( m, t ), "Error (" + seed + "): m does not equal t at creation" ); /* Now we check cloning. */ ensure( contentEquals( m, (java.util.List)m.clone() ), "Error (" + seed + "): m does not equal m.clone()" ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; Object J; i = m.listIterator(); j = t.listIterator(); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" ); ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( java.util.Arrays.equals( (KEY_TYPE[])i.next(), (KEY_TYPE[])j.next() ), "Error (" + seed + "): divergence in next()" ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { ensure( java.util.Arrays.equals( (KEY_TYPE[])i.previous(), (KEY_TYPE[])j.previous() ), "Error (" + seed + "): divergence in previous()" ); } ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" ); ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" ); } } { Object previous = null; Object I, J; int from = r.nextInt( m.size() +1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() , "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( java.util.Arrays.equals( (KEY_TYPE[])i.next(), (KEY_TYPE[])j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { ensure( java.util.Arrays.equals( (KEY_TYPE[])i.previous(), (KEY_TYPE[])j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" ); } } } try { java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test"); java.io.OutputStream os = new java.io.FileOutputStream(ff); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os); oos.writeObject(m); oos.close(); java.io.InputStream is = new java.io.FileInputStream(ff); java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is); m = (ARRAY_FRONT_CODED_LIST)ois.readObject(); ois.close(); ff.delete(); } catch(Exception e) { e.printStackTrace(); System.exit( 1 ); } ensure( contentEquals( m, t ), "Error (" + seed + "): m does not equal t after save/read" ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/ArrayIndirectPriorityQueue.drv0000664000000000000000000006017213050701620020671 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; import it.unimi.dsi.fastutil.IndirectPriorityQueue; #endif import it.unimi.dsi.fastutil.ints.IntArrays; import it.unimi.dsi.fastutil.AbstractIndirectPriorityQueue; import java.util.NoSuchElementException; /** A type-specific array-based semi-indirect priority queue. * *

Instances of this class use as reference list a reference array, * which must be provided to each constructor, and represent a priority queue * using a backing array of integer indices—all operations are performed * directly on the array. The array is enlarged as needed, but it is never * shrunk. Use the {@link #trim()} method to reduce its size, if necessary. * *

This implementation is extremely inefficient, but it is difficult to beat * when the size of the queue is very small. Moreover, it allows to enqueue several * time the same index, without limitations. */ public class ARRAY_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends AbstractIndirectPriorityQueue implements INDIRECT_PRIORITY_QUEUE KEY_GENERIC { /** The reference array. */ protected KEY_GENERIC_TYPE refArray[]; /** The backing array. */ protected int array[] = IntArrays.EMPTY_ARRAY; /** The number of elements in this queue. */ protected int size; /** The type-specific comparator used in this queue. */ protected KEY_COMPARATOR KEY_SUPER_GENERIC c; /** The first index, cached, if {@link #firstIndexValid} is true. */ protected int firstIndex; /** Whether {@link #firstIndex} contains a valid value. */ protected boolean firstIndexValid; /** Creates a new empty queue without elements with a given capacity and comparator. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { if ( capacity > 0 ) this.array = new int[ capacity ]; this.refArray = refArray; this.c = c; } /** Creates a new empty queue with given capacity and using the natural order. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) { this( refArray, capacity, null ); } /** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator. * * @param refArray the reference array. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, refArray.length, c ); } /** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. * @param refArray the reference array. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) { this( refArray, refArray.length, null ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, 0, c ); this.array = a; this.size = size; } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * * @param refArray the reference array. * @param a an array of indices into refArray. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, a, a.length, c ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) { this( refArray, a, size, null ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * * @param refArray the reference array. * @param a an array of indices into refArray. */ public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) { this( refArray, a, a.length ); } /** Returns the index (in {@link #array}) of the smallest element. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private int findFirst() { if ( firstIndexValid ) return this.firstIndex; firstIndexValid = true; int i = size; int firstIndex = --i; KEY_GENERIC_TYPE first = refArray[ array[ firstIndex ] ]; if ( c == null ) while( i-- != 0 ) { if ( KEY_LESS( refArray[ array[ i ] ], first ) ) first = refArray[ array[ firstIndex = i ] ]; } else while( i-- != 0 ) { if ( c.compare( refArray[ array[ i ] ], first ) < 0 ) first = refArray[ array[ firstIndex = i ] ]; } return this.firstIndex = firstIndex; } /** Returns the index (in {@link #array}) of the largest element. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private int findLast() { int i = size; int lastIndex = --i; KEY_GENERIC_TYPE last = refArray[ array[ lastIndex ] ]; if ( c == null ) { while( i-- != 0 ) if ( KEY_LESS( last, refArray[ array[ i ] ] ) ) last = refArray[ array[ lastIndex = i ] ]; } else { while( i-- != 0 ) if ( c.compare( last, refArray[ array[ i ] ] ) < 0 ) last = refArray[ array[ lastIndex = i ] ]; } return lastIndex; } protected final void ensureNonEmpty() { if ( size == 0 ) throw new NoSuchElementException(); } /** Ensures that the given index is a firstIndexValid reference. * * @param index an index in the reference array. * @throws IndexOutOfBoundsException if the given index is negative or larger than the reference array length. */ protected void ensureElement( final int index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index >= refArray.length ) throw new IndexOutOfBoundsException( "Index (" + index + ") is larger than or equal to reference array size (" + refArray.length + ")" ); } /** Enqueues a new element. * *

Note that for efficiency reasons this method will not throw an exception * when x is already in the queue. However, the queue state will become * inconsistent and the following behaviour will not be predictable. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public void enqueue( int x ) { ensureElement( x ); if ( size == array.length ) array = IntArrays.grow( array, size + 1 ); if ( firstIndexValid ) { if ( c == null ) { if ( KEY_LESS( refArray[ x ], refArray[ array[ firstIndex ] ] ) ) firstIndex = size; } else if ( c.compare( refArray[ x ], refArray[ array[ firstIndex ] ] ) < 0 ) firstIndex = size; } else firstIndexValid = false; array[ size++ ] = x; } public int dequeue() { ensureNonEmpty(); final int firstIndex = findFirst(); final int result = array[ firstIndex ]; if ( --size != 0 ) System.arraycopy( array, firstIndex + 1, array, firstIndex, size - firstIndex ); firstIndexValid = false; return result; } public int first() { ensureNonEmpty(); return array[ findFirst() ]; } public int last() { ensureNonEmpty(); return array[ findLast() ]; } public void changed() { ensureNonEmpty(); firstIndexValid = false; } /** {@inheritDoc} * *

Note that for efficiency reasons this method will not throw an exception * when index is not in the queue. */ public void changed( int index ) { ensureElement( index ); if ( index == firstIndex ) firstIndexValid = false; } public void allChanged() { firstIndexValid = false; } public boolean remove( int index ) { ensureElement( index ); final int[] a = array; int i = size; while( i-- != 0 ) if ( a[ i ] == index ) break; if ( i < 0 ) return false; firstIndexValid = false; if ( --size != 0 ) System.arraycopy( a, i + 1, a, i, size - i ); return true; } public int front( int[] a ) { final KEY_GENERIC_TYPE top = refArray[ array[ findFirst() ] ]; int i = size, c = 0; while( i-- != 0 ) if ( KEY_EQUALS_NOT_NULL( top, refArray[ array[ i ] ] ) ) a[ c++ ] = array[ i ]; return c; } public int size() { return size; } public void clear() { size = 0; firstIndexValid = false; } /** Trims the backing array so that it has exactly {@link #size()} elements. */ public void trim() { array = IntArrays.trim( array, size ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; } public String toString() { StringBuffer s = new StringBuffer(); s.append( "[" ); for ( int i = 0; i < size; i++ ) { if ( i != 0 ) s.append( ", " ); s.append( refArray[ array [ i ] ] ); } s.append( "]" ); return s.toString(); } #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { int i, j, s; ARRAY_INDIRECT_PRIORITY_QUEUE[] m = new ARRAY_INDIRECT_PRIORITY_QUEUE[ 100000 ]; HEAP_INDIRECT_PRIORITY_QUEUE[] t = new HEAP_INDIRECT_PRIORITY_QUEUE[ m.length ]; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[m.length]; long ms; for( i = 0; i < n; i++ ) k[i] = genKey(); for( i = 0; i < m.length; i++ ) nk[i] = genKey(); double totEnq = 0, totDeq = 0, totChange = 0, d; for( i = 0; i < m.length; i++ ) { t[ i ] = new HEAP_INDIRECT_PRIORITY_QUEUE( k ); m[ i ] = new ARRAY_INDIRECT_PRIORITY_QUEUE( k ); } if ( comp ) { for( j = 0; j < 20; j++ ) { for( i = 0; i < m.length; i++ ) t[ i ].clear(); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) t[ s ].enqueue( i ); } d = System.currentTimeMillis() - ms; if ( j > 2 ) totEnq += d; System.out.print("Enqueue: " + format( m.length * n/d ) +" K/s " ); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) { k[ t[ s ].first() ] = nk[ i ]; t[ s ].changed(); } } d = System.currentTimeMillis() - ms; if ( j > 2 ) totChange += d; System.out.print("Change: " + format( m.length * n/d ) +" K/s " ); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) t[ s ].dequeue(); } d = System.currentTimeMillis() - ms; if ( j > 2 ) totDeq += d; System.out.print("Dequeue: " + format( m.length * n/d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "Heap: Enqueue: " + format( m.length * (j-3)*n/totEnq ) + " K/s Dequeue: " + format( m.length * (j-3)*n/totDeq ) + " K/s Change: " + format( m.length * (j-3)*n/totChange ) + " K/s" ); System.out.println(); totEnq = totChange = totDeq = 0; } for( j = 0; j < 20; j++ ) { for( i = 0; i < m.length; i++ ) m[ i ].clear(); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) m[ s ].enqueue( i ); } d = System.currentTimeMillis() - ms; if ( j > 2 ) totEnq += d; System.out.print("Enqueue: " + format( m.length * n/d ) +" K/s " ); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) { k[ m[ s ].first() ] = nk[ i ]; m[ s ].changed(); } } d = System.currentTimeMillis() - ms; if ( j > 2 ) totChange += d; System.out.print("Change: " + format( m.length * n/d ) +" K/s " ); ms = System.currentTimeMillis(); s = m.length; while( s-- != 0 ) { i = n; while( i-- != 0 ) m[ s ].dequeue(); } d = System.currentTimeMillis() - ms; if ( j > 2 ) totDeq += d; System.out.print("Dequeue: " + format( m.length * n/d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "Array: Enqueue: " + format( m.length * (j-3)*n/totEnq ) + " K/s Dequeue: " + format( m.length * (j-3)*n/totDeq ) + " K/s Change: " + format( m.length * (j-3)*n/totChange ) + " K/s" ); System.out.println(); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) { if ( sizea != sizeb ) return false; KEY_TYPE[] aa = new KEY_TYPE[ sizea ]; KEY_TYPE[] bb = new KEY_TYPE[ sizea ]; for( int i = 0; i < sizea; i++ ) { aa[ i ] = ref[ a[ i ] ]; bb[ i ] = ref[ b[ i ] ]; } java.util.Arrays.sort( aa ); java.util.Arrays.sort( bb ); while( sizea-- != 0 ) if ( !KEY_EQUALS(aa[sizea], bb[sizea]) ) return false; return true; } private static KEY_TYPE[] ref; protected static void test( int n ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement; int rm = 0, rt = 0; ref = new KEY_TYPE[ n ]; for( int i = 0; i < n; i++ ) ref[ i ] = genKey(); ARRAY_INDIRECT_PRIORITY_QUEUE m = new ARRAY_INDIRECT_PRIORITY_QUEUE( ref ); HEAP_INDIRECT_PRIORITY_QUEUE t = new HEAP_INDIRECT_PRIORITY_QUEUE( ref ); /* We add pairs to t. */ for( int i = 0; i < n / 2; i++ ) { t.enqueue( i ); m.enqueue( i ); } ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" ); /* Now we add and remove random data in m and t, checking that the result is the same. */ for(int i=0; i<2*n; i++ ) { if ( r.nextDouble() < 0.01 ) { t.clear(); m.clear(); for( int j = 0; j < n / 2; j++ ) { t.enqueue( j ); m.enqueue( j ); } } int T = r.nextInt( 2 * n ); mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { t.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } if ( tThrowsIllegal == null ) { // To skip duplicates try { m.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } } mThrowsIllegal = tThrowsIllegal = null; // To skip duplicates ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { rm = m.dequeue(); while( ! m.isEmpty() && KEY_EQUALS( ref[ m.first() ], ref[ rm ] ) ) m.dequeue(); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { rt = t.dequeue(); while( ! t.isEmpty() && KEY_EQUALS( ref[ t.first() ], ref[ rt ] ) ) t.dequeue(); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( KEY_EQUALS( ref[ rt ], ref[ rm ]), "Error (" + seed + "): divergence in dequeue() between m and t (" + rm + "->" + ref[ rm ] + ", " + rt + "->" + ref[ rt ] + ")" ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; int pos = r.nextInt( n * 2 ); try { m.remove( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { t.remove( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): remove(int) divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): remove(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after remove(int) (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after remove(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; pos = r.nextInt( n ); try { t.changed( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } if ( tThrowsIllegal == null ) { try { m.changed( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); //ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): change(int) divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): change(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change(int) (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after change(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } int[] temp = (int[])t.heap.clone(); java.util.Arrays.sort( temp, 0, t.size() ); // To scramble a bit m = new ARRAY_INDIRECT_PRIORITY_QUEUE( m.refArray, temp, t.size() ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after wrap (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after wrap (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.array, 0, m.size ) ).size() == m.size() ) ) { int first = m.first(); ref[ first ] = genKey(); //System.err.println("Pre-change m: " +m ); //System.err.println("Pre-change t: " +t ); m.changed(); t.changed( first ); //System.err.println("Post-change m: " +m ); //System.err.println("Post-change t: " +t ); ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")"); } } } /* Now we check that m actually holds the same data. */ m.clear(); ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" ); System.out.println("Test OK"); } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/ArrayList.drv0000664000000000000000000013146513050701620015300 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.RandomAccess; import java.util.NoSuchElementException; #if KEYS_PRIMITIVE /** A type-specific array-based list; provides some additional methods that use polymorphism to avoid (un)boxing. * *

This class implements a lightweight, fast, open, optimized, * reuse-oriented version of array-based lists. Instances of this class * represent a list with an array that is enlarged as needed when new entries * are created (by doubling its current length), but is * never made smaller (even on a {@link #clear()}). A family of * {@linkplain #trim() trimming methods} lets you control the size of the * backing array; this is particularly useful if you reuse instances of this class. * Range checks are equivalent to those of {@link java.util}'s classes, but * they are delayed as much as possible. The backing array is exposed by the * {@link #elements()} method. * *

This class implements the bulk methods removeElements(), * addElements() and getElements() using * high-performance system calls (e.g., {@link * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of * expensive loops. * * @see java.util.ArrayList */ public class ARRAY_LIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable { private static final long serialVersionUID = -7046029254386353130L; #else /** A type-specific array-based list; provides some additional methods that use polymorphism to avoid (un)boxing. * *

This class implements a lightweight, fast, open, optimized, * reuse-oriented version of array-based lists. Instances of this class * represent a list with an array that is enlarged as needed when new entries * are created (by doubling the current length), but is * never made smaller (even on a {@link #clear()}). A family of * {@linkplain #trim() trimming methods} lets you control the size of the * backing array; this is particularly useful if you reuse instances of this class. * Range checks are equivalent to those of {@link java.util}'s classes, but * they are delayed as much as possible. * *

The backing array is exposed by the {@link #elements()} method. If an instance * of this class was created {@linkplain #wrap(Object[],int) by wrapping}, * backing-array reallocations will be performed using reflection, so that * {@link #elements()} can return an array of the same type of the original array: the comments * about efficiency made in {@link it.unimi.dsi.fastutil.objects.ObjectArrays} apply here. * Moreover, you must take into consideration that assignment to an array * not of type {@code Object[]} is slower due to type checking. * *

This class implements the bulk methods removeElements(), * addElements() and getElements() using * high-performance system calls (e.g., {@link * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of * expensive loops. * * @see java.util.ArrayList */ public class ARRAY_LIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable { private static final long serialVersionUID = -7046029254386353131L; #endif /** The initial default capacity of an array list. */ public final static int DEFAULT_INITIAL_CAPACITY = 16; #if ! KEYS_PRIMITIVE /** Whether the backing array was passed to wrap(). In * this case, we must reallocate with the same type of array. */ protected final boolean wrapped; #endif /** The backing array. */ protected transient KEY_GENERIC_TYPE a[]; /** The current actual size of the list (never greater than the backing-array length). */ protected int size; private static final boolean ASSERTS = ASSERTS_VALUE; /** Creates a new array list using a given array. * *

This constructor is only meant to be used by the wrapping methods. * * @param a the array that will be used to back this array list. */ @SuppressWarnings("unused") protected ARRAY_LIST( final KEY_GENERIC_TYPE a[], boolean dummy ) { this.a = a; #if ! KEYS_PRIMITIVE this.wrapped = true; #endif } /** Creates a new array list with given capacity. * * @param capacity the initial capacity of the array list (may be 0). */ SUPPRESS_WARNINGS_KEY_UNCHECKED public ARRAY_LIST( final int capacity ) { if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" ); a = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ]; #if ! KEYS_PRIMITIVE wrapped = false; #endif } /** Creates a new array list with {@link #DEFAULT_INITIAL_CAPACITY} capacity. */ public ARRAY_LIST() { this( DEFAULT_INITIAL_CAPACITY ); } /** Creates a new array list and fills it with a given collection. * * @param c a collection that will be used to fill the array list. */ public ARRAY_LIST( final Collection c ) { this( c.size() ); #if KEYS_PRIMITIVE size = ITERATORS.unwrap( ITERATORS.AS_KEY_ITERATOR( c.iterator() ), a ); #else size = ITERATORS.unwrap( c.iterator(), a ); #endif } /** Creates a new array list and fills it with a given type-specific collection. * * @param c a type-specific collection that will be used to fill the array list. */ public ARRAY_LIST( final COLLECTION KEY_EXTENDS_GENERIC c ) { this( c.size() ); size = ITERATORS.unwrap( c.iterator(), a ); } /** Creates a new array list and fills it with a given type-specific list. * * @param l a type-specific list that will be used to fill the array list. */ public ARRAY_LIST( final LIST KEY_EXTENDS_GENERIC l ) { this( l.size() ); l.getElements( 0, a, 0, size = l.size() ); } /** Creates a new array list and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the array list. */ public ARRAY_LIST( final KEY_GENERIC_TYPE a[] ) { this( a, 0, a.length ); } /** Creates a new array list and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the array list. * @param offset the first element to use. * @param length the number of elements to use. */ public ARRAY_LIST( final KEY_GENERIC_TYPE a[], final int offset, final int length ) { this( length ); System.arraycopy( a, offset, this.a, 0, length ); size = length; } /** Creates a new array list and fills it with the elements returned by an iterator.. * * @param i an iterator whose returned elements will fill the array list. */ public ARRAY_LIST( final Iterator i ) { this(); while( i.hasNext() ) this.add( i.next() ); } /** Creates a new array list and fills it with the elements returned by a type-specific iterator.. * * @param i a type-specific iterator whose returned elements will fill the array list. */ public ARRAY_LIST( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { this(); while( i.hasNext() ) this.add( i.NEXT_KEY() ); } #if KEYS_PRIMITIVE /** Returns the backing array of this list. * * @return the backing array. */ public KEY_GENERIC_TYPE[] elements() { return a; } #else /** Returns the backing array of this list. * *

If this array list was created by wrapping a given array, it is guaranteed * that the type of the returned array will be the same. Otherwise, the returned * array will be of type {@link Object Object[]} (in spite of the declared return type). * *

Warning: This behaviour may cause (unfathomable) * run-time errors if a method expects an array * actually of type K[], but this methods returns an array * of type {@link Object Object[]}. * * @return the backing array. */ public K[] elements() { return a; } #endif /** Wraps a given array into an array list of given size. * *

Note it is guaranteed * that the type of the array returned by {@link #elements()} will be the same * (see the comments in the class documentation). * * @param a an array to wrap. * @param length the length of the resulting array list. * @return a new array list of the given size, wrapping the given array. */ public static KEY_GENERIC ARRAY_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[], final int length ) { if ( length > a.length ) throw new IllegalArgumentException( "The specified length (" + length + ") is greater than the array size (" + a.length + ")" ); final ARRAY_LIST KEY_GENERIC l = new ARRAY_LIST KEY_GENERIC( a, false ); l.size = length; return l; } /** Wraps a given array into an array list. * *

Note it is guaranteed * that the type of the array returned by {@link #elements()} will be the same * (see the comments in the class documentation). * * @param a an array to wrap. * @return a new array list wrapping the given array. */ public static KEY_GENERIC ARRAY_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[] ) { return wrap( a, a.length ); } /** Ensures that this array list can contain the given number of entries without resizing. * * @param capacity the new minimum capacity for this array list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public void ensureCapacity( final int capacity ) { #if KEYS_PRIMITIVE a = ARRAYS.ensureCapacity( a, capacity, size ); #else if ( wrapped ) a = ARRAYS.ensureCapacity( a, capacity, size ); else { if ( capacity > a.length ) { final Object t[] = new Object[ capacity ]; System.arraycopy( a, 0, t, 0, size ); a = (KEY_GENERIC_TYPE[])t; } } #endif if ( ASSERTS ) assert size <= a.length; } /** Grows this array list, ensuring that it can contain the given number of entries without resizing, * and in case enlarging it at least by a factor of two. * * @param capacity the new minimum capacity for this array list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private void grow( final int capacity ) { #if KEYS_PRIMITIVE a = ARRAYS.grow( a, capacity, size ); #else if ( wrapped ) a = ARRAYS.grow( a, capacity, size ); else { if ( capacity > a.length ) { final int newLength = (int)Math.max( Math.min( 2L * a.length, it.unimi.dsi.fastutil.Arrays.MAX_ARRAY_SIZE ), capacity ); final Object t[] = new Object[ newLength ]; System.arraycopy( a, 0, t, 0, size ); a = (KEY_GENERIC_TYPE[])t; } } #endif if ( ASSERTS ) assert size <= a.length; } public void add( final int index, final KEY_GENERIC_TYPE k ) { ensureIndex( index ); grow( size + 1 ); if ( index != size ) System.arraycopy( a, index, a, index + 1, size - index ); a[ index ] = k; size++; if ( ASSERTS ) assert size <= a.length; } public boolean add( final KEY_GENERIC_TYPE k ) { grow( size + 1 ); a[ size++ ] = k; if ( ASSERTS ) assert size <= a.length; return true; } public KEY_GENERIC_TYPE GET_KEY( final int index ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); return a[ index ]; } public int indexOf( final KEY_TYPE k ) { for( int i = 0; i < size; i++ ) if ( KEY_EQUALS( k, a[ i ] ) ) return i; return -1; } public int lastIndexOf( final KEY_TYPE k ) { for( int i = size; i-- != 0; ) if ( KEY_EQUALS( k, a[ i ] ) ) return i; return -1; } public KEY_GENERIC_TYPE REMOVE_KEY( final int index ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); final KEY_GENERIC_TYPE old = a[ index ]; size--; if ( index != size ) System.arraycopy( a, index + 1, a, index, size - index ); #if KEYS_REFERENCE a[ size ] = null; #endif if ( ASSERTS ) assert size <= a.length; return old; } public boolean rem( final KEY_TYPE k ) { int index = indexOf( k ); if ( index == -1 ) return false; REMOVE_KEY( index ); if ( ASSERTS ) assert size <= a.length; return true; } #if KEYS_REFERENCE public boolean remove( final Object o ) { return rem( o ); } #endif public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); KEY_GENERIC_TYPE old = a[ index ]; a[ index ] = k; return old; } public void clear() { #if KEYS_REFERENCE Arrays.fill( a, 0, size, null ); #endif size = 0; if ( ASSERTS ) assert size <= a.length; } public int size() { return size; } public void size( final int size ) { if ( size > a.length ) ensureCapacity( size ); if ( size > this.size ) Arrays.fill( a, this.size, size, KEY_NULL ); #if KEYS_REFERENCE else Arrays.fill( a, size, this.size, KEY_NULL ); #endif this.size = size; } public boolean isEmpty() { return size == 0; } /** Trims this array list so that the capacity is equal to the size. * * @see java.util.ArrayList#trimToSize() */ public void trim() { trim( 0 ); } /** Trims the backing array if it is too large. * * If the current array length is smaller than or equal to * n, this method does nothing. Otherwise, it trims the * array length to the maximum between n and {@link #size()}. * *

This method is useful when reusing lists. {@linkplain #clear() Clearing a * list} leaves the array length untouched. If you are reusing a list * many times, you can call this method with a typical * size to avoid keeping around a very large array just * because of a few large transient lists. * * @param n the threshold for the trimming. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public void trim( final int n ) { // TODO: use Arrays.trim() and preserve type only if necessary if ( n >= a.length || size == a.length ) return; final KEY_GENERIC_TYPE t[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ Math.max( n, size ) ]; System.arraycopy( a, 0, t, 0, size ); a = t; if ( ASSERTS ) assert size <= a.length; } /** Copies element of this type-specific list into the given array using optimized system calls. * * @param from the start index (inclusive). * @param a the destination array. * @param offset the offset into the destination array where to store the first element copied. * @param length the number of elements to be copied. */ public void getElements( final int from, final KEY_TYPE[] a, final int offset, final int length ) { ARRAYS.ensureOffsetLength( a, offset, length ); System.arraycopy( this.a, from, a, offset, length ); } /** Removes elements of this type-specific list using optimized system calls. * * @param from the start index (inclusive). * @param to the end index (exclusive). */ public void removeElements( final int from, final int to ) { it.unimi.dsi.fastutil.Arrays.ensureFromTo( size, from, to ); System.arraycopy( a, to, a, from, size - to ); size -= ( to - from ); #if KEYS_REFERENCE int i = to - from; while( i-- != 0 ) a[ size + i ] = null; #endif } /** Adds elements to this type-specific list using optimized system calls. * * @param index the index at which to add elements. * @param a the array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ public void addElements( final int index, final KEY_GENERIC_TYPE a[], final int offset, final int length ) { ensureIndex( index ); ARRAYS.ensureOffsetLength( a, offset, length ); grow( size + length ); System.arraycopy( this.a, index, this.a, index + length, size - index ); System.arraycopy( a, offset, this.a, index, length ); size += length; } #if KEYS_PRIMITIVE public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] ) { if ( a == null || a.length < size ) a = new KEY_TYPE[ size ]; System.arraycopy( this.a, 0, a, 0, size ); return a; } public boolean addAll( int index, final COLLECTION c ) { ensureIndex( index ); int n = c.size(); if ( n == 0 ) return false; grow( size + n ); if ( index != size ) System.arraycopy( a, index, a, index + n, size - index ); final KEY_ITERATOR i = c.iterator(); size += n; while( n-- != 0 ) a[ index++ ] = i.NEXT_KEY(); if ( ASSERTS ) assert size <= a.length; return true; } public boolean addAll( final int index, final LIST l ) { ensureIndex( index ); final int n = l.size(); if ( n == 0 ) return false; grow( size + n ); if ( index != size ) System.arraycopy( a, index, a, index + n, size - index ); l.getElements( 0, a, index, n ); size += n; if ( ASSERTS ) assert size <= a.length; return true; } @Override public boolean removeAll( final COLLECTION c ) { final KEY_TYPE[] a = this.a; int j = 0; for( int i = 0; i < size; i++ ) if ( ! c.contains( a[ i ] ) ) a[ j++ ] = a[ i ]; final boolean modified = size != j; size = j; return modified; } @Override public boolean removeAll( final Collection c ) { final KEY_TYPE[] a = this.a; int j = 0; for( int i = 0; i < size; i++ ) if ( ! c.contains( KEY2OBJ( a[ i ] ) ) ) a[ j++ ] = a[ i ]; final boolean modified = size != j; size = j; return modified; } #else @Override public boolean removeAll( final Collection c ) { final KEY_TYPE[] a = this.a; int j = 0; for( int i = 0; i < size; i++ ) if ( ! c.contains( a[ i ] ) ) a[ j++ ] = a[ i ]; Arrays.fill( a, j, size, null ); final boolean modified = size != j; size = j; return modified; } #endif public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) { ensureIndex( index ); return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() { int pos = index, last = -1; public boolean hasNext() { return pos < size; } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return a[ last = pos++ ]; } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return a[ last = --pos ]; } public int nextIndex() { return pos; } public int previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { ARRAY_LIST.this.add( pos++, k ); last = -1; } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); ARRAY_LIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); ARRAY_LIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; } }; } public ARRAY_LIST KEY_GENERIC clone() { ARRAY_LIST KEY_GENERIC c = new ARRAY_LIST KEY_GENERIC( size ); System.arraycopy( a, 0, c.a, 0, size ); c.size = size; return c; } #if KEY_CLASS_Object private boolean valEquals( final K a, final K b ) { return a == null ? b == null : a.equals( b ); } #endif /** Compares this type-specific array list to another one. * *

This method exists only for sake of efficiency. The implementation * inherited from the abstract implementation would already work. * * @param l a type-specific array list. * @return true if the argument contains the same elements of this type-specific array list. */ public boolean equals( final ARRAY_LIST KEY_GENERIC l ) { if ( l == this ) return true; int s = size(); if ( s != l.size() ) return false; final KEY_GENERIC_TYPE[] a1 = a; final KEY_GENERIC_TYPE[] a2 = l.a; #if KEY_CLASS_Object while( s-- != 0 ) if ( ! valEquals( a1[ s ], a2[ s ] ) ) return false; #else while( s-- != 0 ) if ( a1[ s ] != a2[ s ] ) return false; #endif return true; } #if ! KEY_CLASS_Reference /** Compares this array list to another array list. * *

This method exists only for sake of efficiency. The implementation * inherited from the abstract implementation would already work. * * @param l an array list. * @return a negative integer, * zero, or a positive integer as this list is lexicographically less than, equal * to, or greater than the argument. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public int compareTo( final ARRAY_LIST KEY_EXTENDS_GENERIC l ) { final int s1 = size(), s2 = l.size(); final KEY_GENERIC_TYPE a1[] = a, a2[] = l.a; KEY_GENERIC_TYPE e1, e2; int r, i; for( i = 0; i < s1 && i < s2; i++ ) { e1 = a1[ i ]; e2 = a2[ i ]; if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r; } return i < s2 ? -1 : ( i < s1 ? 1 : 0 ); } #endif private void writeObject( java.io.ObjectOutputStream s ) throws java.io.IOException { s.defaultWriteObject(); for( int i = 0; i < size; i++ ) s.WRITE_KEY( a[ i ] ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); a = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ size ]; for( int i = 0; i < size; i++ ) a[ i ] = KEY_GENERIC_CAST s.READ_KEY(); } #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static ARRAY_LIST topList; protected static void testLists( LIST m, java.util.List t, int n, int level ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds; Object rt = null; KEY_TYPE rm = KEY_NULL; if ( level > 4 ) return; /* Now we check that both sets agree on random keys. For m we use the polymorphic method. */ for( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); KEY_TYPE T = genKey(); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.set( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.set( p, KEY2OBJ( T ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): set() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ after set() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" ); p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.GET_KEY( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ aftre get() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" ); } /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.listIterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i n ) { m.size( n ); while( t.size() != n ) t.remove( t.size() -1 ); } /* Now we add random data in m and t using addAll on a type-specific collection, checking that the result is the same. */ for(int i=0; i n ) { m.size( n ); while( t.size() != n ) t.remove( t.size() -1 ); } /* Now we add random data in m and t using addAll on a list, checking that the result is the same. */ for(int i=0; i n ) { m.size( n ); while( t.size() != n ) t.remove( t.size() -1 ); } /* Now we check that m actually holds the same data. */ for(Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)"); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.listIterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) +")" ); } /* Now we inquiry about the content with indexOf()/lastIndexOf(). */ for(int i=0; i<10*n; i++ ) { KEY_TYPE T = genKey(); ensure( m.indexOf( KEY2OBJ( T ) ) == t.indexOf( KEY2OBJ( T ) ), "Error (" + level + ", " + seed + "): indexOf() divergence for " + T + " (" + m.indexOf( KEY2OBJ( T ) ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" ); ensure( m.lastIndexOf( KEY2OBJ( T ) ) == t.lastIndexOf( KEY2OBJ( T ) ), "Error (" + level + ", " + seed + "): lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( KEY2OBJ( T ) ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" ); ensure( m.indexOf( T ) == t.indexOf( KEY2OBJ( T ) ), "Error (" + level + ", " + seed + "): polymorphic indexOf() divergence for " + T + " (" + m.indexOf( T ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" ); ensure( m.lastIndexOf( T ) == t.lastIndexOf( KEY2OBJ( T ) ), "Error (" + level + ", " + seed + "): polymorphic lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" ); } /* Now we check cloning. */ if ( level == 0 ) { ensure( m.equals( ((ARRAY_LIST)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" ); ensure( ((ARRAY_LIST)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" ); } /* Now we play with constructors. */ ensure( m.equals( new ARRAY_LIST( (Collection)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( Collection m )" ); ensure( ( new ARRAY_LIST( (Collection)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( Collection m )does not equal m" ); ensure( m.equals( new ARRAY_LIST( (COLLECTION)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific Collection m )" ); ensure( ( new ARRAY_LIST( (COLLECTION)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific Collection m ) does not equal m" ); ensure( m.equals( new ARRAY_LIST( (LIST)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific List m )" ); ensure( ( new ARRAY_LIST( (LIST)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific List m ) does not equal m" ); ensure( m.equals( new ARRAY_LIST( m.listIterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.listIterator() )" ); ensure( ( new ARRAY_LIST( m.listIterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.listIterator() ) does not equal m" ); ensure( m.equals( new ARRAY_LIST( m.iterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.type_specific_iterator() )" ); ensure( ( new ARRAY_LIST( m.iterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.type_specific_iterator() ) does not equal m" ); /* Now we play with conversion to array, wrapping and copying. */ ensure( m.equals( new ARRAY_LIST( m.TO_KEY_ARRAY() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( toArray( m ) )" ); ensure( ( new ARRAY_LIST( m.TO_KEY_ARRAY() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( toArray( m ) ) does not equal m" ); ensure( m.equals( wrap( m.TO_KEY_ARRAY() ) ), "Error (" + level + ", " + seed + "): m does not equal wrap ( toArray( m ) )" ); ensure( ( wrap( m.TO_KEY_ARRAY() ) ).equals( m ), "Error (" + level + ", " + seed + "): wrap ( toArray( m ) ) does not equal m" ); int h = m.hashCode(); /* Now we save and read m. */ LIST m2 = null; try { java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test"); java.io.OutputStream os = new java.io.FileOutputStream(ff); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os); oos.writeObject(m); oos.close(); java.io.InputStream is = new java.io.FileInputStream(ff); java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is); m2 = (LIST)ois.readObject(); ois.close(); ff.delete(); } catch(Exception e) { e.printStackTrace(); System.exit( 1 ); } #if ! KEY_CLASS_Reference ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" ); /* Now we check that m2 actually holds that data. */ ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" ); ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" ); /* Now we take out of m everything, and check that it is empty. */ for(Iterator i=t.iterator(); i.hasNext(); ) m2.remove(i.next()); ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" ); #endif /* Now we play with iterators. */ { KEY_LIST_ITERATOR i; java.util.ListIterator j; Object J; i = m.listIterator(); j = t.listIterator(); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" ); ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( i.next().equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next()" ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.set( T ); j.set( KEY2OBJ( T ) ); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.add( T ); j.add( KEY2OBJ( T ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { ensure( i.previous().equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous()" ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.set( T ); j.set( KEY2OBJ( T ) ); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.add( T ); j.add( KEY2OBJ( T ) ); } } ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" ); ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" ); } } { Object previous = null; Object I, J; int from = r.nextInt( m.size() +1 ); KEY_LIST_ITERATOR i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() , "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); if ( r.nextFloat() < 0.2 ) { //System.err.println("Removing in next"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.set( T ); j.set( KEY2OBJ( T ) ); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.add( T ); j.add( KEY2OBJ( T ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.2 ) { //System.err.println("Removing in prev"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.set( T ); j.set( KEY2OBJ( T ) ); } else if ( r.nextFloat() < 0.2 ) { KEY_TYPE T = genKey(); i.add( T ); j.add( KEY2OBJ( T ) ); } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a subset. */ if ( ! m.isEmpty() ) { int start = r.nextInt( m.size() ); int end = start + r.nextInt( m.size() - start ); //System.err.println("Checking subList from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after subList" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subList" ); } m.clear(); t.clear(); ensure( m.isEmpty(), "Error (" + level + ", " + seed + "): m is not empty after clear()" ); } protected static void test( int n ) { ARRAY_LIST m = new ARRAY_LIST(); java.util.ArrayList t = new java.util.ArrayList(); topList = m; k = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEYS_REFERENCE k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif } /* We add pairs to t. */ for( int i = 0; i < n; i++ ) t.add( k[i] ); /* We add to m the same data */ m.addAll(t); testLists( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/ArrayMap.drv0000664000000000000000000002725013050701620015076 0ustar rootroot/* * Copyright (C) 2007-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Map; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.objects.AbstractObjectIterator; import it.unimi.dsi.fastutil.objects.AbstractObjectSet; import it.unimi.dsi.fastutil.objects.ObjectIterator; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_COLLECTIONS; import VALUE_PACKAGE.VALUE_ARRAY_SET; import VALUE_PACKAGE.VALUE_ARRAYS; /** A simple, brute-force implementation of a map based on two parallel backing arrays. * *

The main purpose of this * implementation is that of wrapping cleanly the brute-force approach to the storage of a very * small number of pairs: just put them into two parallel arrays and scan linearly to find an item. */ public class ARRAY_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = 1L; /** The keys (valid up to {@link #size}, excluded). */ private transient KEY_TYPE[] key; /** The values (parallel to {@link #key}). */ private transient VALUE_TYPE[] value; /** The number of valid entries in {@link #key} and {@link #value}. */ private int size; /** Creates a new empty array map with given key and value backing arrays. The resulting map will have as many entries as the given arrays. * *

It is responsibility of the caller that the elements of key are distinct. * * @param key the key array. * @param value the value array (it must have the same length as key). */ public ARRAY_MAP( final KEY_TYPE[] key, final VALUE_TYPE[] value ) { this.key = key; this.value = value; size = key.length; if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" ); } /** Creates a new empty array map. */ public ARRAY_MAP() { this.key = ARRAYS.EMPTY_ARRAY; this.value = VALUE_ARRAYS.EMPTY_ARRAY; } /** Creates a new empty array map of given capacity. * * @param capacity the initial capacity. */ public ARRAY_MAP( final int capacity ) { this.key = new KEY_TYPE[ capacity ]; this.value = new VALUE_TYPE[ capacity ]; } /** Creates a new empty array map copying the entries of a given map. * * @param m a map. */ public ARRAY_MAP( final MAP KEY_VALUE_GENERIC m ) { this( m.size() ); putAll( m ); } /** Creates a new empty array map copying the entries of a given map. * * @param m a map. */ public ARRAY_MAP( final Map m ) { this( m.size() ); putAll( m ); } /** Creates a new array map with given key and value backing arrays, using the given number of elements. * *

It is responsibility of the caller that the first size elements of key are distinct. * * @param key the key array. * @param value the value array (it must have the same length as key). * @param size the number of valid elements in key and value. */ public ARRAY_MAP( final KEY_TYPE[] key, final VALUE_TYPE[] value, final int size ) { this.key = key; this.value = value; this.size = size; if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" ); if ( size > key.length ) throw new IllegalArgumentException( "The provided size (" + size + ") is larger than or equal to the backing-arrays size (" + key.length + ")" ); } private final class EntrySet extends AbstractObjectSet implements FastEntrySet KEY_VALUE_GENERIC { @Override public ObjectIterator iterator() { return new AbstractObjectIterator() { int curr = -1, next = 0; public boolean hasNext() { return next < size; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public Entry KEY_VALUE_GENERIC next() { if ( ! hasNext() ) throw new NoSuchElementException(); return new ABSTRACT_MAP.BasicEntry KEY_VALUE_GENERIC( KEY_GENERIC_CAST key[ curr = next ], VALUE_GENERIC_CAST value[ next++ ] ); } public void remove() { if ( curr == -1 ) throw new IllegalStateException(); curr = -1; final int tail = size-- - next--; System.arraycopy( key, next + 1, key, next, tail ); System.arraycopy( value, next + 1, value, next, tail ); #if KEYS_REFERENCE key[ size ] = null; #endif #if VALUES_REFERENCE value[ size ] = null; #endif } }; } public ObjectIterator fastIterator() { return new AbstractObjectIterator() { int next = 0, curr = -1; final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL ); public boolean hasNext() { return next < size; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public Entry KEY_VALUE_GENERIC next() { if ( ! hasNext() ) throw new NoSuchElementException(); entry.key = KEY_GENERIC_CAST key[ curr = next ]; entry.value = VALUE_GENERIC_CAST value[ next++ ]; return entry; } public void remove() { if ( curr == -1 ) throw new IllegalStateException(); curr = -1; final int tail = size-- - next--; System.arraycopy( key, next + 1, key, next, tail ); System.arraycopy( value, next + 1, value, next, tail ); #if KEYS_REFERENCE key[ size ] = null; #endif #if VALUES_REFERENCE value[ size ] = null; #endif } }; } public int size() { return size; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( Object o ) { if ( ! ( o instanceof Map.Entry ) ) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final KEY_GENERIC_TYPE k = KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ); return ARRAY_MAP.this.containsKey( k ) && VALUE_EQUALS( ARRAY_MAP.this.GET_VALUE( k ), VALUE_OBJ2TYPE( e.getValue() ) ); } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED @Override public boolean remove( final Object o ) { if ( !( o instanceof Map.Entry ) ) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final KEY_GENERIC_TYPE k = KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ); final VALUE_GENERIC_TYPE v = VALUE_OBJ2TYPE( VALUE_GENERIC_CAST e.getValue() ); final int oldPos = ARRAY_MAP.this.findKey( k ); if ( oldPos == -1 || ! VALUE_EQUALS( v, ARRAY_MAP.this.value[ oldPos ] ) ) return false; final int tail = size - oldPos - 1; System.arraycopy( ARRAY_MAP.this.key, oldPos + 1, ARRAY_MAP.this.key, oldPos, tail ); System.arraycopy( ARRAY_MAP.this.value, oldPos + 1, ARRAY_MAP.this.value, oldPos, tail ); ARRAY_MAP.this.size--; #if KEYS_REFERENCE ARRAY_MAP.this.key[ size ] = null; #endif #if VALUES_REFERENCE ARRAY_MAP.this.value[ size ] = null; #endif return true; } } public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() { return new EntrySet(); } private int findKey( final KEY_TYPE k ) { final KEY_TYPE[] key = this.key; for( int i = size; i-- != 0; ) if ( KEY_EQUALS( key[ i ], k ) ) return i; return -1; } SUPPRESS_WARNINGS_VALUE_UNCHECKED #if KEYS_PRIMITIVE || VALUES_PRIMITIVE public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { #else public VALUE_GENERIC_TYPE get( final Object k ) { #endif final KEY_TYPE[] key = this.key; for( int i = size; i-- != 0; ) if ( KEY_EQUALS( key[ i ], k ) ) return VALUE_GENERIC_CAST value[ i ]; return defRetValue; } public int size() { return size; } @Override public void clear() { #if KEYS_REFERENCE || VALUES_REFERENCE for( int i = size; i-- != 0; ) { #if KEYS_REFERENCE key[ i ] = null; #endif #if VALUES_REFERENCE value[ i ] = null; #endif } #endif size = 0; } @Override public boolean containsKey( final KEY_TYPE k ) { return findKey( k ) != -1; } @Override public boolean containsValue( VALUE_TYPE v ) { for( int i = size; i-- != 0; ) if ( VALUE_EQUALS( value[ i ], v ) ) return true; return false; } @Override public boolean isEmpty() { return size == 0; } @Override SUPPRESS_WARNINGS_VALUE_UNCHECKED public VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE k, VALUE_GENERIC_TYPE v ) { final int oldKey = findKey( k ); if ( oldKey != -1 ) { final VALUE_GENERIC_TYPE oldValue = VALUE_GENERIC_CAST value[ oldKey ]; value[ oldKey ] = v; return oldValue; } if ( size == key.length ) { final KEY_TYPE[] newKey = new KEY_TYPE[ size == 0 ? 2 : size * 2 ]; final VALUE_TYPE[] newValue = new VALUE_TYPE[ size == 0 ? 2 : size * 2 ]; for( int i = size; i-- != 0; ) { newKey[ i ] = key[ i ]; newValue[ i ] = value[ i ]; } key = newKey; value = newValue; } key[ size ] = k; value[ size ] = v; size++; return defRetValue; } @Override SUPPRESS_WARNINGS_VALUE_UNCHECKED #if KEYS_PRIMITIVE || VALUES_PRIMITIVE public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { #else public VALUE_GENERIC_TYPE remove( final Object k ) { #endif final int oldPos = findKey( k ); if ( oldPos == -1 ) return defRetValue; final VALUE_GENERIC_TYPE oldValue = VALUE_GENERIC_CAST value[ oldPos ]; final int tail = size - oldPos - 1; System.arraycopy( key, oldPos + 1, key, oldPos, tail ); System.arraycopy( value, oldPos + 1, value, oldPos, tail ); size--; #if KEYS_REFERENCE key[ size ] = null; #endif #if VALUES_REFERENCE value[ size ] = null; #endif return oldValue; } @Override public SET KEY_GENERIC keySet() { return new ARRAY_SET KEY_GENERIC( key, size ); } @Override public VALUE_COLLECTION VALUE_GENERIC values() { return VALUE_COLLECTIONS.unmodifiable( new VALUE_ARRAY_SET VALUE_GENERIC( value, size ) ); } /** Returns a deep copy of this map. * *

This method performs a deep copy of this hash map; the data stored in the * map, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this map. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public ARRAY_MAP KEY_VALUE_GENERIC clone() { ARRAY_MAP KEY_VALUE_GENERIC c; try { c = (ARRAY_MAP KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key.clone(); c.value = value.clone(); return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); for( int i = 0; i < size; i++ ) { s.WRITE_KEY( key[ i ] ); s.WRITE_VALUE( value[ i ] ); } } private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); key = new KEY_TYPE[ size ]; value = new VALUE_TYPE[ size ]; for( int i = 0; i < size; i++ ) { key[ i ] = s.READ_KEY(); value[ i ] = s.READ_VALUE(); } } } fastutil-7.1.0/drv/ArrayPriorityQueue.drv0000664000000000000000000001543513050701620017211 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Arrays; import java.util.Comparator; import it.unimi.dsi.fastutil.AbstractPriorityQueue; #endif import java.util.NoSuchElementException; /** A type-specific array-based priority queue. * *

Instances of this class represent a priority queue using a backing * array—all operations are performed directly on the array. The array is * enlarged as needed, but it is never shrunk. Use the {@link #trim()} method * to reduce its size, if necessary. * *

This implementation is extremely inefficient, but it is difficult to beat * when the size of the queue is very small. */ public class ARRAY_PRIORITY_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = 1L; /** The backing array. */ SUPPRESS_WARNINGS_KEY_UNCHECKED protected transient KEY_GENERIC_TYPE array[] = KEY_GENERIC_ARRAY_CAST ARRAYS.EMPTY_ARRAY; /** The number of elements in this queue. */ protected int size; /** The type-specific comparator used in this queue. */ protected KEY_COMPARATOR KEY_SUPER_GENERIC c; /** The first index, cached, if {@link #firstIndexValid} is true. */ transient protected int firstIndex; /** Whether {@link #firstIndex} contains a valid value. */ transient protected boolean firstIndexValid; /** Creates a new empty queue with a given capacity and comparator. * * @param capacity the initial capacity of this queue. * @param c the comparator used in this queue, or null for the natural order. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public ARRAY_PRIORITY_QUEUE( int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { if ( capacity > 0 ) this.array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ]; this.c = c; } /** Creates a new empty queue with a given capacity and using the natural order. * * @param capacity the initial capacity of this queue. */ public ARRAY_PRIORITY_QUEUE( int capacity ) { this( capacity, null ); } /** Creates a new empty queue with a given comparator. * * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_PRIORITY_QUEUE( KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( 0, c ); } /** Creates a new empty queue using the natural order. */ public ARRAY_PRIORITY_QUEUE() { this( 0, null ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * * @param a an array. * @param size the number of elements to be included in the queue. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( c ); this.array = a; this.size = size; } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * * @param a an array. * @param c the comparator used in this queue, or null for the natural order. */ public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( a, a.length, c ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * * @param a an array. * @param size the number of elements to be included in the queue. */ public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size ) { this( a, size, null ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * * @param a an array. */ public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a ) { this( a, a.length ); } /** Returns the index of the smallest element. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private int findFirst() { if ( firstIndexValid ) return this.firstIndex; firstIndexValid = true; int i = size; int firstIndex = --i; KEY_GENERIC_TYPE first = array[ firstIndex ]; if ( c == null ) { while( i-- != 0 ) if ( KEY_LESS( array[ i ], first ) ) first = array[ firstIndex = i ]; } else while( i-- != 0 ) { if ( c.compare( array[ i ], first ) < 0 ) first = array[ firstIndex = i ]; } return this.firstIndex = firstIndex; } private void ensureNonEmpty() { if ( size == 0 ) throw new NoSuchElementException(); } SUPPRESS_WARNINGS_KEY_UNCHECKED public void enqueue( KEY_GENERIC_TYPE x ) { if ( size == array.length ) array = ARRAYS.grow( array, size + 1 ); if ( firstIndexValid ) { if ( c == null ) { if ( KEY_LESS( x, array[ firstIndex ] ) ) firstIndex = size; } else if ( c.compare( x, array[ firstIndex ] ) < 0 ) firstIndex = size; } else firstIndexValid = false; array[ size++ ] = x; } public KEY_GENERIC_TYPE DEQUEUE() { ensureNonEmpty(); final int first = findFirst(); final KEY_GENERIC_TYPE result = array[ first ]; System.arraycopy( array, first + 1, array, first, --size - first ); #if KEY_CLASS_Object array[ size ] = null; #endif firstIndexValid = false; return result; } public KEY_GENERIC_TYPE FIRST() { ensureNonEmpty(); return array[ findFirst() ]; } public void changed() { ensureNonEmpty(); firstIndexValid = false; } public int size() { return size; } public void clear() { #if KEY_CLASS_Object Arrays.fill( array, 0, size, null ); #endif size = 0; firstIndexValid = false; } /** Trims the underlying array so that it has exactly {@link #size()} elements. */ public void trim() { array = ARRAYS.trim( array, size ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); s.writeInt( array.length ); for( int i = 0; i < size; i++ ) s.WRITE_KEY( array[ i ] ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ s.readInt() ]; for( int i = 0; i < size; i++ ) array[ i ] = KEY_GENERIC_CAST s.READ_KEY(); } } fastutil-7.1.0/drv/ArraySet.drv0000664000000000000000000001257413050701620015117 0ustar rootroot/* * Copyright (C) 2007-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import java.util.NoSuchElementException; /** A simple, brute-force implementation of a set based on a backing array. * *

The main purpose of this * implementation is that of wrapping cleanly the brute-force approach to the storage of a very * small number of items: just put them into an array and scan linearly to find an item. */ public class ARRAY_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = 1L; /** The backing array (valid up to {@link #size}, excluded). */ private transient KEY_TYPE[] a; /** The number of valid entries in {@link #a}. */ private int size; /** Creates a new array set using the given backing array. The resulting set will have as many elements as the array. * *

It is responsibility of the caller that the elements of a are distinct. * * @param a the backing array. */ public ARRAY_SET( final KEY_TYPE[] a ) { this.a = a; size = a.length; } /** Creates a new empty array set. */ public ARRAY_SET() { this.a = ARRAYS.EMPTY_ARRAY; } /** Creates a new empty array set of given initial capacity. * * @param capacity the initial capacity. */ public ARRAY_SET( final int capacity ) { this.a = new KEY_TYPE[ capacity ]; } /** Creates a new array set copying the contents of a given collection. * @param c a collection. */ public ARRAY_SET( COLLECTION KEY_GENERIC c ) { this( c.size () ); addAll( c ); } /** Creates a new array set copying the contents of a given set. * @param c a collection. */ public ARRAY_SET( final Collection c ) { this( c.size() ); addAll( c ); } /** Creates a new array set using the given backing array and the given number of elements of the array. * *

It is responsibility of the caller that the first size elements of a are distinct. * * @param a the backing array. * @param size the number of valid elements in a. */ public ARRAY_SET( final KEY_TYPE[] a, final int size ) { this.a = a; this.size = size; if ( size > a.length ) throw new IllegalArgumentException( "The provided size (" + size + ") is larger than or equal to the array size (" + a.length + ")" ); } private int findKey( final KEY_TYPE o ) { for( int i = size; i-- != 0; ) if ( KEY_EQUALS( a[ i ], o ) ) return i; return -1; } @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_ITERATOR KEY_GENERIC iterator() { return new KEY_ABSTRACT_ITERATOR KEY_GENERIC () { int next = 0; public boolean hasNext() { return next < size; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return KEY_GENERIC_CAST a[ next++ ]; } public void remove() { final int tail = size-- - next--; System.arraycopy( a, next + 1, a, next, tail ); #if KEYS_REFERENCE a[ size ] = null; #endif } }; } public boolean contains( final KEY_TYPE k ) { return findKey( k ) != -1; } public int size() { return size; } @Override public boolean rem( final KEY_TYPE k ) { final int pos = findKey( k ); if ( pos == -1 ) return false; final int tail = size - pos - 1; for( int i = 0; i < tail; i++ ) a[ pos + i ] = a[ pos + i + 1 ]; size--; #if KEYS_REFERENCE a[ size ] = null; #endif return true; } @Override public boolean add( final KEY_GENERIC_TYPE k ) { final int pos = findKey( k ); if ( pos != -1 ) return false; if ( size == a.length ) { final KEY_TYPE[] b = new KEY_TYPE[ size == 0 ? 2 : size * 2 ]; for( int i = size; i-- != 0; ) b[ i ] = a[ i ]; a = b; } a[ size++ ] = k; return true; } @Override public void clear() { #if KEYS_REFERENCE for( int i = size; i-- != 0; ) a[ i ] = null; #endif size = 0; } @Override public boolean isEmpty() { return size == 0; } /** Returns a deep copy of this set. * *

This method performs a deep copy of this hash set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public ARRAY_SET KEY_GENERIC clone() { ARRAY_SET KEY_GENERIC c; try { c = (ARRAY_SET KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.a = a.clone(); return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); for( int i = 0; i < size; i++ ) s.WRITE_KEY( a[ i ] ); } private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); a = new KEY_TYPE[ size ]; for( int i = 0; i < size; i++ ) a[ i ] = s.READ_KEY(); } } fastutil-7.1.0/drv/Arrays.drv0000664000000000000000000033545013050701620014627 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * * For the sorting and binary search code: * * Copyright (C) 1999 CERN - European Organization for Nuclear Research. * * Permission to use, copy, modify, distribute and sell this software and * its documentation for any purpose is hereby granted without fee, * provided that the above copyright notice appear in all copies and that * both that copyright notice and this permission notice appear in * supporting documentation. CERN makes no representations about the * suitability of this software for any purpose. It is provided "as is" * without expressed or implied warranty. */ package PACKAGE; import it.unimi.dsi.fastutil.Arrays; import it.unimi.dsi.fastutil.Hash; import java.util.Random; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.RecursiveAction; #if ! KEY_CLASS_Integer import it.unimi.dsi.fastutil.ints.IntArrays; #endif #if KEYS_PRIMITIVE #if ! KEY_CLASS_Boolean import java.util.concurrent.Callable; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; #endif /** A class providing static methods and objects that do useful things with type-specific arrays. * *

In particular, the ensureCapacity(), grow(), * trim() and setLength() methods allow to handle * arrays much like array lists. This can be very useful when efficiency (or * syntactic simplicity) reasons make array lists unsuitable. * *

Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO} * contain several methods make it possible to load and save arrays of primitive types as sequences * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text. * *

Sorting

* *

There are several sorting methods available. The main theme is that of letting you choose * the sorting algorithm you prefer (i.e., trading stability of mergesort for no memory allocation in quicksort). * Several algorithms provide a parallel version, that will use the {@linkplain Runtime#availableProcessors() number of cores available}. * Some algorithms also provide an explicit indirect sorting facility, which makes it possible * to sort an array using the values in another array as comparator. * *

All comparison-based algorithm have an implementation based on a type-specific comparator. * *

As a general rule, sequential radix sort is significantly faster than quicksort or mergesort, in particular * on random-looking data. In * the parallel case, up to a few cores parallel radix sort is still the fastest, but at some point quicksort * exploits parallelism better. * *

If you are fine with not knowing exactly which algorithm will be run (in particular, not knowing exactly whether a support array will be allocated), * the dual-pivot parallel sorts in {@link java.util.Arrays} * are about 50% faster than the classical single-pivot implementation used here. * *

In any case, if sorting time is important I suggest that you benchmark your sorting load * with your data distribution and on your architecture. * * @see java.util.Arrays */ public class ARRAYS { #else import java.util.Comparator; /** A class providing static methods and objects that do useful things with type-specific arrays. * * In particular, the ensureCapacity(), grow(), * trim() and setLength() methods allow to handle * arrays much like array lists. This can be very useful when efficiency (or * syntactic simplicity) reasons make array lists unsuitable. * *

Warning: if your array is not of type {@code Object[]}, * {@link #ensureCapacity(Object[],int,int)} and {@link #grow(Object[],int,int)} * will use {@linkplain java.lang.reflect.Array#newInstance(Class,int) reflection} * to preserve your array type. Reflection is significantly slower than using new. * This phenomenon is particularly * evident in the first growth phases of an array reallocated with doubling (or similar) logic. * *

Sorting

* *

There are several sorting methods available. The main theme is that of letting you choose * the sorting algorithm you prefer (i.e., trading stability of mergesort for no memory allocation in quicksort). * Several algorithms provide a parallel version, that will use the {@linkplain Runtime#availableProcessors() number of cores available}. * *

All comparison-based algorithm have an implementation based on a type-specific comparator. * *

If you are fine with not knowing exactly which algorithm will be run (in particular, not knowing exactly whether a support array will be allocated), * the dual-pivot parallel sorts in {@link java.util.Arrays} * are about 50% faster than the classical single-pivot implementation used here. * *

In any case, if sorting time is important I suggest that you benchmark your sorting load * with your data distribution and on your architecture. * * @see java.util.Arrays */ public class ARRAYS { #endif private ARRAYS() {} /** A static, final, empty array. */ public final static KEY_TYPE[] EMPTY_ARRAY = {}; #if KEY_CLASS_Object /** Creates a new array using a the given one as prototype. * *

This method returns a new array of the given length whose element * are of the same class as of those of prototype. In case * of an empty array, it tries to return {@link #EMPTY_ARRAY}, if possible. * * @param prototype an array that will be used to type the new one. * @param length the length of the new array. * @return a new array of given type and length. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private static K[] newArray( final K[] prototype, final int length ) { final Class klass = prototype.getClass(); if ( klass == Object[].class ) return (K[])( length == 0 ? EMPTY_ARRAY : new Object[ length ] ); return (K[])java.lang.reflect.Array.newInstance( klass.getComponentType(), length ); } #endif /** Ensures that an array can contain the given number of entries. * *

If you cannot foresee whether this array will need again to be * enlarged, you should probably use grow() instead. * * @param array an array. * @param length the new minimum length for this array. * @return array, if it contains length entries or more; otherwise, * an array with length entries whose first array.length * entries are the same as those of array. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] ensureCapacity( final KEY_GENERIC_TYPE[] array, final int length ) { if ( length > array.length ) { final KEY_GENERIC_TYPE t[] = #if KEY_CLASS_Object newArray( array, length ); #else new KEY_TYPE[ length ]; #endif System.arraycopy( array, 0, t, 0, array.length ); return t; } return array; } /** Ensures that an array can contain the given number of entries, preserving just a part of the array. * * @param array an array. * @param length the new minimum length for this array. * @param preserve the number of elements of the array that must be preserved in case a new allocation is necessary. * @return array, if it can contain length entries or more; otherwise, * an array with length entries whose first preserve * entries are the same as those of array. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] ensureCapacity( final KEY_GENERIC_TYPE[] array, final int length, final int preserve ) { if ( length > array.length ) { final KEY_GENERIC_TYPE t[] = #if KEY_CLASS_Object newArray( array, length ); #else new KEY_TYPE[ length ]; #endif System.arraycopy( array, 0, t, 0, preserve ); return t; } return array; } /** Grows the given array to the maximum between the given length and * the current length multiplied by two, provided that the given * length is larger than the current length. * *

If you want complete control on the array growth, you * should probably use ensureCapacity() instead. * * @param array an array. * @param length the new minimum length for this array. * @return array, if it can contain length * entries; otherwise, an array with * max(length,array.length/φ) entries whose first * array.length entries are the same as those of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[] grow( final KEY_GENERIC_TYPE[] array, final int length ) { if ( length > array.length ) { final int newLength = (int)Math.max( Math.min( 2L * array.length, Arrays.MAX_ARRAY_SIZE ), length ); final KEY_GENERIC_TYPE t[] = #if KEY_CLASS_Object newArray( array, newLength ); #else new KEY_TYPE[ newLength ]; #endif System.arraycopy( array, 0, t, 0, array.length ); return t; } return array; } /** Grows the given array to the maximum between the given length and * the current length multiplied by two, provided that the given * length is larger than the current length, preserving just a part of the array. * *

If you want complete control on the array growth, you * should probably use ensureCapacity() instead. * * @param array an array. * @param length the new minimum length for this array. * @param preserve the number of elements of the array that must be preserved in case a new allocation is necessary. * @return array, if it can contain length * entries; otherwise, an array with * max(length,array.length/φ) entries whose first * preserve entries are the same as those of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[] grow( final KEY_GENERIC_TYPE[] array, final int length, final int preserve ) { if ( length > array.length ) { final int newLength = (int)Math.max( Math.min( 2L * array.length, Arrays.MAX_ARRAY_SIZE ), length ); final KEY_GENERIC_TYPE t[] = #if KEY_CLASS_Object newArray( array, newLength ); #else new KEY_TYPE[ newLength ]; #endif System.arraycopy( array, 0, t, 0, preserve ); return t; } return array; } /** Trims the given array to the given length. * * @param array an array. * @param length the new maximum length for the array. * @return array, if it contains length * entries or less; otherwise, an array with * length entries whose entries are the same as * the first length entries of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[] trim( final KEY_GENERIC_TYPE[] array, final int length ) { if ( length >= array.length ) return array; final KEY_GENERIC_TYPE t[] = #if KEY_CLASS_Object newArray( array, length ); #else length == 0 ? EMPTY_ARRAY : new KEY_TYPE[ length ]; #endif System.arraycopy( array, 0, t, 0, length ); return t; } /** Sets the length of the given array. * * @param array an array. * @param length the new length for the array. * @return array, if it contains exactly length * entries; otherwise, if it contains more than * length entries, an array with length entries * whose entries are the same as the first length entries of * array; otherwise, an array with length entries * whose first array.length entries are the same as those of * array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[] setLength( final KEY_GENERIC_TYPE[] array, final int length ) { if ( length == array.length ) return array; if ( length < array.length ) return trim( array, length ); return ensureCapacity( array, length ); } /** Returns a copy of a portion of an array. * * @param array an array. * @param offset the first element to copy. * @param length the number of elements to copy. * @return a new array containing length elements of array starting at offset. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] copy( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) { ensureOffsetLength( array, offset, length ); final KEY_GENERIC_TYPE[] a = #if KEY_CLASS_Object newArray( array, length ); #else length == 0 ? EMPTY_ARRAY : new KEY_TYPE[ length ]; #endif System.arraycopy( array, offset, a, 0, length ); return a; } /** Returns a copy of an array. * * @param array an array. * @return a copy of array. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] copy( final KEY_GENERIC_TYPE[] array ) { return array.clone(); } /** Fills the given array with the given value. * * @param array an array. * @param value the new value for all elements of the array. * @deprecated Please use the corresponding {@link java.util.Arrays} method. */ @Deprecated public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[] array, final KEY_GENERIC_TYPE value ) { int i = array.length; while( i-- != 0 ) array[ i ] = value; } /** Fills a portion of the given array with the given value. * * @param array an array. * @param from the starting index of the portion to fill (inclusive). * @param to the end index of the portion to fill (exclusive). * @param value the new value for all elements of the specified portion of the array. * @deprecated Please use the corresponding {@link java.util.Arrays} method. */ @Deprecated public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[] array, final int from, int to, final KEY_GENERIC_TYPE value ) { ensureFromTo( array, from, to ); if ( from == 0 ) while( to-- != 0 ) array[ to ] = value; else for( int i = from; i < to; i++ ) array[ i ] = value; } /** Returns true if the two arrays are elementwise equal. * * @param a1 an array. * @param a2 another array. * @return true if the two arrays are of the same length, and their elements are equal. * @deprecated Please use the corresponding {@link java.util.Arrays} method, which is intrinsified in recent JVMs. */ @Deprecated public static KEY_GENERIC boolean equals( final KEY_GENERIC_TYPE[] a1, final KEY_GENERIC_TYPE a2[] ) { int i = a1.length; if ( i != a2.length ) return false; while( i-- != 0 ) if (! KEY_EQUALS( a1[ i ], a2[ i ] ) ) return false; return true; } /** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits an array. * *

This method may be used whenever an array range check is needed. * * @param a an array. * @param from a start index (inclusive). * @param to an end index (exclusive). * @throws IllegalArgumentException if from is greater than to. * @throws ArrayIndexOutOfBoundsException if from or to are greater than the array length or negative. */ public static KEY_GENERIC void ensureFromTo( final KEY_GENERIC_TYPE[] a, final int from, final int to ) { Arrays.ensureFromTo( a.length, from, to ); } /** Ensures that a range given by an offset and a length fits an array. * *

This method may be used whenever an array range check is needed. * * @param a an array. * @param offset a start index. * @param length a length (the number of elements in the range). * @throws IllegalArgumentException if length is negative. * @throws ArrayIndexOutOfBoundsException if offset is negative or offset+length is greater than the array length. */ public static KEY_GENERIC void ensureOffsetLength( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) { Arrays.ensureOffsetLength( a.length, offset, length ); } /** Ensures that two arrays are of the same length. * * @param a an array. * @param b another array. * @throws IllegalArgumentException if the two argument arrays are not of the same length. */ public static KEY_GENERIC void ensureSameLength( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE[] b ) { if ( a.length != b.length ) throw new IllegalArgumentException( "Array size mismatch: " + a.length + " != " + b.length ); } private static final int QUICKSORT_NO_REC = 16; private static final int PARALLEL_QUICKSORT_NO_FORK = 8192; private static final int QUICKSORT_MEDIAN_OF_9 = 128; private static final int MERGESORT_NO_REC = 16; /** Swaps two elements of an anrray. * * @param x an array. * @param a a position in {@code x}. * @param b another position in {@code x}. */ public static KEY_GENERIC void swap( final KEY_GENERIC_TYPE x[], final int a, final int b ) { final KEY_GENERIC_TYPE t = x[ a ]; x[ a ] = x[ b ]; x[ b ] = t; } /** Swaps two sequences of elements of an array. * * @param x an array. * @param a a position in {@code x}. * @param b another position in {@code x}. * @param n the number of elements to exchange starting at {@code a} and {@code b}. */ public static KEY_GENERIC void swap( final KEY_GENERIC_TYPE[] x, int a, int b, final int n ) { for( int i = 0; i < n; i++, a++, b++ ) swap( x, a, b ); } private static KEY_GENERIC int med3( final KEY_GENERIC_TYPE x[], final int a, final int b, final int c, KEY_COMPARATOR KEY_GENERIC comp ) { final int ab = comp.compare( x[ a ], x[ b ] ); final int ac = comp.compare( x[ a ], x[ c ] ); final int bc = comp.compare( x[ b ], x[ c ] ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) { for( int i = from; i < to - 1; i++ ) { int m = i; for( int j = i + 1; j < to; j++ ) if ( comp.compare( a[ j ], a[ m ] ) < 0 ) m = j; if ( m != i ) { final KEY_GENERIC_TYPE u = a[ i ]; a[ i ] = a[ m ]; a[ m ] = u; } } } private static KEY_GENERIC void insertionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) { for ( int i = from; ++i < to; ) { KEY_GENERIC_TYPE t = a[ i ]; int j = i; for ( KEY_GENERIC_TYPE u = a[ j - 1 ]; comp.compare( t, u ) < 0; u = a[ --j - 1 ] ) { a[ j ] = u; if ( from == j - 1 ) { --j; break; } } a[ j ] = t; } } /** Sorts the specified range of elements according to the order induced by the specified * comparator using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the sorting order. * */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) { final int len = to - from; // Selection sort on smallest arrays if ( len < QUICKSORT_NO_REC ) { selectionSort( x, from, to, comp ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; if ( len > QUICKSORT_MEDIAN_OF_9 ) { // Big arrays, pseudomedian of 9 int s = len / 8; l = med3( x, l, l + s, l + 2 * s, comp ); m = med3( x, m - s, m, m + s, comp ); n = med3( x, n - 2 * s, n - s, n, comp ); } m = med3( x, l, m, n, comp ); // Mid-size, med of 3 final KEY_GENERIC_TYPE v = x[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while( true ) { int comparison; while ( b <= c && ( comparison = comp.compare( x[ b ], v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while ( c >= b && ( comparison = comp.compare( x[ c ], v ) ) >=0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle int s; s = Math.min( a - from, b - a ); swap( x, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, b, to - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s, comp ); if ( ( s = d - c ) > 1 ) quickSort( x, to - s, to, comp ); } /** Sorts an array according to the order induced by the specified * comparator using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param x the array to be sorted. * @param comp the comparator to determine the sorting order. * */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final KEY_COMPARATOR KEY_GENERIC comp ) { quickSort( x, 0, x.length, comp ); } protected static class ForkJoinQuickSortComp KEY_GENERIC extends RecursiveAction { private static final long serialVersionUID = 1L; private final int from; private final int to; private final KEY_GENERIC_TYPE[] x; private final KEY_COMPARATOR KEY_GENERIC comp; public ForkJoinQuickSortComp( final KEY_GENERIC_TYPE[] x , final int from , final int to, final KEY_COMPARATOR KEY_GENERIC comp ) { this.from = from; this.to = to; this.x = x; this.comp = comp; } @Override protected void compute() { final KEY_GENERIC_TYPE[] x = this.x; final int len = to - from; if ( len < PARALLEL_QUICKSORT_NO_FORK ) { quickSort( x, from, to, comp ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; int s = len / 8; l = med3( x, l, l + s, l + 2 * s, comp ); m = med3( x, m - s, m, m + s, comp ); n = med3( x, n - 2 * s, n - s, n, comp ); m = med3( x, l, m, n, comp ); final KEY_GENERIC_TYPE v = x[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison; while ( b <= c && ( comparison = comp.compare( x[ b ], v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while ( c >= b && ( comparison = comp.compare( x[ c ], v ) ) >= 0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle int t; s = Math.min( a - from, b - a ); swap( x, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, b, to - s, s ); // Recursively sort non-partition-elements s = b - a; t = d - c; if ( s > 1 && t > 1 ) invokeAll( new ForkJoinQuickSortComp KEY_GENERIC( x, from, from + s, comp ), new ForkJoinQuickSortComp KEY_GENERIC( x, to - t, to, comp ) ); else if ( s > 1 ) invokeAll( new ForkJoinQuickSortComp KEY_GENERIC( x, from, from + s, comp ) ); else invokeAll( new ForkJoinQuickSortComp KEY_GENERIC( x, to - t, to, comp ) ); } } /** Sorts the specified range of elements according to the order induced by the specified * comparator using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the sorting order. */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) { if ( to - from < PARALLEL_QUICKSORT_NO_FORK ) quickSort( x, from, to, comp ); else { final ForkJoinPool pool = new ForkJoinPool( Runtime.getRuntime().availableProcessors() ); pool.invoke( new ForkJoinQuickSortComp KEY_GENERIC( x, from, to, comp ) ); pool.shutdown(); } } /** Sorts an array according to the order induced by the specified * comparator using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the array to be sorted. * @param comp the comparator to determine the sorting order. */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x, final KEY_COMPARATOR KEY_GENERIC comp ) { parallelQuickSort( x, 0, x.length, comp ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC int med3( final KEY_GENERIC_TYPE x[], final int a, final int b, final int c ) { final int ab = KEY_CMP( x[ a ], x[ b ] ); final int ac = KEY_CMP( x[ a ], x[ c ] ); final int bc = KEY_CMP( x[ b ], x[ c ] ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to ) { for( int i = from; i < to - 1; i++ ) { int m = i; for( int j = i + 1; j < to; j++ ) if ( KEY_LESS( a[ j ], a[ m ] ) ) m = j; if ( m != i ) { final KEY_GENERIC_TYPE u = a[ i ]; a[ i ] = a[ m ]; a[ m ] = u; } } } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC void insertionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to ) { for ( int i = from; ++i < to; ) { KEY_GENERIC_TYPE t = a[ i ]; int j = i; for ( KEY_GENERIC_TYPE u = a[ j - 1 ]; KEY_LESS( t, u ); u = a[ --j - 1 ] ) { a[ j ] = u; if ( from == j - 1 ) { --j; break; } } a[ j ] = t; } } /** Sorts the specified range of elements according to the natural ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to ) { final int len = to - from; // Selection sort on smallest arrays if ( len < QUICKSORT_NO_REC ) { selectionSort( x, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; if ( len > QUICKSORT_MEDIAN_OF_9 ) { // Big arrays, pseudomedian of 9 int s = len / 8; l = med3( x, l, l + s, l + 2 * s ); m = med3( x, m - s, m, m + s ); n = med3( x, n - 2 * s, n - s, n ); } m = med3( x, l, m, n ); // Mid-size, med of 3 final KEY_GENERIC_TYPE v = x[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while(true) { int comparison; while ( b <= c && ( comparison = KEY_CMP( x[ b ], v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while (c >= b && ( comparison = KEY_CMP( x[ c ], v ) ) >=0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle int s; s = Math.min( a - from, b - a ); swap( x, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, b, to - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s ); if ( ( s = d - c ) > 1 ) quickSort( x, to - s, to ); } /** Sorts an array according to the natural ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param x the array to be sorted. * */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x ) { quickSort( x, 0, x.length ); } protected static class ForkJoinQuickSort KEY_GENERIC extends RecursiveAction { private static final long serialVersionUID = 1L; private final int from; private final int to; private final KEY_GENERIC_TYPE[] x; public ForkJoinQuickSort( final KEY_GENERIC_TYPE[] x , final int from , final int to ) { this.from = from; this.to = to; this.x = x; } @Override SUPPRESS_WARNINGS_KEY_UNCHECKED protected void compute() { final KEY_GENERIC_TYPE[] x = this.x; final int len = to - from; if ( len < PARALLEL_QUICKSORT_NO_FORK ) { quickSort( x, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; int s = len / 8; l = med3( x, l, l + s, l + 2 * s ); m = med3( x, m - s, m, m + s ); n = med3( x, n - 2 * s, n - s, n ); m = med3( x, l, m, n ); final KEY_GENERIC_TYPE v = x[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison; while ( b <= c && ( comparison = KEY_CMP( x[ b ], v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while ( c >= b && ( comparison = KEY_CMP( x[ c ], v ) ) >= 0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle int t; s = Math.min( a - from, b - a ); swap( x, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, b, to - s, s ); // Recursively sort non-partition-elements s = b - a; t = d - c; if ( s > 1 && t > 1 ) invokeAll( new ForkJoinQuickSort KEY_GENERIC( x, from, from + s ), new ForkJoinQuickSort KEY_GENERIC( x, to - t, to ) ); else if ( s > 1 ) invokeAll( new ForkJoinQuickSort KEY_GENERIC( x, from, from + s ) ); else invokeAll( new ForkJoinQuickSort KEY_GENERIC( x, to - t, to ) ); } } /** Sorts the specified range of elements according to the natural ascending order using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to ) { if ( to - from < PARALLEL_QUICKSORT_NO_FORK ) quickSort( x, from, to ); else { final ForkJoinPool pool = new ForkJoinPool( Runtime.getRuntime().availableProcessors() ); pool.invoke( new ForkJoinQuickSort KEY_GENERIC( x, from, to ) ); pool.shutdown(); } } /** Sorts an array according to the natural ascending order using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the array to be sorted. * */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x ) { parallelQuickSort( x, 0, x.length ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC int med3Indirect( final int perm[], final KEY_GENERIC_TYPE x[], final int a, final int b, final int c ) { final KEY_GENERIC_TYPE aa = x[ perm[ a ] ]; final KEY_GENERIC_TYPE bb = x[ perm[ b ] ]; final KEY_GENERIC_TYPE cc = x[ perm[ c ] ]; final int ab = KEY_CMP( aa, bb ); final int ac = KEY_CMP( aa, cc ); final int bc = KEY_CMP( bb, cc ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC void insertionSortIndirect( final int[] perm, final KEY_GENERIC_TYPE[] a, final int from, final int to ) { for ( int i = from; ++i < to; ) { int t = perm[ i ]; int j = i; for ( int u = perm[ j - 1 ]; KEY_LESS( a[ t ], a[ u ] ); u = perm[ --j - 1 ] ) { perm[ j ] = u; if ( from == j - 1 ) { --j; break; } } perm[ j ] = t; } } /** Sorts the specified range of elements according to the natural ascending order using indirect quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ]. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param perm a permutation array indexing {@code x}. * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC void quickSortIndirect( final int[] perm, final KEY_GENERIC_TYPE[] x, final int from, final int to ) { final int len = to - from; // Selection sort on smallest arrays if ( len < QUICKSORT_NO_REC ) { insertionSortIndirect( perm, x, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; if ( len > QUICKSORT_MEDIAN_OF_9 ) { // Big arrays, pseudomedian of 9 int s = len / 8; l = med3Indirect( perm, x, l, l + s, l + 2 * s ); m = med3Indirect( perm, x, m - s, m, m + s ); n = med3Indirect( perm, x, n - 2 * s, n - s, n ); } m = med3Indirect( perm, x, l, m, n ); // Mid-size, med of 3 final KEY_GENERIC_TYPE v = x[ perm[ m ] ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while(true) { int comparison; while ( b <= c && ( comparison = KEY_CMP( x[ perm[ b ] ], v ) ) <= 0 ) { if ( comparison == 0 ) IntArrays.swap( perm, a++, b ); b++; } while (c >= b && ( comparison = KEY_CMP( x[ perm[ c ] ], v ) ) >=0 ) { if ( comparison == 0 ) IntArrays.swap( perm, c, d-- ); c--; } if ( b > c ) break; IntArrays.swap( perm, b++, c-- ); } // Swap partition elements back to middle int s; s = Math.min( a - from, b - a ); IntArrays.swap( perm, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); IntArrays.swap( perm, b, to - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSortIndirect( perm, x, from, from + s ); if ( ( s = d - c ) > 1 ) quickSortIndirect( perm, x, to - s, to ); } /** Sorts an array according to the natural ascending order using indirect quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ]. * *

Note that this implementation does not allocate any object, contrarily to the implementation * used to sort primitive types in {@link java.util.Arrays}, which switches to mergesort on large inputs. * * @param perm a permutation array indexing {@code x}. * @param x the array to be sorted. */ public static KEY_GENERIC void quickSortIndirect( final int perm[], final KEY_GENERIC_TYPE[] x ) { quickSortIndirect( perm, x, 0, x.length ); } protected static class ForkJoinQuickSortIndirect KEY_GENERIC extends RecursiveAction { private static final long serialVersionUID = 1L; private final int from; private final int to; private final int[] perm; private final KEY_GENERIC_TYPE[] x; public ForkJoinQuickSortIndirect( final int perm[], final KEY_GENERIC_TYPE[] x , final int from , final int to ) { this.from = from; this.to = to; this.x = x; this.perm = perm; } @Override SUPPRESS_WARNINGS_KEY_UNCHECKED protected void compute() { final KEY_GENERIC_TYPE[] x = this.x; final int len = to - from; if ( len < PARALLEL_QUICKSORT_NO_FORK ) { quickSortIndirect( perm, x, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; int s = len / 8; l = med3Indirect( perm, x, l, l + s, l + 2 * s ); m = med3Indirect( perm, x, m - s, m, m + s ); n = med3Indirect( perm, x, n - 2 * s, n - s, n ); m = med3Indirect( perm, x, l, m, n ); final KEY_GENERIC_TYPE v = x[ perm[ m ] ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison; while ( b <= c && ( comparison = KEY_CMP( x[ perm[ b ] ], v ) ) <= 0 ) { if ( comparison == 0 ) IntArrays.swap( perm, a++, b ); b++; } while ( c >= b && ( comparison = KEY_CMP( x[ perm[ c ] ], v ) ) >= 0 ) { if ( comparison == 0 ) IntArrays.swap( perm, c, d-- ); c--; } if ( b > c ) break; IntArrays.swap( perm, b++, c-- ); } // Swap partition elements back to middle int t; s = Math.min( a - from, b - a ); IntArrays.swap( perm, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); IntArrays.swap( perm, b, to - s, s ); // Recursively sort non-partition-elements s = b - a; t = d - c; if ( s > 1 && t > 1 ) invokeAll( new ForkJoinQuickSortIndirect KEY_GENERIC( perm, x, from, from + s ), new ForkJoinQuickSortIndirect KEY_GENERIC( perm, x, to - t, to ) ); else if ( s > 1 ) invokeAll( new ForkJoinQuickSortIndirect KEY_GENERIC( perm, x, from, from + s ) ); else invokeAll( new ForkJoinQuickSortIndirect KEY_GENERIC( perm, x, to - t, to ) ); } } /** Sorts the specified range of elements according to the natural ascending order using a parallel indirect quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ]. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param perm a permutation array indexing {@code x}. * @param x the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static KEY_GENERIC void parallelQuickSortIndirect( final int[] perm, final KEY_GENERIC_TYPE[] x, final int from, final int to ) { if ( to - from < PARALLEL_QUICKSORT_NO_FORK ) quickSortIndirect( perm, x, from, to ); else { final ForkJoinPool pool = new ForkJoinPool( Runtime.getRuntime().availableProcessors() ); pool.invoke( new ForkJoinQuickSortIndirect KEY_GENERIC( perm, x, from, to ) ); pool.shutdown(); } } /** Sorts an array according to the natural ascending order using a parallel indirect quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ]. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param perm a permutation array indexing {@code x}. * @param x the array to be sorted. * */ public static KEY_GENERIC void parallelQuickSortIndirect( final int perm[], final KEY_GENERIC_TYPE[] x ) { parallelQuickSortIndirect( perm, x, 0, x.length ); } /** Stabilizes a permutation. * *

This method can be used to stabilize the permutation generated by an indirect sorting, assuming that * initially the permutation array was in ascending order (e.g., the identity, as usually happens). This method * scans the permutation, and for each non-singleton block of elements with the same associated values in {@code x}, * permutes them in ascending order. The resulting permutation corresponds to a stable sort. * *

Usually combining an unstable indirect sort and this method is more efficient than using a stable sort, * as most stable sort algorithms require a support array. * *

More precisely, assuming that x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ], after * stabilization we will also have that x[ perm[ i ] ] = x[ perm[ i + 1 ] ] implies * perm[ i ] ≤ perm[ i + 1 ]. * * @param perm a permutation array indexing {@code x} so that it is sorted. * @param x the sorted array to be stabilized. * @param from the index of the first element (inclusive) to be stabilized. * @param to the index of the last element (exclusive) to be stabilized. */ public static KEY_GENERIC void stabilize( final int perm[], final KEY_GENERIC_TYPE[] x, final int from, final int to ) { int curr = from; for( int i = from + 1; i < to; i++ ) { if ( x[ perm[ i ] ] != x[ perm[ curr ] ] ) { if ( i - curr > 1 ) IntArrays.parallelQuickSort( perm, curr, i ); curr = i; } } if ( to - curr > 1 ) IntArrays.parallelQuickSort( perm, curr, to ); } /** Stabilizes a permutation. * *

This method can be used to stabilize the permutation generated by an indirect sorting, assuming that * initially the permutation array was in ascending order (e.g., the identity, as usually happens). This method * scans the permutation, and for each non-singleton block of elements with the same associated values in {@code x}, * permutes them in ascending order. The resulting permutation corresponds to a stable sort. * *

Usually combining an unstable indirect sort and this method is more efficient than using a stable sort, * as most stable sort algorithms require a support array. * *

More precisely, assuming that x[ perm[ i ] ] ≤ x[ perm[ i + 1 ] ], after * stabilization we will also have that x[ perm[ i ] ] = x[ perm[ i + 1 ] ] implies * perm[ i ] ≤ perm[ i + 1 ]. * * @param perm a permutation array indexing {@code x} so that it is sorted. * @param x the sorted array to be stabilized. */ public static KEY_GENERIC void stabilize( final int perm[], final KEY_GENERIC_TYPE[] x ) { stabilize( perm, x, 0, perm.length ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC int med3( final KEY_GENERIC_TYPE x[], final KEY_GENERIC_TYPE[] y, final int a, final int b, final int c ) { int t; final int ab = ( t = KEY_CMP( x[ a ], x[ b ] ) ) == 0 ? KEY_CMP( y[ a ], y[ b ] ) : t; final int ac = ( t = KEY_CMP( x[ a ], x[ c ] ) ) == 0 ? KEY_CMP( y[ a ], y[ c ] ) : t; final int bc = ( t = KEY_CMP( x[ b ], x[ c ] ) ) == 0 ? KEY_CMP( y[ b ], y[ c ] ) : t; return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } private static KEY_GENERIC void swap( final KEY_GENERIC_TYPE x[], final KEY_GENERIC_TYPE[] y, final int a, final int b ) { final KEY_GENERIC_TYPE t = x[ a ]; final KEY_GENERIC_TYPE u = y[ a ]; x[ a ] = x[ b ]; y[ a ] = y[ b ]; x[ b ] = t; y[ b ] = u; } private static KEY_GENERIC void swap( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y, int a, int b, final int n ) { for ( int i = 0; i < n; i++, a++, b++ ) swap( x, y, a, b ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE[] b, final int from, final int to ) { for( int i = from; i < to - 1; i++ ) { int m = i, u; for( int j = i + 1; j < to; j++ ) if ( ( u = KEY_CMP( a[ j ], a[ m ] ) ) < 0 || u == 0 && KEY_LESS( b[ j ], b[ m ] ) ) m = j; if ( m != i ) { KEY_GENERIC_TYPE t = a[ i ]; a[ i ] = a[ m ]; a[ m ] = t; t = b[ i ]; b[ i ] = b[ m ]; b[ m ] = t; } } } /** Sorts the specified range of elements of two arrays according to the natural lexicographical * ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implements a lexicographical sorting of the arguments. Pairs of * elements in the same position in the two provided arrays will be considered a single key, and * permuted accordingly. In the end, either x[ i ] < x[ i + 1 ] or x[ i ] * == x[ i + 1 ] and y[ i ] ≤ y[ i + 1 ]. * * @param x the first array to be sorted. * @param y the second array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y, final int from, final int to ) { final int len = to - from; if ( len < QUICKSORT_NO_REC ) { selectionSort( x, y, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; if ( len > QUICKSORT_MEDIAN_OF_9 ) { // Big arrays, pseudomedian of 9 int s = len / 8; l = med3( x, y, l, l + s, l + 2 * s ); m = med3( x, y, m - s, m, m + s ); n = med3( x, y, n - 2 * s, n - s, n ); } m = med3( x, y, l, m, n ); // Mid-size, med of 3 final KEY_GENERIC_TYPE v = x[ m ], w = y[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison, t; while ( b <= c && ( comparison = ( t = KEY_CMP( x[ b ], v ) ) == 0 ? KEY_CMP( y[ b ], w ) : t ) <= 0 ) { if ( comparison == 0 ) swap( x, y, a++, b ); b++; } while ( c >= b && ( comparison = ( t = KEY_CMP( x[ c ], v ) ) == 0 ? KEY_CMP( y[ c ], w ) : t ) >= 0 ) { if ( comparison == 0 ) swap( x, y, c, d-- ); c--; } if ( b > c ) break; swap( x, y, b++, c-- ); } // Swap partition elements back to middle int s; s = Math.min( a - from, b - a ); swap( x, y, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, y, b, to - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( x, y, from, from + s ); if ( ( s = d - c ) > 1 ) quickSort( x, y, to - s, to ); } /** Sorts two arrays according to the natural lexicographical ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implements a lexicographical sorting of the arguments. Pairs of * elements in the same position in the two provided arrays will be considered a single key, and * permuted accordingly. In the end, either x[ i ] < x[ i + 1 ] or x[ i ] * == x[ i + 1 ] and y[ i ] ≤ y[ i + 1 ]. * * @param x the first array to be sorted. * @param y the second array to be sorted. */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y ) { ensureSameLength( x, y ); quickSort( x, y, 0, x.length ); } protected static class ForkJoinQuickSort2 KEY_GENERIC extends RecursiveAction { private static final long serialVersionUID = 1L; private final int from; private final int to; private final KEY_GENERIC_TYPE[] x, y; public ForkJoinQuickSort2( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y, final int from , final int to ) { this.from = from; this.to = to; this.x = x; this.y = y; } @Override SUPPRESS_WARNINGS_KEY_UNCHECKED protected void compute() { final KEY_GENERIC_TYPE[] x = this.x; final KEY_GENERIC_TYPE[] y = this.y; final int len = to - from; if ( len < PARALLEL_QUICKSORT_NO_FORK ) { quickSort( x, y, from, to ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; int s = len / 8; l = med3( x, y, l, l + s, l + 2 * s ); m = med3( x, y, m - s, m, m + s ); n = med3( x, y, n - 2 * s, n - s, n ); m = med3( x, y, l, m, n ); final KEY_GENERIC_TYPE v = x[ m ], w = y[ m ]; // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison, t; while ( b <= c && ( comparison = ( t = KEY_CMP( x[ b ], v ) ) == 0 ? KEY_CMP( y[ b ], w ) : t ) <= 0 ) { if ( comparison == 0 ) swap( x, y, a++, b ); b++; } while ( c >= b && ( comparison = ( t = KEY_CMP( x[ c ], v ) ) == 0 ? KEY_CMP( y[ c ], w ) : t ) >= 0 ) { if ( comparison == 0 ) swap( x, y, c, d-- ); c--; } if ( b > c ) break; swap( x, y, b++, c-- ); } // Swap partition elements back to middle int t; s = Math.min( a - from, b - a ); swap( x, y, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( x, y, b, to - s, s ); s = b - a; t = d - c; // Recursively sort non-partition-elements if ( s > 1 && t > 1 ) invokeAll( new ForkJoinQuickSort2 KEY_GENERIC( x, y, from, from + s ), new ForkJoinQuickSort2 KEY_GENERIC( x, y, to - t, to ) ); else if ( s > 1 ) invokeAll( new ForkJoinQuickSort2 KEY_GENERIC( x, y, from, from + s ) ); else invokeAll( new ForkJoinQuickSort2 KEY_GENERIC( x, y, to - t, to ) ); } } /** Sorts the specified range of elements of two arrays according to the natural lexicographical * ascending order using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implements a lexicographical sorting of the arguments. Pairs of * elements in the same position in the two provided arrays will be considered a single key, and * permuted accordingly. In the end, either x[ i ] < x[ i + 1 ] or x[ i ] * == x[ i + 1 ] and y[ i ] ≤ y[ i + 1 ]. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the first array to be sorted. * @param y the second array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y, final int from, final int to ) { if ( to - from < PARALLEL_QUICKSORT_NO_FORK ) quickSort( x, y, from, to ); final ForkJoinPool pool = new ForkJoinPool( Runtime.getRuntime().availableProcessors() ); pool.invoke( new ForkJoinQuickSort2 KEY_GENERIC( x, y, from, to ) ); pool.shutdown(); } /** Sorts two arrays according to the natural lexicographical * ascending order using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This method implements a lexicographical sorting of the arguments. Pairs of * elements in the same position in the two provided arrays will be considered a single key, and * permuted accordingly. In the end, either x[ i ] < x[ i + 1 ] or x[ i ] * == x[ i + 1 ] and y[ i ] ≤ y[ i + 1 ]. * *

This implementation uses a {@link ForkJoinPool} executor service with * {@link Runtime#availableProcessors()} parallel threads. * * @param x the first array to be sorted. * @param y the second array to be sorted. */ public static KEY_GENERIC void parallelQuickSort( final KEY_GENERIC_TYPE[] x, final KEY_GENERIC_TYPE[] y ) { ensureSameLength( x, y ); parallelQuickSort( x, y, 0, x.length ); } /** Sorts the specified range of elements according to the natural ascending order using mergesort, using a given pre-filled support array. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. Moreover, no support arrays will be allocated. * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param supp a support array containing at least to elements, and whose entries are identical to those * of {@code a} in the specified range. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, final KEY_GENERIC_TYPE supp[] ) { int len = to - from; // Insertion sort on smallest arrays if ( len < MERGESORT_NO_REC ) { insertionSort( a, from, to ); return; } // Recursively sort halves of a into supp final int mid = ( from + to ) >>> 1; mergeSort( supp, from, mid, a ); mergeSort( supp, mid, to, a ); // If list is already sorted, just copy from supp to a. This is an // optimization that results in faster sorts for nearly ordered lists. if ( KEY_LESSEQ( supp[ mid - 1 ], supp[ mid ] ) ) { System.arraycopy( supp, from, a, from, len ); return; } // Merge sorted halves (now in supp) into a for( int i = from, p = from, q = mid; i < to; i++ ) { if ( q >= to || p < mid && KEY_LESSEQ( supp[ p ], supp[ q ] ) ) a[ i ] = supp[ p++ ]; else a[ i ] = supp[ q++ ]; } } /** Sorts the specified range of elements according to the natural ascending order using mergesort. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. An array as large as a will be allocated by this method. * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to ) { mergeSort( a, from, to, a.clone() ); } /** Sorts an array according to the natural ascending order using mergesort. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. An array as large as a will be allocated by this method. * @param a the array to be sorted. */ public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[] ) { mergeSort( a, 0, a.length ); } /** Sorts the specified range of elements according to the order induced by the specified * comparator using mergesort, using a given pre-filled support array. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. Moreover, no support arrays will be allocated. * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the sorting order. * @param supp a support array containing at least to elements, and whose entries are identical to those * of {@code a} in the specified range. */ public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, KEY_COMPARATOR KEY_GENERIC comp, final KEY_GENERIC_TYPE supp[] ) { int len = to - from; // Insertion sort on smallest arrays if ( len < MERGESORT_NO_REC ) { insertionSort( a, from, to, comp ); return; } // Recursively sort halves of a into supp final int mid = ( from + to ) >>> 1; mergeSort( supp, from, mid, comp, a ); mergeSort( supp, mid, to, comp, a ); // If list is already sorted, just copy from supp to a. This is an // optimization that results in faster sorts for nearly ordered lists. if ( comp.compare( supp[ mid - 1 ], supp[ mid ] ) <= 0 ) { System.arraycopy( supp, from, a, from, len ); return; } // Merge sorted halves (now in supp) into a for( int i = from, p = from, q = mid; i < to; i++ ) { if ( q >= to || p < mid && comp.compare( supp[ p ], supp[ q ] ) <= 0 ) a[ i ] = supp[ p++ ]; else a[ i ] = supp[ q++ ]; } } /** Sorts the specified range of elements according to the order induced by the specified * comparator using mergesort. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. An array as large as a will be allocated by this method. * * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the sorting order. */ public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, KEY_COMPARATOR KEY_GENERIC comp ) { mergeSort( a, from, to, comp, a.clone() ); } /** Sorts an array according to the order induced by the specified * comparator using mergesort. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. An array as large as a will be allocated by this method. * @param a the array to be sorted. * @param comp the comparator to determine the sorting order. */ public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], KEY_COMPARATOR KEY_GENERIC comp ) { mergeSort( a, 0, a.length, comp ); } #if ! KEY_CLASS_Boolean /** * Searches a range of the specified array for the specified value using * the binary search algorithm. The range must be sorted prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the array to be searched. * @param from the index of the first element (inclusive) to be searched. * @param to the index of the last element (exclusive) to be searched. * @param key the value to be searched for. * @return index of the search key, if it is contained in the array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the array: the index of the first * element greater than the key, or the length of the array, if all * elements in the array are less than the specified key. Note * that this guarantees that the return value will be ≥ 0 if * and only if the key is found. * @see java.util.Arrays */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, int from, int to, final KEY_GENERIC_TYPE key ) { KEY_GENERIC_TYPE midVal; to--; while (from <= to) { final int mid = (from + to) >>> 1; midVal = a[ mid ]; #if KEYS_PRIMITIVE if (midVal < key) from = mid + 1; else if (midVal > key) to = mid - 1; else return mid; #else final int cmp = ((Comparable KEY_SUPER_GENERIC)midVal).compareTo( key ); if ( cmp < 0 ) from = mid + 1; else if (cmp > 0) to = mid - 1; else return mid; #endif } return -( from + 1 ); } /** * Searches an array for the specified value using * the binary search algorithm. The range must be sorted prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the array to be searched. * @param key the value to be searched for. * @return index of the search key, if it is contained in the array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the array: the index of the first * element greater than the key, or the length of the array, if all * elements in the array are less than the specified key. Note * that this guarantees that the return value will be ≥ 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE key ) { return binarySearch( a, 0, a.length, key ); } /** * Searches a range of the specified array for the specified value using * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the array to be searched. * @param from the index of the first element (inclusive) to be searched. * @param to the index of the last element (exclusive) to be searched. * @param key the value to be searched for. * @param c a comparator. * @return index of the search key, if it is contained in the array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the array: the index of the first * element greater than the key, or the length of the array, if all * elements in the array are less than the specified key. Note * that this guarantees that the return value will be ≥ 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, int from, int to, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) { KEY_GENERIC_TYPE midVal; to--; while (from <= to) { final int mid = (from + to) >>> 1; midVal = a[ mid ]; final int cmp = c.compare( midVal, key ); if ( cmp < 0 ) from = mid + 1; else if (cmp > 0) to = mid - 1; else return mid; // key found } return -( from + 1 ); } /** * Searches an array for the specified value using * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the array to be searched. * @param key the value to be searched for. * @param c a comparator. * @return index of the search key, if it is contained in the array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the array: the index of the first * element greater than the key, or the length of the array, if all * elements in the array are less than the specified key. Note * that this guarantees that the return value will be ≥ 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) { return binarySearch( a, 0, a.length, key, c ); } #if KEYS_PRIMITIVE /** The size of a digit used during radix sort (must be a power of 2). */ private static final int DIGIT_BITS = 8; /** The mask to extract a digit of {@link #DIGIT_BITS} bits. */ private static final int DIGIT_MASK = ( 1 << DIGIT_BITS ) - 1; /** The number of digits per element. */ private static final int DIGITS_PER_ELEMENT = KEY_CLASS.SIZE / DIGIT_BITS; private static final int RADIXSORT_NO_REC = 1024; private static final int PARALLEL_RADIXSORT_NO_FORK = 1024; /** This method fixes negative numbers so that the combination exponent/significand is lexicographically sorted. */ #if KEY_CLASS_Double private static final long fixDouble( final double d ) { final long l = Double.doubleToLongBits( d ); return l >= 0 ? l : l ^ 0x7FFFFFFFFFFFFFFFL; } #elif KEY_CLASS_Float private static final int fixFloat( final float f ) { final int i = Float.floatToIntBits( f ); return i >= 0 ? i : i ^ 0x7FFFFFFF; } #endif /** Sorts the specified array using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. * * @param a the array to be sorted. */ public static void radixSort( final KEY_TYPE[] a ) { radixSort( a, 0, a.length ); } /** Sorts the specified range of an array using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. * * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void radixSort( final KEY_TYPE[] a, final int from, final int to ) { if ( to - from < RADIXSORT_NO_REC ) { quickSort( a, from, to ); return; } final int maxLevel = DIGITS_PER_ELEMENT - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1; int stackPos = 0; final int[] offsetStack = new int[ stackSize ]; final int[] lengthStack = new int[ stackSize ]; final int[] levelStack = new int[ stackSize ]; offsetStack[ stackPos ] = from; lengthStack[ stackPos ] = to - from; levelStack[ stackPos++ ] = 0; final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; while( stackPos > 0 ) { final int first = offsetStack[ --stackPos ]; final int length = lengthStack[ stackPos ]; final int level = levelStack[ stackPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( a[ i ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = a[ i ]; c = INT( KEY2LEXINT( t ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while ( ( d = --pos[ c ] ) > i ) { final KEY_TYPE z = t; t = a[ d ]; a[ d ] = z; c = INT( KEY2LEXINT( t ) >>> shift & DIGIT_MASK ^ signMask ); } a[ i ] = t; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < RADIXSORT_NO_REC ) quickSort( a, i, i + count[ c ] ); else { offsetStack[ stackPos ] = i; lengthStack[ stackPos ] = count[ c ]; levelStack[ stackPos++ ] = level + 1; } } } } } protected final static class Segment { protected final int offset, length, level; protected Segment( final int offset, final int length, final int level ) { this.offset = offset; this.length = length; this.level = level; } @Override public String toString() { return "Segment [offset=" + offset + ", length=" + length + ", level=" + level + "]"; } } protected final static Segment POISON_PILL = new Segment( -1, -1, -1 ); /** Sorts the specified range of an array using parallel radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void parallelRadixSort( final KEY_TYPE[] a, final int from, final int to ) { if ( to - from < PARALLEL_RADIXSORT_NO_FORK ) { quickSort( a, from, to ); return; } final int maxLevel = DIGITS_PER_ELEMENT - 1; final LinkedBlockingQueue queue = new LinkedBlockingQueue(); queue.add( new Segment( from, to - from, 0 ) ); final AtomicInteger queueSize = new AtomicInteger( 1 ); final int numberOfThreads = Runtime.getRuntime().availableProcessors(); final ExecutorService executorService = Executors.newFixedThreadPool( numberOfThreads, Executors.defaultThreadFactory() ); final ExecutorCompletionService executorCompletionService = new ExecutorCompletionService( executorService ); for( int i = numberOfThreads; i-- != 0; ) executorCompletionService.submit( new Callable() { public Void call() throws Exception { final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; for(;;) { if ( queueSize.get() == 0 ) for( int i = numberOfThreads; i-- != 0; ) queue.add( POISON_PILL ); final Segment segment = queue.take(); if ( segment == POISON_PILL ) return null; final int first = segment.offset; final int length = segment.length; final int level = segment.level; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( a[ i ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = a[ i ]; c = INT( KEY2LEXINT( t ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { while( ( d = --pos[ c ] ) > i ) { final KEY_TYPE z = t; t = a[ d ]; a[ d ] = z; c = INT( KEY2LEXINT( t ) >>> shift & DIGIT_MASK ^ signMask ); } a[ i ] = t; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < PARALLEL_RADIXSORT_NO_FORK ) quickSort( a, i, i + count[ c ] ); else { queueSize.incrementAndGet(); queue.add( new Segment( i, count[ c ], level + 1 ) ); } } } queueSize.decrementAndGet(); } } } ); Throwable problem = null; for( int i = numberOfThreads; i-- != 0; ) try { executorCompletionService.take().get(); } catch( Exception e ) { problem = e.getCause(); // We keep only the last one. They will be logged anyway. } executorService.shutdown(); if ( problem != null ) throw ( problem instanceof RuntimeException ) ? (RuntimeException)problem : new RuntimeException( problem ); } /** Sorts the specified array using parallel radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param a the array to be sorted. */ public static void parallelRadixSort( final KEY_TYPE[] a ) { parallelRadixSort( a, 0, a.length ); } /** Sorts the specified array using indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation will allocate, in the stable case, a support array as large as perm (note that the stable * version is slightly faster). * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param stable whether the sorting algorithm should be stable. */ public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final boolean stable ) { radixSortIndirect( perm, a, 0, perm.length, stable ); } /** Sorts the specified array using indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation will allocate, in the stable case, a support array as large as perm (note that the stable * version is slightly faster). * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param from the index of the first element of perm (inclusive) to be permuted. * @param to the index of the last element of perm (exclusive) to be permuted. * @param stable whether the sorting algorithm should be stable. */ public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final int from, final int to, final boolean stable ) { if ( to - from < RADIXSORT_NO_REC ) { insertionSortIndirect( perm, a, from, to ); return; } final int maxLevel = DIGITS_PER_ELEMENT - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1; int stackPos = 0; final int[] offsetStack = new int[ stackSize ]; final int[] lengthStack = new int[ stackSize ]; final int[] levelStack = new int[ stackSize ]; offsetStack[ stackPos ] = from; lengthStack[ stackPos ] = to - from; levelStack[ stackPos++ ] = 0; final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; final int[] support = stable ? new int[ perm.length ] : null; while( stackPos > 0 ) { final int first = offsetStack[ --stackPos ]; final int length = lengthStack[ stackPos ]; final int level = levelStack[ stackPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( a[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = stable ? 0 : first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } if ( stable ) { for( int i = first + length; i-- != first; ) support[ --pos[ INT( KEY2LEXINT( a[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ] ] = perm[ i ]; System.arraycopy( support, 0, perm, first, length ); for( int i = 0, p = first; i <= lastUsed; i++ ) { if ( level < maxLevel && count[ i ] > 1 ) { if ( count[ i ] < RADIXSORT_NO_REC ) insertionSortIndirect( perm, a, p, p + count[ i ] ); else { offsetStack[ stackPos ] = p; lengthStack[ stackPos ] = count[ i ]; levelStack[ stackPos++ ] = level + 1; } } p += count[ i ]; } java.util.Arrays.fill( count, 0 ); } else { final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { int t = perm[ i ]; c = INT( KEY2LEXINT( a[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while( ( d = --pos[ c ] ) > i ) { final int z = t; t = perm[ d ]; perm[ d ] = z; c = INT( KEY2LEXINT( a[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); } perm[ i ] = t; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < RADIXSORT_NO_REC ) insertionSortIndirect( perm, a, i, i + count[ c ] ); else { offsetStack[ stackPos ] = i; lengthStack[ stackPos ] = count[ c ]; levelStack[ stackPos++ ] = level + 1; } } } } } } /** Sorts the specified range of an array using parallel indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param stable whether the sorting algorithm should be stable. */ public static void parallelRadixSortIndirect( final int perm[], final KEY_TYPE[] a, final int from, final int to, final boolean stable ) { if ( to - from < PARALLEL_RADIXSORT_NO_FORK ) { radixSortIndirect( perm, a, from, to, stable ); return; } final int maxLevel = DIGITS_PER_ELEMENT - 1; final LinkedBlockingQueue queue = new LinkedBlockingQueue(); queue.add( new Segment( from, to - from, 0 ) ); final AtomicInteger queueSize = new AtomicInteger( 1 ); final int numberOfThreads = Runtime.getRuntime().availableProcessors(); final ExecutorService executorService = Executors.newFixedThreadPool( numberOfThreads, Executors.defaultThreadFactory() ); final ExecutorCompletionService executorCompletionService = new ExecutorCompletionService( executorService ); final int[] support = stable ? new int[ perm.length ] : null; for( int i = numberOfThreads; i-- != 0; ) executorCompletionService.submit( new Callable() { public Void call() throws Exception { final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; for(;;) { if ( queueSize.get() == 0 ) for( int i = numberOfThreads; i-- != 0; ) queue.add( POISON_PILL ); final Segment segment = queue.take(); if ( segment == POISON_PILL ) return null; final int first = segment.offset; final int length = segment.length; final int level = segment.level; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( a[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } if ( stable ) { for( int i = first + length; i-- != first; ) support[ --pos[ INT( KEY2LEXINT( a[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ] ] = perm[ i ]; System.arraycopy( support, first, perm, first, length ); for( int i = 0, p = first; i <= lastUsed; i++ ) { if ( level < maxLevel && count[ i ] > 1 ) { if ( count[ i ] < PARALLEL_RADIXSORT_NO_FORK ) radixSortIndirect( perm, a, p, p + count[ i ], stable ); else { queueSize.incrementAndGet(); queue.add( new Segment( p, count[ i ], level + 1 ) ); } } p += count[ i ]; } java.util.Arrays.fill( count, 0 ); } else { final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { int t = perm[ i ]; c = INT( KEY2LEXINT( a[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while( ( d = --pos[ c ] ) > i ) { final int z = t; t = perm[ d ]; perm[ d ] = z; c = INT( KEY2LEXINT( a[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); } perm[ i ] = t; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < PARALLEL_RADIXSORT_NO_FORK ) radixSortIndirect( perm, a, i, i + count[ c ], stable ); else { queueSize.incrementAndGet(); queue.add( new Segment( i, count[ c ], level + 1 ) ); } } } } queueSize.decrementAndGet(); } } } ); Throwable problem = null; for( int i = numberOfThreads; i-- != 0; ) try { executorCompletionService.take().get(); } catch( Exception e ) { problem = e.getCause(); // We keep only the last one. They will be logged anyway. } executorService.shutdown(); if ( problem != null ) throw ( problem instanceof RuntimeException ) ? (RuntimeException)problem : new RuntimeException( problem ); } /** Sorts the specified array using parallel indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param stable whether the sorting algorithm should be stable. */ public static void parallelRadixSortIndirect( final int perm[], final KEY_TYPE[] a, final boolean stable ) { parallelRadixSortIndirect( perm, a, 0, a.length, stable ); } /** Sorts the specified pair of arrays lexicographically using radix sort. *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] ≤ b[ i + 1 ]. * * @param a the first array to be sorted. * @param b the second array to be sorted. */ public static void radixSort( final KEY_TYPE[] a, final KEY_TYPE[] b ) { ensureSameLength( a, b ); radixSort( a, b, 0, a.length ); } /** Sorts the specified range of elements of two arrays using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] ≤ b[ i + 1 ]. * * @param a the first array to be sorted. * @param b the second array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void radixSort( final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) { if ( to - from < RADIXSORT_NO_REC ) { selectionSort( a, b, from, to ); return; } final int layers = 2; final int maxLevel = DIGITS_PER_ELEMENT * layers - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1; int stackPos = 0; final int[] offsetStack = new int[ stackSize ]; final int[] lengthStack = new int[ stackSize ]; final int[] levelStack = new int[ stackSize ]; offsetStack[ stackPos ] = from; lengthStack[ stackPos ] = to - from; levelStack[ stackPos++ ] = 0; final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; while( stackPos > 0 ) { final int first = offsetStack[ --stackPos ]; final int length = lengthStack[ stackPos ]; final int level = levelStack[ stackPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final KEY_TYPE[] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = a[ i ]; KEY_TYPE u = b[ i ]; c = INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while( ( d = --pos[ c ] ) > i ) { c = INT( KEY2LEXINT( k[ d ] ) >>> shift & DIGIT_MASK ^ signMask ); KEY_TYPE z = t; t = a[ d ]; a[ d ] = z; z = u; u = b[ d ]; b[ d ] = z; } a[ i ] = t; b[ i ] = u; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < RADIXSORT_NO_REC ) selectionSort( a, b, i, i + count[ c ] ); else { offsetStack[ stackPos ] = i; lengthStack[ stackPos ] = count[ c ]; levelStack[ stackPos++ ] = level + 1; } } } } } /** Sorts the specified range of elements of two arrays using a parallel radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] ≤ b[ i + 1 ]. * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param a the first array to be sorted. * @param b the second array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void parallelRadixSort( final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) { if ( to - from < PARALLEL_RADIXSORT_NO_FORK ) { quickSort( a, b, from, to ); return; } final int layers = 2; if ( a.length != b.length ) throw new IllegalArgumentException( "Array size mismatch." ); final int maxLevel = DIGITS_PER_ELEMENT * layers - 1; final LinkedBlockingQueue queue = new LinkedBlockingQueue(); queue.add( new Segment( from, to - from, 0 ) ); final AtomicInteger queueSize = new AtomicInteger( 1 ); final int numberOfThreads = Runtime.getRuntime().availableProcessors(); final ExecutorService executorService = Executors.newFixedThreadPool( numberOfThreads, Executors.defaultThreadFactory() ); final ExecutorCompletionService executorCompletionService = new ExecutorCompletionService( executorService ); for ( int i = numberOfThreads; i-- != 0; ) executorCompletionService.submit( new Callable() { public Void call() throws Exception { final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; for ( ;; ) { if ( queueSize.get() == 0 ) for ( int i = numberOfThreads; i-- != 0; ) queue.add( POISON_PILL ); final Segment segment = queue.take(); if ( segment == POISON_PILL ) return null; final int first = segment.offset; final int length = segment.length; final int level = segment.level; final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; final KEY_TYPE[] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // Count keys. for ( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } final int end = first + length - count[ lastUsed ]; for ( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = a[ i ]; KEY_TYPE u = b[ i ]; c = INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while ( ( d = --pos[ c ] ) > i ) { c = INT( KEY2LEXINT( k[ d ] ) >>> shift & DIGIT_MASK ^ signMask ); final KEY_TYPE z = t; final KEY_TYPE w = u; t = a[ d ]; u = b[ d ]; a[ d ] = z; b[ d ] = w; } a[ i ] = t; b[ i ] = u; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < PARALLEL_RADIXSORT_NO_FORK ) quickSort( a, b, i, i + count[ c ] ); else { queueSize.incrementAndGet(); queue.add( new Segment( i, count[ c ], level + 1 ) ); } } } queueSize.decrementAndGet(); } } } ); Throwable problem = null; for ( int i = numberOfThreads; i-- != 0; ) try { executorCompletionService.take().get(); } catch ( Exception e ) { problem = e.getCause(); // We keep only the last one. They will be logged anyway. } executorService.shutdown(); if ( problem != null ) throw ( problem instanceof RuntimeException ) ? (RuntimeException)problem : new RuntimeException( problem ); } /** Sorts two arrays using a parallel radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] ≤ b[ i + 1 ]. * *

This implementation uses a pool of {@link Runtime#availableProcessors()} threads. * * @param a the first array to be sorted. * @param b the second array to be sorted. */ public static void parallelRadixSort( final KEY_TYPE[] a, final KEY_TYPE[] b ) { ensureSameLength( a, b ); parallelRadixSort( a, b, 0, a.length ); } private static KEY_GENERIC void insertionSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) { for ( int i = from; ++i < to; ) { int t = perm[ i ]; int j = i; for ( int u = perm[ j - 1 ]; KEY_LESS( a[ t ], a[ u ] ) || KEY_CMP_EQ( a[ t ], a[ u ] ) && KEY_LESS( b[ t ], b[ u ] ); u = perm[ --j - 1 ] ) { perm[ j ] = u; if ( from == j - 1 ) { --j; break; } } perm[ j ] = t; } } /** Sorts the specified pair of arrays lexicographically using indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation will allocate, in the stable case, a further support array as large as perm (note that the stable * version is slightly faster). * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param b the second array to be sorted. * @param stable whether the sorting algorithm should be stable. */ public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final boolean stable ) { ensureSameLength( a, b ); radixSortIndirect( perm, a, b, 0, a.length, stable ); } /** Sorts the specified pair of arrays lexicographically using indirect radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implement an indirect sort. The elements of perm (which must * be exactly the numbers in the interval [0..perm.length)) will be permuted so that * a[ perm[ i ] ] ≤ a[ perm[ i + 1 ] ]. * *

This implementation will allocate, in the stable case, a further support array as large as perm (note that the stable * version is slightly faster). * * @param perm a permutation array indexing a. * @param a the array to be sorted. * @param b the second array to be sorted. * @param from the index of the first element of perm (inclusive) to be permuted. * @param to the index of the last element of perm (exclusive) to be permuted. * @param stable whether the sorting algorithm should be stable. */ public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to, final boolean stable ) { if ( to - from < RADIXSORT_NO_REC ) { insertionSortIndirect( perm, a, b, from, to ); return; } final int layers = 2; final int maxLevel = DIGITS_PER_ELEMENT * layers - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1; int stackPos = 0; final int[] offsetStack = new int[ stackSize ]; final int[] lengthStack = new int[ stackSize ]; final int[] levelStack = new int[ stackSize ]; offsetStack[ stackPos ] = from; lengthStack[ stackPos ] = to - from; levelStack[ stackPos++ ] = 0; final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; final int[] support = stable ? new int[ perm.length ] : null; while( stackPos > 0 ) { final int first = offsetStack[ --stackPos ]; final int length = lengthStack[ stackPos ]; final int level = levelStack[ stackPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final KEY_TYPE[] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( k[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = stable ? 0 : first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } if ( stable ) { for( int i = first + length; i-- != first; ) support[ --pos[ INT( KEY2LEXINT( k[ perm[ i ] ] ) >>> shift & DIGIT_MASK ^ signMask ) ] ] = perm[ i ]; System.arraycopy( support, 0, perm, first, length ); for( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( level < maxLevel && count[ i ] > 1 ) { if ( count[ i ] < RADIXSORT_NO_REC ) insertionSortIndirect( perm, a, b, p, p + count[ i ] ); else { offsetStack[ stackPos ] = p; lengthStack[ stackPos ] = count[ i ]; levelStack[ stackPos++ ] = level + 1; } } p += count[ i ]; } java.util.Arrays.fill( count, 0 ); } else { final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { int t = perm[ i ]; c = INT( KEY2LEXINT( k[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while( ( d = --pos[ c ] ) > i ) { final int z = t; t = perm[ d ]; perm[ d ] = z; c = INT( KEY2LEXINT( k[ t ] ) >>> shift & DIGIT_MASK ^ signMask ); } perm[ i ] = t; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < RADIXSORT_NO_REC ) insertionSortIndirect( perm, a, b, i, i + count[ c ] ); else { offsetStack[ stackPos ] = i; lengthStack[ stackPos ] = count[ c ]; levelStack[ stackPos++ ] = level + 1; } } } } } } private static void selectionSort( final KEY_TYPE[][] a, final int from, final int to, final int level ) { final int layers = a.length; final int firstLayer = level / DIGITS_PER_ELEMENT; for( int i = from; i < to - 1; i++ ) { int m = i; for( int j = i + 1; j < to; j++ ) { for( int p = firstLayer; p < layers; p++ ) { if ( a[ p ][ j ] < a[ p ][ m ] ) { m = j; break; } else if ( a[ p ][ j ] > a[ p ][ m ] ) break; } } if ( m != i ) { for( int p = layers; p-- != 0; ) { final KEY_TYPE u = a[ p ][ i ]; a[ p ][ i ] = a[ p ][ m ]; a[ p ][ m ] = u; } } } } /** Sorts the specified array of arrays lexicographically using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the provided arrays. Tuples of elements * in the same position will be considered a single key, and permuted * accordingly. * * @param a an array containing arrays of equal length to be sorted lexicographically in parallel. */ public static void radixSort( final KEY_TYPE[][] a ) { radixSort( a, 0, a[ 0 ].length ); } /** Sorts the specified array of arrays lexicographically using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993). * *

This method implements a lexicographical sorting of the provided arrays. Tuples of elements * in the same position will be considered a single key, and permuted * accordingly. * * @param a an array containing arrays of equal length to be sorted lexicographically in parallel. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void radixSort( final KEY_TYPE[][] a, final int from, final int to ) { if ( to - from < RADIXSORT_NO_REC ) { selectionSort( a, from, to, 0 ); return; } final int layers = a.length; final int maxLevel = DIGITS_PER_ELEMENT * layers - 1; for( int p = layers, l = a[ 0 ].length; p-- != 0; ) if ( a[ p ].length != l ) throw new IllegalArgumentException( "The array of index " + p + " has not the same length of the array of index 0." ); final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1; int stackPos = 0; final int[] offsetStack = new int[ stackSize ]; final int[] lengthStack = new int[ stackSize ]; final int[] levelStack = new int[ stackSize ]; offsetStack[ stackPos ] = from; lengthStack[ stackPos ] = to - from; levelStack[ stackPos++ ] = 0; final int[] count = new int[ 1 << DIGIT_BITS ]; final int[] pos = new int[ 1 << DIGIT_BITS ]; final KEY_TYPE[] t = new KEY_TYPE[ layers ]; while( stackPos > 0 ) { final int first = offsetStack[ --stackPos ]; final int length = lengthStack[ stackPos ]; final int level = levelStack[ stackPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif final KEY_TYPE[] k = a[ level / DIGITS_PER_ELEMENT ]; // This is the key array final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( int i = first + length; i-- != first; ) count[ INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ) ]++; // Compute cumulative distribution int lastUsed = -1; for ( int i = 0, p = first; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) lastUsed = i; pos[ i ] = ( p += count[ i ] ); } final int end = first + length - count[ lastUsed ]; // i moves through the start of each block for( int i = first, c = -1, d; i <= end; i += count[ c ], count[ c ] = 0 ) { for( int p = layers; p-- != 0; ) t[ p ] = a[ p ][ i ]; c = INT( KEY2LEXINT( k[ i ] ) >>> shift & DIGIT_MASK ^ signMask ); if ( i < end ) { // When all slots are OK, the last slot is necessarily OK. while( ( d = --pos[ c ] ) > i ) { c = INT( KEY2LEXINT( k[ d ] ) >>> shift & DIGIT_MASK ^ signMask ); for( int p = layers; p-- != 0; ) { final KEY_TYPE u = t[ p ]; t[ p ] = a[ p ][ d ]; a[ p ][ d ] = u; } } for( int p = layers; p-- != 0; ) a[ p ][ i ] = t[ p ]; } if ( level < maxLevel && count[ c ] > 1 ) { if ( count[ c ] < RADIXSORT_NO_REC ) selectionSort( a, i, i + count[ c ], level + 1 ); else { offsetStack[ stackPos ] = i; lengthStack[ stackPos ] = count[ c ]; levelStack[ stackPos++ ] = level + 1; } } } } } #endif #endif /** Shuffles the specified array fragment using the specified pseudorandom number generator. * * @param a the array to be shuffled. * @param from the index of the first element (inclusive) to be shuffled. * @param to the index of the last element (exclusive) to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] shuffle( final KEY_GENERIC_TYPE[] a, final int from, final int to, final Random random ) { for( int i = to - from; i-- != 0; ) { final int p = random.nextInt( i + 1 ); final KEY_GENERIC_TYPE t = a[ from + i ]; a[ from + i ] = a[ from + p ]; a[ from + p ] = t; } return a; } /** Shuffles the specified array using the specified pseudorandom number generator. * * @param a the array to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] shuffle( final KEY_GENERIC_TYPE[] a, final Random random ) { for( int i = a.length; i-- != 0; ) { final int p = random.nextInt( i + 1 ); final KEY_GENERIC_TYPE t = a[ i ]; a[ i ] = a[ p ]; a[ p ] = t; } return a; } /** Reverses the order of the elements in the specified array. * * @param a the array to be reversed. * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] reverse( final KEY_GENERIC_TYPE[] a ) { final int length = a.length; for( int i = length / 2; i-- != 0; ) { final KEY_GENERIC_TYPE t = a[ length - i - 1 ]; a[ length - i - 1 ] = a[ i ]; a[ i ] = t; } return a; } /** Reverses the order of the elements in the specified array fragment. * * @param a the array to be reversed. * @param from the index of the first element (inclusive) to be reversed. * @param to the index of the last element (exclusive) to be reversed. * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] reverse( final KEY_GENERIC_TYPE[] a, final int from, final int to ) { final int length = to - from; for( int i = length / 2; i-- != 0; ) { final KEY_GENERIC_TYPE t = a[ from + length - i - 1 ]; a[ from + length - i - 1 ] = a[ from + i ]; a[ from + i ] = t; } return a; } /** A type-specific content-based hash strategy for arrays. */ private static final class ArrayHashStrategy KEY_GENERIC implements Hash.Strategy, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; public int hashCode( final KEY_GENERIC_TYPE[] o ) { return java.util.Arrays.hashCode( o ); } public boolean equals( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE[] b ) { return java.util.Arrays.equals( a, b ); } } /** A type-specific content-based hash strategy for arrays. * *

This hash strategy may be used in custom hash collections whenever keys are * arrays, and they must be considered equal by content. This strategy * will handle null correctly, and it is serializable. */ #if KEYS_PRIMITIVE public final static Hash.Strategy HASH_STRATEGY = new ArrayHashStrategy(); #else @SuppressWarnings({"rawtypes"}) public final static Hash.Strategy HASH_STRATEGY = new ArrayHashStrategy(); #endif } fastutil-7.1.0/drv/BidirectionalIterator.drv0000664000000000000000000000351713050701620017644 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.BidirectionalIterator; #if KEYS_PRIMITIVE import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; #endif /** A type-specific bidirectional iterator; provides an additional method to avoid (un)boxing, * and the possibility to skip elements backwards. * * @see BidirectionalIterator */ #if KEYS_PRIMITIVE public interface KEY_BIDI_ITERATOR KEY_GENERIC extends KEY_ITERATOR KEY_GENERIC, ObjectBidirectionalIterator { #else public interface KEY_BIDI_ITERATOR KEY_GENERIC extends KEY_ITERATOR KEY_GENERIC, BidirectionalIterator { #endif #if KEYS_PRIMITIVE /** * Returns the previous element as a primitive type. * * @return the previous element in the iteration. * @see java.util.ListIterator#previous() */ KEY_TYPE PREV_KEY(); #endif /** Moves back for the given number of elements. * *

The effect of this call is exactly the same as that of * calling {@link #previous()} for n times (possibly stopping * if {@link #hasPrevious()} becomes false). * * @param n the number of elements to skip back. * @return the number of elements actually skipped. * @see java.util.Iterator#next() */ int back( int n ); } fastutil-7.1.0/drv/BigArrayBigList.drv0000664000000000000000000012625313050701620016343 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import java.util.Iterator; import java.util.RandomAccess; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.BigArrays; #if KEYS_PRIMITIVE /** A type-specific big list based on a big array; provides some additional methods that use polymorphism to avoid (un)boxing. * *

This class implements a lightweight, fast, open, optimized, * reuse-oriented version of big-array-based big lists. Instances of this class * represent a big list with a big array that is enlarged as needed when new entries * are created (by doubling the current length), but is * never made smaller (even on a {@link #clear()}). A family of * {@linkplain #trim() trimming methods} lets you control the size of the * backing big array; this is particularly useful if you reuse instances of this class. * Range checks are equivalent to those of {@link java.util}'s classes, but * they are delayed as much as possible. The backing big array is exposed by the * {@link #elements()} method. * *

This class implements the bulk methods removeElements(), * addElements() and getElements() using * high-performance system calls (e.g., {@link * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of * expensive loops. * * @see java.util.ArrayList */ public class BIG_ARRAY_BIG_LIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable { private static final long serialVersionUID = -7046029254386353130L; #else /** A type-specific big-array-based big list; provides some additional methods that use polymorphism to avoid (un)boxing. * *

This class implements a lightweight, fast, open, optimized, * reuse-oriented version of big-array-based big lists. Instances of this class * represent a big list with a big array that is enlarged as needed when new entries * are created (by doubling the current length), but is * never made smaller (even on a {@link #clear()}). A family of * {@linkplain #trim() trimming methods} lets you control the size of the * backing big array; this is particularly useful if you reuse instances of this class. * Range checks are equivalent to those of {@link java.util}'s classes, but * they are delayed as much as possible. * *

The backing big array is exposed by the {@link #elements()} method. If an instance * of this class was created {@linkplain #wrap(Object[][],long) by wrapping}, * backing-array reallocations will be performed using reflection, so that * {@link #elements()} can return a big array of the same type of the original big array; the comments * about efficiency made in {@link it.unimi.dsi.fastutil.objects.ObjectArrays} apply here. * *

This class implements the bulk methods removeElements(), * addElements() and getElements() using * high-performance system calls (e.g., {@link * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of * expensive loops. * * @see java.util.ArrayList */ public class BIG_ARRAY_BIG_LIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable { private static final long serialVersionUID = -7046029254386353131L; #endif /** The initial default capacity of a big-array big list. */ public final static int DEFAULT_INITIAL_CAPACITY = 16; #if ! KEYS_PRIMITIVE /** Whether the backing big array was passed to wrap(). In * this case, we must reallocate with the same type of big array. */ protected final boolean wrapped; #endif /** The backing big array. */ protected transient KEY_GENERIC_TYPE a[][]; /** The current actual size of the big list (never greater than the backing-array length). */ protected long size; private static final boolean ASSERTS = ASSERTS_VALUE; /** Creates a new big-array big list using a given array. * *

This constructor is only meant to be used by the wrapping methods. * * @param a the big array that will be used to back this big-array big list. */ @SuppressWarnings("unused") protected BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][], boolean dummy ) { this.a = a; #if ! KEYS_PRIMITIVE this.wrapped = true; #endif } /** Creates a new big-array big list with given capacity. * * @param capacity the initial capacity of the array list (may be 0). */ SUPPRESS_WARNINGS_KEY_UNCHECKED public BIG_ARRAY_BIG_LIST( final long capacity ) { if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" ); a = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( capacity ); #if ! KEYS_PRIMITIVE wrapped = false; #endif } /** Creates a new big-array big list with {@link #DEFAULT_INITIAL_CAPACITY} capacity. */ public BIG_ARRAY_BIG_LIST() { this( DEFAULT_INITIAL_CAPACITY ); } /** Creates a new big-array big list and fills it with a given type-specific collection. * * @param c a type-specific collection that will be used to fill the array list. */ public BIG_ARRAY_BIG_LIST( final COLLECTION KEY_EXTENDS_GENERIC c ) { this( c.size() ); for( KEY_ITERATOR KEY_EXTENDS_GENERIC i = c.iterator(); i.hasNext(); ) add( i.NEXT_KEY() ); } /** Creates a new big-array big list and fills it with a given type-specific list. * * @param l a type-specific list that will be used to fill the array list. */ public BIG_ARRAY_BIG_LIST( final BIG_LIST KEY_EXTENDS_GENERIC l ) { this( l.size64() ); l.getElements( 0, a, 0, size = l.size64() ); } /** Creates a new big-array big list and fills it with the elements of a given big array. * *

Note that this constructor makes it easy to build big lists from literal arrays * declared as type[][] {{ init_values }}. * The only constraint is that the number of initialisation values is * below {@link it.unimi.dsi.fastutil.BigArrays#SEGMENT_SIZE}. * * @param a a big array whose elements will be used to fill the array list. */ public BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][] ) { this( a, 0, BIG_ARRAYS.length( a ) ); } /** Creates a new big-array big list and fills it with the elements of a given big array. * *

Note that this constructor makes it easy to build big lists from literal arrays * declared as type[][] {{ init_values }}. * The only constraint is that the number of initialisation values is * below {@link it.unimi.dsi.fastutil.BigArrays#SEGMENT_SIZE}. * * @param a a big array whose elements will be used to fill the array list. * @param offset the first element to use. * @param length the number of elements to use. */ public BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][], final long offset, final long length ) { this( length ); BIG_ARRAYS.copy( a, offset, this.a, 0, length ); size = length; } /** Creates a new big-array big list and fills it with the elements returned by an iterator.. * * @param i an iterator whose returned elements will fill the array list. */ public BIG_ARRAY_BIG_LIST( final Iterator i ) { this(); while( i.hasNext() ) this.add( i.next() ); } /** Creates a new big-array big list and fills it with the elements returned by a type-specific iterator.. * * @param i a type-specific iterator whose returned elements will fill the array list. */ public BIG_ARRAY_BIG_LIST( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { this(); while( i.hasNext() ) this.add( i.NEXT_KEY() ); } #if KEYS_PRIMITIVE /** Returns the backing big array of this big list. * * @return the backing big array. */ public KEY_GENERIC_TYPE[][] elements() { return a; } #else /** Returns the backing big array of this big list. * *

If this big-array big list was created by wrapping a given big array, it is guaranteed * that the type of the returned big array will be the same. Otherwise, the returned * big array will be an big array of objects. * * @return the backing big array. */ public KEY_GENERIC_TYPE[][] elements() { return a; } #endif /** Wraps a given big array into a big-array list of given size. * * @param a a big array to wrap. * @param length the length of the resulting big-array list. * @return a new big-array list of the given size, wrapping the given big array. */ public static KEY_GENERIC BIG_ARRAY_BIG_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[][], final long length ) { if ( length > BIG_ARRAYS.length( a ) ) throw new IllegalArgumentException( "The specified length (" + length + ") is greater than the array size (" + BIG_ARRAYS.length( a ) + ")" ); final BIG_ARRAY_BIG_LIST KEY_GENERIC l = new BIG_ARRAY_BIG_LIST KEY_GENERIC( a, false ); l.size = length; return l; } /** Wraps a given big array into a big-array big list. * * @param a a big array to wrap. * @return a new big-array big list wrapping the given array. */ public static KEY_GENERIC BIG_ARRAY_BIG_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[][] ) { return wrap( a, BIG_ARRAYS.length( a ) ); } /** Ensures that this big-array big list can contain the given number of entries without resizing. * * @param capacity the new minimum capacity for this big-array big list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public void ensureCapacity( final long capacity ) { #if KEYS_PRIMITIVE a = BIG_ARRAYS.ensureCapacity( a, capacity, size ); #else if ( wrapped ) a = BIG_ARRAYS.ensureCapacity( a, capacity, size ); else { if ( capacity > BIG_ARRAYS.length( a ) ) { final Object t[][] = BIG_ARRAYS.newBigArray( capacity ); BIG_ARRAYS.copy( a, 0, t, 0, size ); a = (KEY_GENERIC_TYPE[][])t; } } #endif if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); } /** Grows this big-array big list, ensuring that it can contain the given number of entries without resizing, * and in case enlarging it at least by a factor of two. * * @param capacity the new minimum capacity for this big-array big list. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private void grow( final long capacity ) { #if KEYS_PRIMITIVE a = BIG_ARRAYS.grow( a, capacity, size ); #else if ( wrapped ) a = BIG_ARRAYS.grow( a, capacity, size ); else { if ( capacity > BIG_ARRAYS.length( a ) ) { final int newLength = (int)Math.max( Math.min( 2 * BIG_ARRAYS.length( a ), it.unimi.dsi.fastutil.Arrays.MAX_ARRAY_SIZE ), capacity ); final Object t[][] = BIG_ARRAYS.newBigArray( newLength ); BIG_ARRAYS.copy( a, 0, t, 0, size ); a = (KEY_GENERIC_TYPE[][])t; } } #endif if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); } public void add( final long index, final KEY_GENERIC_TYPE k ) { ensureIndex( index ); grow( size + 1 ); if ( index != size ) BIG_ARRAYS.copy( a, index, a, index + 1, size - index ); BIG_ARRAYS.set( a, index, k ); size++; if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); } public boolean add( final KEY_GENERIC_TYPE k ) { grow( size + 1 ); BIG_ARRAYS.set( a, size++, k ); if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); return true; } public KEY_GENERIC_TYPE GET_KEY( final long index ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); return BIG_ARRAYS.get( a, index ); } public long indexOf( final KEY_TYPE k ) { for( long i = 0; i < size; i++ ) if ( KEY_EQUALS( k, BIG_ARRAYS.get( a, i ) ) ) return i; return -1; } public long lastIndexOf( final KEY_TYPE k ) { for( long i = size; i-- != 0; ) if ( KEY_EQUALS( k, BIG_ARRAYS.get( a, i ) ) ) return i; return -1; } public KEY_GENERIC_TYPE REMOVE_KEY( final long index ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); final KEY_GENERIC_TYPE old = BIG_ARRAYS.get( a, index ); size--; if ( index != size ) BIG_ARRAYS.copy( a, index + 1, a, index, size - index ); #if KEYS_REFERENCE BIG_ARRAYS.set( a, size, null ); #endif if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); return old; } public boolean rem( final KEY_TYPE k ) { final long index = indexOf( k ); if ( index == -1 ) return false; REMOVE_KEY( index ); if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); return true; } #if KEYS_REFERENCE public boolean remove( final Object o ) { return rem( o ); } #endif public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) { if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" ); KEY_GENERIC_TYPE old = BIG_ARRAYS.get( a, index ); BIG_ARRAYS.set( a, index, k ); return old; } #if KEYS_PRIMITIVE @Override public boolean removeAll( final COLLECTION c ) { KEY_GENERIC_TYPE[] s = null, d = null; int ss = -1, sd = BigArrays.SEGMENT_SIZE, ds = -1, dd = BigArrays.SEGMENT_SIZE; for ( long i = 0; i < size; i++ ) { if ( sd == BigArrays.SEGMENT_SIZE ) { sd = 0; s = a[ ++ss ]; } if ( !c.contains( s[ sd ] ) ) { if ( dd == BigArrays.SEGMENT_SIZE ) { d = a[ ++ds ]; dd = 0; } d[ dd++ ] = s[ sd ]; } sd++; } final long j = BigArrays.index( ds, dd ); final boolean modified = size != j; size = j; return modified; } @Override public boolean removeAll( final Collection c ) { KEY_GENERIC_TYPE[] s = null, d = null; int ss = -1, sd = BigArrays.SEGMENT_SIZE, ds = -1, dd = BigArrays.SEGMENT_SIZE; for ( long i = 0; i < size; i++ ) { if ( sd == BigArrays.SEGMENT_SIZE ) { sd = 0; s = a[ ++ss ]; } if ( !c.contains( KEY2OBJ( s[ sd ] ) ) ) { if ( dd == BigArrays.SEGMENT_SIZE ) { d = a[ ++ds ]; dd = 0; } d[ dd++ ] = s[ sd ]; } sd++; } final long j = BigArrays.index( ds, dd ); final boolean modified = size != j; size = j; return modified; } #else @Override public boolean removeAll( final Collection c ) { KEY_GENERIC_TYPE[] s = null, d = null; int ss = -1, sd = BigArrays.SEGMENT_SIZE, ds = -1, dd = BigArrays.SEGMENT_SIZE; for ( long i = 0; i < size; i++ ) { if ( sd == BigArrays.SEGMENT_SIZE ) { sd = 0; s = a[ ++ss ]; } if ( !c.contains( s[ sd ] ) ) { if ( dd == BigArrays.SEGMENT_SIZE ) { d = a[ ++ds ]; dd = 0; } d[ dd++ ] = s[ sd ]; } sd++; } final long j = BigArrays.index( ds, dd ); final boolean modified = size != j; size = j; return modified; } #endif public void clear() { #if KEYS_REFERENCE BIG_ARRAYS.fill( a, 0, size, null ); #endif size = 0; if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); } public long size64() { return size; } public void size( final long size ) { if ( size > BIG_ARRAYS.length( a ) ) ensureCapacity( size ); if ( size > this.size ) BIG_ARRAYS.fill( a, this.size, size, KEY_NULL ); #if KEYS_REFERENCE else BIG_ARRAYS.fill( a, size, this.size, KEY_NULL ); #endif this.size = size; } public boolean isEmpty() { return size == 0; } /** Trims this big-array big list so that the capacity is equal to the size. * * @see java.util.ArrayList#trimToSize() */ public void trim() { trim( 0 ); } /** Trims the backing big array if it is too large. * * If the current big array length is smaller than or equal to * n, this method does nothing. Otherwise, it trims the * big-array length to the maximum between n and {@link #size64()}. * *

This method is useful when reusing big lists. {@linkplain #clear() Clearing a * big list} leaves the big-array length untouched. If you are reusing a big list * many times, you can call this method with a typical * size to avoid keeping around a very large big array just * because of a few large transient big lists. * * @param n the threshold for the trimming. */ public void trim( final long n ) { final long arrayLength = BIG_ARRAYS.length( a ); if ( n >= arrayLength || size == arrayLength ) return; a = BIG_ARRAYS.trim( a, Math.max( n, size ) ); if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a ); } /** Copies element of this type-specific list into the given big array using optimized system calls. * * @param from the start index (inclusive). * @param a the destination big array. * @param offset the offset into the destination array where to store the first element copied. * @param length the number of elements to be copied. */ public void getElements( final int from, final KEY_TYPE[][] a, final long offset, final long length ) { BIG_ARRAYS.copy( this.a, from, a, offset, length ); } /** Removes elements of this type-specific list using optimized system calls. * * @param from the start index (inclusive). * @param to the end index (exclusive). */ public void removeElements( final int from, final int to ) { BigArrays.ensureFromTo( size, from, to ); BIG_ARRAYS.copy( a, to, a, from, size - to ); size -= ( to - from ); #if KEYS_REFERENCE BIG_ARRAYS.fill( a, size, size + to - from, null ); #endif } /** Adds elements to this type-specific list using optimized system calls. * * @param index the index at which to add elements. * @param a the big array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ public void addElements( final int index, final KEY_GENERIC_TYPE a[][], final long offset, final long length ) { ensureIndex( index ); BIG_ARRAYS.ensureOffsetLength( a, offset, length ); grow( size + length ); BIG_ARRAYS.copy( this.a, index, this.a, index + length, size - index ); BIG_ARRAYS.copy( a, offset, this.a, index, length ); size += length; } @Override public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) { ensureIndex( index ); return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() { long pos = index, last = -1; public boolean hasNext() { return pos < size; } public boolean hasPrevious() { return pos > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return BIG_ARRAYS.get( a, last = pos++ ); } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return BIG_ARRAYS.get( a, last = --pos ); } public long nextIndex() { return pos; } public long previousIndex() { return pos - 1; } public void add( KEY_GENERIC_TYPE k ) { BIG_ARRAY_BIG_LIST.this.add( pos++, k ); last = -1; } public void set( KEY_GENERIC_TYPE k ) { if ( last == -1 ) throw new IllegalStateException(); BIG_ARRAY_BIG_LIST.this.set( last, k ); } public void remove() { if ( last == -1 ) throw new IllegalStateException(); BIG_ARRAY_BIG_LIST.this.REMOVE_KEY( last ); /* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */ if ( last < pos ) pos--; last = -1; } }; } public BIG_ARRAY_BIG_LIST KEY_GENERIC clone() { BIG_ARRAY_BIG_LIST KEY_GENERIC c = new BIG_ARRAY_BIG_LIST KEY_GENERIC( size ); BIG_ARRAYS.copy( a, 0, c.a, 0, size ); c.size = size; return c; } #if KEY_CLASS_Object private boolean valEquals( final K a, final K b ) { return a == null ? b == null : a.equals( b ); } #endif /** Compares this type-specific big-array list to another one. * *

This method exists only for sake of efficiency. The implementation * inherited from the abstract implementation would already work. * * @param l a type-specific big-array list. * @return true if the argument contains the same elements of this type-specific big-array list. */ public boolean equals( final BIG_ARRAY_BIG_LIST KEY_GENERIC l ) { if ( l == this ) return true; long s = size64(); if ( s != l.size64() ) return false; final KEY_GENERIC_TYPE[][] a1 = a; final KEY_GENERIC_TYPE[][] a2 = l.a; #if KEY_CLASS_Object while( s-- != 0 ) if ( ! valEquals( BIG_ARRAYS.get( a1, s ), BIG_ARRAYS.get( a2, s ) ) ) return false; #else while( s-- != 0 ) if ( BIG_ARRAYS.get( a1, s ) != BIG_ARRAYS.get( a2, s ) ) return false; #endif return true; } #if ! KEY_CLASS_Reference /** Compares this big list to another big list. * *

This method exists only for sake of efficiency. The implementation * inherited from the abstract implementation would already work. * * @param l a big list. * @return a negative integer, * zero, or a positive integer as this big list is lexicographically less than, equal * to, or greater than the argument. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public int compareTo( final BIG_ARRAY_BIG_LIST KEY_EXTENDS_GENERIC l ) { final long s1 = size64(), s2 = l.size64(); final KEY_GENERIC_TYPE a1[][] = a, a2[][] = l.a; KEY_GENERIC_TYPE e1, e2; int r, i; for( i = 0; i < s1 && i < s2; i++ ) { e1 = BIG_ARRAYS.get( a1, i ); e2 = BIG_ARRAYS.get( a2, i ); if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r; } return i < s2 ? -1 : ( i < s1 ? 1 : 0 ); } #endif private void writeObject( java.io.ObjectOutputStream s ) throws java.io.IOException { s.defaultWriteObject(); for( int i = 0; i < size; i++ ) s.WRITE_KEY( BIG_ARRAYS.get( a, i ) ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); a = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( size ); for( int i = 0; i < size; i++ ) BIG_ARRAYS.set( a, i, KEY_GENERIC_CAST s.READ_KEY() ); } #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static BIG_ARRAY_BIG_LIST topList; protected static void testLists( BIG_LIST m, BIG_LIST t, int n, int level ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds; Object rt = null; KEY_TYPE rm = KEY_NULL; if ( level > 4 ) return; /* Now we check that both sets agree on random keys. For m we use the polymorphic method. */ for( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); KEY_TYPE T = genKey(); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.set( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.set( p, KEY2OBJ( T ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): set() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ after set() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" ); p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.GET_KEY( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ aftre get() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" ); } /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.listIterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i n ) { m.size( n ); while( t.size64() != n ) t.remove( t.size64() -1 ); } /* Now we add random data in m and t using addAll on a type-specific collection, checking that the result is the same. */ for(int i=0; i n ) { m.size( n ); while( t.size64() != n ) t.remove( t.size64() -1 ); } /* Now we add random data in m and t using addAll on a list, checking that the result is the same. */ for(int i=0; i 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/BigArrays.drv0000664000000000000000000017130713050701620015250 0ustar rootroot/* * Copyright (C) 2009-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * * Copyright (C) 1999 CERN - European Organization for Nuclear Research. * * Permission to use, copy, modify, distribute and sell this software and * its documentation for any purpose is hereby granted without fee, * provided that the above copyright notice appear in all copies and that * both that copyright notice and this permission notice appear in * supporting documentation. CERN makes no representations about the * suitability of this software for any purpose. It is provided "as is" * without expressed or implied warranty. */ package PACKAGE; import java.util.Arrays; import java.util.Random; import it.unimi.dsi.fastutil.BigArrays; import it.unimi.dsi.fastutil.Hash; import static it.unimi.dsi.fastutil.BigArrays.ensureLength; import static it.unimi.dsi.fastutil.BigArrays.start; import static it.unimi.dsi.fastutil.BigArrays.segment; import static it.unimi.dsi.fastutil.BigArrays.displacement; import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK; import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_SHIFT; import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_SIZE; #if KEYS_PRIMITIVE #if ! KEY_CLASS_Byte && ! KEY_CLASS_Boolean import it.unimi.dsi.fastutil.bytes.ByteBigArrays; #endif /** A class providing static methods and objects that do useful things with {@linkplain BigArrays big arrays}. * *

In particular, the ensureCapacity(), grow(), * trim() and setLength() methods allow to handle * big arrays much like array lists. * *

Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO} * contain several methods that make it possible to load and save big arrays of primitive types as sequences * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text. * * @see BigArrays */ public class BIG_ARRAYS { #else import java.util.Comparator; /** A class providing static methods and objects that do useful things with {@linkplain BigArrays big arrays}. * *

In particular, the ensureCapacity(), grow(), * trim() and setLength() methods allow to handle * arrays much like array lists. * *

Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO} * contain several methods make it possible to load and save big arrays of primitive types as sequences * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text. * *

Warning: creating arrays * using {@linkplain java.lang.reflect.Array#newInstance(Class,int) reflection}, as it * happens in {@link #ensureCapacity(Object[][],long,long)} and {@link #grow(Object[][],long,long)}, * is significantly slower than using new. This phenomenon is particularly * evident in the first growth phases of an array reallocated with doubling (or similar) logic. * * @see BigArrays */ public class BIG_ARRAYS { #endif private BIG_ARRAYS() {} /** A static, final, empty big array. */ public final static KEY_TYPE[][] EMPTY_BIG_ARRAY = {}; /** Returns the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. * @return the element of the big array at the specified position. */ public static KEY_GENERIC KEY_GENERIC_TYPE get( final KEY_GENERIC_TYPE[][] array, final long index ) { return array[ segment( index ) ][ displacement( index ) ]; } /** Sets the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. * @param value the new value for the array element at the specified position. */ public static KEY_GENERIC void set( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE value ) { array[ segment( index ) ][ displacement( index ) ] = value; } /** Swaps the element of the given big array of specified indices. * * @param array a big array. * @param first a position in the big array. * @param second a position in the big array. */ public static KEY_GENERIC void swap( final KEY_GENERIC_TYPE[][] array, final long first, final long second ) { final KEY_GENERIC_TYPE t = array[ segment( first ) ][ displacement( first ) ]; array[ segment( first ) ][ displacement( first ) ] = array[ segment( second ) ][ displacement( second ) ]; array[ segment( second ) ][ displacement( second ) ] = t; } #if KEYS_PRIMITIVE && ! KEY_CLASS_Boolean /** Adds the specified increment the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. * @param incr the increment */ public static void add( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE incr ) { array[ segment( index ) ][ displacement( index ) ] += incr; } /** Multiplies by the specified factor the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. * @param factor the factor */ public static void mul( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE factor ) { array[ segment( index ) ][ displacement( index ) ] *= factor; } /** Increments the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. */ public static void incr( final KEY_GENERIC_TYPE[][] array, final long index ) { array[ segment( index ) ][ displacement( index ) ]++; } /** Decrements the element of the given big array of specified index. * * @param array a big array. * @param index a position in the big array. */ public static void decr( final KEY_GENERIC_TYPE[][] array, final long index ) { array[ segment( index ) ][ displacement( index ) ]--; } #endif /** Returns the length of the given big array. * * @param array a big array. * @return the length of the given big array. */ public static KEY_GENERIC long length( final KEY_GENERIC_TYPE[][] array ) { final int length = array.length; return length == 0 ? 0 : start( length - 1 ) + array[ length - 1 ].length; } /** Copies a big array from the specified source big array, beginning at the specified position, to the specified position of the destination big array. * Handles correctly overlapping regions of the same big array. * * @param srcArray the source big array. * @param srcPos the starting position in the source big array. * @param destArray the destination big array. * @param destPos the starting position in the destination data. * @param length the number of elements to be copied. */ public static KEY_GENERIC void copy( final KEY_GENERIC_TYPE[][] srcArray, final long srcPos, final KEY_GENERIC_TYPE[][] destArray, final long destPos, long length ) { if ( destPos <= srcPos ) { int srcSegment = segment( srcPos ); int destSegment = segment( destPos ); int srcDispl = displacement( srcPos ); int destDispl = displacement( destPos ); int l; while( length > 0 ) { l = (int)Math.min( length, Math.min( srcArray[ srcSegment ].length - srcDispl, destArray[ destSegment ].length - destDispl ) ); System.arraycopy( srcArray[ srcSegment ], srcDispl, destArray[ destSegment ], destDispl, l ); if ( ( srcDispl += l ) == SEGMENT_SIZE ) { srcDispl = 0; srcSegment++; } if ( ( destDispl += l ) == SEGMENT_SIZE ) { destDispl = 0; destSegment++; } length -= l; } } else { int srcSegment = segment( srcPos + length ); int destSegment = segment( destPos + length ); int srcDispl = displacement( srcPos + length ); int destDispl = displacement( destPos + length ); int l; while( length > 0 ) { if ( srcDispl == 0 ) { srcDispl = SEGMENT_SIZE; srcSegment--; } if ( destDispl == 0 ) { destDispl = SEGMENT_SIZE; destSegment--; } l = (int)Math.min( length, Math.min( srcDispl, destDispl ) ); System.arraycopy( srcArray[ srcSegment ], srcDispl - l, destArray[ destSegment ], destDispl - l, l ); srcDispl -= l; destDispl -= l; length -= l; } } } /** Copies a big array from the specified source big array, beginning at the specified position, to the specified position of the destination array. * * @param srcArray the source big array. * @param srcPos the starting position in the source big array. * @param destArray the destination array. * @param destPos the starting position in the destination data. * @param length the number of elements to be copied. */ public static KEY_GENERIC void copyFromBig( final KEY_GENERIC_TYPE[][] srcArray, final long srcPos, final KEY_GENERIC_TYPE[] destArray, int destPos, int length ) { int srcSegment = segment( srcPos ); int srcDispl = displacement( srcPos ); int l; while( length > 0 ) { l = Math.min( srcArray[ srcSegment ].length - srcDispl, length ); System.arraycopy( srcArray[ srcSegment ], srcDispl, destArray, destPos, l ); if ( ( srcDispl += l ) == SEGMENT_SIZE ) { srcDispl = 0; srcSegment++; } destPos += l; length -= l; } } /** Copies an array from the specified source array, beginning at the specified position, to the specified position of the destination big array. * * @param srcArray the source array. * @param srcPos the starting position in the source array. * @param destArray the destination big array. * @param destPos the starting position in the destination data. * @param length the number of elements to be copied. */ public static KEY_GENERIC void copyToBig( final KEY_GENERIC_TYPE[] srcArray, int srcPos, final KEY_GENERIC_TYPE[][] destArray, final long destPos, long length ) { int destSegment = segment( destPos ); int destDispl = displacement( destPos ); int l; while( length > 0 ) { l = (int)Math.min( destArray[ destSegment ].length - destDispl, length ); System.arraycopy( srcArray, srcPos, destArray[ destSegment ], destDispl, l ); if ( ( destDispl += l ) == SEGMENT_SIZE ) { destDispl = 0; destSegment++; } srcPos += l; length -= l; } } #if KEY_CLASS_Object /** Creates a new big array using the given one as prototype. * *

This method returns a new big array of the given length whose element * are of the same class as of those of prototype. In case * of an empty big array, it tries to return {@link #EMPTY_BIG_ARRAY}, if possible. * * @param prototype a big array that will be used to type the new one. * @param length the length of the new big array. * @return a new big array of given type and length. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static K[][] newBigArray( final K[][] prototype, final long length ) { return (K[][])newBigArray( prototype.getClass().getComponentType(), length ); } /** Creates a new big array using a the given one as component type. * *

This method returns a new big array whose segments * are of class componentType. In case * of an empty big array, it tries to return {@link #EMPTY_BIG_ARRAY}, if possible. * * @param componentType a class representing the type of segments of the array to be created. * @param length the length of the new big array. * @return a new big array of given type and length. */ private static Object[][] newBigArray( Class componentType, final long length ) { if ( length == 0 && componentType == Object[].class ) return EMPTY_BIG_ARRAY; ensureLength( length ); final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); Object[][] base = (Object[][])java.lang.reflect.Array.newInstance( componentType, baseLength ); final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) { for( int i = 0; i < baseLength - 1; i++ ) base[ i ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE ); base[ baseLength - 1 ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), residual ); } else for( int i = 0; i < baseLength; i++ ) base[ i ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE ); return base; } #endif /** Creates a new big array. * * @param length the length of the new big array. * @return a new big array of given length. */ public static KEY_TYPE[][] newBigArray( final long length ) { if ( length == 0 ) return EMPTY_BIG_ARRAY; ensureLength( length ); final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); KEY_TYPE[][] base = new KEY_TYPE[ baseLength ][]; final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) { for( int i = 0; i < baseLength - 1; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ]; base[ baseLength - 1 ] = new KEY_TYPE[ residual ]; } else for( int i = 0; i < baseLength; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ]; return base; } #if KEY_CLASS_Object /** Turns a standard array into a big array. * *

Note that the returned big array might contain as a segment the original array. * * @param array an array. * @return a new big array with the same length and content of array. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static K[][] wrap( final K[] array ) { if ( array.length == 0 && array.getClass() == Object[].class ) return KEY_GENERIC_BIG_ARRAY_CAST EMPTY_BIG_ARRAY; if ( array.length <= SEGMENT_SIZE ) { final K[][] bigArray = (K[][])java.lang.reflect.Array.newInstance( array.getClass(), 1 ); bigArray[ 0 ] = array; return bigArray; } final K[][] bigArray = (K[][])newBigArray( array.getClass(), array.length ); for( int i = 0; i < bigArray.length; i++ ) System.arraycopy( array, (int)start( i ), bigArray[ i ], 0, bigArray[ i ].length ); return bigArray; } #else /** Turns a standard array into a big array. * *

Note that the returned big array might contain as a segment the original array. * * @param array an array. * @return a new big array with the same length and content of array. */ public static KEY_TYPE[][] wrap( final KEY_TYPE[] array ) { if ( array.length == 0 ) return EMPTY_BIG_ARRAY; if ( array.length <= SEGMENT_SIZE ) return new KEY_TYPE[][] { array }; final KEY_TYPE[][] bigArray = newBigArray( array.length ); for( int i = 0; i < bigArray.length; i++ ) System.arraycopy( array, (int)start( i ), bigArray[ i ], 0, bigArray[ i ].length ); return bigArray; } #endif /** Ensures that a big array can contain the given number of entries. * *

If you cannot foresee whether this big array will need again to be * enlarged, you should probably use grow() instead. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new minimum length for this big array. * @return array, if it contains length entries or more; otherwise, * a big array with length entries whose first length(array) * entries are the same as those of array. */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] ensureCapacity( final KEY_GENERIC_TYPE[][] array, final long length ) { return ensureCapacity( array, length, length( array ) ); } #if KEY_CLASS_Object /** Ensures that a big array can contain the given number of entries, preserving just a part of the big array. * *

This method returns a new big array of the given length whose element * are of the same class as of those of array. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new minimum length for this big array. * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary. * @return array, if it can contain length entries or more; otherwise, * a big array with length entries whose first preserve * entries are the same as those of array. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC KEY_GENERIC_TYPE[][] ensureCapacity( final KEY_GENERIC_TYPE[][] array, final long length, final long preserve ) { final long oldLength = length( array ); if ( length > oldLength ) { ensureLength( length ); final int valid = array.length - ( array.length == 0 || array.length > 0 && array[ array.length - 1 ].length == SEGMENT_SIZE ? 0 : 1 ); final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); final KEY_GENERIC_TYPE[][] base = Arrays.copyOf( array, baseLength ); final Class componentType = array.getClass().getComponentType(); final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) { for( int i = valid; i < baseLength - 1; i++ ) base[ i ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE ); base[ baseLength - 1 ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), residual ); } else for( int i = valid; i < baseLength; i++ ) base[ i ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE ); if ( preserve - ( valid * (long)SEGMENT_SIZE ) > 0 ) copy( array, valid * (long)SEGMENT_SIZE, base, valid * (long)SEGMENT_SIZE, preserve - ( valid * (long)SEGMENT_SIZE ) ); return base; } return array; } #else /** Ensures that a big array can contain the given number of entries, preserving just a part of the big array. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new minimum length for this big array. * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary. * @return array, if it can contain length entries or more; otherwise, * a big array with length entries whose first preserve * entries are the same as those of array. */ public static KEY_TYPE[][] ensureCapacity( final KEY_TYPE[][] array, final long length, final long preserve ) { final long oldLength = length( array ); if ( length > oldLength ) { ensureLength( length ); final int valid = array.length - ( array.length == 0 || array.length > 0 && array[ array.length - 1 ].length == SEGMENT_SIZE ? 0 : 1 ); final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); final KEY_TYPE[][] base = Arrays.copyOf( array, baseLength ); final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) { for( int i = valid; i < baseLength - 1; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ]; base[ baseLength - 1 ] = new KEY_TYPE[ residual ]; } else for( int i = valid; i < baseLength; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ]; if ( preserve - ( valid * (long)SEGMENT_SIZE ) > 0 ) copy( array, valid * (long)SEGMENT_SIZE, base, valid * (long)SEGMENT_SIZE, preserve - ( valid * (long)SEGMENT_SIZE ) ); return base; } return array; } #endif /** Grows the given big array to the maximum between the given length and * the current length multiplied by two, provided that the given * length is larger than the current length. * *

If you want complete control on the big array growth, you * should probably use ensureCapacity() instead. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new minimum length for this big array. * @return array, if it can contain length * entries; otherwise, a big array with * max(length,length(array)/φ) entries whose first * length(array) entries are the same as those of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] grow( final KEY_GENERIC_TYPE[][] array, final long length ) { final long oldLength = length( array ); return length > oldLength ? grow( array, length, oldLength ) : array; } /** Grows the given big array to the maximum between the given length and * the current length multiplied by two, provided that the given * length is larger than the current length, preserving just a part of the big array. * *

If you want complete control on the big array growth, you * should probably use ensureCapacity() instead. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new minimum length for this big array. * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary. * @return array, if it can contain length * entries; otherwise, a big array with * max(length,length(array)/φ) entries whose first * preserve entries are the same as those of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] grow( final KEY_GENERIC_TYPE[][] array, final long length, final long preserve ) { final long oldLength = length( array ); return length > oldLength ? ensureCapacity( array, Math.max( 2 * oldLength, length ), preserve ) : array; } #if KEY_CLASS_Object /** Trims the given big array to the given length. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new maximum length for the big array. * @return array, if it contains length * entries or less; otherwise, a big array with * length entries whose entries are the same as * the first length entries of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] trim( final KEY_GENERIC_TYPE[][] array, final long length ) { ensureLength( length ); final long oldLength = length( array ); if ( length >= oldLength ) return array; final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); final KEY_GENERIC_TYPE[][] base = Arrays.copyOf( array, baseLength ); final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) base[ baseLength - 1 ] = ARRAYS.trim( base[ baseLength - 1 ], residual ); return base; } #else /** Trims the given big array to the given length. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new maximum length for the big array. * @return array, if it contains length * entries or less; otherwise, a big array with * length entries whose entries are the same as * the first length entries of array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] trim( final KEY_GENERIC_TYPE[][] array, final long length ) { ensureLength( length ); final long oldLength = length( array ); if ( length >= oldLength ) return array; final int baseLength = (int)((length + SEGMENT_MASK) >>> SEGMENT_SHIFT); final KEY_TYPE[][] base = Arrays.copyOf( array, baseLength ); final int residual = (int)(length & SEGMENT_MASK); if ( residual != 0 ) base[ baseLength - 1 ] = ARRAYS.trim( base[ baseLength - 1 ], residual ); return base; } #endif /** Sets the length of the given big array. * *

Warning: the returned array might use part of the segments of the original * array, which must be considered read-only after calling this method. * * @param array a big array. * @param length the new length for the big array. * @return array, if it contains exactly length * entries; otherwise, if it contains more than * length entries, a big array with length entries * whose entries are the same as the first length entries of * array; otherwise, a big array with length entries * whose first length(array) entries are the same as those of * array. * */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] setLength( final KEY_GENERIC_TYPE[][] array, final long length ) { final long oldLength = length( array ); if ( length == oldLength ) return array; if ( length < oldLength ) return trim( array, length ); return ensureCapacity( array, length ); } /** Returns a copy of a portion of a big array. * * @param array a big array. * @param offset the first element to copy. * @param length the number of elements to copy. * @return a new big array containing length elements of array starting at offset. */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] copy( final KEY_GENERIC_TYPE[][] array, final long offset, final long length ) { ensureOffsetLength( array, offset, length ); final KEY_GENERIC_TYPE[][] a = #if KEY_CLASS_Object newBigArray( array, length ); #else newBigArray( length ); #endif copy( array, offset, a, 0, length ); return a; } /** Returns a copy of a big array. * * @param array a big array. * @return a copy of array. */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] copy( final KEY_GENERIC_TYPE[][] array ) { final KEY_GENERIC_TYPE[][] base = array.clone(); for( int i = base.length; i-- != 0; ) base[ i ] = array[ i ].clone(); return base; } /** Fills the given big array with the given value. * *

This method uses a backward loop. It is significantly faster than the corresponding * method in {@link java.util.Arrays}. * * @param array a big array. * @param value the new value for all elements of the big array. */ public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[][] array, final KEY_GENERIC_TYPE value ) { for( int i = array.length; i-- != 0; ) Arrays.fill( array[ i ], value ); } /** Fills a portion of the given big array with the given value. * *

If possible (i.e., from is 0) this method uses a * backward loop. In this case, it is significantly faster than the * corresponding method in {@link java.util.Arrays}. * * @param array a big array. * @param from the starting index of the portion to fill. * @param to the end index of the portion to fill. * @param value the new value for all elements of the specified portion of the big array. */ public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[][] array, final long from, long to, final KEY_GENERIC_TYPE value ) { final long length = length( array ); BigArrays.ensureFromTo( length, from, to ); int fromSegment = segment( from ); int toSegment = segment( to ); int fromDispl = displacement( from ); int toDispl = displacement( to ); if ( fromSegment == toSegment ) { Arrays.fill( array[ fromSegment ], fromDispl, toDispl, value ); return; } if ( toDispl != 0 ) Arrays.fill( array[ toSegment ], 0, toDispl, value ); while( --toSegment > fromSegment ) Arrays.fill( array[ toSegment ], value ); Arrays.fill( array[ fromSegment ], fromDispl, SEGMENT_SIZE, value ); } /** Returns true if the two big arrays are elementwise equal. * *

This method uses a backward loop. It is significantly faster than the corresponding * method in {@link java.util.Arrays}. * * @param a1 a big array. * @param a2 another big array. * @return true if the two big arrays are of the same length, and their elements are equal. */ public static KEY_GENERIC boolean equals( final KEY_GENERIC_TYPE[][] a1, final KEY_GENERIC_TYPE a2[][] ) { if ( length( a1 ) != length( a2 ) ) return false; int i = a1.length, j; KEY_GENERIC_TYPE[] t, u; while( i-- != 0 ) { t = a1[ i ]; u = a2[ i ]; j = t.length; while( j-- != 0 ) if (! KEY_EQUALS( t[ j ], u[ j ] ) ) return false; } return true; } /* Returns a string representation of the contents of the specified big array. * * The string representation consists of a list of the big array's elements, enclosed in square brackets ("[]"). Adjacent elements are separated by the characters ", " (a comma followed by a space). Returns "null" if a is null. * @param a the big array whose string representation to return. * @return the string representation of a. */ public static KEY_GENERIC String toString( final KEY_GENERIC_TYPE[][] a ) { if ( a == null ) return "null"; final long last = length( a ) - 1; if ( last == - 1 ) return "[]"; final StringBuilder b = new StringBuilder(); b.append('['); for ( long i = 0; ; i++ ) { b.append( String.valueOf( get( a, i ) ) ); if ( i == last ) return b.append(']').toString(); b.append(", "); } } /** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits a big array. * *

This method may be used whenever a big array range check is needed. * * @param a a big array. * @param from a start index (inclusive). * @param to an end index (inclusive). * @throws IllegalArgumentException if from is greater than to. * @throws ArrayIndexOutOfBoundsException if from or to are greater than the big array length or negative. */ public static KEY_GENERIC void ensureFromTo( final KEY_GENERIC_TYPE[][] a, final long from, final long to ) { BigArrays.ensureFromTo( length( a ), from, to ); } /** Ensures that a range given by an offset and a length fits a big array. * *

This method may be used whenever a big array range check is needed. * * @param a a big array. * @param offset a start index. * @param length a length (the number of elements in the range). * @throws IllegalArgumentException if length is negative. * @throws ArrayIndexOutOfBoundsException if offset is negative or offset+length is greater than the big array length. */ public static KEY_GENERIC void ensureOffsetLength( final KEY_GENERIC_TYPE[][] a, final long offset, final long length ) { BigArrays.ensureOffsetLength( length( a ), offset, length ); } /** A type-specific content-based hash strategy for big arrays. */ private static final class BigArrayHashStrategy KEY_GENERIC implements Hash.Strategy, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; public int hashCode( final KEY_GENERIC_TYPE[][] o ) { return java.util.Arrays.deepHashCode( o ); } public boolean equals( final KEY_GENERIC_TYPE[][] a, final KEY_GENERIC_TYPE[][] b ) { return BIG_ARRAYS.equals( a, b ); } } /** A type-specific content-based hash strategy for big arrays. * *

This hash strategy may be used in custom hash collections whenever keys are * big arrays, and they must be considered equal by content. This strategy * will handle null correctly, and it is serializable. */ @SuppressWarnings({"rawtypes"}) public final static Hash.Strategy HASH_STRATEGY = new BigArrayHashStrategy(); private static final int SMALL = 7; private static final int MEDIUM = 40; private static KEY_GENERIC void vecSwap( final KEY_GENERIC_TYPE[][] x, long a, long b, final long n ) { for( int i = 0; i < n; i++, a++, b++ ) swap( x, a, b ); } private static KEY_GENERIC long med3( final KEY_GENERIC_TYPE x[][], final long a, final long b, final long c, KEY_COMPARATOR KEY_GENERIC comp ) { int ab = comp.compare( get( x, a ), get( x, b ) ); int ac = comp.compare( get( x, a ), get( x, c ) ); int bc = comp.compare( get( x, b ), get( x, c ) ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[][] a, final long from, final long to, final KEY_COMPARATOR KEY_GENERIC comp ) { for( long i = from; i < to - 1; i++ ) { long m = i; for( long j = i + 1; j < to; j++ ) if ( comp.compare( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) < 0 ) m = j; if ( m != i ) swap( a, i, m ); } } /** Sorts the specified range of elements according to the order induced by the specified * comparator using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * * @param x the big array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the sorting order. */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final long from, final long to, final KEY_COMPARATOR KEY_GENERIC comp ) { final long len = to - from; // Selection sort on smallest arrays if ( len < SMALL ) { selectionSort( x, from, to, comp ); return; } // Choose a partition element, v long m = from + len / 2; // Small arrays, middle element if ( len > SMALL ) { long l = from; long n = to - 1; if ( len > MEDIUM ) { // Big arrays, pseudomedian of 9 long s = len / 8; l = med3( x, l, l + s, l + 2 * s, comp ); m = med3( x, m - s, m, m + s, comp ); n = med3( x, n - 2 * s, n - s, n, comp ); } m = med3( x, l, m, n, comp ); // Mid-size, med of 3 } final KEY_GENERIC_TYPE v = get( x, m ); // Establish Invariant: v* (v)* v* long a = from, b = a, c = to - 1, d = c; while(true) { int comparison; while ( b <= c && ( comparison = comp.compare( get( x, b ), v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while (c >= b && ( comparison = comp.compare( get( x, c ), v ) ) >=0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle long s, n = to; s = Math.min( a - from, b - a ); vecSwap( x, from, b - s, s ); s = Math.min( d - c, n - d- 1 ); vecSwap( x, b, n - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s, comp ); if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n, comp ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC long med3( final KEY_GENERIC_TYPE x[][], final long a, final long b, final long c ) { int ab = KEY_CMP( get( x, a ), get( x, b ) ); int ac = KEY_CMP( get( x, a ), get( x, c ) ); int bc = KEY_CMP( get( x, b ), get( x, c ) ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[][] a, final long from, final long to ) { for( long i = from; i < to - 1; i++ ) { long m = i; for( long j = i + 1; j < to; j++ ) if ( KEY_LESS( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) ) m = j; if ( m != i ) swap( a, i, m ); } } /** Sorts the specified big array according to the order induced by the specified * comparator using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * * @param x the big array to be sorted. * @param comp the comparator to determine the sorting order. * */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final KEY_COMPARATOR KEY_GENERIC comp ) { quickSort( x, 0, BIG_ARRAYS.length( x ), comp ); } /** Sorts the specified range of elements according to the natural ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * * @param x the big array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final long from, final long to ) { final long len = to - from; // Selection sort on smallest arrays if ( len < SMALL ) { selectionSort( x, from, to ); return; } // Choose a partition element, v long m = from + len / 2; // Small arrays, middle element if ( len > SMALL ) { long l = from; long n = to - 1; if ( len > MEDIUM ) { // Big arrays, pseudomedian of 9 long s = len / 8; l = med3( x, l, l + s, l + 2 * s ); m = med3( x, m - s, m, m + s ); n = med3( x, n - 2 * s, n - s, n ); } m = med3( x, l, m, n ); // Mid-size, med of 3 } final KEY_GENERIC_TYPE v = get( x, m ); // Establish Invariant: v* (v)* v* long a = from, b = a, c = to - 1, d = c; while(true) { int comparison; while ( b <= c && ( comparison = KEY_CMP( get( x, b ), v ) ) <= 0 ) { if ( comparison == 0 ) swap( x, a++, b ); b++; } while (c >= b && ( comparison = KEY_CMP( get( x, c ), v ) ) >=0 ) { if ( comparison == 0 ) swap( x, c, d-- ); c--; } if ( b > c ) break; swap( x, b++, c-- ); } // Swap partition elements back to middle long s, n = to; s = Math.min( a - from, b - a ); vecSwap( x, from, b - s, s ); s = Math.min( d - c, n - d- 1 ); vecSwap( x, b, n - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s ); if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n ); } /** Sorts the specified big array according to the natural ascending order using quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * * @param x the big array to be sorted. */ public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x ) { quickSort( x, 0, BIG_ARRAYS.length( x ) ); } #if ! KEY_CLASS_Boolean /** * Searches a range of the specified big array for the specified value using * the binary search algorithm. The range must be sorted prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the big array to be searched. * @param from the index of the first element (inclusive) to be searched. * @param to the index of the last element (exclusive) to be searched. * @param key the value to be searched for. * @return index of the search key, if it is contained in the big array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the big array: the index of the first * element greater than the key, or the length of the big array, if all * elements in the big array are less than the specified key. Note * that this guarantees that the return value will be >= 0 if * and only if the key is found. * @see java.util.Arrays */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, long from, long to, final KEY_GENERIC_TYPE key ) { KEY_GENERIC_TYPE midVal; to--; while (from <= to) { final long mid = (from + to) >>> 1; midVal = get( a, mid ); #if KEYS_PRIMITIVE if (midVal < key) from = mid + 1; else if (midVal > key) to = mid - 1; else return mid; #else final int cmp = ((Comparable KEY_SUPER_GENERIC)midVal).compareTo( key ); if ( cmp < 0 ) from = mid + 1; else if (cmp > 0) to = mid - 1; else return mid; #endif } return -( from + 1 ); } /** * Searches a big array for the specified value using * the binary search algorithm. The range must be sorted prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the big array to be searched. * @param key the value to be searched for. * @return index of the search key, if it is contained in the big array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the big array: the index of the first * element greater than the key, or the length of the big array, if all * elements in the big array are less than the specified key. Note * that this guarantees that the return value will be >= 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, final KEY_TYPE key ) { return binarySearch( a, 0, BIG_ARRAYS.length( a ), key ); } /** * Searches a range of the specified big array for the specified value using * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the big array to be searched. * @param from the index of the first element (inclusive) to be searched. * @param to the index of the last element (exclusive) to be searched. * @param key the value to be searched for. * @param c a comparator. * @return index of the search key, if it is contained in the big array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the big array: the index of the first * element greater than the key, or the length of the big array, if all * elements in the big array are less than the specified key. Note * that this guarantees that the return value will be >= 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, long from, long to, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) { KEY_GENERIC_TYPE midVal; to--; while (from <= to) { final long mid = (from + to) >>> 1; midVal = get( a, mid ); final int cmp = c.compare( midVal, key ); if ( cmp < 0 ) from = mid + 1; else if (cmp > 0) to = mid - 1; else return mid; // key found } return -( from + 1 ); } /** * Searches a big array for the specified value using * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. * If it is not sorted, the results are undefined. If the range contains multiple elements with * the specified value, there is no guarantee which one will be found. * * @param a the big array to be searched. * @param key the value to be searched for. * @param c a comparator. * @return index of the search key, if it is contained in the big array; * otherwise, (-(insertion point) - 1). The insertion * point is defined as the the point at which the value would * be inserted into the big array: the index of the first * element greater than the key, or the length of the big array, if all * elements in the big array are less than the specified key. Note * that this guarantees that the return value will be >= 0 if * and only if the key is found. * @see java.util.Arrays */ public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) { return binarySearch( a, 0, BIG_ARRAYS.length( a ), key, c ); } #if KEYS_PRIMITIVE /** The size of a digit used during radix sort (must be a power of 2). */ private static final int DIGIT_BITS = 8; /** The mask to extract a digit of {@link #DIGIT_BITS} bits. */ private static final int DIGIT_MASK = ( 1 << DIGIT_BITS ) - 1; /** The number of digits per element. */ private static final int DIGITS_PER_ELEMENT = KEY_CLASS.SIZE / DIGIT_BITS; /** This method fixes negative numbers so that the combination exponent/significand is lexicographically sorted. */ #if KEY_CLASS_Double private static final long fixDouble( final double d ) { final long l = Double.doubleToRawLongBits( d ); return l >= 0 ? l : l ^ 0x7FFFFFFFFFFFFFFFL; } #elif KEY_CLASS_Float private static final long fixFloat( final float f ) { final long i = Float.floatToRawIntBits( f ); return i >= 0 ? i : i ^ 0x7FFFFFFF; } #endif /** Sorts the specified big array using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993), * and further improved using the digit-oracle idea described by * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”, * String Processing and Information Retrieval, 15th International Symposium, volume 5280 of * Lecture Notes in Computer Science, pages 3−14, Springer (2008). * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. * It will allocate a support array of bytes with the same number of elements as the array to be sorted. * * @param a the big array to be sorted. */ public static void radixSort( final KEY_TYPE[][] a ) { radixSort( a, 0, BIG_ARRAYS.length( a ) ); } /** Sorts the specified big array using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993), * and further improved using the digit-oracle idea described by * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”, * String Processing and Information Retrieval, 15th International Symposium, volume 5280 of * Lecture Notes in Computer Science, pages 3−14, Springer (2008). * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. * It will allocate a support array of bytes with the same number of elements as the array to be sorted. * * @param a the big array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void radixSort( final KEY_TYPE[][] a, final long from, final long to ) { final int maxLevel = DIGITS_PER_ELEMENT - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1; final long[] offsetStack = new long[ stackSize ]; int offsetPos = 0; final long[] lengthStack = new long[ stackSize ]; int lengthPos = 0; final int[] levelStack = new int[ stackSize ]; int levelPos = 0; offsetStack[ offsetPos++ ] = from; lengthStack[ lengthPos++ ] = to - from; levelStack[ levelPos++ ] = 0; final long[] count = new long[ 1 << DIGIT_BITS ]; final long[] pos = new long[ 1 << DIGIT_BITS ]; final byte[][] digit = ByteBigArrays.newBigArray( to - from ); while( offsetPos > 0 ) { final long first = offsetStack[ --offsetPos ]; final long length = lengthStack[ --lengthPos ]; final int level = levelStack[ --levelPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif if ( length < MEDIUM ) { selectionSort( a, first, first + length ); continue; } final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( long i = length; i-- != 0; ) ByteBigArrays.set( digit, i, (byte)( ( ( KEY2LEXINT( BIG_ARRAYS.get( a, first + i ) ) >>> shift ) & DIGIT_MASK ) ^ signMask )); for( long i = length; i-- != 0; ) count[ ByteBigArrays.get( digit, i ) & 0xFF ]++; // Compute cumulative distribution and push non-singleton keys on stack. int lastUsed = -1; long p = 0; for( int i = 0; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) { lastUsed = i; if ( level < maxLevel && count[ i ] > 1 ){ //System.err.println( " Pushing " + new StackEntry( first + pos[ i - 1 ], first + pos[ i ], level + 1 ) ); offsetStack[ offsetPos++ ] = p + first; lengthStack[ lengthPos++ ] = count[ i ]; levelStack[ levelPos++ ] = level + 1; } } pos[ i ] = ( p += count[ i ] ); } // When all slots are OK, the last slot is necessarily OK. final long end = length - count[ lastUsed ]; count[ lastUsed ] = 0; // i moves through the start of each block int c = -1; for( long i = 0, d; i < end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = BIG_ARRAYS.get( a, i +first ); c = ByteBigArrays.get( digit, i ) & 0xFF; while( ( d = --pos[ c ] ) > i ) { final KEY_TYPE z = t; final int zz = c; t = BIG_ARRAYS.get( a, d + first ); c = ByteBigArrays.get( digit, d ) & 0xFF; BIG_ARRAYS.set( a, d + first, z ); ByteBigArrays.set( digit, d, (byte)zz ); } BIG_ARRAYS.set( a, i + first, t ); } } } private static void selectionSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b, final long from, final long to ) { for( long i = from; i < to - 1; i++ ) { long m = i; for( long j = i + 1; j < to; j++ ) if ( KEY_LESS( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) || KEY_CMP_EQ( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) && KEY_LESS( BIG_ARRAYS.get( b, j ), BIG_ARRAYS.get( b, m ) ) ) m = j; if ( m != i ) { KEY_TYPE t = BIG_ARRAYS.get( a, i ); BIG_ARRAYS.set( a, i, BIG_ARRAYS.get( a, m ) ); BIG_ARRAYS.set( a, m, t ); t = BIG_ARRAYS.get( b, i ); BIG_ARRAYS.set( b, i, BIG_ARRAYS.get( b, m ) ); BIG_ARRAYS.set( b, m, t ); } } } /** Sorts the specified pair of big arrays lexicographically using radix sort. *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993), * and further improved using the digit-oracle idea described by * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”, * String Processing and Information Retrieval, 15th International Symposium, volume 5280 of * Lecture Notes in Computer Science, pages 3−14, Springer (2008). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] <= b[ i + 1 ]. * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted. * * @param a the first big array to be sorted. * @param b the second big array to be sorted. */ public static void radixSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b ) { radixSort( a, b, 0, BIG_ARRAYS.length( a ) ); } /** Sorts the specified pair of big arrays lexicographically using radix sort. * *

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas * McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993), * and further improved using the digit-oracle idea described by * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”, * String Processing and Information Retrieval, 15th International Symposium, volume 5280 of * Lecture Notes in Computer Science, pages 3−14, Springer (2008). * *

This method implements a lexicographical sorting of the arguments. Pairs of elements * in the same position in the two provided arrays will be considered a single key, and permuted * accordingly. In the end, either a[ i ] < a[ i + 1 ] or a[ i ] == a[ i + 1 ] and b[ i ] <= b[ i + 1 ]. * *

This implementation is significantly faster than quicksort * already at small sizes (say, more than 10000 elements), but it can only * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted. * * @param a the first big array to be sorted. * @param b the second big array to be sorted. * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. */ public static void radixSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b, final long from, final long to ) { final int layers = 2; if ( BIG_ARRAYS.length( a ) != BIG_ARRAYS.length( b ) ) throw new IllegalArgumentException( "Array size mismatch." ); final int maxLevel = DIGITS_PER_ELEMENT * layers - 1; final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1; final long[] offsetStack = new long[ stackSize ]; int offsetPos = 0; final long[] lengthStack = new long[ stackSize ]; int lengthPos = 0; final int[] levelStack = new int[ stackSize ]; int levelPos = 0; offsetStack[ offsetPos++ ] = from; lengthStack[ lengthPos++ ] = to - from; levelStack[ levelPos++ ] = 0; final long[] count = new long[ 1 << DIGIT_BITS ]; final long[] pos = new long[ 1 << DIGIT_BITS ]; final byte[][] digit = ByteBigArrays.newBigArray( to - from ); while( offsetPos > 0 ) { final long first = offsetStack[ --offsetPos ]; final long length = lengthStack[ --lengthPos ]; final int level = levelStack[ --levelPos ]; #if KEY_CLASS_Character final int signMask = 0; #else final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0; #endif if ( length < MEDIUM ) { selectionSort( a, b, first, first + length ); continue; } final KEY_TYPE[][] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key // Count keys. for( long i = length; i-- != 0; ) ByteBigArrays.set( digit, i, (byte)( ( ( KEY2LEXINT( BIG_ARRAYS.get( k, first + i ) ) >>> shift ) & DIGIT_MASK ) ^ signMask ) ); for( long i = length; i-- != 0; ) count[ ByteBigArrays.get( digit, i ) & 0xFF ]++; // Compute cumulative distribution and push non-singleton keys on stack. int lastUsed = -1; long p = 0; for( int i = 0; i < 1 << DIGIT_BITS; i++ ) { if ( count[ i ] != 0 ) { lastUsed = i; if ( level < maxLevel && count[ i ] > 1 ){ offsetStack[ offsetPos++ ] = p + first; lengthStack[ lengthPos++ ] = count[ i ]; levelStack[ levelPos++ ] = level + 1; } } pos[ i ] = ( p += count[ i ] ); } // When all slots are OK, the last slot is necessarily OK. final long end = length - count[ lastUsed ]; count[ lastUsed ] = 0; // i moves through the start of each block int c = -1; for( long i = 0, d; i < end; i += count[ c ], count[ c ] = 0 ) { KEY_TYPE t = BIG_ARRAYS.get( a, i + first ); KEY_TYPE u = BIG_ARRAYS.get( b, i + first ); c = ByteBigArrays.get( digit, i ) & 0xFF; while( ( d = --pos[ c ] ) > i ) { KEY_TYPE z = t; final int zz = c; t = BIG_ARRAYS.get( a, d + first ); BIG_ARRAYS.set( a, d + first, z ); z = u; u = BIG_ARRAYS.get( b, d + first ); BIG_ARRAYS.set( b, d + first, z ); c = ByteBigArrays.get( digit, d ) & 0xFF; ByteBigArrays.set( digit, d, (byte)zz ); } BIG_ARRAYS.set( a, i + first, t ); BIG_ARRAYS.set( b, i + first, u ); } } } #endif #endif /** Shuffles the specified big array fragment using the specified pseudorandom number generator. * * @param a the big array to be shuffled. * @param from the index of the first element (inclusive) to be shuffled. * @param to the index of the last element (exclusive) to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] shuffle( final KEY_GENERIC_TYPE[][] a, final long from, final long to, final Random random ) { for( long i = to - from; i-- != 0; ) { final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); final KEY_GENERIC_TYPE t = get( a, from + i ); set( a, from + i, get( a, from + p ) ); set( a, from + p, t ); } return a; } /** Shuffles the specified big array using the specified pseudorandom number generator. * * @param a the big array to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return a. */ public static KEY_GENERIC KEY_GENERIC_TYPE[][] shuffle( final KEY_GENERIC_TYPE[][] a, final Random random ) { for( long i = length( a ); i-- != 0; ) { final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); final KEY_GENERIC_TYPE t = get( a, i ); set( a, i, get( a, p ) ); set( a, p, t ); } return a; } #if KEY_CLASS_Integer #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static BIG_ARRAY_BIG_LIST topList; protected static void speedTest( int n, boolean b ) {} protected static void test( int n ) { KEY_TYPE[][] a = BIG_ARRAYS.newBigArray( n ); for( int i = 0; i < n; i++ ) set( a, i, i ); BIG_ARRAYS.copy( a, 0, a, 1, n - 2 ); assert a[ 0 ][ 0 ] == 0; for( int i = 0; i < n - 2; i++ ) assert get( a, i + 1 ) == i; for( int i = 0; i < n; i++ ) set( a, i, i ); BIG_ARRAYS.copy( a, 1, a, 0, n - 1 ); for( int i = 0; i < n - 1; i++ ) assert get( a, i ) == i + 1; for( int i = 0; i < n; i++ ) set( a, i, i ); KEY_TYPE[] b = new KEY_TYPE[ n ]; for( int i = 0; i < n; i++ ) b[ i ] = i; assert equals( wrap( b ), a ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif #endif } fastutil-7.1.0/drv/BigList.drv0000664000000000000000000001210713050701620014712 0ustar rootroot/* * Copyright (C) 2010-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.List; import it.unimi.dsi.fastutil.BigList; #if ! KEY_CLASS_Reference /** A type-specific {@link BigList}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()}, * {@link #listIterator(long)} and {@link #subList(long,long)}. * *

Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations * of these methods, it is expected that concrete implementation override them with optimized versions. * * @see List */ public interface BIG_LIST KEY_GENERIC extends BigList, COLLECTION KEY_GENERIC, Comparable> { #else /** A type-specific {@link BigList}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()}, * {@link #listIterator(long)} and {@link #subList(long,long)}. * *

Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations * of these methods, it is expected that concrete implementation override them with optimized versions. * * @see List */ public interface BIG_LIST KEY_GENERIC extends BigList, COLLECTION KEY_GENERIC { #endif /** Returns a type-specific big-list iterator on this type-specific big list. * * @see List#iterator() */ KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator(); /** Returns a type-specific big-list iterator on this type-specific big list. * * @see List#listIterator() */ KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator(); /** Returns a type-specific list iterator on this type-specific big list starting at a given index. * * @see BigList#listIterator(long) */ KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long index ); /** Returns a type-specific view of the portion of this type-specific big list from the index from, inclusive, to the index to, exclusive. * *

Note that this specification strengthens the one given in {@link BigList#subList(long,long)}. * * @see BigList#subList(long,long) */ BIG_LIST KEY_GENERIC subList( long from, long to ); /** Copies (hopefully quickly) elements of this type-specific big list into the given big array. * * @param from the start index (inclusive). * @param a the destination big array. * @param offset the offset into the destination big array where to store the first element copied. * @param length the number of elements to be copied. */ void getElements( long from, KEY_TYPE a[][], long offset, long length ); /** Removes (hopefully quickly) elements of this type-specific big list. * * @param from the start index (inclusive). * @param to the end index (exclusive). */ void removeElements( long from, long to ); /** Add (hopefully quickly) elements to this type-specific big list. * * @param index the index at which to add elements. * @param a the big array containing the elements. */ void addElements( long index, KEY_GENERIC_TYPE a[][] ); /** Add (hopefully quickly) elements to this type-specific big list. * * @param index the index at which to add elements. * @param a the big array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ void addElements( long index, KEY_GENERIC_TYPE a[][], long offset, long length ); #if KEYS_PRIMITIVE /** * @see List#add(int,Object) */ void add( long index, KEY_TYPE key ); /** * @see List#addAll(int,java.util.Collection) */ boolean addAll( long index, COLLECTION c ); /** * @see List#addAll(int,java.util.Collection) */ boolean addAll( long index, BIG_LIST c ); /** * @see List#addAll(int,java.util.Collection) */ boolean addAll( BIG_LIST c ); /** * @see BigList#get(long) */ KEY_TYPE GET_KEY( long index ); /** * @see BigList#indexOf(Object) */ long indexOf( KEY_TYPE k ); /** * @see BigList#lastIndexOf(Object) */ long lastIndexOf( KEY_TYPE k ); /** * @see BigList#remove(long) */ KEY_TYPE REMOVE_KEY( long index ); /** * @see BigList#set(long,Object) */ KEY_TYPE set( long index, KEY_TYPE k ); #endif }fastutil-7.1.0/drv/BigListIterator.drv0000664000000000000000000000210613050701620016422 0ustar rootroot/* * Copyright (C) 2010-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.BigListIterator; /** A type-specific {@link BigListIterator}. * *

This interface adds a skipping method that take longs. * * @see BigListIterator */ public interface KEY_BIG_LIST_ITERATOR KEY_GENERIC extends KEY_BIDI_ITERATOR KEY_GENERIC, BigListIterator { #if KEYS_PRIMITIVE void set( KEY_TYPE k ); void add( KEY_TYPE k ); #endif void set( KEY_GENERIC_CLASS k ); void add( KEY_GENERIC_CLASS k ); } fastutil-7.1.0/drv/BigListIterators.drv0000664000000000000000000001502213050701620016606 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Iterator; import java.util.NoSuchElementException; /** A class providing static methods and objects that do useful things with type-specific iterators. * * @see Iterator */ public class BIG_LIST_ITERATORS { private BIG_LIST_ITERATORS() {} /** A class returning no elements and a type-specific big list iterator interface. * *

This class may be useful to implement your own in case you subclass * a type-specific iterator. */ public static class EmptyBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyBigListIterator() {} public boolean hasNext() { return false; } public boolean hasPrevious() { return false; } public KEY_GENERIC_TYPE NEXT_KEY() { throw new NoSuchElementException(); } public KEY_GENERIC_TYPE PREV_KEY() { throw new NoSuchElementException(); } public long nextIndex() { return 0; } public long previousIndex() { return -1; } public long skip( long n ) { return 0; }; public long back( long n ) { return 0; }; public Object clone() { return EMPTY_BIG_LIST_ITERATOR; } private Object readResolve() { return EMPTY_BIG_LIST_ITERATOR; } } /** An empty iterator (immutable). It is serializable and cloneable. * *

The class of this objects represent an abstract empty iterator * that can iterate as a type-specific (list) iterator. */ SUPPRESS_WARNINGS_KEY_RAWTYPES public final static EmptyBigListIterator EMPTY_BIG_LIST_ITERATOR = new EmptyBigListIterator(); /** An iterator returning a single element. */ private static class SingletonBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC { private final KEY_GENERIC_TYPE element; private int curr; public SingletonBigListIterator( final KEY_GENERIC_TYPE element ) { this.element = element; } public boolean hasNext() { return curr == 0; } public boolean hasPrevious() { return curr == 1; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = 1; return element; } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = 0; return element; } public long nextIndex() { return curr; } public long previousIndex() { return curr - 1; } } /** Returns an iterator that iterates just over the given element. * * @param element the only element to be returned by a type-specific list iterator. * @return an iterator that iterates just over element. */ public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new SingletonBigListIterator KEY_GENERIC( element ); } /** An unmodifiable wrapper class for big list iterators. */ public static class UnmodifiableBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC { final protected KEY_BIG_LIST_ITERATOR KEY_GENERIC i; public UnmodifiableBigListIterator( final KEY_BIG_LIST_ITERATOR KEY_GENERIC i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); } public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); } public long nextIndex() { return i.nextIndex(); } public long previousIndex() { return i.previousIndex(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS next() { return i.next(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS previous() { return i.previous(); } #endif } /** Returns an unmodifiable list iterator backed by the specified list iterator. * * @param i the list iterator to be wrapped in an unmodifiable list iterator. * @return an unmodifiable view of the specified list iterator. */ public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC unmodifiable( final KEY_BIG_LIST_ITERATOR KEY_GENERIC i ) { return new UnmodifiableBigListIterator KEY_GENERIC( i ); } /** A class exposing a list iterator as a big-list iterator.. */ public static class BigListIteratorListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC { protected final KEY_LIST_ITERATOR KEY_GENERIC i; protected BigListIteratorListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) { this.i = i; } private int intDisplacement( long n ) { if ( n < Integer.MIN_VALUE || n > Integer.MAX_VALUE ) throw new IndexOutOfBoundsException( "This big iterator is restricted to 32-bit displacements" ); return (int)n; } public void set( KEY_GENERIC_TYPE ok ) { i.set( ok ); } public void add( KEY_GENERIC_TYPE ok ) { i.add( ok ); } public int back( int n ) { return i.back( n ); } public long back( long n ) { return i.back( intDisplacement( n ) ); } public void remove() { i.remove(); } public int skip( int n ) { return i.skip( n ); } public long skip( long n ) { return i.skip( intDisplacement( n ) ); } public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); } public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); } public long nextIndex() { return i.nextIndex(); } public long previousIndex() { return i.previousIndex(); } } /** Returns a big-list iterator backed by the specified list iterator. * * @param i the list iterator to adapted to the big-list-iterator interface. * @return a big-list iterator backed by the specified list iterator. */ public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC asBigListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) { return new BigListIteratorListIterator KEY_GENERIC( i ); } } fastutil-7.1.0/drv/BigLists.drv0000664000000000000000000013355213050701620015105 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.BigList; import java.util.Collection; import java.util.List; import java.util.Random; /** A class providing static methods and objects that do useful things with type-specific big lists. * * @see java.util.Collections * @see it.unimi.dsi.fastutil.BigList */ public class BIG_LISTS { private BIG_LISTS() {} /** Shuffles the specified big list using the specified pseudorandom number generator. * * @param l the big list to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return l. */ public static KEY_GENERIC BIG_LIST KEY_GENERIC shuffle( final BIG_LIST KEY_GENERIC l, final Random random ) { for( long i = l.size64(); i-- != 0; ) { final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); final KEY_GENERIC_TYPE t = l.GET_KEY( i ); l.set( i, l.GET_KEY( p ) ); l.set( p, t ); } return l; } /** An immutable class representing an empty type-specific big list. * *

This class may be useful to implement your own in case you subclass * a type-specific list. */ public static class EmptyBigList KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyBigList() {} public void add( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE REMOVE_KEY( long i ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public long indexOf( KEY_TYPE k ) { return -1; } public long lastIndexOf( KEY_TYPE k ) { return -1; } public boolean addAll( Collection c ) { throw new UnsupportedOperationException(); } public boolean addAll( long i, Collection c ) { throw new UnsupportedOperationException(); } public boolean removeAll( Collection c ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS get( long i ) { throw new IndexOutOfBoundsException(); } #if KEYS_PRIMITIVE public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( BIG_LIST c ) { throw new UnsupportedOperationException(); } public boolean addAll( long i, COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( long i, BIG_LIST c ) { throw new UnsupportedOperationException(); } public void add( final long index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS set( final long index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE GET_KEY( long i ) { throw new IndexOutOfBoundsException(); } public KEY_GENERIC_CLASS remove( long k ) { throw new UnsupportedOperationException(); } public long indexOf( Object k ) { return -1; } public long lastIndexOf( Object k ) { return -1; } #else public boolean remove( Object k ) { throw new UnsupportedOperationException(); } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long i ) { if ( i == 0 ) return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); } public BIG_LIST KEY_GENERIC subList( long from, long to ) { if ( from == 0 && to == 0 ) return this; throw new IndexOutOfBoundsException(); } public void getElements( long from, KEY_TYPE[][] a, long offset, long length ) { BIG_ARRAYS.ensureOffsetLength( a, offset, length ); if ( from != 0 ) throw new IndexOutOfBoundsException(); } public void removeElements( long from, long to ) { throw new UnsupportedOperationException(); } public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { throw new UnsupportedOperationException(); } public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { throw new UnsupportedOperationException(); } public void size( long s ) { throw new UnsupportedOperationException(); } public long size64() { return 0; } public int compareTo( final BigList o ) { if ( o == this ) return 0; return ((BigList)o).isEmpty() ? 0 : -1; } private Object readResolve() { return EMPTY_BIG_LIST; } public Object clone() { return EMPTY_BIG_LIST; } public int hashCode() { return 1; } @SuppressWarnings("rawtypes") public boolean equals( Object o ) { return o instanceof BigList && ((BigList)o).isEmpty(); } public String toString() { return "[]"; } } /** An empty big list (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_RAWTYPES public static final EmptyBigList EMPTY_BIG_LIST = new EmptyBigList(); #if KEYS_REFERENCE /** Return an empty big list (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_BIG_LIST}. * @return an empty big list (immutable). */ @SuppressWarnings("unchecked") public static KEY_GENERIC BIG_LIST KEY_GENERIC emptyList() { return EMPTY_BIG_LIST; } #endif /** An immutable class representing a type-specific singleton big list. * *

This class may be useful to implement your own in case you subclass * a type-specific big list. */ public static class Singleton KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; private final KEY_GENERIC_TYPE element; private Singleton( final KEY_GENERIC_TYPE element ) { this.element = element; } public KEY_GENERIC_TYPE GET_KEY( final long i ) { if ( i == 0 ) return element; throw new IndexOutOfBoundsException(); } public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { throw new UnsupportedOperationException(); } public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); } public boolean addAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean addAll( final long i, final Collection c ) { throw new UnsupportedOperationException(); } public boolean removeAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final Collection c ) { throw new UnsupportedOperationException(); } /* Slightly optimized w.r.t. the one in ABSTRACT_SET. */ public KEY_TYPE[] TO_KEY_ARRAY() { KEY_TYPE a[] = new KEY_TYPE[ 1 ]; a[ 0 ] = element; return a; } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.singleton( element ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long i ) { if ( i > 1 || i < 0 ) throw new IndexOutOfBoundsException(); KEY_BIG_LIST_ITERATOR KEY_GENERIC l = listIterator(); if ( i == 1 ) l.next(); return l; } SUPPRESS_WARNINGS_KEY_UNCHECKED public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" ); if ( from != 0 || to != 1 ) return EMPTY_BIG_LIST; return this; } @Deprecated public int size() { return 1; } public long size64() { return 1; } public void size( final long size ) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } public Object clone() { return this; } #if KEYS_PRIMITIVE public boolean rem( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } @Override public boolean addAll( final long i, final COLLECTION c ) { throw new UnsupportedOperationException(); } #else public boolean remove( Object k ) { throw new UnsupportedOperationException(); } #endif } /** Returns a type-specific immutable big list containing only the specified element. The returned big list is serializable and cloneable. * * @param element the only element of the returned big list. * @return a type-specific immutable big list containing just element. */ public static KEY_GENERIC BIG_LIST KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); } #if ! KEYS_REFERENCE /** Returns a type-specific immutable big list containing only the specified element. The returned big list is serializable and cloneable. * * @param element the only element of the returned big list. * @return a type-specific immutable big list containing just element. */ public static KEY_GENERIC BIG_LIST KEY_GENERIC singleton( final Object element ) { return new Singleton KEY_GENERIC( KEY_OBJ2TYPE( element ) ); } #endif /** A synchronized wrapper class for big lists. */ public static class SynchronizedBigList KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final BIG_LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching. protected SynchronizedBigList( final BIG_LIST KEY_GENERIC l, final Object sync ) { super( l, sync ); this.list = l; } protected SynchronizedBigList( final BIG_LIST KEY_GENERIC l ) { super( l ); this.list = l; } public KEY_GENERIC_TYPE GET_KEY( final long i ) { synchronized( sync ) { return list.GET_KEY( i ); } } public KEY_GENERIC_TYPE set( final long i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return list.set( i, k ); } } public void add( final long i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { list.add( i, k ); } } public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { synchronized( sync ) { return list.REMOVE_KEY( i ); } } public long indexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.indexOf( k ); } } public long lastIndexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.lastIndexOf( k ); } } public boolean addAll( final long index, final Collection c ) { synchronized( sync ) { return list.addAll( index, c ); } } public void getElements( final long from, final KEY_TYPE a[][], final long offset, final long length ) { synchronized( sync ) { list.getElements( from, a, offset, length ); } } public void removeElements( final long from, final long to ) { synchronized( sync ) { list.removeElements( from, to ); } } public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { synchronized( sync ) { list.addElements( index, a, offset, length ); } } public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { synchronized( sync ) { list.addElements( index, a ); } } public void size( final long size ) { synchronized( sync ) { list.size( size ); } } public long size64() { synchronized( sync ) { return list.size64(); } } public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return list.listIterator(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return list.listIterator(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long i ) { return list.listIterator( i ); } public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { synchronized( sync ) { return synchronize( list.subList( from, to ), sync ); } } public boolean equals( final Object o ) { synchronized( sync ) { return list.equals( o ); } } public int hashCode() { synchronized( sync ) { return list.hashCode(); } } #if ! KEY_CLASS_Reference public int compareTo( final BigList o ) { synchronized( sync ) { return list.compareTo( o ); } } #endif #if KEYS_PRIMITIVE public boolean addAll( final long index, final COLLECTION c ) { synchronized( sync ) { return list.addAll( index, c ); } } public boolean addAll( final long index, BIG_LIST l ) { synchronized( sync ) { return list.addAll( index, l ); } } public boolean addAll( BIG_LIST l ) { synchronized( sync ) { return list.addAll( l ); } } public KEY_GENERIC_CLASS get( final long i ) { synchronized( sync ) { return list.get( i ); } } public void add( final long i, KEY_GENERIC_CLASS k ) { synchronized( sync ) { list.add( i, k ); } } public KEY_GENERIC_CLASS set( final long index, KEY_GENERIC_CLASS k ) { synchronized( sync ) { return list.set( index, k ); } } public KEY_GENERIC_CLASS remove( final long i ) { synchronized( sync ) { return list.remove( i ); } } public long indexOf( final Object o ) { synchronized( sync ) { return list.indexOf( o ); } } public long lastIndexOf( final Object o ) { synchronized( sync ) { return list.lastIndexOf( o ); } } #endif } /** Returns a synchronized type-specific big list backed by the given type-specific big list. * * @param l the big list to be wrapped in a synchronized big list. * @return a synchronized view of the specified big list. * @see java.util.Collections#synchronizedList(List) */ public static KEY_GENERIC BIG_LIST KEY_GENERIC synchronize( final BIG_LIST KEY_GENERIC l ) { return new SynchronizedBigList KEY_GENERIC( l ); } /** Returns a synchronized type-specific big list backed by the given type-specific big list, using an assigned object to synchronize. * * @param l the big list to be wrapped in a synchronized big list. * @param sync an object that will be used to synchronize the access to the big list. * @return a synchronized view of the specified big list. * @see java.util.Collections#synchronizedList(List) */ public static KEY_GENERIC BIG_LIST KEY_GENERIC synchronize( final BIG_LIST KEY_GENERIC l, final Object sync ) { return new SynchronizedBigList KEY_GENERIC( l, sync ); } /** An unmodifiable wrapper class for big lists. */ public static class UnmodifiableBigList KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final BIG_LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching. protected UnmodifiableBigList( final BIG_LIST KEY_GENERIC l ) { super( l ); this.list = l; } public KEY_GENERIC_TYPE GET_KEY( final long i ) { return list.GET_KEY( i ); } public KEY_GENERIC_TYPE set( final long i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( final long i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { throw new UnsupportedOperationException(); } public long indexOf( final KEY_TYPE k ) { return list.indexOf( k ); } public long lastIndexOf( final KEY_TYPE k ) { return list.lastIndexOf( k ); } public boolean addAll( final long index, final Collection c ) { throw new UnsupportedOperationException(); } public void getElements( final long from, final KEY_TYPE a[][], final long offset, final long length ) { list.getElements( from, a, offset, length ); } public void removeElements( final long from, final long to ) { throw new UnsupportedOperationException(); } public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { throw new UnsupportedOperationException(); } public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { throw new UnsupportedOperationException(); } public void size( final long size ) { list.size( size ); } public long size64() { return list.size64(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.unmodifiable( list.listIterator() ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long i ) { return BIG_LIST_ITERATORS.unmodifiable( list.listIterator( i ) ); } public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { return unmodifiable( list.subList( from, to ) ); } public boolean equals( final Object o ) { return list.equals( o ); } public int hashCode() { return list.hashCode(); } #if ! KEY_CLASS_Reference public int compareTo( final BigList o ) { return list.compareTo( o ); } #endif #if KEYS_PRIMITIVE public boolean addAll( final long index, final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( final BIG_LIST l ) { throw new UnsupportedOperationException(); } public boolean addAll( final long index, final BIG_LIST l ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS get( final long i ) { return list.get( i ); } public void add( final long i, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS set( final long index, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS remove( final long i ) { throw new UnsupportedOperationException(); } public long indexOf( final Object o ) { return list.indexOf( o ); } public long lastIndexOf( final Object o ) { return list.lastIndexOf( o ); } #endif } /** Returns an unmodifiable type-specific big list backed by the given type-specific big list. * * @param l the big list to be wrapped in an unmodifiable big list. * @return an unmodifiable view of the specified big list. * @see java.util.Collections#unmodifiableList(List) */ public static KEY_GENERIC BIG_LIST KEY_GENERIC unmodifiable( final BIG_LIST KEY_GENERIC l ) { return new UnmodifiableBigList KEY_GENERIC( l ); } /** A class exposing a list as a big list. */ public static class ListBigList KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; private final LIST KEY_GENERIC list; protected ListBigList( final LIST KEY_GENERIC list ) { this.list = list; } private int intIndex( long index ) { if ( index >= Integer.MAX_VALUE ) throw new IndexOutOfBoundsException( "This big list is restricted to 32-bit indices" ); return (int)index; } public long size64() { return list.size(); } @Deprecated public int size() { return list.size(); } public void size( final long size ) { list.size( intIndex( size ) ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return BIG_LIST_ITERATORS.asBigListIterator( list.iterator() ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.asBigListIterator( list.listIterator() ); } public boolean addAll( final long index, final Collection c ) { return list.addAll( intIndex( index ), c ); } public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) { return BIG_LIST_ITERATORS.asBigListIterator( list.listIterator( intIndex( index ) ) ); } public BIG_LIST KEY_GENERIC subList( long from, long to ) { return new ListBigList KEY_GENERIC( list.subList( intIndex( from ), intIndex( to ) ) ); } public boolean contains( final KEY_TYPE key ) { return list.contains( key ); } public KEY_TYPE[] TO_KEY_ARRAY() { return list.TO_KEY_ARRAY(); } public void removeElements( final long from, final long to ) { list.removeElements( intIndex( from ), intIndex( to ) ); } #if KEYS_PRIMITIVE public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE[] a ) { return list.TO_KEY_ARRAY( a ); } #endif public void add( long index, KEY_GENERIC_TYPE key ) { list.add( intIndex( index ), key ); } #if KEYS_PRIMITIVE @Override #endif public boolean addAll( long index, COLLECTION KEY_GENERIC c ) { return list.addAll( intIndex( index ), c ); } #if KEYS_PRIMITIVE @Override #endif public boolean addAll( long index, BIG_LIST KEY_GENERIC c ) { return list.addAll( intIndex( index ), c ); } public boolean add( KEY_GENERIC_TYPE key ) { return list.add( key ); } public boolean addAll( BIG_LIST KEY_GENERIC c ) { return list.addAll( c ); } public KEY_GENERIC_TYPE GET_KEY( long index ) { return list.GET_KEY( intIndex( index ) ); } public long indexOf( KEY_TYPE k ) { return list.indexOf( k ); } public long lastIndexOf( KEY_TYPE k ) { return list.lastIndexOf( k ); } public KEY_GENERIC_TYPE REMOVE_KEY( long index ) { return list.REMOVE_KEY( intIndex( index ) ); } public KEY_GENERIC_TYPE set( long index, KEY_GENERIC_TYPE k ) { return list.set( intIndex( index ), k ); } public boolean addAll( COLLECTION KEY_GENERIC c ) { return list.addAll( c ); } public boolean containsAll( COLLECTION KEY_GENERIC c ) { return list.containsAll( c ); } public boolean removeAll( COLLECTION KEY_GENERIC c ) { return list.removeAll( c ); } public boolean retainAll( COLLECTION KEY_GENERIC c ) { return list.retainAll( c ); } public boolean isEmpty() { return list.isEmpty(); } public T[] toArray( T[] a ) { return list.toArray( a ); } public boolean containsAll( Collection c ) { return list.containsAll( c ); } public boolean addAll( Collection c ) { return list.addAll( c ); } public boolean removeAll( Collection c ) { return list.removeAll( c ); } public boolean retainAll( Collection c ) { return list.retainAll( c ); } public void clear() { list.clear(); } public int hashCode() { return list.hashCode(); } } /** Returns a big list backed by the specified list. * * @param list a list. * @return a big list backed by the specified list. */ public static KEY_GENERIC BIG_LIST KEY_GENERIC asBigList( final LIST KEY_GENERIC list ) { return new ListBigList KEY_GENERIC( list ); } #ifdef TEST private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static void testLists( KEY_TYPE k, BIG_LIST m, BIG_LIST t, int level ) { int n = 100; int c; long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp; boolean rt = false, rm = false; Object Rt = null, Rm = null; if ( level == 0 ) return; /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(java.util.Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(java.util.Iterator i=m.listIterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) ); try { test(); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/BinIO.drv0000664000000000000000000001533713050703572014335 0ustar rootroot/* * Copyright (C) 2005-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.io; import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK; import static it.unimi.dsi.fastutil.BigArrays.start; import static it.unimi.dsi.fastutil.BigArrays.segment; import static it.unimi.dsi.fastutil.BigArrays.displacement; import java.io.*; import java.util.*; import it.unimi.dsi.fastutil.booleans.*; import it.unimi.dsi.fastutil.bytes.*; import it.unimi.dsi.fastutil.shorts.*; import it.unimi.dsi.fastutil.chars.*; import it.unimi.dsi.fastutil.ints.*; import it.unimi.dsi.fastutil.longs.*; import it.unimi.dsi.fastutil.floats.*; import it.unimi.dsi.fastutil.doubles.*; /** Provides static methods to perform easily binary I/O. * *

This class fills some gaps in the Java API. First of all, you have two * buffered, easy-to-use methods to {@linkplain #storeObject(Object,CharSequence) store an object to a file} * or {@linkplain #loadObject(CharSequence) load an object from a file}, * and two * buffered, easy-to-use methods to {@linkplain #storeObject(Object,OutputStream) store an object to an output stream} * or to {@linkplain #loadObject(InputStream) load an object from an input stream}. * *

Second, a natural operation on sequences of primitive elements is to load or * store them in binary form using the {@link DataInput} conventions. This * method is much more flexible than storing arrays as objects, as it allows * for partial load, partial store, and makes it easy to read the * resulting files from other languages. * *

For each primitive type, this class provides methods that read elements * from a {@link DataInput} or from a filename into an array. Analogously, there are * methods that store the content of an array (fragment) or the elements * returned by an iterator to a {@link DataOutput} or to a given filename. Files * are buffered using {@link FastBufferedInputStream} and {@link FastBufferedOutputStream}. * *

Since bytes can be read from or written to any stream, additional methods * makes it possible to {@linkplain #loadBytes(InputStream,byte[]) load bytes from} and * {@linkplain #storeBytes(byte[],OutputStream) store bytes to} a stream. Such methods * use the bulk-read methods of {@link InputStream} and {@link OutputStream}, but they * also include a workaround for bug #6478546. * *

Finally, there are useful wrapper methods that {@linkplain #asIntIterator(CharSequence) * exhibit a file as a type-specific iterator}. * * @since 4.4 */ public class BinIO { private BinIO() {} /** Stores an object in a file given by a {@link File} object. * * @param o an object. * @param file a file. * @see #loadObject(File) */ public static void storeObject( final Object o, final File file ) throws IOException { final ObjectOutputStream oos = new ObjectOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); oos.writeObject( o ); oos.close(); } /** Stores an object in a file given by a pathname. * * @param o an object. * @param filename a filename. * @see #loadObject(CharSequence) */ public static void storeObject( final Object o, final CharSequence filename ) throws IOException { storeObject( o, new File( filename.toString() ) ); } /** Loads an object from a file given by a {@link File} object. * * @param file a file. * @return the object stored under the given file. * @see #storeObject(Object, File) */ public static Object loadObject( final File file ) throws IOException, ClassNotFoundException { final ObjectInputStream ois = new ObjectInputStream( new FastBufferedInputStream( new FileInputStream( file ) ) ); final Object result = ois.readObject(); ois.close(); return result; } /** Loads an object from a file given by a pathname. * * @param filename a filename. * @return the object stored under the given filename. * @see #storeObject(Object, CharSequence) */ public static Object loadObject( final CharSequence filename ) throws IOException, ClassNotFoundException { return loadObject( new File( filename.toString() ) ); } /** Stores an object in a given output stream. * * This methods buffers s, and flushes all wrappers after * calling writeObject(), but does not close s. * * @param o an object. * @param s an output stream. * @see #loadObject(InputStream) */ public static void storeObject( final Object o, final OutputStream s ) throws IOException { @SuppressWarnings("resource") final ObjectOutputStream oos = new ObjectOutputStream( new FastBufferedOutputStream( s ) ); oos.writeObject( o ); oos.flush(); } /** Loads an object from a given input stream. * *

Warning: this method buffers the input stream. As a consequence, * subsequent reads from the same stream may not give the desired results, as bytes * may have been read by the internal buffer, but not used by readObject(). * This is a feature, as this method is targeted at one-shot reading from streams, * e.g., reading exactly one object from {@link System#in}. * * @param s an input stream. * @return the object read from the given input stream. * @see #storeObject(Object, OutputStream) */ public static Object loadObject( final InputStream s ) throws IOException, ClassNotFoundException { @SuppressWarnings("resource") final ObjectInputStream ois = new ObjectInputStream( new FastBufferedInputStream( s ) ); final Object result = ois.readObject(); return result; } #include "src/it/unimi/dsi/fastutil/io/BooleanBinIOFragment.h" #undef KEY_CLASS_Boolean #include "src/it/unimi/dsi/fastutil/io/ByteBinIOFragment.h" #undef KEY_CLASS_Byte #include "src/it/unimi/dsi/fastutil/io/ShortBinIOFragment.h" #undef KEY_CLASS_Short #include "src/it/unimi/dsi/fastutil/io/CharBinIOFragment.h" #undef KEY_CLASS_Character #include "src/it/unimi/dsi/fastutil/io/IntBinIOFragment.h" #undef KEY_CLASS_Integer #include "src/it/unimi/dsi/fastutil/io/LongBinIOFragment.h" #undef KEY_CLASS_Long #include "src/it/unimi/dsi/fastutil/io/FloatBinIOFragment.h" #undef KEY_CLASS_Float #include "src/it/unimi/dsi/fastutil/io/DoubleBinIOFragment.h" #undef KEY_CLASS_Double } fastutil-7.1.0/drv/BinIOFragment.drv0000664000000000000000000010335613050703551016015 0ustar rootroot/* * Copyright (C) 2004-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #if KEY_CLASS_Byte // HORRIBLE kluges to work around bug #6478546 private final static int MAX_IO_LENGTH = 1024 * 1024; private static int read( final InputStream is, final byte a[], final int offset, final int length ) throws IOException { if ( length == 0 ) return 0; int read = 0, result; do { result = is.read( a, offset + read, Math.min( length - read, MAX_IO_LENGTH ) ); if ( result < 0 ) return read; read += result; } while( read < length ); return read; } private static void write( final OutputStream outputStream, final byte a[], final int offset, final int length ) throws IOException { int written = 0; while( written < length ) { outputStream.write( a, offset + written, Math.min( length - written, MAX_IO_LENGTH ) ); written += Math.min( length - written, MAX_IO_LENGTH ); } } private static void write( final DataOutput dataOutput, final byte a[], final int offset, final int length ) throws IOException { int written = 0; while( written < length ) { dataOutput.write( a, offset + written, Math.min( length - written, MAX_IO_LENGTH ) ); written += Math.min( length - written, MAX_IO_LENGTH ); } } // Additional read/write methods to work around the DataInput/DataOutput schizophrenia. /** Loads bytes from a given input stream, storing them in a given array fragment. * *

Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[],int,int)} * as it uses {@link InputStream}'s bulk-read methods. * * @param inputStream an input stream. * @param array an array which will be filled with data from inputStream. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from inputStream (it might be less than length if inputStream ends). */ public static int LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { return read( inputStream, array, offset, length ); } /** Loads bytes from a given input stream, storing them in a given array. * *

Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[])} * as it uses {@link InputStream}'s bulk-read methods. * * @param inputStream an input stream. * @param array an array which will be filled with data from inputStream. * @return the number of elements actually read from inputStream (it might be less than the array length if inputStream ends). */ public static int LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[] array ) throws IOException { return read( inputStream, array, 0, array.length ); } /** Stores an array fragment to a given output stream. * *

Note that this method is going to be significantly faster than {@link #storeBytes(byte[],int,int,DataOutput)} * as it uses {@link OutputStream}'s bulk-read methods. * * @param array an array whose elements will be written to outputStream. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param outputStream an output stream. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final OutputStream outputStream ) throws IOException { write( outputStream, array, offset, length ); } /** Stores an array to a given output stream. * *

Note that this method is going to be significantly faster than {@link #storeBytes(byte[],DataOutput)} * as it uses {@link OutputStream}'s bulk-read methods. * * @param array an array whose elements will be written to outputStream. * @param outputStream an output stream. */ public static void STORE_KEYS( final KEY_TYPE array[], final OutputStream outputStream ) throws IOException { write( outputStream, array, 0, array.length ); } private static long read( final InputStream is, final byte a[][], final long offset, final long length ) throws IOException { if ( length == 0 ) return 0; long read = 0; int segment = segment( offset ); int displacement = displacement( offset ); int result; do { result = is.read( a[ segment ], displacement, (int)Math.min( a[ segment ].length - displacement, Math.min( length - read, MAX_IO_LENGTH ) ) ); if ( result < 0 ) return read; read += result; displacement += result; if ( displacement == a[ segment ].length ) { segment++; displacement = 0; } } while( read < length ); return read; } private static void write( final OutputStream outputStream, final byte a[][], final long offset, final long length ) throws IOException { if ( length == 0 ) return; long written = 0; int toWrite; int segment = segment( offset ); int displacement = displacement( offset ); do { toWrite = (int)Math.min( a[ segment ].length - displacement, Math.min( length - written, MAX_IO_LENGTH ) ); outputStream.write( a[ segment ], displacement, toWrite ); written += toWrite; displacement += toWrite; if ( displacement == a[ segment ].length ) { segment++; displacement = 0; } } while( written < length ); } private static void write( final DataOutput dataOutput, final byte a[][], final long offset, final long length ) throws IOException { if ( length == 0 ) return; long written = 0; int toWrite; int segment = segment( offset ); int displacement = displacement( offset ); do { toWrite = (int)Math.min( a[ segment ].length - displacement, Math.min( length - written, MAX_IO_LENGTH ) ); dataOutput.write( a[ segment ], displacement, toWrite ); written += toWrite; displacement += toWrite; if ( displacement == a[ segment ].length ) { segment++; displacement = 0; } } while( written < length ); } // Additional read/write methods to work around the DataInput/DataOutput schizophrenia. /** Loads bytes from a given input stream, storing them in a given big-array fragment. * *

Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[][],long,long)} * as it uses {@link InputStream}'s bulk-read methods. * * @param inputStream an input stream. * @param array a big array which will be filled with data from inputStream. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from inputStream (it might be less than length if inputStream ends). */ public static long LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { return read( inputStream, array, offset, length ); } /** Loads bytes from a given input stream, storing them in a given big array. * *

Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[][])} * as it uses {@link InputStream}'s bulk-read methods. * * @param inputStream an input stream. * @param array a big array which will be filled with data from inputStream. * @return the number of elements actually read from inputStream (it might be less than the array length if inputStream ends). */ public static long LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[][] array ) throws IOException { return read( inputStream, array, 0, BIG_ARRAYS.length( array ) ); } /** Stores a big-array fragment to a given output stream. * *

Note that this method is going to be significantly faster than {@link #storeBytes(byte[][],long,long,DataOutput)} * as it uses {@link OutputStream}'s bulk-read methods. * * @param array a big array whose elements will be written to outputStream. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param outputStream an output stream. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final OutputStream outputStream ) throws IOException { write( outputStream, array, offset, length ); } /** Stores a big array to a given output stream. * *

Note that this method is going to be significantly faster than {@link #storeBytes(byte[][],DataOutput)} * as it uses {@link OutputStream}'s bulk-read methods. * * @param array a big array whose elements will be written to outputStream. * @param outputStream an output stream. */ public static void STORE_KEYS( final KEY_TYPE array[][], final OutputStream outputStream ) throws IOException { write( outputStream, array, 0, BIG_ARRAYS.length( array ) ); } #endif /** Loads elements from a given data input, storing them in a given array fragment. * * @param dataInput a data input. * @param array an array which will be filled with data from dataInput. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from dataInput (it might be less than length if dataInput ends). */ public static int LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); int i = 0; try { for( i = 0; i < length; i++ ) array[ i + offset ] = dataInput.READ_KEY(); } catch( EOFException itsOk ) {} return i; } /** Loads elements from a given data input, storing them in a given array. * * @param dataInput a data input. * @param array an array which will be filled with data from dataInput. * @return the number of elements actually read from dataInput (it might be less than the array length if dataInput ends). */ public static int LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[] array ) throws IOException { int i = 0; try { final int length = array.length; for( i = 0; i < length; i++ ) array[ i ] = dataInput.READ_KEY(); } catch( EOFException itsOk ) {} return i; } /** Loads elements from a file given by a {@link File} object, storing them in a given array fragment. * * @param file a file. * @param array an array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static int LOAD_KEYS( final File file, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Byte final int result = read( fis, array, offset, length ); fis.close(); return result; #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); int i = 0; try { for( i = 0; i < length; i++ ) array[ i + offset ] = dis.READ_KEY(); } catch( EOFException itsOk ) {} dis.close(); return i; #endif } /** Loads elements from a file given by a pathname, storing them in a given array fragment. * * @param filename a filename. * @param array an array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array, offset, length ); } /** Loads elements from a file given by a {@link File} object, storing them in a given array. * * @param file a file. * @param array an array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static int LOAD_KEYS( final File file, final KEY_TYPE[] array ) throws IOException { final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Byte final int result = read( fis, array, 0, array.length ); fis.close(); return result; #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); int i = 0; try { final int length = array.length; for( i = 0; i < length; i++ ) array[ i ] = dis.READ_KEY(); } catch( EOFException itsOk ) {} dis.close(); return i; #endif } /** Loads elements from a file given by a pathname, storing them in a given array. * * @param filename a filename. * @param array an array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array ); } /** Loads elements from a file given by a {@link File} object, storing them in a new array. * *

Note that the length of the returned array will be computed * dividing the specified file size by the number of bytes used to * represent each element. * * @param file a file. * @return an array filled with the content of the specified file. */ public static KEY_TYPE[] LOAD_KEYS( final File file ) throws IOException { final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Boolean final long length = fis.getChannel().size(); #else final long length = fis.getChannel().size() / ( KEY_CLASS.SIZE / 8 ); #endif if ( length > Integer.MAX_VALUE ) { fis.close(); throw new IllegalArgumentException( "File too long: " + fis.getChannel().size()+ " bytes (" + length + " elements)" ); } final KEY_TYPE[] array = new KEY_TYPE[ (int)length ]; #if KEY_CLASS_Byte if ( read( fis, array, 0, (int)length ) < length ) throw new EOFException(); fis.close(); #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); for( int i = 0; i < length; i++ ) array[ i ] = dis.READ_KEY(); dis.close(); #endif return array; } /** Loads elements from a file given by a filename, storing them in a new array. * *

Note that the length of the returned array will be computed * dividing the specified file size by the number of bytes used to * represent each element. * * @param filename a filename. * @return an array filled with the content of the specified file. */ public static KEY_TYPE[] LOAD_KEYS( final CharSequence filename ) throws IOException { return LOAD_KEYS( new File( filename.toString() ) ); } /** Stores an array fragment to a given data output. * * @param array an array whose elements will be written to dataOutput. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param dataOutput a data output. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final DataOutput dataOutput ) throws IOException { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); #if KEY_CLASS_Byte write( dataOutput, array, offset, length ); #else for( int i = 0; i < length; i++ ) dataOutput.WRITE_KEY( array[ offset + i ] ); #endif } /** Stores an array to a given data output. * * @param array an array whose elements will be written to dataOutput. * @param dataOutput a data output. */ public static void STORE_KEYS( final KEY_TYPE array[], final DataOutput dataOutput ) throws IOException { #if KEY_CLASS_Byte write( dataOutput, array, 0, array.length ); #else final int length = array.length; for( int i = 0; i < length; i++ ) dataOutput.WRITE_KEY( array[ i ] ); #endif } /** Stores an array fragment to a file given by a {@link File} object. * * @param array an array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final File file ) throws IOException { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); #if KEY_CLASS_Byte final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) ); write( os, array, offset, length ); os.close(); #else final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); for( int i = 0; i < length; i++ ) dos.WRITE_KEY( array[ offset + i ] ); dos.close(); #endif } /** Stores an array fragment to a file given by a pathname. * * @param array an array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final CharSequence filename ) throws IOException { STORE_KEYS( array, offset, length, new File( filename.toString() ) ); } /** Stores an array to a file given by a {@link File} object. * * @param array an array whose elements will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[], final File file ) throws IOException { #if KEY_CLASS_Byte final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) ); write( os, array, 0, array.length ); os.close(); #else final int length = array.length; final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); for( int i = 0; i < length; i++ ) dos.WRITE_KEY( array[ i ] ); dos.close(); #endif } /** Stores an array to a file given by a pathname. * * @param array an array whose elements will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[], final CharSequence filename ) throws IOException { STORE_KEYS( array, new File( filename.toString() ) ); } /** Loads elements from a given data input, storing them in a given big-array fragment. * * @param dataInput a data input. * @param array a big array which will be filled with data from dataInput. * @param offset the index of the first element of bigArray to be filled. * @param length the number of elements of bigArray to be filled. * @return the number of elements actually read from dataInput (it might be less than length if dataInput ends). */ public static long LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); long c = 0; try { for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) { t[ d ] = dataInput.READ_KEY(); c++; } } } catch( EOFException itsOk ) {} return c; } /** Loads elements from a given data input, storing them in a given big array. * * @param dataInput a data input. * @param array a big array which will be filled with data from dataInput. * @return the number of elements actually read from dataInput (it might be less than the array length if dataInput ends). */ public static long LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[][] array ) throws IOException { long c = 0; try { for( int i = 0; i < array.length; i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = t.length; for( int d = 0; d < l; d++ ) { t[ d ] = dataInput.READ_KEY(); c++; } } } catch( EOFException itsOk ) {} return c; } /** Loads elements from a file given by a {@link File} object, storing them in a given big-array fragment. * * @param file a file. * @param array a big array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Byte final long result = read( fis, array, offset, length ); fis.close(); return result; #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); long c = 0; try { for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) { t[ d ] = dis.READ_KEY(); c++; } } } catch( EOFException itsOk ) {} dis.close(); return c; #endif } /** Loads elements from a file given by a pathname, storing them in a given big-array fragment. * * @param filename a filename. * @param array an array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array, offset, length ); } /** Loads elements from a file given by a {@link File} object, storing them in a given big array. * * @param file a file. * @param array a big array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array ) throws IOException { final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Byte final long result = read( fis, array, 0, BIG_ARRAYS.length( array ) ); fis.close(); return result; #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); long c = 0; try { for( int i = 0; i < array.length; i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = t.length; for( int d = 0; d < l; d++ ) { t[ d ] = dis.READ_KEY(); c++; } } } catch( EOFException itsOk ) {} dis.close(); return c; #endif } /** Loads elements from a file given by a pathname, storing them in a given big array. * * @param filename a filename. * @param array a big array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array ); } /** Loads elements from a file given by a {@link File} object, storing them in a new big array. * *

Note that the length of the returned big array will be computed * dividing the specified file size by the number of bytes used to * represent each element. * * @param file a file. * @return a big array filled with the content of the specified file. */ public static KEY_TYPE[][] LOAD_KEYS_BIG( final File file ) throws IOException { final FileInputStream fis = new FileInputStream( file ); #if KEY_CLASS_Boolean final long length = fis.getChannel().size(); #else final long length = fis.getChannel().size() / ( KEY_CLASS.SIZE / 8 ); #endif final KEY_TYPE[][] array = BIG_ARRAYS.newBigArray( length ); #if KEY_CLASS_Byte if ( read( fis, array, 0, length ) < length ) throw new EOFException(); fis.close(); #else final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) ); for( int i = 0; i < array.length; i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = t.length; for( int d = 0; d < l; d++ ) t[ d ] = dis.READ_KEY(); } dis.close(); #endif return array; } /** Loads elements from a file given by a filename, storing them in a new big array. * *

Note that the length of the returned big array will be computed * dividing the specified file size by the number of bytes used to * represent each element. * * @param filename a filename. * @return a big array filled with the content of the specified file. */ public static KEY_TYPE[][] LOAD_KEYS_BIG( final CharSequence filename ) throws IOException { return LOAD_KEYS_BIG( new File( filename.toString() ) ); } /** Stores an array fragment to a given data output. * * @param array an array whose elements will be written to dataOutput. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param dataOutput a data output. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final DataOutput dataOutput ) throws IOException { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); #if KEY_CLASS_Byte write( dataOutput, array, offset, length ); #else for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) dataOutput.WRITE_KEY( t[ d ] ); } #endif } /** Stores a big array to a given data output. * * @param array a big array whose elements will be written to dataOutput. * @param dataOutput a data output. */ public static void STORE_KEYS( final KEY_TYPE array[][], final DataOutput dataOutput ) throws IOException { #if KEY_CLASS_Byte write( dataOutput, array, 0, BIG_ARRAYS.length( array ) ); #else for( int i = 0; i < array.length; i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = t.length; for( int d = 0; d < l; d++ ) dataOutput.WRITE_KEY( t[ d ] ); } #endif } /** Stores a big-array fragment to a file given by a {@link File} object. * * @param array a big array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final File file ) throws IOException { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); #if KEY_CLASS_Byte final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) ); write( os, array, offset, length ); os.close(); #else final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) dos.WRITE_KEY( t[ d ] ); } dos.close(); #endif } /** Stores a big-array fragment to a file given by a pathname. * * @param array a big array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final CharSequence filename ) throws IOException { STORE_KEYS( array, offset, length, new File( filename.toString() ) ); } /** Stores an array to a file given by a {@link File} object. * * @param array an array whose elements will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[][], final File file ) throws IOException { #if KEY_CLASS_Byte final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) ); write( os, array, 0, BIG_ARRAYS.length( array ) ); os.close(); #else final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); for( int i = 0; i < array.length; i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = t.length; for( int d = 0; d < l; d++ ) dos.WRITE_KEY( t[ d ] ); } dos.close(); #endif } /** Stores a big array to a file given by a pathname. * * @param array a big array whose elements will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[][], final CharSequence filename ) throws IOException { STORE_KEYS( array, new File( filename.toString() ) ); } /** Stores the element returned by an iterator to a given data output. * * @param i an iterator whose output will be written to dataOutput. * @param dataOutput a filename. */ public static void STORE_KEYS( final KEY_ITERATOR i, final DataOutput dataOutput ) throws IOException { while( i.hasNext() ) dataOutput.WRITE_KEY( i.NEXT_KEY() ); } /** Stores the element returned by an iterator to a file given by a {@link File} object. * * @param i an iterator whose output will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_ITERATOR i, final File file ) throws IOException { final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); while( i.hasNext() ) dos.WRITE_KEY( i.NEXT_KEY() ); dos.close(); } /** Stores the element returned by an iterator to a file given by a pathname. * * @param i an iterator whose output will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_ITERATOR i, final CharSequence filename ) throws IOException { STORE_KEYS( i, new File( filename.toString() ) ); } /** A wrapper that exhibits the content of a data input stream as a type-specific iterator. */ final private static class KEY_DATA_INPUT_WRAPPER extends KEY_ABSTRACT_ITERATOR { final private DataInput dataInput; private boolean toAdvance = true; private boolean endOfProcess = false; private KEY_TYPE next; public KEY_DATA_INPUT_WRAPPER( final DataInput dataInput ) { this.dataInput = dataInput; } public boolean hasNext() { if ( ! toAdvance ) return ! endOfProcess; toAdvance = false; try { next = dataInput.READ_KEY(); } catch( EOFException eof ) { endOfProcess = true; } catch( IOException rethrow ) { throw new RuntimeException( rethrow ); } return ! endOfProcess; } public KEY_TYPE NEXT_KEY() { if (! hasNext()) throw new NoSuchElementException(); toAdvance = true; return next; } } /** Wraps the given data input stream into an iterator. * * @param dataInput a data input. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final DataInput dataInput ) { return new KEY_DATA_INPUT_WRAPPER( dataInput ); } /** Wraps a file given by a {@link File} object into an iterator. * * @param file a file. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final File file ) throws IOException { return new KEY_DATA_INPUT_WRAPPER( new DataInputStream( new FastBufferedInputStream( new FileInputStream( file ) ) ) ); } /** Wraps a file given by a pathname into an iterator. * * @param filename a filename. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final CharSequence filename ) throws IOException { return AS_KEY_ITERATOR( new File( filename.toString() ) ); } /** Wraps a file given by a {@link File} object into an iterable object. * * @param file a file. */ public static KEY_ITERABLE AS_KEY_ITERABLE( final File file ) { return new KEY_ITERABLE() { public KEY_ITERATOR iterator() { try { return AS_KEY_ITERATOR( file ); } catch( IOException e ) { throw new RuntimeException( e ); } } }; } /** Wraps a file given by a pathname into an iterable object. * * @param filename a filename. */ public static KEY_ITERABLE AS_KEY_ITERABLE( final CharSequence filename ) { return new KEY_ITERABLE() { public KEY_ITERATOR iterator() { try { return AS_KEY_ITERATOR( filename ); } catch( IOException e ) { throw new RuntimeException( e ); } } }; } fastutil-7.1.0/drv/Collection.drv0000664000000000000000000001072013050701620015447 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; /** A type-specific {@link Collection}; provides some additional methods * that use polymorphism to avoid (un)boxing. * *

Additionally, this class defines strengthens (again) {@link #iterator()} and defines * a slightly different semantics for {@link #toArray(Object[])}. * * @see Collection */ public interface COLLECTION KEY_GENERIC extends Collection, KEY_ITERABLE KEY_GENERIC { /** Returns a type-specific iterator on the elements of this collection. * *

Note that this specification strengthens the one given in * {@link java.lang.Iterable#iterator()}, which was already * strengthened in the corresponding type-specific class, * but was weakened by the fact that this interface extends {@link Collection}. * * @return a type-specific iterator on the elements of this collection. */ KEY_ITERATOR KEY_GENERIC iterator(); /** Returns a type-specific iterator on this elements of this collection. * * @see #iterator() * @deprecated As of fastutil 5, replaced by {@link #iterator()}. */ @Deprecated KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD(); /** Returns an containing the items of this collection; * the runtime type of the returned array is that of the specified array. * *

Warning: Note that, contrarily to {@link Collection#toArray(Object[])}, this * methods just writes all elements of this collection: no special * value will be added after the last one. * * @param a if this array is big enough, it will be used to store this collection. * @return a primitive type array containing the items of this collection. * @see Collection#toArray(Object[]) */ T[] toArray(T[] a); #if KEYS_PRIMITIVE /** * @see Collection#contains(Object) */ boolean contains( KEY_TYPE key ); /** Returns a primitive type array containing the items of this collection. * @return a primitive type array containing the items of this collection. * @see Collection#toArray() */ KEY_TYPE[] TO_KEY_ARRAY(); /** Returns a primitive type array containing the items of this collection. * *

Note that, contrarily to {@link Collection#toArray(Object[])}, this * methods just writes all elements of this collection: no special * value will be added after the last one. * * @param a if this array is big enough, it will be used to store this collection. * @return a primitive type array containing the items of this collection. * @see Collection#toArray(Object[]) */ KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] ); /** Returns a primitive type array containing the items of this collection. * *

Note that, contrarily to {@link Collection#toArray(Object[])}, this * methods just writes all elements of this collection: no special * value will be added after the last one. * * @param a if this array is big enough, it will be used to store this collection. * @return a primitive type array containing the items of this collection. * @see Collection#toArray(Object[]) */ KEY_TYPE[] toArray( KEY_TYPE a[] ); /** * @see Collection#add(Object) */ boolean add( KEY_TYPE key ); /** Note that this method should be called {@link java.util.Collection#remove(Object) remove()}, but the clash * with the similarly named index-based method in the {@link java.util.List} interface * forces us to use a distinguished name. For simplicity, the set interfaces reinstates * remove(). * * @see Collection#remove(Object) */ boolean rem( KEY_TYPE key ); /** * @see Collection#addAll(Collection) */ boolean addAll( COLLECTION c ); /** * @see Collection#containsAll(Collection) */ boolean containsAll( COLLECTION c ); /** * @see Collection#removeAll(Collection) */ boolean removeAll( COLLECTION c ); /** * @see Collection#retainAll(Collection) */ boolean retainAll( COLLECTION c ); #endif } fastutil-7.1.0/drv/Collections.drv0000664000000000000000000002624413050701620015642 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import it.unimi.dsi.fastutil.objects.ObjectArrays; /** A class providing static methods and objects that do useful things with type-specific collections. * * @see java.util.Collections */ public class COLLECTIONS { private COLLECTIONS() {} /** An immutable class representing an empty type-specific collection. * *

This class may be useful to implement your own in case you subclass * a type-specific collection. */ public abstract static class EmptyCollection KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC { protected EmptyCollection() {} public boolean add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean contains( KEY_TYPE k ) { return false; } public Object[] toArray() { return ObjectArrays.EMPTY_ARRAY; } #if KEYS_PRIMITIVE public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE[] a ) { return a; } public KEY_TYPE[] TO_KEY_ARRAY() { return ARRAYS.EMPTY_ARRAY; } public boolean rem( KEY_TYPE k ) { throw new UnsupportedOperationException(); } public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean removeAll( COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean retainAll( COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean containsAll( COLLECTION c ) { return c.isEmpty(); } #else public boolean remove( final Object k ) { throw new UnsupportedOperationException(); } public T[] toArray( T[] a ) { if (a.length != 0) a[0] = null; return a; } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return ITERATORS.EMPTY_ITERATOR; } public int size() { return 0; } public void clear() {} public int hashCode() { return 0; } public boolean equals( Object o ) { if ( o == this ) return true; if ( ! ( o instanceof Collection ) ) return false; return ((Collection)o).isEmpty(); } } /** A synchronized wrapper class for collections. */ public static class SynchronizedCollection KEY_GENERIC implements COLLECTION KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final COLLECTION KEY_GENERIC collection; protected final Object sync; protected SynchronizedCollection( final COLLECTION KEY_GENERIC c, final Object sync ) { if ( c == null ) throw new NullPointerException(); this.collection = c; this.sync = sync; } protected SynchronizedCollection( final COLLECTION KEY_GENERIC c ) { if ( c == null ) throw new NullPointerException(); this.collection = c; this.sync = this; } public int size() { synchronized( sync ) { return collection.size(); } } public boolean isEmpty() { synchronized( sync ) { return collection.isEmpty(); } } public boolean contains( final KEY_TYPE o ) { synchronized( sync ) { return collection.contains( o ); } } public KEY_TYPE[] TO_KEY_ARRAY() { synchronized( sync ) { return collection.TO_KEY_ARRAY(); } } #if KEYS_PRIMITIVE public Object[] toArray() { synchronized( sync ) { return collection.toArray(); } } public KEY_TYPE[] TO_KEY_ARRAY( final KEY_TYPE[] a ) { synchronized( sync ) { return collection.TO_KEY_ARRAY( a ); } } public KEY_TYPE[] toArray( final KEY_TYPE[] a ) { synchronized( sync ) { return collection.TO_KEY_ARRAY( a ); } } public boolean addAll( final COLLECTION c ) { synchronized( sync ) { return collection.addAll( c ); } } public boolean containsAll( final COLLECTION c ) { synchronized( sync ) { return collection.containsAll( c ); } } public boolean removeAll( final COLLECTION c ) { synchronized( sync ) { return collection.removeAll( c ); } } public boolean retainAll( final COLLECTION c ) { synchronized( sync ) { return collection.retainAll( c ); } } public boolean add( final KEY_GENERIC_CLASS k ) { synchronized( sync ) { return collection.add( k ); } } public boolean contains( final Object k ) { synchronized( sync ) { return collection.contains( k ); } } #endif public T[] toArray( final T[] a ) { synchronized( sync ) { return collection.toArray( a ); } } public KEY_ITERATOR KEY_GENERIC iterator() { return collection.iterator(); } @Deprecated public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public boolean add( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return collection.add( k ); } } public boolean rem( final KEY_TYPE k ) { synchronized( sync ) { return collection.REMOVE( k ); } } public boolean remove( final Object ok ) { synchronized( sync ) { return collection.remove( ok ); } } public boolean addAll( final Collection c ) { synchronized( sync ) { return collection.addAll( c ); } } public boolean containsAll( final Collection c ) { synchronized( sync ) { return collection.containsAll( c ); } } public boolean removeAll( final Collection c ) { synchronized( sync ) { return collection.removeAll( c ); } } public boolean retainAll( final Collection c ) { synchronized( sync ) { return collection.retainAll( c ); } } public void clear() { synchronized( sync ) { collection.clear(); } } public String toString() { synchronized( sync ) { return collection.toString(); } } } /** Returns a synchronized collection backed by the specified collection. * * @param c the collection to be wrapped in a synchronized collection. * @return a synchronized view of the specified collection. * @see java.util.Collections#synchronizedCollection(Collection) */ public static KEY_GENERIC COLLECTION KEY_GENERIC synchronize( final COLLECTION KEY_GENERIC c ) { return new SynchronizedCollection KEY_GENERIC( c ); } /** Returns a synchronized collection backed by the specified collection, using an assigned object to synchronize. * * @param c the collection to be wrapped in a synchronized collection. * @param sync an object that will be used to synchronize the list access. * @return a synchronized view of the specified collection. * @see java.util.Collections#synchronizedCollection(Collection) */ public static KEY_GENERIC COLLECTION KEY_GENERIC synchronize( final COLLECTION KEY_GENERIC c, final Object sync ) { return new SynchronizedCollection KEY_GENERIC( c, sync ); } /** An unmodifiable wrapper class for collections. */ public static class UnmodifiableCollection KEY_GENERIC implements COLLECTION KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final COLLECTION KEY_GENERIC collection; protected UnmodifiableCollection( final COLLECTION KEY_GENERIC c ) { if ( c == null ) throw new NullPointerException(); this.collection = c; } public int size() { return collection.size(); } public boolean isEmpty() { return collection.isEmpty(); } public boolean contains( final KEY_TYPE o ) { return collection.contains( o ); } public KEY_ITERATOR KEY_GENERIC iterator() { return ITERATORS.unmodifiable( collection.iterator() ); } @Deprecated public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean remove( final Object ok ) { throw new UnsupportedOperationException(); } public boolean addAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean containsAll( final Collection c ) { return collection.containsAll( c ); } public boolean removeAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final Collection c ) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } public String toString() { return collection.toString(); } public T[] toArray( final T[] a ) { return collection.toArray( a ); } public Object[] toArray() { return collection.toArray(); } #if KEYS_PRIMITIVE public KEY_TYPE[] TO_KEY_ARRAY() { return collection.TO_KEY_ARRAY(); } public KEY_TYPE[] TO_KEY_ARRAY( final KEY_TYPE[] a ) { return collection.TO_KEY_ARRAY( a ); } public KEY_TYPE[] toArray( final KEY_TYPE[] a ) { return collection.toArray( a ); } public boolean rem( final KEY_TYPE k ) { throw new UnsupportedOperationException(); } public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean containsAll( final COLLECTION c ) { return collection.containsAll( c ); } public boolean removeAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public boolean contains( final Object k ) { return collection.contains( k ); } #endif } /** Returns an unmodifiable collection backed by the specified collection. * * @param c the collection to be wrapped in an unmodifiable collection. * @return an unmodifiable view of the specified collection. * @see java.util.Collections#unmodifiableCollection(Collection) */ public static KEY_GENERIC COLLECTION KEY_GENERIC unmodifiable( final COLLECTION KEY_GENERIC c ) { return new UnmodifiableCollection KEY_GENERIC( c ); } /** A collection wrapper class for iterables. */ public static class IterableCollection KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final KEY_ITERABLE KEY_GENERIC iterable; protected IterableCollection( final KEY_ITERABLE KEY_GENERIC iterable ) { if ( iterable == null ) throw new NullPointerException(); this.iterable = iterable; } public int size() { int c = 0; final KEY_ITERATOR KEY_GENERIC iterator = iterator(); while( iterator.hasNext() ) { iterator.next(); c++; } return c; } public boolean isEmpty() { return ! iterable.iterator().hasNext(); } public KEY_ITERATOR KEY_GENERIC iterator() { return iterable.iterator(); } @Deprecated public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } } /** Returns an unmodifiable collection backed by the specified iterable. * * @param iterable the iterable object to be wrapped in an unmodifiable collection. * @return an unmodifiable collection view of the specified iterable. */ public static KEY_GENERIC COLLECTION KEY_GENERIC asCollection( final KEY_ITERABLE KEY_GENERIC iterable ) { if ( iterable instanceof COLLECTION ) return (COLLECTION KEY_GENERIC)iterable; return new IterableCollection KEY_GENERIC( iterable ); } } fastutil-7.1.0/drv/Comparator.drv0000664000000000000000000000255013050701620015465 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Comparator; /** A type-specific {@link Comparator}; provides methods to compare two primitive types both as objects * and as primitive types. * *

Note that fastutil provides a corresponding abstract class that * can be used to implement this interface just by specifying the type-specific * comparator. * * @see Comparator */ public interface KEY_COMPARATOR KEY_GENERIC extends Comparator { /** Compares the given primitive types. * * @see java.util.Comparator * @return A positive integer, zero, or a negative integer if the first * argument is greater than, equal to, or smaller than, respectively, the * second one. */ public int compare( KEY_TYPE k1, KEY_TYPE k2 ); } fastutil-7.1.0/drv/Comparators.drv0000664000000000000000000000653513050701620015657 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEYS_REFERENCE import java.util.Comparator; #endif /** A class providing static methods and objects that do useful things with comparators. */ public class COMPARATORS { private COMPARATORS() {} /** A type-specific comparator mimicking the natural order. */ #if KEYS_REFERENCE SUPPRESS_WARNINGS_KEY_UNCHECKED_RAWTYPES protected static class NaturalImplicitComparator implements Comparator, java.io.Serializable { #else protected static class NaturalImplicitComparator extends KEY_ABSTRACT_COMPARATOR KEY_GENERIC implements java.io.Serializable { #endif private static final long serialVersionUID = 1L; public final int compare( final KEY_TYPE a, final KEY_TYPE b ) { #if KEYS_PRIMITIVE return KEY_CMP( a, b ); #else return ((Comparable)a).compareTo(b); #endif } private Object readResolve() { return NATURAL_COMPARATOR; } }; SUPPRESS_WARNINGS_KEY_RAWTYPES public static final KEY_COMPARATOR NATURAL_COMPARATOR = new NaturalImplicitComparator(); /** A type-specific comparator mimicking the opposite of the natural order. */ #if KEYS_REFERENCE SUPPRESS_WARNINGS_KEY_UNCHECKED_RAWTYPES protected static class OppositeImplicitComparator implements Comparator, java.io.Serializable { #else protected static class OppositeImplicitComparator extends KEY_ABSTRACT_COMPARATOR KEY_GENERIC implements java.io.Serializable { #endif private static final long serialVersionUID = 1L; public final int compare( final KEY_TYPE a, final KEY_TYPE b ) { #if KEYS_PRIMITIVE return - KEY_CMP( a, b ); #else return ((Comparable)b).compareTo(a); #endif } private Object readResolve() { return OPPOSITE_COMPARATOR; } }; SUPPRESS_WARNINGS_KEY_RAWTYPES public static final KEY_COMPARATOR OPPOSITE_COMPARATOR = new OppositeImplicitComparator(); #if KEYS_REFERENCE protected static class OppositeComparator KEY_GENERIC implements Comparator KEY_GENERIC, java.io.Serializable { #else protected static class OppositeComparator KEY_GENERIC extends KEY_ABSTRACT_COMPARATOR KEY_GENERIC implements java.io.Serializable { #endif private static final long serialVersionUID = 1L; private final KEY_COMPARATOR KEY_GENERIC comparator; protected OppositeComparator( final KEY_COMPARATOR KEY_GENERIC c ) { comparator = c; } public final int compare( final KEY_GENERIC_TYPE a, final KEY_GENERIC_TYPE b ) { return comparator.compare( b, a ); } }; /** Returns a comparator representing the opposite order of the given comparator. * * @param c a comparator. * @return a comparator representing the opposite order of c. */ public static KEY_GENERIC KEY_COMPARATOR KEY_GENERIC oppositeComparator( final KEY_COMPARATOR KEY_GENERIC c ) { return new OppositeComparator KEY_GENERIC ( c ); } } fastutil-7.1.0/drv/Function.drv0000664000000000000000000000722113050701620015143 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.Function; /** A type-specific {@link Function}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Type-specific versions of get(), put() and * remove() cannot rely on null to denote absence of * a key. Rather, they return a {@linkplain #defaultReturnValue() default * return value}, which is set to 0 cast to the return type (false * for booleans) at creation, but can be changed using the * defaultReturnValue() method. * *

For uniformity reasons, even maps returning objects implement the default * return value (of course, in this case the default return value is * initialized to null). * *

Warning: to fall in line as much as possible with the * {@linkplain java.util.Map standard map interface}, it is strongly suggested * that standard versions of get(), put() and * remove() for maps with primitive-type values return * null to denote missing keys rather than wrap the default * return value in an object (of course, for maps with object keys and values * this is not possible, as there is no type-specific version). * * @see Function */ public interface FUNCTION KEY_VALUE_GENERIC extends Function { #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** Adds a pair to the map. * * @param key the key. * @param value the value. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#put(Object,Object) */ VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ); /** Returns the value to which the given key is mapped. * * @param key the key. * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#get(Object) */ VALUE_GENERIC_TYPE GET_VALUE( KEY_TYPE key ); /** Removes the mapping with the given key. * @param key the key. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#remove(Object) */ VALUE_GENERIC_TYPE REMOVE_VALUE( KEY_TYPE key ); #endif #if KEYS_PRIMITIVE /** * @see Function#containsKey(Object) */ boolean containsKey( KEY_TYPE key ); #endif /** Sets the default return value. * * This value must be returned by type-specific versions of * get(), put() and remove() to * denote that the map does not contain the specified key. It must be * 0/false/null by default. * * @param rv the new default return value. * @see #defaultReturnValue() */ void defaultReturnValue( VALUE_GENERIC_TYPE rv ); /** Gets the default return value. * * @return the current default return value. */ VALUE_GENERIC_TYPE defaultReturnValue(); } fastutil-7.1.0/drv/Functions.drv0000664000000000000000000002557713050701620015344 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** A class providing static methods and objects that do useful things with type-specific functions. * * @see it.unimi.dsi.fastutil.Function * @see java.util.Collections */ public class FUNCTIONS { private FUNCTIONS() {} /** An immutable class representing an empty type-specific function. * *

This class may be useful to implement your own in case you subclass * a type-specific function. */ public static class EmptyFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyFunction() {} public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { return VALUE_NULL; } public boolean containsKey( final KEY_TYPE k ) { return false; } public VALUE_GENERIC_TYPE defaultReturnValue() { return VALUE_NULL; } public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue ) { throw new UnsupportedOperationException(); } #if KEYS_PRIMITIVE #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_CLASS get( final Object k ) { return null; } #endif public int size() { return 0; } public void clear() {} private Object readResolve() { return EMPTY_FUNCTION; } public Object clone() { return EMPTY_FUNCTION; } } /** An empty type-specific function (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_VALUE_RAWTYPES public static final EmptyFunction EMPTY_FUNCTION = new EmptyFunction(); /** An immutable class representing a type-specific singleton function. * *

This class may be useful to implement your own in case you subclass * a type-specific function. */ public static class Singleton KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected final KEY_GENERIC_TYPE key; protected final VALUE_GENERIC_TYPE value; protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) { this.key = key; this.value = value; } public boolean containsKey( final KEY_TYPE k ) { return KEY_EQUALS( key, k ); } public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { if ( KEY_EQUALS( key, k ) ) return value; return defRetValue; } public int size() { return 1; } public Object clone() { return this; } } /** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable. * *

Note that albeit the returned function is immutable, its default return value may be changed. * * @param key the only key of the returned function. * @param value the only value of the returned function. * @return a type-specific immutable function containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) { return new Singleton KEY_VALUE_GENERIC( key, value ); } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable. * *

Note that albeit the returned function is immutable, its default return value may be changed. * * @param key the only key of the returned function. * @param value the only value of the returned function. * @return a type-specific immutable function containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) { return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) ); } #endif /** A synchronized wrapper class for functions. */ public static class SynchronizedFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final FUNCTION KEY_VALUE_GENERIC function; protected final Object sync; protected SynchronizedFunction( final FUNCTION KEY_VALUE_GENERIC f, final Object sync ) { if ( f == null ) throw new NullPointerException(); this.function = f; this.sync = sync; } protected SynchronizedFunction( final FUNCTION KEY_VALUE_GENERIC f ) { if ( f == null ) throw new NullPointerException(); this.function = f; this.sync = this; } public int size() { synchronized( sync ) { return function.size(); } } public boolean containsKey( final KEY_TYPE k ) { synchronized( sync ) { return function.containsKey( k ); } } public VALUE_GENERIC_TYPE defaultReturnValue() { synchronized( sync ) { return function.defaultReturnValue(); } } public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue ) { synchronized( sync ) { function.defaultReturnValue( defRetValue ); } } public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { synchronized( sync ) { return function.put( k, v ); } } public void clear() { synchronized( sync ) { function.clear(); } } public String toString() { synchronized( sync ) { return function.toString(); } } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { synchronized( sync ) { return function.put( k, v ); } } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_CLASS get( final Object k ) { synchronized( sync ) { return function.get( k ); } } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_CLASS remove( final Object k ) { synchronized( sync ) { return function.remove( k ); } } #endif #if KEYS_PRIMITIVE @Override public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return function.remove( k ); } } @Override public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return function.get( k ); } } public boolean containsKey( final Object ok ) { synchronized( sync ) { return function.containsKey( ok ); } } #endif #if KEYS_REFERENCE @Override public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { synchronized( sync ) { return function.REMOVE_VALUE( k ); } } @Override public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { synchronized( sync ) { return function.GET_VALUE( k ); } } #endif } /** Returns a synchronized type-specific function backed by the given type-specific function. * * @param f the function to be wrapped in a synchronized function. * @return a synchronized view of the specified function. * @see java.util.Collections#synchronizedMap(java.util.Map) */ public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC synchronize( final FUNCTION KEY_VALUE_GENERIC f ) { return new SynchronizedFunction KEY_VALUE_GENERIC( f ); } /** Returns a synchronized type-specific function backed by the given type-specific function, using an assigned object to synchronize. * * @param f the function to be wrapped in a synchronized function. * @param sync an object that will be used to synchronize the access to the function. * @return a synchronized view of the specified function. * @see java.util.Collections#synchronizedMap(java.util.Map) */ public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC synchronize( final FUNCTION KEY_VALUE_GENERIC f, final Object sync ) { return new SynchronizedFunction KEY_VALUE_GENERIC( f, sync ); } /** An unmodifiable wrapper class for functions. */ public static class UnmodifiableFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final FUNCTION KEY_VALUE_GENERIC function; protected UnmodifiableFunction( final FUNCTION KEY_VALUE_GENERIC f ) { if ( f == null ) throw new NullPointerException(); this.function = f; } public int size() { return function.size(); } public boolean containsKey( final KEY_TYPE k ) { return function.containsKey( k ); } public VALUE_GENERIC_TYPE defaultReturnValue() { return function.defaultReturnValue(); } public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue ) { throw new UnsupportedOperationException(); } public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } public String toString() { return function.toString(); } #if KEYS_PRIMITIVE #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { return function.get( k ); } public boolean containsKey( final Object ok ) { return function.containsKey( ok ); } #endif #if KEYS_REFERENCE || VALUES_REFERENCE @Override public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { throw new UnsupportedOperationException(); } @Override public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { return function.GET_VALUE( k ); } #endif } /** Returns an unmodifiable type-specific function backed by the given type-specific function. * * @param f the function to be wrapped in an unmodifiable function. * @return an unmodifiable view of the specified function. * @see java.util.Collections#unmodifiableMap(java.util.Map) */ public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC unmodifiable( final FUNCTION KEY_VALUE_GENERIC f ) { return new UnmodifiableFunction KEY_VALUE_GENERIC( f ); } } fastutil-7.1.0/drv/Hash.drv0000664000000000000000000000263213050701620014242 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.Hash; /** A type-specific {@link Hash} interface. * * @see Hash */ public interface HASH { /** A type-specific hash strategy. * * @see it.unimi.dsi.fastutil.Hash.Strategy */ public interface Strategy { /** Returns the hash code of the specified element with respect to this hash strategy. * * @param e an element. * @return the hash code of the given element with respect to this hash strategy. */ public int hashCode( KEY_TYPE e ); /** Returns true if the given elements are equal with respect to this hash strategy. * * @param a an element. * @param b another element. * @return true if the two specified elements are equal with respect to this hash strategy. */ public boolean equals( KEY_TYPE a, KEY_TYPE b ); } } fastutil-7.1.0/drv/HeapIndirectPriorityQueue.drv0000664000000000000000000006075613050701620020500 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; #endif import it.unimi.dsi.fastutil.ints.IntArrays; import java.util.Arrays; import java.util.NoSuchElementException; /** A type-specific heap-based indirect priority queue. * *

Instances of this class use an additional inversion array, of the same length of the reference array, * to keep track of the heap position containing a given element of the reference array. The priority queue is * represented using a heap. The heap is enlarged as needed, but it is never * shrunk. Use the {@link #trim()} method to reduce its size, if necessary. * *

This implementation does not allow one to enqueue several times the same index. */ public class HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends HEAP_SEMI_INDIRECT_PRIORITY_QUEUE KEY_GENERIC { /** The inversion array. */ protected final int inv[]; /** Creates a new empty queue with a given capacity and comparator. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { super( refArray, capacity, c ); if ( capacity > 0 ) this.heap = new int[ capacity ]; this.c = c; this.inv = new int[ refArray.length ]; Arrays.fill( inv, -1 ); } /** Creates a new empty queue with a given capacity and using the natural order. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. */ public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) { this( refArray, capacity, null ); } /** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator. * * @param refArray the reference array. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, refArray.length, c ); } /** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. * @param refArray the reference array. */ public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) { this( refArray, refArray.length, null ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, 0, c ); this.heap = a; this.size = size; int i = size; while( i-- != 0 ) { if ( inv[ a[ i ] ] != -1 ) throw new IllegalArgumentException( "Index " + a[ i ] + " appears twice in the heap" ); inv[ a[ i ] ] = i; } INDIRECT_HEAPS.makeHeap( refArray, a, inv, size, c ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, a, a.length, c ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. */ public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) { this( refArray, a, size, null ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. */ public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) { this( refArray, a, a.length ); } public void enqueue( final int x ) { if ( inv[ x ] >= 0 ) throw new IllegalArgumentException( "Index " + x + " belongs to the queue" ); if ( size == heap.length ) heap = IntArrays.grow( heap, size + 1 ); inv[ heap[ size ] = x ] = size++; INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, size - 1, c ); } public boolean contains( final int index ) { return inv[ index ] >= 0; } public int dequeue() { if ( size == 0 ) throw new NoSuchElementException(); final int result = heap[ 0 ]; if ( --size != 0 ) inv[ heap[ 0 ] = heap[ size ] ] = 0; inv[ result ] = -1; if ( size != 0 ) INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, 0, c ); return result; } public void changed() { INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, 0, c ); } public void changed( final int index ) { final int pos = inv[ index ]; if ( pos < 0 ) throw new IllegalArgumentException( "Index " + index + " does not belong to the queue" ); final int newPos = INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, pos, c ); INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, newPos, c ); } /** Rebuilds this heap in a bottom-up fashion. */ public void allChanged() { INDIRECT_HEAPS.makeHeap( refArray, heap, inv, size, c ); } public boolean remove( final int index ) { final int result = inv[ index ]; if ( result < 0 ) return false; inv[ index ] = -1; if ( result < --size ) { inv[ heap[ result ] = heap[ size ] ] = result; final int newPos = INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, result, c ); INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, newPos, c ); } return true; } public void clear() { size = 0; Arrays.fill( inv, -1 ); } #ifdef TEST /** The original class, now just used for testing. */ private static class TestQueue { /** The reference array */ private KEY_TYPE refArray[]; /** Its length */ private int N; /** The number of elements in the heaps */ private int n; /** The two comparators */ private KEY_COMPARATOR primaryComp, secondaryComp; /** Two indirect heaps are used, called primary and secondary. Each of them contains a permutation of n among the indices 0, 1, ..., N-1 in such a way that the corresponding objects be sorted with respect to the two comparators. We also need an array inSec[] so that inSec[k] is the index of secondary containing k. */ private int primary[], secondary[], inSec[]; /** Builds a double indirect priority queue. * @param refArray The reference array. * @param primaryComp The primary comparator. * @param secondaryComp The secondary comparator. */ public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) { this.refArray = refArray; this.N = refArray.length; assert this.N != 0; this.n = 0; this.primaryComp = primaryComp; this.secondaryComp = secondaryComp; this.primary = new int[N]; this.secondary = new int[N]; this.inSec = new int[N]; java.util.Arrays.fill( inSec, -1 ); } /** Adds an index to the queue. Notice that the index should not be already present in the queue. * @param i The index to be added */ public void add( int i ) { if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException(); if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException(); primary[n] = i; secondary[n] = i; inSec[i] = n; n++; swimPrimary( n-1 ); swimSecondary( n-1 ); } /** Heapify the primary heap. * @param i The index of the heap to be heapified. */ private void heapifyPrimary( int i ) { int dep = primary[i]; int child; while ( ( child = 2*i+1 ) < n ) { if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++; if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break; primary[i] = primary[child]; i = child; } primary[i] = dep; } /** Heapify the secondary heap. * @param i The index of the heap to be heapified. */ private void heapifySecondary( int i ) { int dep = secondary[i]; int child; while ( ( child = 2*i+1 ) < n ) { if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++; if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break; secondary[i] = secondary[child]; inSec[secondary[i]] = i; i = child; } secondary[i] = dep; inSec[secondary[i]] = i; } /** Swim and heapify the primary heap. * @param i The index to be moved. */ private void swimPrimary( int i ) { int dep = primary[i]; int parent; while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) { if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break; primary[i] = primary[parent]; i = parent; } primary[i] = dep; heapifyPrimary( i ); } /** Swim and heapify the secondary heap. * @param i The index to be moved. */ private void swimSecondary( int i ) { int dep = secondary[i]; int parent; while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) { if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break; secondary[i] = secondary[parent]; inSec[secondary[i]] = i; i = parent; } secondary[i] = dep; inSec[secondary[i]] = i; heapifySecondary( i ); } /** Returns the minimum element with respect to the primary comparator. @return the minimum element. */ public int top() { if ( n == 0 ) throw new java.util.NoSuchElementException(); return primary[0]; } /** Returns the minimum element with respect to the secondary comparator. @return the minimum element. */ public int secTop() { if ( n == 0 ) throw new java.util.NoSuchElementException(); return secondary[0]; } /** Removes the minimum element with respect to the primary comparator. * @return the removed element. */ public boolean remove() { if ( n == 0 ) throw new java.util.NoSuchElementException(); if ( inSec[primary[0]] == -1 ) return false; int result = primary[0]; int ins = inSec[result]; inSec[ result ] = -1; // Copy a leaf primary[0] = primary[n-1]; if ( ins == n-1 ) { n--; heapifyPrimary( 0 ); return true; } secondary[ins] = secondary[n-1]; inSec[secondary[ins]] = ins; // Heapify n--; heapifyPrimary( 0 ); swimSecondary( ins ); return true; } public void clear() { while( size() != 0 ) remove(); } public void remove( int index ) { if ( index >= refArray.length ) throw new IndexOutOfBoundsException(); if ( inSec[index] == -1 ) return; int ins = inSec[index]; inSec[ index ] = -1; // Copy a leaf primary[ins] = primary[n-1]; if ( ins == n-1 ) { n--; swimPrimary( ins ); return; } secondary[ins] = secondary[n-1]; inSec[secondary[ins]] = ins; // Heapify n--; swimPrimary( ins ); swimSecondary( ins ); } /** Signals that the minimum element with respect to the comparator has changed. */ public void change() { if ( n == 0 ) throw new java.util.NoSuchElementException(); if ( inSec[primary[0]] == -1 ) throw new IllegalArgumentException(); int ins = inSec[primary[0]]; heapifyPrimary( 0 ); swimSecondary( ins ); } public void change(int index) { if ( index >= refArray.length ) throw new IndexOutOfBoundsException(); if ( inSec[index] == -1 ) throw new IllegalArgumentException(); if ( n == 0 ) throw new java.util.NoSuchElementException(); int ins = inSec[ index ]; swimPrimary( ins ); swimSecondary( ins ); } /** Returns the number of elements in the queue. * @return the size of the queue */ public int size() { return n; } public String toString() { String s = "["; for ( int i = 0; i < n; i++ ) s += refArray[primary[i]]+", "; return s+ "]"; } } private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) { if ( sizea != sizeb ) return false; while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false; return true; } private static boolean invEqual( int inva[], int[] invb ) { int i = inva.length; while( i-- != 0 ) if ( inva[ i ] != invb[ i ] ) return false; return true; } protected static void test( int n ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement; int rm = 0, rt = 0; KEY_TYPE[] refArray = new KEY_TYPE[ n ]; for( int i = 0; i < n; i++ ) refArray[ i ] = genKey(); HEAP_INDIRECT_PRIORITY_QUEUE m = new HEAP_INDIRECT_PRIORITY_QUEUE( refArray, COMPARATORS.NATURAL_COMPARATOR ); TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.NATURAL_COMPARATOR ); /* We add pairs to t. */ for( int i = 0; i < n / 2; i++ ) { t.add( i ); m.enqueue( i ); } ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after creation (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); /* Now we add and remove random data in m and t, checking that the result is the same. */ for(int i=0; i<2*n; i++ ) { if ( r.nextDouble() < 0.01 ) { t.clear(); m.clear(); for( int j = 0; j < n / 2; j++ ) { t.add( j ); m.enqueue( j ); } } int T = r.nextInt( 2 * n ); mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { m.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } try { t.add( T ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after enqueue (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { rm = m.dequeue(); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { rt = t.top(); t.remove(); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after dequeue (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")"); } int pos = r.nextInt( n * 2 ); try { m.remove( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { t.remove( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): remove(int) divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): remove(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in remove(int) between t and m (" + rt + ", " + rm + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after remove(int) (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after remove(int) (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after remove(int) (" + m.first() + ", " + t.top() + ")"); } pos = r.nextInt( n * 2 ); try { m.changed( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { t.change( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): change(int) divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): change(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in change(int) between t and m (" + rt + ", " + rm + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change(int) (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after change(int) (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change(int) (" + m.first() + ", " + t.top() + ")"); } if ( m.size() != 0 ) { refArray[ m.first() ] = genKey(); m.changed(); t.change(); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")" ); ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after change (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")"); } } } /* Now we check that m actually holds the same data. */ m.clear(); ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" ); System.out.println("Test OK"); } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/HeapPriorityQueue.drv0000664000000000000000000004273013050701620017006 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Arrays; import java.util.Comparator; import it.unimi.dsi.fastutil.AbstractPriorityQueue; #else import java.util.Iterator; #endif import java.util.Collection; import java.util.NoSuchElementException; /** A type-specific heap-based priority queue. * *

Instances of this class represent a priority queue using a heap. The heap is enlarged as needed, but * it is never shrunk. Use the {@link #trim()} method to reduce its size, if necessary. */ public class HEAP_PRIORITY_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC implements java.io.Serializable { private static final long serialVersionUID = 1L; /** The heap array. */ SUPPRESS_WARNINGS_KEY_UNCHECKED protected transient KEY_GENERIC_TYPE[] heap = KEY_GENERIC_ARRAY_CAST ARRAYS.EMPTY_ARRAY; /** The number of elements in this queue. */ protected int size; /** The type-specific comparator used in this queue. */ protected KEY_COMPARATOR KEY_SUPER_GENERIC c; /** Creates a new empty queue with a given capacity and comparator. * * @param capacity the initial capacity of this queue. * @param c the comparator used in this queue, or null for the natural order. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public HEAP_PRIORITY_QUEUE( int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { if ( capacity > 0 ) this.heap = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ]; this.c = c; } /** Creates a new empty queue with a given capacity and using the natural order. * * @param capacity the initial capacity of this queue. */ public HEAP_PRIORITY_QUEUE( int capacity ) { this( capacity, null ); } /** Creates a new empty queue with a given comparator. * * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_PRIORITY_QUEUE( KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( 0, c ); } /** Creates a new empty queue using the natural order. */ public HEAP_PRIORITY_QUEUE() { this( 0, null ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param a an array. * @param size the number of elements to be included in the queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( c ); this.heap = a; this.size = size; HEAPS.makeHeap( a, size, c ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param a an array. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( a, a.length, c ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param a an array. * @param size the number of elements to be included in the queue. */ public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size ) { this( a, size, null ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param a an array. */ public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a ) { this( a, a.length ); } #if KEYS_PRIMITIVE /** Creates a queue using the elements in a type-specific collection using a given comparator. * *

This constructor is more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_PRIORITY_QUEUE( final COLLECTION KEY_EXTENDS_GENERIC collection, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( collection.TO_KEY_ARRAY(), c ); } /** Creates a queue using the elements in a type-specific collection using the natural order. * *

This constructor is * more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. */ public HEAP_PRIORITY_QUEUE( final COLLECTION KEY_EXTENDS_GENERIC collection ) { this( collection, null ); } /** Creates a queue using the elements in a collection using a given comparator. * *

This constructor is more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_PRIORITY_QUEUE( final Collection collection, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( collection.size(), c ); final Iterator iterator = collection.iterator(); final int size = collection.size(); for( int i = 0 ; i < size; i++ ) heap[ i ] = KEY_OBJ2TYPE( iterator.next() ); } /** Creates a queue using the elements in a collection using the natural order. * *

This constructor is * more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. */ public HEAP_PRIORITY_QUEUE( final Collection collection ) { this( collection, null ); } #else /** Creates a queue using the elements in a collection using a given comparator. * *

This constructor is more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. * @param c the comparator used in this queue, or null for the natural order. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public HEAP_PRIORITY_QUEUE( final Collection collection, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( KEY_GENERIC_ARRAY_CAST collection.toArray(), c ); } /** Creates a queue using the elements in a collection using the natural order. * *

This constructor is * more efficient than enqueing the elements of collection one by one. * * @param collection a collection; its elements will be used to initialize the queue. */ public HEAP_PRIORITY_QUEUE( final Collection collection ) { this( collection, null ); } #endif public void enqueue( KEY_GENERIC_TYPE x ) { if ( size == heap.length ) heap = ARRAYS.grow( heap, size + 1 ); heap[ size++ ] = x; HEAPS.upHeap( heap, size, size - 1, c ); } public KEY_GENERIC_TYPE DEQUEUE() { if ( size == 0 ) throw new NoSuchElementException(); final KEY_GENERIC_TYPE result = heap[ 0 ]; heap[ 0 ] = heap[ --size ]; #if KEY_CLASS_Object heap[ size ] = null; #endif if ( size != 0 ) HEAPS.downHeap( heap, size, 0, c ); return result; } public KEY_GENERIC_TYPE FIRST() { if ( size == 0 ) throw new NoSuchElementException(); return heap[ 0 ]; } public void changed() { HEAPS.downHeap( heap, size, 0, c ); } public int size() { return size; } public void clear() { #if KEY_CLASS_Object Arrays.fill( heap, 0, size, null ); #endif size = 0; } /** Trims the underlying heap array so that it has exactly {@link #size()} elements. */ public void trim() { heap = ARRAYS.trim( heap, size ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); s.writeInt( heap.length ); for( int i = 0; i < size; i++ ) s.WRITE_KEY( heap[ i ] ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); heap = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ s.readInt() ]; for( int i = 0; i < size; i++ ) heap[ i ] = KEY_GENERIC_CAST s.READ_KEY(); } #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static boolean heapEqual( KEY_TYPE[] a, KEY_TYPE[] b, int sizea, int sizeb ) { if ( sizea != sizeb ) return false; KEY_TYPE[] aa = (KEY_TYPE[])a.clone(); KEY_TYPE[] bb = (KEY_TYPE[])b.clone(); java.util.Arrays.sort( aa, 0, sizea ); java.util.Arrays.sort( bb, 0, sizeb ); while( sizea-- != 0 ) if ( ! KEY_EQUALS( aa[sizea], bb[sizea] ) ) return false; return true; } private static KEY_TYPE k[]; protected static void test( int n ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement; KEY_TYPE rm = KEY_NULL, rt = KEY_NULL; k = new KEY_TYPE[ n ]; for( int i = 0; i < n; i++ ) k[i] = genKey(); HEAP_PRIORITY_QUEUE m = new HEAP_PRIORITY_QUEUE( COMPARATORS.NATURAL_COMPARATOR ); ARRAY_PRIORITY_QUEUE t = new ARRAY_PRIORITY_QUEUE( COMPARATORS.NATURAL_COMPARATOR ); /* We add pairs to t. */ for( int i = 0; i < n / 2; i++ ) { t.enqueue( k[ i ] ); m.enqueue( k[ i ] ); } ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after creation (" + m.FIRST() + ", " + t.FIRST() + ")"); } /* Now we add and remove random data in m and t, checking that the result is the same. */ for(int i=0; i<2*n; i++ ) { if ( r.nextDouble() < 0.01 ) { t.clear(); m.clear(); for( int j = 0; j < n / 2; j++ ) { t.enqueue( k[ j ] ); m.enqueue( k[ j ] ); } } KEY_TYPE T = genKey(); mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { m.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } try { t.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.FIRST() + ", " + t.FIRST() + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { rm = m.DEQUEUE(); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( NoSuchElementException e ) { mThrowsNoElement = e; } try { rt = t.DEQUEUE(); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( KEY_EQUALS(rt, rm) , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" ); ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.FIRST() + ", " + t.FIRST() + ")"); } /* Now we save and read m. */ try { java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test"); java.io.OutputStream os = new java.io.FileOutputStream(ff); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os); oos.writeObject(m); oos.close(); java.io.InputStream is = new java.io.FileInputStream(ff); java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is); m = (HEAP_PRIORITY_QUEUE)ois.readObject(); ois.close(); ff.delete(); } catch(Exception e) { e.printStackTrace(); System.exit( 1 ); } ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after save/read" ); HEAP_PRIORITY_QUEUE m2 = new HEAP_PRIORITY_QUEUE( t.array, t.size() ); ARRAY_PRIORITY_QUEUE t2 = new ARRAY_PRIORITY_QUEUE( m.heap, m.size() ); m = m2; t = t2; ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after wrap (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after wrap (" + m.FIRST() + ", " + t.FIRST() + ")"); } if ( m.size() != 0 && ( ( new OPEN_HASH_SET( m.heap, 0, m.size ) ).size() == m.size() ) ) { int j = t.size(), M = --j; #if KEYS_PRIMITIVE while( j-- != 0 ) if ( KEY_LESS( t.array[ j ], t.array[ M ] ) ) M = j; #else while( j-- != 0 ) if ( ((Comparable)t.array[ j ]).compareTo( t.array[ M ] )< 0 ) M = j; #endif m.heap[ 0 ] = t.array[ M ] = genKey(); m.changed(); t.changed(); ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after change (" + m.FIRST() + ", " + t.FIRST() + ")"); } } } /* Now we check that m actually holds the same data. */ m.clear(); ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" ); System.out.println("Test OK"); } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/HeapSemiIndirectPriorityQueue.drv0000664000000000000000000004674613050701620021321 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; import it.unimi.dsi.fastutil.IndirectPriorityQueue; #endif import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.ints.IntArrays; import it.unimi.dsi.fastutil.AbstractIndirectPriorityQueue; /** A type-specific heap-based semi-indirect priority queue. * *

Instances of this class use as reference list a reference array, * which must be provided to each constructor. The priority queue is * represented using a heap. The heap is enlarged as needed, but it is never * shrunk. Use the {@link #trim()} method to reduce its size, if necessary. * *

This implementation allows one to enqueue several time the same index, but * you must be careful when calling {@link #changed()}. */ public class HEAP_SEMI_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends AbstractIndirectPriorityQueue implements INDIRECT_PRIORITY_QUEUE KEY_GENERIC { /** The reference array. */ protected final KEY_GENERIC_TYPE refArray[]; /** The semi-indirect heap. */ protected int heap[] = IntArrays.EMPTY_ARRAY; /** The number of elements in this queue. */ protected int size; /** The type-specific comparator used in this queue. */ protected KEY_COMPARATOR KEY_SUPER_GENERIC c; /** Creates a new empty queue without elements with a given capacity and comparator. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { if ( capacity > 0 ) this.heap = new int[ capacity ]; this.refArray = refArray; this.c = c; } /** Creates a new empty queue with given capacity and using the natural order. * * @param refArray the reference array. * @param capacity the initial capacity of this queue. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) { this( refArray, capacity, null ); } /** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator. * * @param refArray the reference array. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, refArray.length, c ); } /** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. * @param refArray the reference array. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray ) { this( refArray, refArray.length, null ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, 0, c ); this.heap = a; this.size = size; SEMI_INDIRECT_HEAPS.makeHeap( refArray, a, size, c ); } /** Wraps a given array in a queue using a given comparator. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param c the comparator used in this queue, or null for the natural order. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { this( refArray, a, a.length, c ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The first size element of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. * @param size the number of elements to be included in the queue. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) { this( refArray, a, size, null ); } /** Wraps a given array in a queue using the natural order. * *

The queue returned by this method will be backed by the given array. * The elements of the array will be rearranged so to form a heap (this is * more efficient than enqueing the elements of a one by one). * * @param refArray the reference array. * @param a an array of indices into refArray. */ public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) { this( refArray, a, a.length ); } /** Ensures that the given index is a valid reference. * * @param index an index in the reference array. * @throws IndexOutOfBoundsException if the given index is negative or larger than the reference array length. */ protected void ensureElement( final int index ) { if ( index < 0 ) throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" ); if ( index >= refArray.length ) throw new IndexOutOfBoundsException( "Index (" + index + ") is larger than or equal to reference array size (" + refArray.length + ")" ); } public void enqueue( int x ) { ensureElement( x ); if ( size == heap.length ) heap = IntArrays.grow( heap, size + 1 ); heap[ size++ ] = x; SEMI_INDIRECT_HEAPS.upHeap( refArray, heap, size, size - 1, c ); } public int dequeue() { if ( size == 0 ) throw new NoSuchElementException(); final int result = heap[ 0 ]; heap[ 0 ] = heap[ --size ]; if ( size != 0 ) SEMI_INDIRECT_HEAPS.downHeap( refArray, heap, size, 0, c ); return result; } public int first() { if ( size == 0 ) throw new NoSuchElementException(); return heap[ 0 ]; } /** {@inheritDoc} * *

The caller must guarantee that when this method is called the * index of the first element appears just once in the queue. Failure to do so * will bring the queue in an inconsistent state, and will cause * unpredictable behaviour. */ public void changed() { SEMI_INDIRECT_HEAPS.downHeap( refArray, heap, size, 0, c ); } /** Rebuilds this heap in a bottom-up fashion. */ public void allChanged() { SEMI_INDIRECT_HEAPS.makeHeap( refArray, heap, size, c ); } public int size() { return size; } public void clear() { size = 0; } /** Trims the backing array so that it has exactly {@link #size()} elements. */ public void trim() { heap = IntArrays.trim( heap, size ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; } public int front( final int[] a ) { return c == null ? SEMI_INDIRECT_HEAPS.front( refArray, heap, size, a ) : SEMI_INDIRECT_HEAPS.front( refArray, heap, size, a, c ); } public String toString() { StringBuffer s = new StringBuffer(); s.append( "[" ); for ( int i = 0; i < size; i++ ) { if ( i != 0 ) s.append( ", " ); s.append( refArray[ heap [ i ] ] ); } s.append( "]" ); return s.toString(); } #ifdef TEST /** The original class, now just used for testing. */ private static class TestQueue { /** The reference array */ private KEY_TYPE refArray[]; /** Its length */ private int N; /** The number of elements in the heaps */ private int n; /** The two comparators */ private KEY_COMPARATOR primaryComp, secondaryComp; /** Two indirect heaps are used, called primary and secondary. Each of them contains a permutation of n among the indices 0, 1, ..., N-1 in such a way that the corresponding objects be sorted with respect to the two comparators. We also need an array inSec[] so that inSec[k] is the index of secondary containing k. */ private int primary[], secondary[], inSec[]; /** Builds a double indirect priority queue. * @param refArray The reference array. * @param primaryComp The primary comparator. * @param secondaryComp The secondary comparator. */ public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) { this.refArray = refArray; this.N = refArray.length; assert this.N != 0; this.n = 0; this.primaryComp = primaryComp; this.secondaryComp = secondaryComp; this.primary = new int[N]; this.secondary = new int[N]; this.inSec = new int[N]; java.util.Arrays.fill( inSec, -1 ); } /** Adds an index to the queue. Notice that the index should not be already present in the queue. * @param i The index to be added */ public void add( int i ) { if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException(); //if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException(); primary[n] = i; n++; swimPrimary( n-1 ); } /** Heapify the primary heap. * @param i The index of the heap to be heapified. */ private void heapifyPrimary( int i ) { int dep = primary[i]; int child; while ( ( child = 2*i+1 ) < n ) { if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++; if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break; primary[i] = primary[child]; i = child; } primary[i] = dep; } /** Heapify the secondary heap. * @param i The index of the heap to be heapified. */ private void heapifySecondary( int i ) { int dep = secondary[i]; int child; while ( ( child = 2*i+1 ) < n ) { if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++; if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break; secondary[i] = secondary[child]; inSec[secondary[i]] = i; i = child; } secondary[i] = dep; inSec[secondary[i]] = i; } /** Swim and heapify the primary heap. * @param i The index to be moved. */ private void swimPrimary( int i ) { int dep = primary[i]; int parent; while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) { if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break; primary[i] = primary[parent]; i = parent; } primary[i] = dep; heapifyPrimary( i ); } /** Swim and heapify the secondary heap. * @param i The index to be moved. */ private void swimSecondary( int i ) { int dep = secondary[i]; int parent; while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) { if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break; secondary[i] = secondary[parent]; inSec[secondary[i]] = i; i = parent; } secondary[i] = dep; inSec[secondary[i]] = i; heapifySecondary( i ); } /** Returns the minimum element with respect to the primary comparator. @return the minimum element. */ public int top() { if ( n == 0 ) throw new NoSuchElementException(); return primary[0]; } /** Returns the minimum element with respect to the secondary comparator. @return the minimum element. */ public int secTop() { if ( n == 0 ) throw new NoSuchElementException(); return secondary[0]; } /** Removes the minimum element with respect to the primary comparator. * @return the removed element. */ public void remove() { if ( n == 0 ) throw new NoSuchElementException(); int result = primary[0]; // Copy a leaf primary[0] = primary[n-1]; n--; heapifyPrimary( 0 ); return; } public void clear() { while( size() != 0 ) remove(); } /** Signals that the minimum element with respect to the comparator has changed. */ public void change() { heapifyPrimary( 0 ); } /** Returns the number of elements in the queue. * @return the size of the queue */ public int size() { return n; } public String toString() { String s = "["; for ( int i = 0; i < n; i++ ) s += refArray[primary[i]]+", "; return s+ "]"; } } private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) { if ( sizea != sizeb ) return false; while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false; return true; } protected static void test( int n ) { long ms; Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement; int rm = 0, rt = 0; KEY_TYPE[] refArray = new KEY_TYPE[ n ]; for( int i = 0; i < n; i++ ) refArray[ i ] = genKey(); HEAP_SEMI_INDIRECT_PRIORITY_QUEUE m = new HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( refArray, COMPARATORS.NATURAL_COMPARATOR ); TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.OPPOSITE_COMPARATOR ); /* We add pairs to t. */ for( int i = 0; i < n / 2; i++ ) { t.add( i ); m.enqueue( i ); } ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" ); /* Now we add and remove random data in m and t, checking that the result is the same. */ for(int i=0; i<2*n; i++ ) { if ( r.nextDouble() < 0.01 ) { t.clear(); m.clear(); for( int j = 0; j < n / 2; j++ ) { t.add( j ); m.enqueue( j ); } } int T = r.nextInt( 2 * n ); mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { m.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } try { t.add( T ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" ); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")"); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { rm = m.dequeue(); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( NoSuchElementException e ) { mThrowsNoElement = e; } try { rt = t.top(); t.remove(); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( NoSuchElementException e ) { tThrowsNoElement = e; } ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ); ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" ); ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" ); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")"); } if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.heap, 0, m.size ) ).size() == m.size() ) ) { refArray[ m.first() ] = genKey(); m.changed(); t.change(); ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")"); if ( m.size() != 0 ) { ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")"); } } } /* Now we check that m actually holds the same data. */ m.clear(); ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" ); System.out.println("Test OK"); } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/Heaps.drv0000664000000000000000000000735513050701620014426 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; #endif /** A class providing static methods and objects that do useful things with heaps. * *

The static methods of this class allow to treat arrays as 0-based heaps. They * are used in the implementation of heap-based queues, but they may be also used * directly. * */ public class HEAPS { private HEAPS() {} /** Moves the given element down into the heap until it reaches the lowest possible position. * * @param heap the heap (starting at 0). * @param size the number of elements in the heap. * @param i the index of the element that must be moved down. * @param c a type-specific comparator, or null for the natural order. * @return the new position of the element of index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] heap, final int size, int i, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) { assert i < size; final KEY_GENERIC_TYPE e = heap[ i ]; int child; if ( c == null ) while ( ( child = ( i << 1 ) + 1 ) < size ) { KEY_GENERIC_TYPE t = heap[ child ]; final int right = child + 1; if ( right < size && KEY_LESS( heap[ right ], t ) ) t = heap[ child = right ]; if ( KEY_LESSEQ( e, t ) ) break; heap[ i ] = t; i = child; } else while ( ( child = ( i << 1 ) + 1 ) < size ) { KEY_GENERIC_TYPE t = heap[ child ]; final int right = child + 1; if ( right < size && c.compare( heap[ right ], t ) < 0 ) t = heap[ child = right ]; if ( c.compare( e, t ) <= 0 ) break; heap[ i ] = t; i = child; } heap[ i ] = e; return i; } /** Moves the given element up in the heap until it reaches the highest possible position. * * @param heap the heap (starting at 0). * @param size the number of elements in the heap. * @param i the index of the element that must be moved up. * @param c a type-specific comparator, or null for the natural order. * @return the new position of the element of index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) { assert i < size; final KEY_GENERIC_TYPE e = heap[ i ]; if ( c == null ) while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final KEY_GENERIC_TYPE t = heap[ parent ]; if ( KEY_LESSEQ( t, e ) ) break; heap[ i ] = t; i = parent; } else while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final KEY_GENERIC_TYPE t = heap[ parent ]; if ( c.compare( t, e ) <= 0 ) break; heap[ i ] = t; i = parent; } heap[ i ] = e; return i; } /** Makes an array into a heap. * * @param heap the heap (starting at 0). * @param size the number of elements in the heap. * @param c a type-specific comparator, or null for the natural order. */ public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] heap, final int size, final KEY_COMPARATOR KEY_GENERIC c ) { int i = size >>> 1; while( i-- != 0 ) downHeap( heap, size, i, c ); } } fastutil-7.1.0/drv/IndirectHeaps.drv0000664000000000000000000001365513050701620016110 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; #endif import java.util.Arrays; /** A class providing static methods and objects that do useful things with indirect heaps. * *

An indirect heap is an extension of a semi-indirect heap using also an * inversion array of the same length as the reference array, * satisfying the relation heap[inv[i]]==i when * inv[i]>=0, and inv[heap[i]]==i for all elements in the heap. */ public class INDIRECT_HEAPS { private INDIRECT_HEAPS() {} /** Moves the given element down into the indirect heap until it reaches the lowest possible position. * * @param refArray the reference array. * @param heap the indirect heap (starting at 0). * @param inv the inversion array. * @param size the number of elements in the heap. * @param i the index in the heap of the element to be moved down. * @param c a type-specific comparator, or null for the natural order. * @return the new position in the heap of the element of heap index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) { assert i < size; final int e = heap[ i ]; final KEY_GENERIC_TYPE E = refArray[ e ]; int child; if ( c == null ) while ( ( child = ( i << 1 ) + 1 ) < size ) { int t = heap[ child ]; final int right = child + 1; if ( right < size && KEY_LESS( refArray[ heap[ right ] ], refArray[ t ] ) ) t = heap[ child = right ]; if ( KEY_LESSEQ( E, refArray[ t ] ) ) break; heap[ i ] = t; inv[ heap[ i ] ] = i; i = child; } else while ( ( child = ( i << 1 ) + 1 ) < size ) { int t = heap[ child ]; final int right = child + 1; if ( right < size && c.compare( refArray[ heap[ right ] ], refArray[ t ] ) < 0 ) t = heap[ child = right ]; if ( c.compare( E, refArray[ t ] ) <= 0 ) break; heap[ i ] = t; inv[ heap[ i ] ] = i; i = child; } heap[ i ] = e; inv[ e ] = i; return i; } /** Moves the given element up in the indirect heap until it reaches the highest possible position. * * Note that in principle after this call the heap property may be violated. * * @param refArray the reference array. * @param heap the indirect heap (starting at 0). * @param inv the inversion array. * @param size the number of elements in the heap. * @param i the index in the heap of the element to be moved up. * @param c a type-specific comparator, or null for the natural order. * @return the new position in the heap of the element of heap index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) { assert i < size; final int e = heap[ i ]; final KEY_GENERIC_TYPE E = refArray[ e ]; if ( c == null ) while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final int t = heap[ parent ]; if ( KEY_LESSEQ( refArray[ t ], E ) ) break; heap[ i ] = t; inv[ heap[ i ] ] = i; i = parent; } else while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final int t = heap[ parent ]; if ( c.compare( refArray[ t ], E ) <= 0 ) break; heap[ i ] = t; inv[ heap[ i ] ] = i; i = parent; } heap[ i ] = e; inv[ e ] = i; return i; } /** Creates an indirect heap in the given array. * * @param refArray the reference array. * @param offset the first element of the reference array to be put in the heap. * @param length the number of elements to be put in the heap. * @param heap the array where the heap is to be created. * @param inv the inversion array. * @param c a type-specific comparator, or null for the natural order. */ public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final int[] heap, final int[] inv, final KEY_COMPARATOR KEY_GENERIC c ) { ARRAYS.ensureOffsetLength( refArray, offset, length ); if ( heap.length < length ) throw new IllegalArgumentException( "The heap length (" + heap.length + ") is smaller than the number of elements (" + length + ")" ); if ( inv.length < refArray.length ) throw new IllegalArgumentException( "The inversion array length (" + heap.length + ") is smaller than the length of the reference array (" + refArray.length + ")" ); Arrays.fill( inv, 0, refArray.length, -1 ); int i = length; while( i-- != 0 ) inv[ heap[ i ] = offset + i ] = i; i = length >>> 1; while( i-- != 0 ) downHeap( refArray, heap, inv, length, i, c ); } /** Creates an indirect heap from a given index array. * * @param refArray the reference array. * @param heap an array containing indices into refArray. * @param inv the inversion array. * @param size the number of elements in the heap. * @param c a type-specific comparator, or null for the natural order. */ public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, final KEY_COMPARATOR KEY_GENERIC c ) { int i = size >>> 1; while( i-- != 0 ) downHeap( refArray, heap, inv, size, i, c ); } } fastutil-7.1.0/drv/IndirectPriorityQueue.drv0000664000000000000000000000227513050701620017672 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.IndirectPriorityQueue; /** A type-specific {@link IndirectPriorityQueue}. * *

Additionally, this interface strengthens {@link #comparator()}. */ public interface INDIRECT_PRIORITY_QUEUE extends IndirectPriorityQueue { /** Returns the comparator associated with this queue. * * Note that this specification strengthens the one given in {@link IndirectPriorityQueue}. * * @return the comparator associated with this queue. * @see IndirectPriorityQueue#comparator() */ KEY_COMPARATOR comparator(); } fastutil-7.1.0/drv/Iterable.drv0000664000000000000000000000264413050701620015111 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.lang.Iterable; /** A type-specific {@link Iterable} that strengthens that specification of {@link Iterable#iterator()}. * *

Warning: Java will let you write “colon” for statements with primitive-type * loop variables; however, what is (unfortunately) really happening is that at each iteration an * unboxing (and, in the case of fastutil type-specific data structures, a boxing) will be performed. Watch out. * * @see Iterable */ public interface KEY_ITERABLE KEY_GENERIC extends Iterable { /** Returns a type-specific iterator. * * Note that this specification strengthens the one given in {@link Iterable#iterator()}. * * @return a type-specific iterator. */ KEY_ITERATOR KEY_GENERIC iterator(); } fastutil-7.1.0/drv/Iterator.drv0000664000000000000000000000266713050701620015160 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Iterator; /** A type-specific {@link Iterator}; provides an additional method to avoid (un)boxing, and * the possibility to skip elements. * * @see Iterator */ public interface KEY_ITERATOR KEY_GENERIC extends Iterator { #if KEYS_PRIMITIVE /** * Returns the next element as a primitive type. * * @return the next element in the iteration. * @see Iterator#next() */ KEY_TYPE NEXT_KEY(); #endif /** Skips the given number of elements. * *

The effect of this call is exactly the same as that of * calling {@link #next()} for n times (possibly stopping * if {@link #hasNext()} becomes false). * * @param n the number of elements to skip. * @return the number of elements actually skipped. * @see Iterator#next() */ int skip( int n ); } fastutil-7.1.0/drv/Iterators.drv0000664000000000000000000007375113050701620015345 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Iterator; import java.util.ListIterator; import java.util.NoSuchElementException; /** A class providing static methods and objects that do useful things with type-specific iterators. * * @see Iterator */ public class ITERATORS { private ITERATORS() {} /** A class returning no elements and a type-specific iterator interface. * *

This class may be useful to implement your own in case you subclass * a type-specific iterator. */ public static class EmptyIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyIterator() {} public boolean hasNext() { return false; } public boolean hasPrevious() { return false; } public KEY_GENERIC_TYPE NEXT_KEY() { throw new NoSuchElementException(); } public KEY_GENERIC_TYPE PREV_KEY() { throw new NoSuchElementException(); } public int nextIndex() { return 0; } public int previousIndex() { return -1; } public int skip( int n ) { return 0; }; public int back( int n ) { return 0; }; public Object clone() { return EMPTY_ITERATOR; } private Object readResolve() { return EMPTY_ITERATOR; } } /** An empty iterator (immutable). It is serializable and cloneable. * *

The class of this objects represent an abstract empty iterator * that can iterate as a type-specific (list) iterator. */ SUPPRESS_WARNINGS_KEY_RAWTYPES public final static EmptyIterator EMPTY_ITERATOR = new EmptyIterator(); /** An iterator returning a single element. */ private static class SingletonIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { private final KEY_GENERIC_TYPE element; private int curr; public SingletonIterator( final KEY_GENERIC_TYPE element ) { this.element = element; } public boolean hasNext() { return curr == 0; } public boolean hasPrevious() { return curr == 1; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = 1; return element; } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = 0; return element; } public int nextIndex() { return curr; } public int previousIndex() { return curr - 1; } } /** Returns an iterator that iterates just over the given element. * * @param element the only element to be returned by a type-specific list iterator. * @return an iterator that iterates just over element. */ public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new SingletonIterator KEY_GENERIC( element ); } /** A class to wrap arrays in iterators. */ private static class ArrayIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { private final KEY_GENERIC_TYPE[] array; private final int offset, length; private int curr; public ArrayIterator( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) { this.array = array; this.offset = offset; this.length = length; } public boolean hasNext() { return curr < length; } public boolean hasPrevious() { return curr > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return array[ offset + curr++ ]; } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return array[ offset + --curr ]; } public int skip( int n ) { if ( n <= length - curr ) { curr += n; return n; } n = length - curr; curr = length; return n; } public int back( int n ) { if ( n <= curr ) { curr -= n; return n; } n = curr; curr = 0; return n; } public int nextIndex() { return curr; } public int previousIndex() { return curr - 1; } } /** Wraps the given part of an array into a type-specific list iterator. * *

The type-specific list iterator returned by this method will iterate * length times, returning consecutive elements of the given * array starting from the one with index offset. * * @param array an array to wrap into a type-specific list iterator. * @param offset the first element of the array to be returned. * @param length the number of elements to return. * @return an iterator that will return length elements of array starting at position offset. */ public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC wrap( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) { ARRAYS.ensureOffsetLength( array, offset, length ); return new ArrayIterator KEY_GENERIC( array, offset, length ); } /** Wraps the given array into a type-specific list iterator. * *

The type-specific list iterator returned by this method will return * all elements of the given array. * * @param array an array to wrap into a type-specific list iterator. * @return an iterator that will the elements of array. */ public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC wrap( final KEY_GENERIC_TYPE[] array ) { return new ArrayIterator KEY_GENERIC( array, 0, array.length ); } /** Unwraps an iterator into an array starting at a given offset for a given number of elements. * *

This method iterates over the given type-specific iterator and stores the elements * returned, up to a maximum of length, in the given array starting at offset. * The number of actually unwrapped elements is returned (it may be less than max if * the iterator emits less than max elements). * * @param i a type-specific iterator. * @param array an array to contain the output of the iterator. * @param offset the first element of the array to be returned. * @param max the maximum number of elements to unwrap. * @return the number of elements unwrapped. */ public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final KEY_GENERIC_TYPE array[], int offset, final int max ) { if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" ); if ( offset < 0 || offset + max > array.length ) throw new IllegalArgumentException(); int j = max; while( j-- != 0 && i.hasNext() ) array[ offset++ ] = i.NEXT_KEY(); return max - j - 1; } /** Unwraps an iterator into an array. * *

This method iterates over the given type-specific iterator and stores the * elements returned in the given array. The iteration will stop when the * iterator has no more elements or when the end of the array has been reached. * * @param i a type-specific iterator. * @param array an array to contain the output of the iterator. * @return the number of elements unwrapped. */ public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final KEY_GENERIC_TYPE array[] ) { return unwrap( i, array, 0, array.length ); } /** Unwraps an iterator, returning an array, with a limit on the number of elements. * *

This method iterates over the given type-specific iterator and returns an array * containing the elements returned by the iterator. At most max elements * will be returned. * * @param i a type-specific iterator. * @param max the maximum number of elements to be unwrapped. * @return an array containing the elements returned by the iterator (at most max). */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC KEY_GENERIC_TYPE[] unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, int max ) { if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" ); KEY_GENERIC_TYPE array[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ 16 ]; int j = 0; while( max-- != 0 && i.hasNext() ) { if ( j == array.length ) array = ARRAYS.grow( array, j + 1 ); array[ j++ ] = i.NEXT_KEY(); } return ARRAYS.trim( array, j ); } /** Unwraps an iterator, returning an array. * *

This method iterates over the given type-specific iterator and returns an array * containing the elements returned by the iterator. * * @param i a type-specific iterator. * @return an array containing the elements returned by the iterator. */ public static KEY_GENERIC KEY_GENERIC_TYPE[] unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { return unwrap( i, Integer.MAX_VALUE ); } /** Unwraps an iterator into a type-specific collection, with a limit on the number of elements. * *

This method iterates over the given type-specific iterator and stores the elements * returned, up to a maximum of max, in the given type-specific collection. * The number of actually unwrapped elements is returned (it may be less than max if * the iterator emits less than max elements). * * @param i a type-specific iterator. * @param c a type-specific collection array to contain the output of the iterator. * @param max the maximum number of elements to unwrap. * @return the number of elements unwrapped. Note that * this is the number of elements returned by the iterator, which is not necessarily the number * of elements that have been added to the collection (because of duplicates). */ public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC c, final int max ) { if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" ); int j = max; while( j-- != 0 && i.hasNext() ) c.add( i.NEXT_KEY() ); return max - j - 1; } /** Unwraps an iterator into a type-specific collection. * *

This method iterates over the given type-specific iterator and stores the * elements returned in the given type-specific collection. The returned count on the number * unwrapped elements is a long, so that it will work also with very large collections. * * @param i a type-specific iterator. * @param c a type-specific collection to contain the output of the iterator. * @return the number of elements unwrapped. Note that * this is the number of elements returned by the iterator, which is not necessarily the number * of elements that have been added to the collection (because of duplicates). */ public static KEY_GENERIC long unwrap( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC c ) { long n = 0; while( i.hasNext() ) { c.add( i.NEXT_KEY() ); n++; } return n; } /** Pours an iterator into a type-specific collection, with a limit on the number of elements. * *

This method iterates over the given type-specific iterator and adds * the returned elements to the given collection (up to max). * * @param i a type-specific iterator. * @param s a type-specific collection. * @param max the maximum number of elements to be poured. * @return the number of elements poured. Note that * this is the number of elements returned by the iterator, which is not necessarily the number * of elements that have been added to the collection (because of duplicates). */ public static KEY_GENERIC int pour( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC s, final int max ) { if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" ); int j = max; while( j-- != 0 && i.hasNext() ) s.add( i.NEXT_KEY() ); return max - j - 1; } /** Pours an iterator into a type-specific collection. * *

This method iterates over the given type-specific iterator and adds * the returned elements to the given collection. * * @param i a type-specific iterator. * @param s a type-specific collection. * @return the number of elements poured. Note that * this is the number of elements returned by the iterator, which is not necessarily the number * of elements that have been added to the collection (because of duplicates). */ public static KEY_GENERIC int pour( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC s ) { return pour( i, s, Integer.MAX_VALUE ); } /** Pours an iterator, returning a type-specific list, with a limit on the number of elements. * *

This method iterates over the given type-specific iterator and returns * a type-specific list containing the returned elements (up to max). Iteration * on the returned list is guaranteed to produce the elements in the same order * in which they appeared in the iterator. * * * @param i a type-specific iterator. * @param max the maximum number of elements to be poured. * @return a type-specific list containing the returned elements, up to max. */ public static KEY_GENERIC LIST KEY_GENERIC pour( final STD_KEY_ITERATOR KEY_GENERIC i, int max ) { final ARRAY_LIST KEY_GENERIC l = new ARRAY_LIST KEY_GENERIC(); pour( i, l, max ); l.trim(); return l; } /** Pours an iterator, returning a type-specific list. * *

This method iterates over the given type-specific iterator and returns * a list containing the returned elements. Iteration * on the returned list is guaranteed to produce the elements in the same order * in which they appeared in the iterator. * * @param i a type-specific iterator. * @return a type-specific list containing the returned elements. */ public static KEY_GENERIC LIST KEY_GENERIC pour( final STD_KEY_ITERATOR KEY_GENERIC i ) { return pour( i, Integer.MAX_VALUE ); } private static class IteratorWrapper KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC { final Iterator i; public IteratorWrapper( final Iterator i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public void remove() { i.remove(); } public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next() ); } } /** Wraps a standard iterator into a type-specific iterator. * *

This method wraps a standard iterator into a type-specific one which will handle the * type conversions for you. Of course, any attempt to wrap an iterator returning the * instances of the wrong class will generate a {@link ClassCastException}. The * returned iterator is backed by i: changes to one of the iterators * will affect the other, too. * *

If i is already type-specific, it will returned and no new object * will be generated. * * @param i an iterator. * @return a type-specific iterator backed by i. */ #if KEYS_PRIMITIVE @SuppressWarnings({"unchecked","rawtypes"}) #endif public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC AS_KEY_ITERATOR( final Iterator KEY_GENERIC i ) { if ( i instanceof KEY_ITERATOR ) return (KEY_ITERATOR KEY_GENERIC)i; return new IteratorWrapper KEY_GENERIC( i ); } private static class ListIteratorWrapper KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { final ListIterator i; public ListIteratorWrapper( final ListIterator i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } public int nextIndex() { return i.nextIndex(); } public int previousIndex() { return i.previousIndex(); } public void set( KEY_GENERIC_TYPE k ) { i.set( KEY2OBJ( k ) ); } public void add( KEY_GENERIC_TYPE k ) { i.add( KEY2OBJ( k ) ); } public void remove() { i.remove(); } public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next() ); } public KEY_GENERIC_TYPE PREV_KEY() { return KEY_CLASS2TYPE( i.previous() ); } } /** Wraps a standard list iterator into a type-specific list iterator. * *

This method wraps a standard list iterator into a type-specific one * which will handle the type conversions for you. Of course, any attempt * to wrap an iterator returning the instances of the wrong class will * generate a {@link ClassCastException}. The * returned iterator is backed by i: changes to one of the iterators * will affect the other, too. * *

If i is already type-specific, it will returned and no new object * will be generated. * * @param i a list iterator. * @return a type-specific list iterator backed by i. */ #if KEYS_PRIMITIVE @SuppressWarnings({"unchecked","rawtypes"}) #endif public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC AS_KEY_ITERATOR( final ListIterator KEY_GENERIC i ) { if ( i instanceof KEY_LIST_ITERATOR ) return (KEY_LIST_ITERATOR KEY_GENERIC)i; return new ListIteratorWrapper KEY_GENERIC( i ); } #if KEY_CLASS_Integer || KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character || KEY_CLASS_Long #if KEY_CLASS_Long private static class IntervalIterator extends KEY_ABSTRACT_BIDI_ITERATOR { #else private static class IntervalIterator extends KEY_ABSTRACT_LIST_ITERATOR { #endif private final KEY_TYPE from, to; KEY_TYPE curr; public IntervalIterator( final KEY_TYPE from, final KEY_TYPE to ) { this.from = this.curr = from; this.to = to; } public boolean hasNext() { return curr < to; } public boolean hasPrevious() { return curr > from; } public KEY_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return curr++; } public KEY_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return --curr; } #if ! KEY_CLASS_Long public int nextIndex() { return curr - from; } public int previousIndex() { return curr - from - 1; } #endif public int skip( int n ) { if ( curr + n <= to ) { curr += n; return n; } #if ! KEY_CLASS_Long n = to - curr; #else n = (int)( to - curr ); #endif curr = to; return n; } public int back( int n ) { if ( curr - n >= from ) { curr -= n; return n; } #if ! KEY_CLASS_Long n = curr - from ; #else n = (int)( curr - from ); #endif curr = from; return n; } } #if KEY_CLASS_Long /** Creates a type-specific bidirectional iterator over an interval. * *

The type-specific bidirectional iterator returned by this method will return the * elements from, from+1,…, to-1. * *

Note that all other type-specific interval iterator are list * iterators. Of course, this is not possible with longs as the index * returned by {@link java.util.ListIterator#nextIndex() nextIndex()}/{@link * java.util.ListIterator#previousIndex() previousIndex()} would exceed an integer. * * @param from the starting element (inclusive). * @param to the ending element (exclusive). * @return a type-specific bidirectional iterator enumerating the elements from from to to. */ public static KEY_BIDI_ITERATOR fromTo( final KEY_TYPE from, final KEY_TYPE to ) { return new IntervalIterator( from, to ); } #else /** Creates a type-specific list iterator over an interval. * *

The type-specific list iterator returned by this method will return the * elements from, from+1,…, to-1. * * @param from the starting element (inclusive). * @param to the ending element (exclusive). * @return a type-specific list iterator enumerating the elements from from to to. */ public static KEY_LIST_ITERATOR fromTo( final KEY_TYPE from, final KEY_TYPE to ) { return new IntervalIterator( from, to ); } #endif #endif private static class IteratorConcatenator KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC { final KEY_ITERATOR KEY_EXTENDS_GENERIC a[]; int offset, length, lastOffset = -1; public IteratorConcatenator( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[], int offset, int length ) { this.a = a; this.offset = offset; this.length = length; advance(); } private void advance() { while( length != 0 ) { if ( a[ offset ].hasNext() ) break; length--; offset++; } return; } public boolean hasNext() { return length > 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); KEY_GENERIC_TYPE next = a[ lastOffset = offset ].NEXT_KEY(); advance(); return next; } public void remove() { if ( lastOffset == -1 ) throw new IllegalStateException(); a[ lastOffset ].remove(); } public int skip( int n ) { lastOffset = -1; int skipped = 0; while( skipped < n && length != 0 ) { skipped += a[ offset ].skip( n - skipped ); if ( a[ offset ].hasNext() ) break; length--; offset++; } return skipped; } } /** Concatenates all iterators contained in an array. * *

This method returns an iterator that will enumerate in order the elements returned * by all iterators contained in the given array. * * @param a an array of iterators. * @return an iterator obtained by concatenation. */ public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC concat( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[] ) { return concat( a, 0, a.length ); } /** Concatenates a sequence of iterators contained in an array. * *

This method returns an iterator that will enumerate in order the elements returned * by a[ offset ], then those returned * by a[ offset + 1 ], and so on up to * a[ offset + length - 1 ]. * * @param a an array of iterators. * @param offset the index of the first iterator to concatenate. * @param length the number of iterators to concatenate. * @return an iterator obtained by concatenation of length elements of a starting at offset. */ public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC concat( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[], final int offset, final int length ) { return new IteratorConcatenator KEY_GENERIC( a, offset, length ); } /** An unmodifiable wrapper class for iterators. */ public static class UnmodifiableIterator KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC { final protected KEY_ITERATOR KEY_GENERIC i; public UnmodifiableIterator( final KEY_ITERATOR KEY_GENERIC i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS next() { return i.next(); } #endif } /** Returns an unmodifiable iterator backed by the specified iterator. * * @param i the iterator to be wrapped in an unmodifiable iterator. * @return an unmodifiable view of the specified iterator. */ public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC unmodifiable( final KEY_ITERATOR KEY_GENERIC i ) { return new UnmodifiableIterator KEY_GENERIC( i ); } /** An unmodifiable wrapper class for bidirectional iterators. */ public static class UnmodifiableBidirectionalIterator KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC { final protected KEY_BIDI_ITERATOR KEY_GENERIC i; public UnmodifiableBidirectionalIterator( final KEY_BIDI_ITERATOR KEY_GENERIC i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); } public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS next() { return i.next(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS previous() { return i.previous(); } #endif } /** Returns an unmodifiable bidirectional iterator backed by the specified bidirectional iterator. * * @param i the bidirectional iterator to be wrapped in an unmodifiable bidirectional iterator. * @return an unmodifiable view of the specified bidirectional iterator. */ public static KEY_GENERIC KEY_BIDI_ITERATOR KEY_GENERIC unmodifiable( final KEY_BIDI_ITERATOR KEY_GENERIC i ) { return new UnmodifiableBidirectionalIterator KEY_GENERIC( i ); } /** An unmodifiable wrapper class for list iterators. */ public static class UnmodifiableListIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { final protected KEY_LIST_ITERATOR KEY_GENERIC i; public UnmodifiableListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) { this.i = i; } public boolean hasNext() { return i.hasNext(); } public boolean hasPrevious() { return i.hasPrevious(); } public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); } public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); } public int nextIndex() { return i.nextIndex(); } public int previousIndex() { return i.previousIndex(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS next() { return i.next(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS previous() { return i.previous(); } #endif } /** Returns an unmodifiable list iterator backed by the specified list iterator. * * @param i the list iterator to be wrapped in an unmodifiable list iterator. * @return an unmodifiable view of the specified list iterator. */ public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC unmodifiable( final KEY_LIST_ITERATOR KEY_GENERIC i ) { return new UnmodifiableListIterator KEY_GENERIC( i ); } #if KEY_CLASS_Short || KEY_CLASS_Integer || KEY_CLASS_Long || KEY_CLASS_Float || KEY_CLASS_Double /** A wrapper promoting the results of a ByteIterator. */ protected static class ByteIteratorWrapper implements KEY_ITERATOR { final it.unimi.dsi.fastutil.bytes.ByteIterator iterator; public ByteIteratorWrapper( final it.unimi.dsi.fastutil.bytes.ByteIterator iterator ) { this.iterator = iterator; } public boolean hasNext() { return iterator.hasNext(); } public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextByte() ); } public KEY_TYPE NEXT_KEY() { return iterator.nextByte(); } public void remove() { iterator.remove(); } public int skip( final int n ) { return iterator.skip( n ); } } /** Returns an iterator backed by the specified byte iterator. * @return an iterator backed by the specified byte iterator. */ public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.bytes.ByteIterator iterator ) { return new ByteIteratorWrapper( iterator ); } #endif #if KEY_CLASS_Integer || KEY_CLASS_Long || KEY_CLASS_Float || KEY_CLASS_Double /** A wrapper promoting the results of a ShortIterator. */ protected static class ShortIteratorWrapper implements KEY_ITERATOR { final it.unimi.dsi.fastutil.shorts.ShortIterator iterator; public ShortIteratorWrapper( final it.unimi.dsi.fastutil.shorts.ShortIterator iterator ) { this.iterator = iterator; } public boolean hasNext() { return iterator.hasNext(); } public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextShort() ); } public KEY_TYPE NEXT_KEY() { return iterator.nextShort(); } public void remove() { iterator.remove(); } public int skip( final int n ) { return iterator.skip( n ); } } /** Returns an iterator backed by the specified short iterator. * @return an iterator backed by the specified short iterator. */ public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.shorts.ShortIterator iterator ) { return new ShortIteratorWrapper( iterator ); } #endif #if KEY_CLASS_Long || KEY_CLASS_Double /** A wrapper promoting the results of an IntIterator. */ protected static class IntIteratorWrapper implements KEY_ITERATOR { final it.unimi.dsi.fastutil.ints.IntIterator iterator; public IntIteratorWrapper( final it.unimi.dsi.fastutil.ints.IntIterator iterator ) { this.iterator = iterator; } public boolean hasNext() { return iterator.hasNext(); } public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextInt() ); } public KEY_TYPE NEXT_KEY() { return iterator.nextInt(); } public void remove() { iterator.remove(); } public int skip( final int n ) { return iterator.skip( n ); } } /** Returns an iterator backed by the specified integer iterator. * @return an iterator backed by the specified integer iterator. */ public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.ints.IntIterator iterator ) { return new IntIteratorWrapper( iterator ); } #endif #if KEY_CLASS_Double /** A wrapper promoting the results of a FloatIterator. */ protected static class FloatIteratorWrapper implements KEY_ITERATOR { final it.unimi.dsi.fastutil.floats.FloatIterator iterator; public FloatIteratorWrapper( final it.unimi.dsi.fastutil.floats.FloatIterator iterator ) { this.iterator = iterator; } public boolean hasNext() { return iterator.hasNext(); } public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextFloat() ); } public KEY_TYPE NEXT_KEY() { return iterator.nextFloat(); } public void remove() { iterator.remove(); } public int skip( final int n ) { return iterator.skip( n ); } } /** Returns an iterator backed by the specified float iterator. * @return an iterator backed by the specified float iterator. */ public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.floats.FloatIterator iterator ) { return new FloatIteratorWrapper( iterator ); } #endif } fastutil-7.1.0/drv/LinkedOpenCustomHashMap.drv0000777000000000000000000000000013050701620022714 2OpenHashMap.drvustar rootrootfastutil-7.1.0/drv/LinkedOpenCustomHashSet.drv0000777000000000000000000000000013050701620022750 2OpenHashSet.drvustar rootrootfastutil-7.1.0/drv/LinkedOpenHashMap.drv0000777000000000000000000000000013050701620021521 2OpenHashMap.drvustar rootrootfastutil-7.1.0/drv/LinkedOpenHashSet.drv0000777000000000000000000000000013050701620021555 2OpenHashSet.drvustar rootrootfastutil-7.1.0/drv/List.drv0000664000000000000000000001517113050701620014274 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.List; #if ! KEY_CLASS_Reference /** A type-specific {@link List}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Note that this type-specific interface extends {@link Comparable}: it is expected that implementing * classes perform a lexicographical comparison using the standard operator "less then" for primitive types, * and the usual {@link Comparable#compareTo(Object) compareTo()} method for objects. * *

Additionally, this interface strengthens {@link #listIterator()}, * {@link #listIterator(int)} and {@link #subList(int,int)}. * *

Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations * of these methods, it is expected that concrete implementation override them with optimized versions. * * @see List */ public interface LIST KEY_GENERIC extends List, Comparable>, COLLECTION KEY_GENERIC { #else /** A type-specific {@link List}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()}, * {@link #listIterator(int)} and {@link #subList(int,int)}. The former had been already * strengthened upstream, but unfortunately {@link List} re-specifies it. * *

Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations * of these methods, it is expected that concrete implementation override them with optimized versions. * * @see List */ public interface LIST KEY_GENERIC extends List, COLLECTION KEY_GENERIC { #endif /** Returns a type-specific iterator on the elements of this list (in proper sequence). * * Note that this specification strengthens the one given in {@link List#iterator()}. * It would not be normally necessary, but {@link java.lang.Iterable#iterator()} is bizarrily re-specified * in {@link List}. * * @return an iterator on the elements of this list (in proper sequence). */ KEY_LIST_ITERATOR KEY_GENERIC iterator(); /** Returns a type-specific list iterator on the list. * * @see #listIterator() * @deprecated As of fastutil 5, replaced by {@link #listIterator()}. */ @Deprecated KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD(); /** Returns a type-specific list iterator on the list starting at a given index. * * @see #listIterator(int) * @deprecated As of fastutil 5, replaced by {@link #listIterator(int)}. */ @Deprecated KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( int index ); /** Returns a type-specific list iterator on the list. * * @see List#listIterator() */ KEY_LIST_ITERATOR KEY_GENERIC listIterator(); /** Returns a type-specific list iterator on the list starting at a given index. * * @see List#listIterator(int) */ KEY_LIST_ITERATOR KEY_GENERIC listIterator( int index ); /** Returns a type-specific view of the portion of this list from the index from, inclusive, to the index to, exclusive. * @see List#subList(int,int) * @deprecated As of fastutil 5, replaced by {@link #subList(int,int)}. */ @Deprecated LIST KEY_GENERIC SUBLIST_METHOD( int from, int to ); /** Returns a type-specific view of the portion of this list from the index from, inclusive, to the index to, exclusive. * *

Note that this specification strengthens the one given in {@link List#subList(int,int)}. * * @see List#subList(int,int) */ LIST KEY_GENERIC subList(int from, int to); /** Sets the size of this list. * *

If the specified size is smaller than the current size, the last elements are * discarded. Otherwise, they are filled with 0/null/false. * * @param size the new size. */ void size( int size ); /** Copies (hopefully quickly) elements of this type-specific list into the given array. * * @param from the start index (inclusive). * @param a the destination array. * @param offset the offset into the destination array where to store the first element copied. * @param length the number of elements to be copied. */ void getElements( int from, KEY_TYPE a[], int offset, int length ); /** Removes (hopefully quickly) elements of this type-specific list. * * @param from the start index (inclusive). * @param to the end index (exclusive). */ void removeElements( int from, int to ); /** Add (hopefully quickly) elements to this type-specific list. * * @param index the index at which to add elements. * @param a the array containing the elements. */ void addElements( int index, KEY_GENERIC_TYPE a[] ); /** Add (hopefully quickly) elements to this type-specific list. * * @param index the index at which to add elements. * @param a the array containing the elements. * @param offset the offset of the first element to add. * @param length the number of elements to add. */ void addElements( int index, KEY_GENERIC_TYPE a[], int offset, int length ); #if KEYS_PRIMITIVE /** * @see List#add(Object) */ boolean add( KEY_TYPE key ); /** * @see List#add(int,Object) */ void add( int index, KEY_TYPE key ); /** * @see List#add(int,Object) */ boolean addAll( int index, COLLECTION c ); /** * @see List#add(int,Object) */ boolean addAll( int index, LIST c ); /** * @see List#add(int,Object) */ boolean addAll( LIST c ); /** * @see List#get(int) */ KEY_TYPE GET_KEY( int index ); /** * @see List#indexOf(Object) */ int indexOf( KEY_TYPE k ); /** * @see List#lastIndexOf(Object) */ int lastIndexOf( KEY_TYPE k ); /** * @see List#remove(int) */ KEY_TYPE REMOVE_KEY( int index ); /** * @see List#set(int,Object) */ KEY_TYPE set( int index, KEY_TYPE k ); #endif } fastutil-7.1.0/drv/ListIterator.drv0000664000000000000000000000250313050701620016001 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.ListIterator; /** A type-specific bidirectional iterator that is also a {@link ListIterator}. * *

This interface merges the methods provided by a {@link ListIterator} and * a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator}. Moreover, it provides * type-specific versions of {@link java.util.ListIterator#add(Object) add()} * and {@link java.util.ListIterator#set(Object) set()}. * * @see java.util.ListIterator * @see it.unimi.dsi.fastutil.BidirectionalIterator */ public interface KEY_LIST_ITERATOR KEY_GENERIC extends ListIterator, KEY_BIDI_ITERATOR KEY_GENERIC { #if KEYS_PRIMITIVE void set( KEY_TYPE k ); void add( KEY_TYPE k ); #endif } fastutil-7.1.0/drv/Lists.drv0000664000000000000000000012444713050701620014466 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.List; import java.util.Collection; import java.util.Random; /** A class providing static methods and objects that do useful things with type-specific lists. * * @see java.util.Collections */ public class LISTS { private LISTS() {} /** Shuffles the specified list using the specified pseudorandom number generator. * * @param l the list to be shuffled. * @param random a pseudorandom number generator (please use a XorShift* generator). * @return l. */ public static KEY_GENERIC LIST KEY_GENERIC shuffle( final LIST KEY_GENERIC l, final Random random ) { for( int i = l.size(); i-- != 0; ) { final int p = random.nextInt( i + 1 ); final KEY_GENERIC_TYPE t = l.GET_KEY( i ); l.set( i, l.GET_KEY( p ) ); l.set( p, t ); } return l; } /** An immutable class representing an empty type-specific list. * *

This class may be useful to implement your own in case you subclass * a type-specific list. */ public static class EmptyList KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyList() {} public void add( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE REMOVE_KEY( int i ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public int indexOf( KEY_TYPE k ) { return -1; } public int lastIndexOf( KEY_TYPE k ) { return -1; } public boolean addAll( Collection c ) { throw new UnsupportedOperationException(); } public boolean addAll( int i, Collection c ) { throw new UnsupportedOperationException(); } public boolean removeAll( Collection c ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS get( int i ) { throw new IndexOutOfBoundsException(); } #if KEYS_PRIMITIVE public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( LIST c ) { throw new UnsupportedOperationException(); } public boolean addAll( int i, COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( int i, LIST c ) { throw new UnsupportedOperationException(); } public void add( final int index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS set( final int index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE GET_KEY( int i ) { throw new IndexOutOfBoundsException(); } public KEY_GENERIC_CLASS remove( int k ) { throw new UnsupportedOperationException(); } public int indexOf( Object k ) { return -1; } public int lastIndexOf( Object k ) { return -1; } #endif //SUPPRESS_WARNINGS_KEY_UNCHECKED //public KEY_ITERATOR KEY_GENERIC iterator( int i ) { if ( i == 0 ) return ITERATORS.EMPTY_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); } @Deprecated SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return ITERATORS.EMPTY_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.EMPTY_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return ITERATORS.EMPTY_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_LIST_ITERATOR KEY_GENERIC listIterator( int i ) { if ( i == 0 ) return ITERATORS.EMPTY_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( int i ) { return listIterator( i ); } public LIST KEY_GENERIC subList( int from, int to ) { if ( from == 0 && to == 0 ) return this; throw new IndexOutOfBoundsException(); } @Deprecated public LIST KEY_GENERIC SUBLIST_METHOD( int from, int to ) { return subList( from, to ); } public void getElements( int from, KEY_TYPE[] a, int offset, int length ) { if ( from == 0 && length == 0 && offset >= 0 && offset <= a.length ) return; throw new IndexOutOfBoundsException(); } public void removeElements( int from, int to ) { throw new UnsupportedOperationException(); } public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { throw new UnsupportedOperationException(); } public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { throw new UnsupportedOperationException(); } public void size( int s ) { throw new UnsupportedOperationException(); } public int compareTo( final List o ) { if ( o == this ) return 0; return ((List)o).isEmpty() ? 0 : -1; } private Object readResolve() { return EMPTY_LIST; } public Object clone() { return EMPTY_LIST; } public int hashCode() { return 1; } @SuppressWarnings("rawtypes") public boolean equals( Object o ) { return o instanceof List && ((List)o).isEmpty(); } public String toString() { return "[]"; } } /** An empty list (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_RAWTYPES public static final EmptyList EMPTY_LIST = new EmptyList(); #if KEYS_REFERENCE /** Return an empty list (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_LIST}. * @return an empty list (immutable). */ @SuppressWarnings("unchecked") public static KEY_GENERIC LIST KEY_GENERIC emptyList() { return EMPTY_LIST; } #endif /** An immutable class representing a type-specific singleton list. * *

This class may be useful to implement your own in case you subclass * a type-specific list. */ public static class Singleton KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; private final KEY_GENERIC_TYPE element; private Singleton( final KEY_GENERIC_TYPE element ) { this.element = element; } public KEY_GENERIC_TYPE GET_KEY( final int i ) { if ( i == 0 ) return element; throw new IndexOutOfBoundsException(); } public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { throw new UnsupportedOperationException(); } public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); } public boolean addAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean addAll( final int i, final Collection c ) { throw new UnsupportedOperationException(); } public boolean removeAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final Collection c ) { throw new UnsupportedOperationException(); } /* Slightly optimized w.r.t. the one in ABSTRACT_SET. */ public KEY_TYPE[] TO_KEY_ARRAY() { KEY_TYPE a[] = new KEY_TYPE[ 1 ]; a[ 0 ] = element; return a; } public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.singleton( element ); } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator( int i ) { if ( i > 1 || i < 0 ) throw new IndexOutOfBoundsException(); KEY_LIST_ITERATOR KEY_GENERIC l = listIterator(); if ( i == 1 ) l.next(); return l; } SUPPRESS_WARNINGS_KEY_UNCHECKED public LIST KEY_GENERIC subList( final int from, final int to ) { ensureIndex( from ); ensureIndex( to ); if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" ); if ( from != 0 || to != 1 ) return EMPTY_LIST; return this; } public int size() { return 1; } public void size( final int size ) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } public Object clone() { return this; } #if KEYS_PRIMITIVE public boolean rem( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( final int i, final COLLECTION c ) { throw new UnsupportedOperationException(); } #else public boolean remove( final Object k ) { throw new UnsupportedOperationException(); } #endif } /** Returns a type-specific immutable list containing only the specified element. The returned list is serializable and cloneable. * * @param element the only element of the returned list. * @return a type-specific immutable list containing just element. */ public static KEY_GENERIC LIST KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); } #if ! KEYS_REFERENCE /** Returns a type-specific immutable list containing only the specified element. The returned list is serializable and cloneable. * * @param element the only element of the returned list. * @return a type-specific immutable list containing just element. */ public static KEY_GENERIC LIST KEY_GENERIC singleton( final Object element ) { return new Singleton KEY_GENERIC( KEY_OBJ2TYPE( element ) ); } #endif /** A synchronized wrapper class for lists. */ public static class SynchronizedList KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching. protected SynchronizedList( final LIST KEY_GENERIC l, final Object sync ) { super( l, sync ); this.list = l; } protected SynchronizedList( final LIST KEY_GENERIC l ) { super( l ); this.list = l; } public KEY_GENERIC_TYPE GET_KEY( final int i ) { synchronized( sync ) { return list.GET_KEY( i ); } } public KEY_GENERIC_TYPE set( final int i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return list.set( i, k ); } } public void add( final int i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { list.add( i, k ); } } public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { synchronized( sync ) { return list.REMOVE_KEY( i ); } } public int indexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.indexOf( k ); } } public int lastIndexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.lastIndexOf( k ); } } public boolean addAll( final int index, final Collection c ) { synchronized( sync ) { return list.addAll( index, c ); } } public void getElements( final int from, final KEY_TYPE a[], final int offset, final int length ) { synchronized( sync ) { list.getElements( from, a, offset, length ); } } public void removeElements( final int from, final int to ) { synchronized( sync ) { list.removeElements( from, to ); } } public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { synchronized( sync ) { list.addElements( index, a, offset, length ); } } public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { synchronized( sync ) { list.addElements( index, a ); } } public void size( final int size ) { synchronized( sync ) { list.size( size ); } } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return list.listIterator(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return list.listIterator(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int i ) { return list.listIterator( i ); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int i ) { return listIterator( i ); } public LIST KEY_GENERIC subList( final int from, final int to ) { synchronized( sync ) { return synchronize( list.subList( from, to ), sync ); } } @Deprecated public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) { return subList( from, to ); } public boolean equals( final Object o ) { synchronized( sync ) { return collection.equals( o ); } } public int hashCode() { synchronized( sync ) { return collection.hashCode(); } } #if ! KEY_CLASS_Reference public int compareTo( final List o ) { synchronized( sync ) { return list.compareTo( o ); } } #endif #if KEYS_PRIMITIVE public boolean addAll( final int index, final COLLECTION c ) { synchronized( sync ) { return list.addAll( index, c ); } } public boolean addAll( final int index, LIST l ) { synchronized( sync ) { return list.addAll( index, l ); } } public boolean addAll( LIST l ) { synchronized( sync ) { return list.addAll( l ); } } public KEY_GENERIC_CLASS get( final int i ) { synchronized( sync ) { return list.get( i ); } } public void add( final int i, KEY_GENERIC_CLASS k ) { synchronized( sync ) { list.add( i, k ); } } public KEY_GENERIC_CLASS set( final int index, KEY_GENERIC_CLASS k ) { synchronized( sync ) { return list.set( index, k ); } } public KEY_GENERIC_CLASS remove( final int i ) { synchronized( sync ) { return list.remove( i ); } } public int indexOf( final Object o ) { synchronized( sync ) { return list.indexOf( o ); } } public int lastIndexOf( final Object o ) { synchronized( sync ) { return list.lastIndexOf( o ); } } #endif } /** Returns a synchronized type-specific list backed by the given type-specific list. * * @param l the list to be wrapped in a synchronized list. * @return a synchronized view of the specified list. * @see java.util.Collections#synchronizedList(List) */ public static KEY_GENERIC LIST KEY_GENERIC synchronize( final LIST KEY_GENERIC l ) { return new SynchronizedList KEY_GENERIC( l ); } /** Returns a synchronized type-specific list backed by the given type-specific list, using an assigned object to synchronize. * * @param l the list to be wrapped in a synchronized list. * @param sync an object that will be used to synchronize the access to the list. * @return a synchronized view of the specified list. * @see java.util.Collections#synchronizedList(List) */ public static KEY_GENERIC LIST KEY_GENERIC synchronize( final LIST KEY_GENERIC l, final Object sync ) { return new SynchronizedList KEY_GENERIC( l, sync ); } /** An unmodifiable wrapper class for lists. */ public static class UnmodifiableList KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching. protected UnmodifiableList( final LIST KEY_GENERIC l ) { super( l ); this.list = l; } public KEY_GENERIC_TYPE GET_KEY( final int i ) { return list.GET_KEY( i ); } public KEY_GENERIC_TYPE set( final int i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( final int i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { throw new UnsupportedOperationException(); } public int indexOf( final KEY_TYPE k ) { return list.indexOf( k ); } public int lastIndexOf( final KEY_TYPE k ) { return list.lastIndexOf( k ); } public boolean addAll( final int index, final Collection c ) { throw new UnsupportedOperationException(); } public void getElements( final int from, final KEY_TYPE a[], final int offset, final int length ) { list.getElements( from, a, offset, length ); } public void removeElements( final int from, final int to ) { throw new UnsupportedOperationException(); } public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { throw new UnsupportedOperationException(); } public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { throw new UnsupportedOperationException(); } public void size( final int size ) { list.size( size ); } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.unmodifiable( list.listIterator() ); } public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int i ) { return ITERATORS.unmodifiable( list.listIterator( i ) ); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); } @Deprecated public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int i ) { return listIterator( i ); } public LIST KEY_GENERIC subList( final int from, final int to ) { return unmodifiable( list.subList( from, to ) ); } @Deprecated public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) { return subList( from, to ); } public boolean equals( final Object o ) { return collection.equals( o ); } public int hashCode() { return collection.hashCode(); } #if ! KEY_CLASS_Reference public int compareTo( final List o ) { return list.compareTo( o ); } #endif #if KEYS_PRIMITIVE public boolean addAll( final int index, final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean addAll( final LIST l ) { throw new UnsupportedOperationException(); } public boolean addAll( final int index, final LIST l ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS get( final int i ) { return list.get( i ); } public void add( final int i, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS set( final int index, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } public KEY_GENERIC_CLASS remove( final int i ) { throw new UnsupportedOperationException(); } public int indexOf( final Object o ) { return list.indexOf( o ); } public int lastIndexOf( final Object o ) { return list.lastIndexOf( o ); } #endif } /** Returns an unmodifiable type-specific list backed by the given type-specific list. * * @param l the list to be wrapped in an unmodifiable list. * @return an unmodifiable view of the specified list. * @see java.util.Collections#unmodifiableList(List) */ public static KEY_GENERIC LIST KEY_GENERIC unmodifiable( final LIST KEY_GENERIC l ) { return new UnmodifiableList KEY_GENERIC( l ); } #ifdef TEST private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static void testLists( KEY_TYPE k, LIST m, List t, int level ) { int n = 100; int c; long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp; boolean rt = false, rm = false; Object Rt = null, Rm = null; if ( level == 0 ) return; /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(java.util.Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(java.util.Iterator i=m.listIterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) ); try { test(); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/Map.drv0000664000000000000000000001176213050701620014100 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import VALUE_PACKAGE.VALUE_COLLECTION; import it.unimi.dsi.fastutil.objects.ObjectSet; import it.unimi.dsi.fastutil.objects.ObjectIterator; import java.util.Map; /** A type-specific {@link Map}; provides some additional methods that use polymorphism to avoid (un)boxing, and handling of a default return value. * *

Besides extending the corresponding type-specific {@linkplain it.unimi.dsi.fastutil.Function function}, this interface strengthens {@link #entrySet()}, * {@link #keySet()} and {@link #values()}. Maps returning entry sets of type {@link FastEntrySet} support also fast iteration. * *

A submap or subset may or may not have an * independent default return value (which however must be initialized to the * default return value of the originator). * * @see Map */ public interface MAP KEY_VALUE_GENERIC extends FUNCTION KEY_VALUE_GENERIC, Map { /** An entry set providing fast iteration. * *

In some cases (e.g., hash-based classes) iteration over an entry set requires the creation * of a large number of {@link java.util.Map.Entry} objects. Some fastutil * maps might return {@linkplain #entrySet() entry set} objects of type FastEntrySet: in this case, {@link #fastIterator() fastIterator()} * will return an iterator that is guaranteed not to create a large number of objects, possibly * by returning always the same entry (of course, mutated). */ public interface FastEntrySet KEY_VALUE_GENERIC extends ObjectSet { /** Returns a fast iterator over this entry set; the iterator might return always the same entry object, suitably mutated. * * @return a fast iterator over this entry set; the iterator might return always the same {@link java.util.Map.Entry} object, suitably mutated. */ public ObjectIterator fastIterator(); } /** Returns a set view of the mappings contained in this map. *

Note that this specification strengthens the one given in {@link Map#entrySet()}. * * @return a set view of the mappings contained in this map. * @see Map#entrySet() */ ObjectSet> entrySet(); /** Returns a type-specific set view of the mappings contained in this map. * *

This method is necessary because there is no inheritance along * type parameters: it is thus impossible to strengthen {@link #entrySet()} * so that it returns an {@link it.unimi.dsi.fastutil.objects.ObjectSet} * of type-specific entries (the latter makes it possible to * access keys and values with type-specific methods). * * @return a type-specific set view of the mappings contained in this map. * @see #entrySet() */ ObjectSet ENTRYSET(); /** Returns a set view of the keys contained in this map. *

Note that this specification strengthens the one given in {@link Map#keySet()}. * * @return a set view of the keys contained in this map. * @see Map#keySet() */ SET KEY_GENERIC keySet(); /** Returns a set view of the values contained in this map. *

Note that this specification strengthens the one given in {@link Map#values()}. * * @return a set view of the values contained in this map. * @see Map#values() */ VALUE_COLLECTION VALUE_GENERIC values(); #if VALUES_PRIMITIVE /** * @see Map#containsValue(Object) */ boolean containsValue( VALUE_TYPE value ); #endif /** A type-specific {@link java.util.Map.Entry}; provides some additional methods * that use polymorphism to avoid (un)boxing. * * @see java.util.Map.Entry */ interface Entry KEY_VALUE_GENERIC extends Map.Entry { #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override KEY_GENERIC_CLASS getKey(); #endif #if KEYS_PRIMITIVE /** * @see java.util.Map.Entry#getKey() */ KEY_TYPE ENTRY_GET_KEY(); #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override VALUE_GENERIC_CLASS getValue(); #endif #if VALUES_PRIMITIVE /** * @see java.util.Map.Entry#setValue(Object) */ VALUE_TYPE setValue(VALUE_TYPE value); /** * @see java.util.Map.Entry#getValue() */ VALUE_TYPE ENTRY_GET_VALUE(); #endif } } fastutil-7.1.0/drv/Maps.drv0000664000000000000000000004311313050701620014256 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.ObjectSet; import it.unimi.dsi.fastutil.objects.ObjectSets; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_COLLECTIONS; #if ! VALUE_CLASS_Object import VALUE_PACKAGE.VALUE_SETS; #endif import java.util.Map; /** A class providing static methods and objects that do useful things with type-specific maps. * * @see it.unimi.dsi.fastutil.Maps * @see java.util.Collections */ public class MAPS { private MAPS() {} /** An immutable class representing an empty type-specific map. * *

This class may be useful to implement your own in case you subclass * a type-specific map. */ public static class EmptyMap KEY_VALUE_GENERIC extends FUNCTIONS.EmptyFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptyMap() {} public boolean containsValue( final VALUE_TYPE v ) { return false; } public void putAll( final Map m ) { throw new UnsupportedOperationException(); } @SuppressWarnings("unchecked") public ObjectSet ENTRYSET() { return ObjectSets.EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SET KEY_GENERIC keySet() { return SETS.EMPTY_SET; } SUPPRESS_WARNINGS_VALUE_UNCHECKED public VALUE_COLLECTION VALUE_GENERIC values() { return VALUE_SETS.EMPTY_SET; } #if VALUES_PRIMITIVE public boolean containsValue( final Object ov ) { return false; } #endif private Object readResolve() { return EMPTY_MAP; } public Object clone() { return EMPTY_MAP; } public boolean isEmpty() { return true; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSet> entrySet() { return (ObjectSet)ENTRYSET(); } public int hashCode() { return 0; } public boolean equals( final Object o ) { if ( ! ( o instanceof Map ) ) return false; return ((Map)o).isEmpty(); } public String toString() { return "{}"; } } /** An empty type-specific map (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_VALUE_RAWTYPES public static final EmptyMap EMPTY_MAP = new EmptyMap(); #if KEYS_REFERENCE || VALUES_REFERENCE /** Return an empty map (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_MAP}. * @return an empty map (immutable). */ @SuppressWarnings("unchecked") public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC emptyMap() { return EMPTY_MAP; } #endif /** An immutable class representing a type-specific singleton map. * *

This class may be useful to implement your own in case you subclass * a type-specific map. */ public static class Singleton KEY_VALUE_GENERIC extends FUNCTIONS.Singleton KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected transient ObjectSet entries; protected transient SET KEY_GENERIC keys; protected transient VALUE_COLLECTION VALUE_GENERIC values; protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) { super( key, value ); } public boolean containsValue( final VALUE_TYPE v ) { return VALUE_EQUALS( value, v ); } #if VALUES_PRIMITIVE public boolean containsValue( final Object ov ) { return VALUE_EQUALS( VALUE_OBJ2TYPE( ov ), value ); } #endif public void putAll( final Map m ) { throw new UnsupportedOperationException(); } public ObjectSet ENTRYSET() { if ( entries == null ) entries = ObjectSets.singleton( (MAP.Entry KEY_VALUE_GENERIC)new SingletonEntry() ); return entries; } public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.singleton( key ); return keys; } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = VALUE_SETS.singleton( value ); return values; } protected class SingletonEntry implements MAP.Entry KEY_VALUE_GENERIC, Map.Entry { #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public KEY_GENERIC_CLASS getKey() { return KEY2OBJ( Singleton.this.key ); } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ( Singleton.this.value ); } #if KEYS_PRIMITIVE public KEY_GENERIC_TYPE ENTRY_GET_KEY() { return Singleton.this.key; } #endif #if VALUES_PRIMITIVE public VALUE_GENERIC_TYPE ENTRY_GET_VALUE() { return Singleton.this.value; } public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE value ) { throw new UnsupportedOperationException(); } #endif public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS value ) { throw new UnsupportedOperationException(); } public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif return KEY_EQUALS( Singleton.this.key, KEY_OBJ2TYPE( e.getKey() ) ) && VALUE_EQUALS( Singleton.this.value, VALUE_OBJ2TYPE( e.getValue() ) ); } public int hashCode() { return KEY2JAVAHASH( Singleton.this.key ) ^ VALUE2JAVAHASH( Singleton.this.value ); } public String toString() { return Singleton.this.key + "->" + Singleton.this.value; } } public boolean isEmpty() { return false; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSet> entrySet() { return (ObjectSet)ENTRYSET(); } public int hashCode() { return KEY2JAVAHASH( key ) ^ VALUE2JAVAHASH( value ); } public boolean equals( final Object o ) { if ( o == this ) return true; if ( ! ( o instanceof Map ) ) return false; Map m = (Map)o; if ( m.size() != 1 ) return false; return entrySet().iterator().next().equals( m.entrySet().iterator().next() ); } public String toString() { return "{" + key + "=>" + value + "}"; } } /** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned map. * @param value the only value of the returned map. * @return a type-specific immutable map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) { return new Singleton KEY_VALUE_GENERIC( key, value ); } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned map. * @param value the only value of the returned map. * @return a type-specific immutable map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) { return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) ); } #endif /** A synchronized wrapper class for maps. */ public static class SynchronizedMap KEY_VALUE_GENERIC extends FUNCTIONS.SynchronizedFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final MAP KEY_VALUE_GENERIC map; protected transient ObjectSet entries; protected transient SET KEY_GENERIC keys; protected transient VALUE_COLLECTION VALUE_GENERIC values; protected SynchronizedMap( final MAP KEY_VALUE_GENERIC m, final Object sync ) { super( m, sync ); this.map = m; } protected SynchronizedMap( final MAP KEY_VALUE_GENERIC m ) { super( m ); this.map = m; } public int size() { synchronized( sync ) { return map.size(); } } public boolean containsKey( final KEY_TYPE k ) { synchronized( sync ) { return map.containsKey( k ); } } public boolean containsValue( final VALUE_TYPE v ) { synchronized( sync ) { return map.containsValue( v ); } } public VALUE_GENERIC_TYPE defaultReturnValue() { synchronized( sync ) { return map.defaultReturnValue(); } } public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue ) { synchronized( sync ) { map.defaultReturnValue( defRetValue ); } } public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { synchronized( sync ) { return map.put( k, v ); } } //public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { synchronized( sync ) { map.putAll( c ); } } public void putAll( final Map m ) { synchronized( sync ) { map.putAll( m ); } } public ObjectSet ENTRYSET() { if ( entries == null ) entries = ObjectSets.synchronize( map.ENTRYSET(), sync ); return entries; } public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.synchronize( map.keySet(), sync ); return keys; } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) return VALUE_COLLECTIONS.synchronize( map.values(), sync ); return values; } public void clear() { synchronized( sync ) { map.clear(); } } public String toString() { synchronized( sync ) { return map.toString(); } } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { synchronized( sync ) { return map.put( k, v ); } } #endif #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return map.remove( k ); } } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return map.get( k ); } } public boolean containsKey( final Object ok ) { synchronized( sync ) { return map.containsKey( ok ); } } #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public boolean containsValue( final Object ov ) { synchronized( sync ) { return map.containsValue( ov ); } } #endif #if KEYS_REFERENCE #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { synchronized( sync ) { return map.REMOVE_VALUE( k ); } } #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { synchronized( sync ) { return map.GET_VALUE( k ); } } #endif public boolean isEmpty() { synchronized( sync ) { return map.isEmpty(); } } public ObjectSet> entrySet() { synchronized( sync ) { return map.entrySet(); } } public int hashCode() { synchronized( sync ) { return map.hashCode(); } } public boolean equals( final Object o ) { synchronized( sync ) { return map.equals( o ); } } } /** Returns a synchronized type-specific map backed by the given type-specific map. * * @param m the map to be wrapped in a synchronized map. * @return a synchronized view of the specified map. * @see java.util.Collections#synchronizedMap(Map) */ public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC synchronize( final MAP KEY_VALUE_GENERIC m ) { return new SynchronizedMap KEY_VALUE_GENERIC( m ); } /** Returns a synchronized type-specific map backed by the given type-specific map, using an assigned object to synchronize. * * @param m the map to be wrapped in a synchronized map. * @param sync an object that will be used to synchronize the access to the map. * @return a synchronized view of the specified map. * @see java.util.Collections#synchronizedMap(Map) */ public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC synchronize( final MAP KEY_VALUE_GENERIC m, final Object sync ) { return new SynchronizedMap KEY_VALUE_GENERIC( m, sync ); } /** An unmodifiable wrapper class for maps. */ public static class UnmodifiableMap KEY_VALUE_GENERIC extends FUNCTIONS.UnmodifiableFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final MAP KEY_VALUE_GENERIC map; protected transient ObjectSet entries; protected transient SET KEY_GENERIC keys; protected transient VALUE_COLLECTION VALUE_GENERIC values; protected UnmodifiableMap( final MAP KEY_VALUE_GENERIC m ) { super( m ); this.map = m; } public int size() { return map.size(); } public boolean containsKey( final KEY_TYPE k ) { return map.containsKey( k ); } public boolean containsValue( final VALUE_TYPE v ) { return map.containsValue( v ); } public VALUE_GENERIC_TYPE defaultReturnValue() { throw new UnsupportedOperationException(); } public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue ) { throw new UnsupportedOperationException(); } public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } //public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { throw new UnsupportedOperationException(); } public void putAll( final Map m ) { throw new UnsupportedOperationException(); } public ObjectSet ENTRYSET() { if ( entries == null ) entries = ObjectSets.unmodifiable( map.ENTRYSET() ); return entries; } public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.unmodifiable( map.keySet() ); return keys; } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) return VALUE_COLLECTIONS.unmodifiable( map.values() ); return values; } public void clear() { throw new UnsupportedOperationException(); } public String toString() { return map.toString(); } #if KEYS_PRIMITIVE && VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { throw new UnsupportedOperationException(); } #endif #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { return map.get( k ); } public boolean containsKey( final Object ok ) { return map.containsKey( ok ); } #endif #if VALUES_PRIMITIVE public boolean containsValue( final Object ov ) { return map.containsValue( ov ); } #endif #if KEYS_REFERENCE || VALUES_REFERENCE #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { throw new UnsupportedOperationException(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif @Override public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { return map.GET_VALUE( k ); } #endif public boolean isEmpty() { return map.isEmpty(); } public ObjectSet> entrySet() { return ObjectSets.unmodifiable( map.entrySet() ); } } /** Returns an unmodifiable type-specific map backed by the given type-specific map. * * @param m the map to be wrapped in an unmodifiable map. * @return an unmodifiable view of the specified map. * @see java.util.Collections#unmodifiableMap(Map) */ public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC unmodifiable( final MAP KEY_VALUE_GENERIC m ) { return new UnmodifiableMap KEY_VALUE_GENERIC( m ); } } fastutil-7.1.0/drv/OpenCustomHashMap.drv0000777000000000000000000000000013050701620021565 2OpenHashMap.drvustar rootrootfastutil-7.1.0/drv/OpenCustomHashSet.drv0000777000000000000000000000000013050701620021621 2OpenHashSet.drvustar rootrootfastutil-7.1.0/drv/OpenHashBigSet.drv0000664000000000000000000012447413050701620016173 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.BigArrays; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.Size64; import it.unimi.dsi.fastutil.HashCommon; import static it.unimi.dsi.fastutil.HashCommon.bigArraySize; import static it.unimi.dsi.fastutil.HashCommon.maxFill; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; /** A type-specific hash big set with with a fast, small-footprint implementation. * *

Instances of this class use a hash table to represent a big set: the number * of elements in the set is limited only by the amount of core memory. The table * (backed by a {@linkplain it.unimi.dsi.fastutil.BigArrays big array}) is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * *

The methods of this class are about 30% slower than those of the corresponding non-big set. * * @see Hash * @see HashCommon */ public class OPEN_HASH_BIG_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash, Size64 { private static final long serialVersionUID = 0L; private static final boolean ASSERTS = ASSERTS_VALUE; /** The big array of keys. */ protected transient KEY_GENERIC_TYPE[][] key; /** The mask for wrapping a position counter. */ protected transient long mask; /** The mask for wrapping a segment counter. */ protected transient int segmentMask; /** The mask for wrapping a base counter. */ protected transient int baseMask; /** Whether this set contains the null key. */ protected transient boolean containsNull; /** The current table size (always a power of 2). */ protected transient long n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected transient long maxFill; /** The acceptable load factor. */ protected final float f; /** Number of entries in the set. */ protected long size; /** Initialises the mask values. */ private void initMasks() { mask = n - 1; /* Note that either we have more than one segment, and in this case all segments * are BigArrays.SEGMENT_SIZE long, or we have exactly one segment whose length * is a power of two. */ segmentMask = key[ 0 ].length - 1; baseMask = key.length - 1; } /** Creates a new hash big set. * *

The actual table size will be the least power of two greater than expected/f. * * @param expected the expected number of elements in the set. * @param f the load factor. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public OPEN_HASH_BIG_SET( final long expected, final float f ) { if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" ); if ( n < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" ); this.f = f; n = bigArraySize( expected, f ); maxFill = maxFill( n, f ); key = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( n ); initMasks(); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash big set. */ public OPEN_HASH_BIG_SET( final long expected ) { this( expected, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash big set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. */ public OPEN_HASH_BIG_SET() { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash big set copying a given collection. * * @param c a {@link Collection} to be copied into the new hash big set. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final Collection c, final float f ) { this( c.size(), f ); addAll( c ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given collection. * * @param c a {@link Collection} to be copied into the new hash big set. */ public OPEN_HASH_BIG_SET( final Collection c ) { this( c, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash big set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash big set. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f ) { this( c.size(), f ); addAll( c ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash big set. */ public OPEN_HASH_BIG_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) { this( c, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash big set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the new hash big set. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final float f ) { this( DEFAULT_INITIAL_SIZE, f ); while( i.hasNext() ) add( i.NEXT_KEY() ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the new hash big set. */ public OPEN_HASH_BIG_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { this( i, DEFAULT_LOAD_FACTOR ); } #if KEYS_PRIMITIVE /** Creates a new hash big set using elements provided by an iterator. * * @param i an iterator whose elements will fill the new hash big set. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final Iterator i, final float f ) { this( ITERATORS.AS_KEY_ITERATOR( i ), f ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator. * * @param i an iterator whose elements will fill the new hash big set. */ public OPEN_HASH_BIG_SET( final Iterator i ) { this( ITERATORS.AS_KEY_ITERATOR( i ) ); } #endif /** Creates a new hash big set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the new hash big set. * @param offset the first element to use. * @param length the number of elements to use. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f ) { this( length < 0 ? 0 : length, f ); ARRAYS.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the new hash big set. * @param offset the first element to use. * @param length the number of elements to use. */ public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) { this( a, offset, length, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash big set copying the elements of an array. * * @param a an array to be copied into the new hash big set. * @param f the load factor. */ public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final float f ) { this( a, 0, a.length, f ); } /** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying the elements of an array. * * @param a an array to be copied into the new hash big set. */ public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a ) { this( a, DEFAULT_LOAD_FACTOR ); } private long realSize() { return containsNull ? size - 1 : size; } private void ensureCapacity( final long capacity ) { final long needed = bigArraySize( capacity, f ); if ( needed > n ) rehash( needed ); } #if KEYS_PRIMITIVE /** {@inheritDoc} */ public boolean addAll( COLLECTION c ) { final long size = c instanceof Size64 ? ((Size64)c).size64() : c.size(); if ( f <= .5 ) ensureCapacity( size ); // The resulting collection will be size for c.size() elements else ensureCapacity( size64() + size ); // The resulting collection will be sized for size() + c.size() elements return super.addAll( c ); } #endif /** {@inheritDoc} */ public boolean addAll( Collection c ) { final long size = c instanceof Size64 ? ((Size64)c).size64() : c.size(); // The resulting collection will be at least c.size() big if ( f <= .5 ) ensureCapacity( size ); // The resulting collection will be sized for c.size() elements else ensureCapacity( size64() + size ); // The resulting collection will be sized for size() + c.size() elements return super.addAll( c ); } public boolean add( final KEY_GENERIC_TYPE k ) { int displ, base; if ( KEY_IS_NULL( k ) ) { if ( containsNull ) return false; containsNull = true; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = this.key; final long h = KEY2LONGHASH( k ); // The starting point. if ( ! KEY_IS_NULL( curr = key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return false; while( ! KEY_IS_NULL( curr = key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return false; } key[ base ][ displ ] = k; } if ( size++ >= maxFill ) rehash( 2 * n ); if ( ASSERTS ) checkTable(); return true; } #if KEY_CLASS_Object /** Add a random element if not present, get the existing value if already present. * * This is equivalent to (but faster than) doing a: *

	 * K exist = set.get(k);
	 * if (exist == null) {
	 *   set.add(k);
	 *   exist = k;
	 * }
	 * 
*/ public KEY_GENERIC_TYPE addOrGet( final KEY_GENERIC_TYPE k ) { int displ, base; if ( KEY_IS_NULL( k ) ) { if ( containsNull ) return null; containsNull = true; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = this.key; final long h = KEY2LONGHASH( k ); // The starting point. if ( ! KEY_IS_NULL( curr = key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; while( ! KEY_IS_NULL( curr = key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; } key[ base ][ displ ] = k; } if ( size++ >= maxFill ) rehash( 2 * n ); if ( ASSERTS ) checkTable(); return k; } #endif /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ protected final void shiftKeys( long pos ) { // Shift entries with the same hash. long last, slot; final KEY_GENERIC_TYPE[][] key = this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( BIG_ARRAYS.get( key, pos ) ) ) { BIG_ARRAYS.set( key, last, KEY_NULL ); return; } slot = KEY2LONGHASH( BIG_ARRAYS.get( key, pos ) ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } BIG_ARRAYS.set( key, last, BIG_ARRAYS.get( key, pos ) ); } } private boolean removeEntry( final int base, final int displ ) { shiftKeys( base * (long)BigArrays.SEGMENT_SIZE + displ ); if ( --size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return true; } private boolean removeNullEntry() { containsNull = false; if ( --size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return true; } public boolean rem( final KEY_TYPE k ) { if ( KEY_IS_NULL( k ) ) { if ( containsNull ) return removeNullEntry(); return false; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = this.key; final long h = KEY2LONGHASH( k ); int displ, base; // The starting point. if ( KEY_IS_NULL( curr = key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return removeEntry( base, displ ); while( true ) { if ( KEY_IS_NULL( curr = key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return removeEntry( base, displ ); } } public boolean contains( final KEY_TYPE k ) { if ( KEY_IS_NULL( k ) ) return containsNull; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = this.key; final long h = KEY2LONGHASH( k ); int displ, base; // The starting point. if ( KEY_IS_NULL( curr = key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return true; while( true ) { if ( KEY_IS_NULL( curr = key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return true; } } #if KEY_CLASS_Object /** Returns the element of this set that is equal to the given key, or null. * @return the element of this set that is equal to the given key, or null. */ public K get( final KEY_TYPE k ) { if ( k == null ) return null; // This is correct independently of the value of containsNull KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = this.key; final long h = KEY2LONGHASH( k ); int displ, base; // The starting point. if ( KEY_IS_NULL( curr = key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) return null; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; while( true ) { if ( KEY_IS_NULL( curr = key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ) return null; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; } } #endif /* Removes all elements from this set. * *

To increase object reuse, this method does not change the table size. * If you want to reduce the table size, you must use {@link #trim(long)}. * */ public void clear() { if ( size == 0 ) return; size = 0; containsNull = false; BIG_ARRAYS.fill( key, KEY_NULL ); } /** An iterator over a hash big set. */ private class SetIterator extends KEY_ABSTRACT_ITERATOR KEY_GENERIC { /** The base of the last entry returned, if positive or zero; initially, the number of components of the key array. If negative, the last element returned was that of index {@code - base - 1} from the {@link #wrapped} list. */ int base = key.length; /** The displacement of the last entry returned; initially, zero. */ int displ; /** The index of the last entry that has been returned (or {@link Long#MIN_VALUE} if {@link #base} is negative). It is -1 if either we did not return an entry yet, or the last returned entry has been removed. */ long last = -1; /** A downward counter measuring how many entries must still be returned. */ long c = size; /** A boolean telling us whether we should return the null key. */ boolean mustReturnNull = OPEN_HASH_BIG_SET.this.containsNull; /** A lazily allocated list containing elements that have wrapped around the table because of removals. */ ARRAY_LIST KEY_GENERIC wrapped; public boolean hasNext() { return c != 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); c--; if ( mustReturnNull ) { mustReturnNull = false; last = n; return KEY_NULL; } final KEY_GENERIC_TYPE[][] key = OPEN_HASH_BIG_SET.this.key; for(;;) { if ( displ == 0 && base <= 0 ) { // We are just enumerating elements from the wrapped list. last = Long.MIN_VALUE; return wrapped.GET_KEY( - ( --base ) - 1 ); } if ( displ-- == 0 ) displ = key[ --base ].length - 1; final KEY_GENERIC_TYPE k = key[ base ][ displ ]; if ( ! KEY_IS_NULL( k ) ) { last = base * (long)BigArrays.SEGMENT_SIZE + displ; return k; } } } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ private final void shiftKeys( long pos ) { // Shift entries with the same hash. long last, slot; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[][] key = OPEN_HASH_BIG_SET.this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if( KEY_IS_NULL( curr = BIG_ARRAYS.get( key, pos ) ) ) { BIG_ARRAYS.set( key, last, KEY_NULL ); return; } slot = KEY2LONGHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( pos < last ) { // Wrapped entry. if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC(); wrapped.add( BIG_ARRAYS.get( key, pos ) ); } BIG_ARRAYS.set( key, last, curr ); } } public void remove() { if ( last == -1 ) throw new IllegalStateException(); if ( last == n ) OPEN_HASH_BIG_SET.this.containsNull = false; else if ( base >= 0 ) shiftKeys( last ); else { // We're removing wrapped entries. #if KEYS_REFERENCE OPEN_HASH_BIG_SET.this.remove( wrapped.set( - base - 1, null ) ); #else OPEN_HASH_BIG_SET.this.remove( wrapped.GET_KEY( - base - 1 ) ); #endif last = -1; // Note that we must not decrement size return; } size--; last = -1; // You can no longer remove this entry. if ( ASSERTS ) checkTable(); } } public KEY_ITERATOR KEY_GENERIC iterator() { return new SetIterator(); } /** A no-op for backward compatibility. The kind of tables implemented by * this class never need rehashing. * *

If you need to reduce the table size to fit exactly * this set, use {@link #trim()}. * * @return true. * @see #trim() * @deprecated A no-op. */ @Deprecated public boolean rehash() { return true; } /** Rehashes this set, making the table as small as possible. * *

This method rehashes the table to the smallest size satisfying the * load factor. It can be used when the set will not be changed anymore, so * to optimize access speed and size. * *

If the table size is already the minimum possible, this method * does nothing. * * @return true if there was enough memory to trim the set. * @see #trim(long) */ public boolean trim() { final long l = bigArraySize( size, f ); if ( l >= n || size > maxFill( l, f ) ) return true; try { rehash( l ); } catch(OutOfMemoryError cantDoIt) { return false; } return true; } /** Rehashes this set if the table is too large. * *

Let N be the smallest table size that can hold * max(n,{@link #size64()}) entries, still satisfying the load factor. If the current * table size is smaller than or equal to N, this method does * nothing. Otherwise, it rehashes this set in a table of size * N. * *

This method is useful when reusing sets. {@linkplain #clear() Clearing a * set} leaves the table size untouched. If you are reusing a set * many times, you can call this method with a typical * size to avoid keeping around a very large table just * because of a few large transient sets. * * @param n the threshold for the trimming. * @return true if there was enough memory to trim the set. * @see #trim() */ public boolean trim( final long n ) { final long l = bigArraySize( n, f ); if ( this.n <= l ) return true; try { rehash( l ); } catch( OutOfMemoryError cantDoIt ) { return false; } return true; } /** Resizes the set. * *

This method implements the basic rehashing strategy, and may be * overriden by subclasses implementing different rehashing strategies (e.g., * disk-based rehashing). However, you should not override this method * unless you understand the internal workings of this class. * * @param newN the new size */ SUPPRESS_WARNINGS_KEY_UNCHECKED protected void rehash( final long newN ) { final KEY_GENERIC_TYPE key[][] = this.key; final KEY_GENERIC_TYPE newKey[][] = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( newN ); final long mask = newN - 1; // Note that this is used by the hashing macro final int newSegmentMask = newKey[ 0 ].length - 1; final int newBaseMask = newKey.length - 1; int base = 0, displ = 0, b, d; long h; KEY_GENERIC_TYPE k; for( long i = realSize(); i-- != 0; ) { while( KEY_IS_NULL( key[ base ][ displ ] ) ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ); k = key[ base ][ displ ]; h = KEY2LONGHASH( k ); // The starting point. if ( ! KEY_IS_NULL( newKey[ b = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ d = (int)( h & newSegmentMask ) ] ) ) while( ! KEY_IS_NULL( newKey[ b = ( b + ( ( d = ( d + 1 ) & newSegmentMask ) == 0 ? 1 : 0 ) ) & newBaseMask ][ d ] ) ); newKey[ b ][ d ] = k; base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ); } this.n = newN; this.key = newKey; initMasks(); maxFill = maxFill( n, f ); } @Deprecated public int size() { return (int)Math.min( Integer.MAX_VALUE, size ); } public long size64() { return size; } public boolean isEmpty() { return size == 0; } /** Returns a deep copy of this big set. * *

This method performs a deep copy of this big hash set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this big set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public OPEN_HASH_BIG_SET KEY_GENERIC clone() { OPEN_HASH_BIG_SET KEY_GENERIC c; try { c = (OPEN_HASH_BIG_SET KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = BIG_ARRAYS.copy( key ); c.containsNull = containsNull; return c; } /** Returns a hash code for this set. * * This method overrides the generic method provided by the superclass. * Since equals() is not overriden, it is important * that the value returned by this method is the same value as * the one returned by the overriden method. * * @return a hash code for this set. */ public int hashCode() { final KEY_GENERIC_TYPE key[][] = this.key; int h = 0, base = 0, displ = 0; for( long j = realSize(); j-- != 0; ) { while( KEY_IS_NULL( key[ base ][ displ ] ) ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ); #if KEYS_REFERENCE if ( this != key[ base ][ displ ] ) #endif h += KEY2JAVAHASH_NOT_NULL( key[ base ][ displ ] ); base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ); } return h; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { final KEY_ITERATOR KEY_GENERIC i = iterator(); s.defaultWriteObject(); for( long j = size; j-- != 0; ) s.WRITE_KEY( i.NEXT_KEY() ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = bigArraySize( size, f ); maxFill = maxFill( n, f ); final KEY_GENERIC_TYPE[][] key = this.key = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( n ); initMasks(); long h; KEY_GENERIC_TYPE k; int base, displ; for( long i = size; i-- != 0; ) { k = KEY_GENERIC_CAST s.READ_KEY(); if ( KEY_IS_NULL( k ) ) containsNull = true; else { h = KEY2LONGHASH( k ); if ( ! KEY_IS_NULL( key[ base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT ) ][ displ = (int)( h & segmentMask ) ] ) ) while( ! KEY_IS_NULL( key[ base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask ][ displ ] ) ); key[ base ][ displ ] = k; } } if ( ASSERTS ) checkTable(); } #ifdef ASSERTS_CODE private void checkTable() { assert ( n & -n ) == n : "Table length is not a power of two: " + n; assert n == BIG_ARRAYS.length( key ); long n = this.n; while( n-- != 0 ) if ( ! KEY_IS_NULL( BIG_ARRAYS.get( key, n ) ) && ! contains( BIG_ARRAYS.get( key, n ) ) ) throw new AssertionError( "Hash table has key " + BIG_ARRAYS.get( key, n ) + " marked as occupied, but the key does not belong to the table" ); #if KEYS_PRIMITIVE java.util.HashSet s = new java.util.HashSet (); #else java.util.HashSet s = new java.util.HashSet(); #endif for( long i = size(); i-- != 0; ) if ( ! KEY_IS_NULL( BIG_ARRAYS.get( key, i ) ) && ! s.add( BIG_ARRAYS.get( key, i ) ) ) throw new AssertionError( "Key " + BIG_ARRAYS.get( key, i ) + " appears twice" ); } #else private void checkTable() {} #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static final class ArrayComparator implements java.util.Comparator { public int compare( Object a, Object b ) { byte[] aa = (byte[])a; byte[] bb = (byte[])b; int length = Math.min( aa.length, bb.length ); for( int i = 0; i < length; i++ ) { if ( aa[ i ] < bb[ i ] ) return -1; if ( aa[ i ] > bb[ i ] ) return 1; } return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 ); } } private static final class MockSet extends java.util.TreeSet { private java.util.List list = new java.util.ArrayList(); public MockSet( java.util.Comparator c ) { super( c ); } public boolean add( Object k ) { if ( ! contains( k ) ) list.add( k ); return super.add( k ); } public boolean addAll( Collection c ) { java.util.Iterator i = c.iterator(); boolean result = false; while( i.hasNext() ) result |= add( i.next() ); return result; } public boolean removeAll( Collection c ) { java.util.Iterator i = c.iterator(); boolean result = false; while( i.hasNext() ) result |= remove( i.next() ); return result; } public boolean remove( Object k ) { if ( contains( k ) ) { int i = list.size(); while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) { list.remove( i ); break; } } return super.remove( k ); } private void justRemove( Object k ) { super.remove( k ); } public java.util.Iterator iterator() { return new java.util.Iterator() { final java.util.Iterator iterator = list.iterator(); Object curr; public Object next() { return curr = iterator.next(); } public boolean hasNext() { return iterator.hasNext(); } public void remove() { justRemove( curr ); iterator.remove(); } }; } } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, fp ).toString(); } private static void speedTest( int n, float f, boolean comp ) { int i, j; OPEN_HASH_BIG_SET m; java.util.HashSet t; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; long ms; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); } double totAdd = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d; if ( comp ) { for( j = 0; j < 20; j++ ) { t = new java.util.HashSet( 16 ); /* We add pairs to t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totAdd += d; System.out.print("Add: " + format( d ) +" K/s " ); /* We check for pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on t. */ ms = System.currentTimeMillis(); for( java.util.Iterator it = t.iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) +" K/s " ); /* We delete pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) +" K/s " ); /* We delete pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" ); System.out.println(); totAdd = totYes = totNo = totIter = totRemYes = totRemNo = 0; } for( j = 0; j < 20; j++ ) { m = new OPEN_HASH_BIG_SET( 16, f ); /* We add pairs to m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.add( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totAdd += d; System.out.print("Add: " + format( d ) +" K/s " ); /* We check for pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on m. */ ms = System.currentTimeMillis(); for( KEY_ITERATOR it = (KEY_ITERATOR)m.iterator(); it.hasNext(); it.NEXT_KEY() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) +" K/s " ); /* We delete pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.remove( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) +" K/s " ); /* We delete pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.remove( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" ); System.out.println(); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static void printProbes( OPEN_HASH_BIG_SET m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final double f = (double)m.size / m.n; for( int i = 0, c = 0; i < m.n; i++ ) { if ( ! KEY_IS_NULL( BIG_ARRAYS.get( m.key, i ) ) ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } private static void test( int n, float f ) { int c; OPEN_HASH_BIG_SET m = new OPEN_HASH_BIG_SET(Hash.DEFAULT_INITIAL_SIZE, f); java.util.Set t = new java.util.HashSet(); /* First of all, we fill t with random data. */ for(int i=0; i2) f = Float.parseFloat(args[2]); if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n, f); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/OpenHashMap.drv0000664000000000000000000030063713050701620015530 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import static it.unimi.dsi.fastutil.HashCommon.arraySize; import static it.unimi.dsi.fastutil.HashCommon.maxFill; import java.util.Map; import java.util.Arrays; import java.util.NoSuchElementException; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION; #if VALUES_PRIMITIVE || KEYS_PRIMITIVE && VALUE_CLASS_Object import VALUE_PACKAGE.VALUE_ITERATOR; #endif #ifdef Linked import java.util.Comparator; #if VALUES_PRIMITIVE import VALUE_PACKAGE.VALUE_LIST_ITERATOR; #endif #if KEYS_PRIMITIVE && VALUE_CLASS_Reference import it.unimi.dsi.fastutil.objects.ObjectIterator; #endif import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; #else import it.unimi.dsi.fastutil.objects.AbstractObjectSet; #if KEYS_PRIMITIVE && ! VALUE_CLASS_Object import it.unimi.dsi.fastutil.objects.ObjectIterator; #endif #endif #ifdef Linked /** A type-specific linked hash map with with a fast, small-footprint implementation. * *

Instances of this class use a hash table to represent a map. The table is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * *

Iterators generated by this map will enumerate pairs in the same order in which they * have been added to the map (addition of pairs whose key is already present * in the set does not change the iteration order). Note that this order has nothing in common with the natural * order of the keys. The order is kept by means of a doubly linked list, represented * via an array of longs parallel to the table. * *

This class implements the interface of a sorted map, so to allow easy * access of the iteration order: for instance, you can get the first key * in iteration order with {@code firstKey()} without having to create an * iterator; however, this class partially violates the {@link java.util.SortedMap} * contract because all submap methods throw an exception and {@link * #comparator()} returns always null. * *

Additional methods, such as getAndMoveToFirst(), make it easy * to use instances of this class as a cache (e.g., with LRU policy). * *

The iterators provided by the views of this class using are type-specific * {@linkplain java.util.ListIterator list iterators}, and can be started at any * element which is a key of the map, or * a {@link NoSuchElementException} exception will be thrown. * If, however, the provided element is not the first or last key in the * set, the first access to the list index will require linear time, as in the worst case * the entire key set must be scanned in iteration order to retrieve the positional * index of the starting key. If you use just the methods of a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator}, * however, all operations will be performed in constant time. * * @see Hash * @see HashCommon */ public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash { #else #ifdef Custom /** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy} * is specified at creation time. * *

Instances of this class use a hash table to represent a map. The table is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * @see Hash * @see HashCommon */ public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash { #else /** A type-specific hash map with a fast, small-footprint implementation. * *

Instances of this class use a hash table to represent a map. The table is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * @see Hash * @see HashCommon */ public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash { #endif #endif private static final long serialVersionUID = 0L; private static final boolean ASSERTS = ASSERTS_VALUE; /** The array of keys. */ protected transient KEY_GENERIC_TYPE[] key; /** The array of values. */ protected transient VALUE_GENERIC_TYPE[] value; /** The mask for wrapping a position counter. */ protected transient int mask; /** Whether this set contains the key zero. */ protected transient boolean containsNullKey; #ifdef Custom /** The hash strategy of this custom map. */ protected STRATEGY KEY_GENERIC strategy; #endif #ifdef Linked /** The index of the first entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */ protected transient int first = -1; /** The index of the last entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */ protected transient int last = -1; /** For each entry, the next and the previous entry in iteration order, * stored as ((prev & 0xFFFFFFFFL) << 32) | (next & 0xFFFFFFFFL). * The first entry contains predecessor -1, and the last entry * contains successor -1. */ protected transient long[] link; #endif /** The current table size. */ protected transient int n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected transient int maxFill; /** Number of entries in the set (including the key zero, if present). */ protected int size; /** The acceptable load factor. */ protected final float f; #ifdef Linked /** Cached set of entries. */ protected transient FastSortedEntrySet KEY_VALUE_GENERIC entries; /** Cached set of keys. */ protected transient SORTED_SET KEY_GENERIC keys; #else /** Cached set of entries. */ protected transient FastEntrySet KEY_VALUE_GENERIC entries; /** Cached set of keys. */ protected transient SET KEY_GENERIC keys; #endif /** Cached collection of values. */ protected transient VALUE_COLLECTION VALUE_GENERIC values; #ifdef Custom /** Creates a new hash map. * *

The actual table size will be the least power of two greater than expected/f. * * @param expected the expected number of elements in the hash set. * @param f the load factor. * @param strategy the strategy. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public OPEN_HASH_MAP( final int expected, final float f, final STRATEGY KEY_GENERIC strategy ) { this.strategy = strategy; #else /** Creates a new hash map. * *

The actual table size will be the least power of two greater than expected/f. * * @param expected the expected number of elements in the hash set. * @param f the load factor. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public OPEN_HASH_MAP( final int expected, final float f ) { #endif if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" ); if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" ); this.f = f; n = arraySize( expected, f ); mask = n - 1; maxFill = maxFill( n, f ); key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n + 1 ]; value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ n + 1 ]; #ifdef Linked link = new long[ n + 1 ]; #endif } #ifdef Custom /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash map. * @param strategy the strategy. */ public OPEN_HASH_MAP( final int expected, final STRATEGY KEY_GENERIC strategy ) { this( expected, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash map. */ public OPEN_HASH_MAP( final int expected ) { this( expected, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * @param strategy the strategy. */ public OPEN_HASH_MAP( final STRATEGY KEY_GENERIC strategy ) { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. */ public OPEN_HASH_MAP() { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash map copying a given one. * * @param m a {@link Map} to be copied into the new hash map. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_MAP( final Map m, final float f, final STRATEGY KEY_GENERIC strategy ) { this( m.size(), f, strategy ); putAll( m ); } #else /** Creates a new hash map copying a given one. * * @param m a {@link Map} to be copied into the new hash map. * @param f the load factor. */ public OPEN_HASH_MAP( final Map m, final float f ) { this( m.size(), f ); putAll( m ); } #endif #ifdef Custom /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one. * * @param m a {@link Map} to be copied into the new hash map. * @param strategy the strategy. */ public OPEN_HASH_MAP( final Map m, final STRATEGY KEY_GENERIC strategy ) { this( m, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one. * * @param m a {@link Map} to be copied into the new hash map. */ public OPEN_HASH_MAP( final Map m ) { this( m, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash map copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f, final STRATEGY KEY_GENERIC strategy ) { this( m.size(), f, strategy ); putAll( m ); } #else /** Creates a new hash map copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. * @param f the load factor. */ public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f ) { this( m.size(), f ); putAll( m ); } #endif #ifdef Custom /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. * @param strategy the strategy. */ public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final STRATEGY KEY_GENERIC strategy ) { this( m, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. */ public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m ) { this( m, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash map using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @param f the load factor. * @param strategy the strategy. * @throws IllegalArgumentException if k and v have different lengths. */ public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE[] v, final float f, final STRATEGY KEY_GENERIC strategy ) { this( k.length, f, strategy ); if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" ); for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] ); } #else /** Creates a new hash map using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @param f the load factor. * @throws IllegalArgumentException if k and v have different lengths. */ public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE[] v, final float f ) { this( k.length, f ); if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" ); for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] ); } #endif #ifdef Custom /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @param strategy the strategy. * @throws IllegalArgumentException if k and v have different lengths. */ public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE[] v, final STRATEGY KEY_GENERIC strategy ) { this( k, v, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @throws IllegalArgumentException if k and v have different lengths. */ public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE[] v ) { this( k, v, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Returns the hashing strategy. * * @return the hashing strategy of this custom hash map. */ public STRATEGY KEY_GENERIC strategy() { return strategy; } #endif private int realSize() { return containsNullKey ? size - 1 : size; } private void ensureCapacity( final int capacity ) { final int needed = arraySize( capacity, f ); if ( needed > n ) rehash( needed ); } private void tryCapacity( final long capacity ) { final int needed = (int)Math.min( 1 << 30, Math.max( 2, HashCommon.nextPowerOfTwo( (long)Math.ceil( capacity / f ) ) ) ); if ( needed > n ) rehash( needed ); } private VALUE_GENERIC_TYPE removeEntry( final int pos ) { final VALUE_GENERIC_TYPE oldValue = value[ pos ]; #if VALUES_REFERENCE value[ pos ] = null; #endif size--; #ifdef Linked fixPointers( pos ); #endif shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return oldValue; } private VALUE_GENERIC_TYPE removeNullEntry() { containsNullKey = false; #if KEYS_REFERENCE key[ n ] = null; #endif final VALUE_GENERIC_TYPE oldValue = value[ n ]; #if VALUES_REFERENCE value[ n ] = null; #endif size--; #ifdef Linked fixPointers( n ); #endif if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return oldValue; } /** {@inheritDoc} */ public void putAll(Map m) { if ( f <= .5 ) ensureCapacity( m.size() ); // The resulting map will be sized for m.size() elements else tryCapacity( size() + m.size() ); // The resulting map will be tentatively sized for size() + m.size() elements super.putAll( m ); } private int insert(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) return n; containsNullKey = true; pos = n; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return pos; while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return pos; } } key[ pos ] = k; value[ pos ] = v; #ifdef Linked if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } #endif if ( size++ >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return -1; } public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) { final int pos = insert( k, v ); if ( pos < 0 ) return defRetValue; final VALUE_GENERIC_TYPE oldValue = value[ pos ]; value[ pos ] = v; return oldValue; } #if VALUES_PRIMITIVE || KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final VALUE_GENERIC_TYPE v = VALUE_CLASS2TYPE( ov ); final int pos = insert( KEY_CLASS2TYPE( ok ), v ); if ( pos < 0 ) return OBJECT_DEFAULT_RETURN_VALUE; final VALUE_GENERIC_TYPE oldValue = value[ pos ]; value[ pos ] = v; return VALUE2OBJ( oldValue ); } #endif #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Char || VALUE_CLASS_Integer || VALUE_CLASS_Long || VALUE_CLASS_Float || VALUE_CLASS_Double private VALUE_GENERIC_TYPE addToValue( final int pos, final VALUE_GENERIC_TYPE incr ) { final VALUE_GENERIC_TYPE oldValue = value[ pos ]; #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Char value[ pos ] = (VALUE_TYPE)(oldValue + incr); #else value[ pos ] = oldValue + incr; #endif return oldValue; } /** Adds an increment to value currently associated with a key. * *

Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when * called with a key that does not currently appears in the map, the key * will be associated with the default return value plus * the given increment. * * @param k the key. * @param incr the increment. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE addTo(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE incr) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) return addToValue( n, incr ); pos = n; containsNullKey = true; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return addToValue( pos, incr ); while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return addToValue( pos, incr ); } } key[ pos ] = k; #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Char value[ pos ] = (VALUE_TYPE)(defRetValue + incr); #else value[ pos ] = defRetValue + incr; #endif #ifdef Linked if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } #endif if ( size++ >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; } #endif /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ protected final void shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; #if VALUES_REFERENCE value[ last ] = null; #endif return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } key[ last ] = curr; value[ last ] = value[ pos ]; #ifdef Linked fixPointers( pos, last ); #endif } } SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) { if ( containsNullKey ) return removeNullEntry(); return defRetValue; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return removeEntry( pos ); while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return removeEntry( pos ); } } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_CLASS remove( final Object ok ) { final KEY_GENERIC_TYPE k = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok ); if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) return VALUE2OBJ( removeNullEntry() ); return OBJECT_DEFAULT_RETURN_VALUE; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return OBJECT_DEFAULT_RETURN_VALUE; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return VALUE2OBJ( removeEntry( pos ) ); while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return OBJECT_DEFAULT_RETURN_VALUE; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return VALUE2OBJ( removeEntry( pos ) ); } } #endif #ifdef Linked private VALUE_GENERIC_TYPE setValue( final int pos, final VALUE_GENERIC_TYPE v ) { final VALUE_GENERIC_TYPE oldValue = value[ pos ]; value[ pos ] = v; return oldValue; } /** Removes the mapping associated with the first key in iteration order. * @return the value previously associated with the first key in iteration order. * @throws NoSuchElementException is this map is empty. */ public VALUE_GENERIC_TYPE REMOVE_FIRST_VALUE() { if ( size == 0 ) throw new NoSuchElementException(); final int pos = first; // Abbreviated version of fixPointers(pos) first = GET_NEXT(link[ pos ]); if ( 0 <= first ) { // Special case of SET_PREV( link[ first ], -1 ) link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } size--; final VALUE_GENERIC_TYPE v = value[ pos ]; if ( pos == n ) { containsNullKey = false; #if KEYS_REFERENCE key[ n ] = null; #endif #if VALUES_REFERENCE value[ n ] = null; #endif } else shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return v; } /** Removes the mapping associated with the last key in iteration order. * @return the value previously associated with the last key in iteration order. * @throws NoSuchElementException is this map is empty. */ public VALUE_GENERIC_TYPE REMOVE_LAST_VALUE() { if ( size == 0 ) throw new NoSuchElementException(); final int pos = last; // Abbreviated version of fixPointers(pos) last = GET_PREV(link[ pos ]); if ( 0 <= last ) { // Special case of SET_NEXT( link[ last ], -1 ) link[ last ] |= -1 & 0xFFFFFFFFL; } size--; final VALUE_GENERIC_TYPE v = value[ pos ]; if ( pos == n ) { containsNullKey = false; #if KEYS_REFERENCE key[ n ] = null; #endif #if VALUES_REFERENCE value[ n ] = null; #endif } else shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return v; } private void moveIndexToFirst( final int i ) { if ( size == 1 || first == i ) return; if ( last == i ) { last = GET_PREV(link[ i ]); // Special case of SET_NEXT( link[ last ], -1 ); link[ last ] |= -1 & 0xFFFFFFFFL; } else { final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } SET_PREV( link[ first ], i ); SET_UPPER_LOWER( link[ i ], -1, first ); first = i; } private void moveIndexToLast( final int i ) { if ( size == 1 || last == i ) return; if ( first == i ) { first = GET_NEXT(link[ i ]); // Special case of SET_PREV( link[ first ], -1 ); link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } else { final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } SET_NEXT( link[ last ], i ); SET_UPPER_LOWER( link[ i ], last, -1 ); last = i; } /** Returns the value to which the given key is mapped; if the key is present, it is moved to the first position of the iteration order. * * @param k the key. * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE getAndMoveToFirst( final KEY_GENERIC_TYPE k ) { if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) { moveIndexToFirst( n ); return value[ n ]; } return defRetValue; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) { moveIndexToFirst( pos ); return value[ pos ]; } // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) { moveIndexToFirst( pos ); return value[ pos ]; } } } /** Returns the value to which the given key is mapped; if the key is present, it is moved to the last position of the iteration order. * * @param k the key. * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE getAndMoveToLast( final KEY_GENERIC_TYPE k ) { if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) { moveIndexToLast( n ); return value[ n ]; } return defRetValue; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) { moveIndexToLast( pos ); return value[ pos ]; } // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) { moveIndexToLast( pos ); return value[ pos ]; } } } /** Adds a pair to the map; if the key is already present, it is moved to the first position of the iteration order. * * @param k the key. * @param v the value. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE putAndMoveToFirst( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) { moveIndexToFirst( n ); return setValue( n, v ); } containsNullKey = true; pos = n; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { moveIndexToFirst( pos ); return setValue( pos, v ); } while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { moveIndexToFirst( pos ); return setValue( pos, v ); } } } key[ pos ] = k; value[ pos ] = v; if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_PREV( link[ first ], pos ); SET_UPPER_LOWER( link[ pos ], -1, first ); first = pos; } if ( size++ >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; } /** Adds a pair to the map; if the key is already present, it is moved to the last position of the iteration order. * * @param k the key. * @param v the value. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE putAndMoveToLast( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey ) { moveIndexToLast( n ); return setValue( n, v ); } containsNullKey = true; pos = n; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { moveIndexToLast( pos ); return setValue( pos, v ); } while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { moveIndexToLast( pos ); return setValue( pos, v ); } } } key[ pos ] = k; value[ pos ] = v; if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } if ( size++ >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; } #endif #if KEYS_PRIMITIVE /** @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public VALUE_GENERIC_CLASS get( final KEY_CLASS ok ) { if ( ok == null ) return null; final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( ok ); if ( KEY_EQUALS_NULL( k ) ) return containsNullKey ? VALUE2OBJ( value[ n ] ) : OBJECT_DEFAULT_RETURN_VALUE; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return OBJECT_DEFAULT_RETURN_VALUE; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) return VALUE2OBJ( value[ pos ] ); // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return OBJECT_DEFAULT_RETURN_VALUE; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) return VALUE2OBJ( value[ pos ] ); } } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) return containsNullKey ? value[ n ] : defRetValue; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return value[ pos ]; // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return defRetValue; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return value[ pos ]; } } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean containsKey( final KEY_TYPE k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) return containsNullKey; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return true; // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return true; } } public boolean containsValue( final VALUE_TYPE v ) { final VALUE_GENERIC_TYPE value[] = this.value; final KEY_GENERIC_TYPE key[] = this.key; if ( containsNullKey && VALUE_EQUALS( value[ n ], v ) ) return true; for( int i = n; i-- != 0; ) if ( ! KEY_IS_NULL( key[ i ] ) && VALUE_EQUALS( value[ i ], v ) ) return true; return false; } /* Removes all elements from this map. * *

To increase object reuse, this method does not change the table size. * If you want to reduce the table size, you must use {@link #trim()}. * */ public void clear() { if ( size == 0 ) return; size = 0; containsNullKey = false; Arrays.fill( key, KEY_NULL ); #if VALUES_REFERENCE Arrays.fill( value, null ); #endif #ifdef Linked first = last = -1; #endif } public int size() { return size; } public boolean isEmpty() { return size == 0; } /** A no-op for backward compatibility. * * @param growthFactor unused. * @deprecated Since fastutil 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public void growthFactor( int growthFactor ) {} /** Gets the growth factor (2). * * @return the growth factor of this set, which is fixed (2). * @see #growthFactor(int) * @deprecated Since fastutil 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public int growthFactor() { return 16; } /** The entry class for a hash map does not record key and value, but * rather the position in the hash table of the corresponding entry. This * is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in * the map */ final class MapEntry implements MAP.Entry KEY_VALUE_GENERIC, Map.Entry { // The table index this entry refers to, or -1 if this entry has been deleted. int index; MapEntry( final int index ) { this.index = index; } MapEntry() {} #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public KEY_GENERIC_CLASS getKey() { return KEY2OBJ( key[ index ] ); } #if KEYS_PRIMITIVE public KEY_TYPE ENTRY_GET_KEY() { return key[ index ]; } #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ( value[ index ] ); } #if VALUES_PRIMITIVE public VALUE_GENERIC_TYPE ENTRY_GET_VALUE() { return value[ index ]; } #endif public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE v ) { final VALUE_GENERIC_TYPE oldValue = value[ index ]; value[ index ] = v; return oldValue; } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS v ) { return VALUE2OBJ( setValue( VALUE_CLASS2TYPE( v ) ) ); } #endif @SuppressWarnings("unchecked") public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return KEY_EQUALS( key[ index ], KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value[ index ], VALUE_CLASS2TYPE( e.getValue() ) ); } public int hashCode() { return KEY2JAVAHASH( key[ index ] ) ^ VALUE2JAVAHASH( value[ index ] ); } public String toString() { return key[ index ] + "=>" + value[ index ]; } } #ifdef Linked /** Modifies the {@link #link} vector so that the given entry is removed. * This method will complete in constant time. * * @param i the index of an entry. */ protected void fixPointers( final int i ) { if ( size == 0 ) { first = last = -1; return; } if ( first == i ) { first = GET_NEXT(link[ i ]); if (0 <= first) { // Special case of SET_PREV( link[ first ], -1 ) link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } return; } if ( last == i ) { last = GET_PREV(link[ i ]); if (0 <= last) { // Special case of SET_NEXT( link[ last ], -1 ) link[ last ] |= -1 & 0xFFFFFFFFL; } return; } final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } /** Modifies the {@link #link} vector for a shift from s to d. *

This method will complete in constant time. * * @param s the source position. * @param d the destination position. */ protected void fixPointers( int s, int d ) { if ( size == 1 ) { first = last = d; // Special case of SET_UPPER_LOWER( link[ d ], -1, -1 ) link[ d ] = -1L; return; } if ( first == s ) { first = d; SET_PREV( link[ GET_NEXT(link[ s ]) ], d ); link[ d ] = link[ s ]; return; } if ( last == s ) { last = d; SET_NEXT( link[ GET_PREV(link[ s ])], d ); link[ d ] = link[ s ]; return; } final long links = link[ s ]; final int prev = GET_PREV(links); final int next = GET_NEXT(links); SET_NEXT( link[ prev ], d ); SET_PREV( link[ next ], d ); link[ d ] = links; } /** Returns the first key of this map in iteration order. * * @return the first key in iteration order. */ public KEY_GENERIC_TYPE FIRST_KEY() { if ( size == 0 ) throw new NoSuchElementException(); return key[ first ]; } /** Returns the last key of this map in iteration order. * * @return the last key in iteration order. */ public KEY_GENERIC_TYPE LAST_KEY() { if ( size == 0 ) throw new NoSuchElementException(); return key[ last ]; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); } public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } /** A list iterator over a linked map. * *

This class provides a list iterator over a linked hash map. The constructor runs in constant time. */ private class MapIterator { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ int prev = -1; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ int next = -1; /** The last entry that was returned (or -1 if we did not iterate or used {@link java.util.Iterator#remove()}). */ int curr = -1; /** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor.*/ int index = -1; private MapIterator() { next = first; index = 0; } private MapIterator( final KEY_GENERIC_TYPE from ) { if ( KEY_EQUALS_NULL( from ) ) { if ( OPEN_HASH_MAP.this.containsNullKey ) { next = GET_NEXT( link[ n ] ); prev = n; return; } else throw new NoSuchElementException( "The key " + from + " does not belong to this map." ); } if ( KEY_EQUALS( key[ last ], from ) ) { prev = last; index = size; return; } // The starting point. int pos = KEY2INTHASH( from ) & mask; // There's always an unused entry. while( ! KEY_IS_NULL( key[ pos ] ) ) { if ( KEY_EQUALS_NOT_NULL( key[ pos ], from ) ) { // Note: no valid index known. next = GET_NEXT( link[ pos ] ); prev = pos; return; } pos = ( pos + 1 ) & mask; } throw new NoSuchElementException( "The key " + from + " does not belong to this map." ); } public boolean hasNext() { return next != -1; } public boolean hasPrevious() { return prev != -1; } private final void ensureIndexKnown() { if ( index >= 0 ) return; if ( prev == -1 ) { index = 0; return; } if ( next == -1 ) { index = size; return; } int pos = first; index = 1; while( pos != prev ) { pos = GET_NEXT( link[ pos ] ); index++; } } public int nextIndex() { ensureIndexKnown(); return index; } public int previousIndex() { ensureIndexKnown(); return index - 1; } public int nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = next; next = GET_NEXT(link[ curr ]); prev = curr; if ( index >= 0 ) index++; return curr; } public int previousEntry() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = prev; prev = GET_PREV(link[ curr ]); next = curr; if ( index >= 0 ) index--; return curr; } public void remove() { ensureIndexKnown(); if ( curr == -1 ) throw new IllegalStateException(); if ( curr == prev ) { /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ index--; prev = GET_PREV(link[ curr ]); } else next = GET_NEXT(link[ curr ]); size--; /* Now we manually fix the pointers. Because of our knowledge of next and prev, this is going to be faster than calling fixPointers(). */ if ( prev == -1 ) first = next; else SET_NEXT( link[ prev ], next ); if ( next == -1 ) last = prev; else SET_PREV( link[ next ], prev ); int last, slot, pos = curr; curr = -1; if ( pos == n ) { OPEN_HASH_MAP.this.containsNullKey = false; #if KEYS_REFERENCE key[ n ] = null; #endif #if VALUES_REFERENCE value[ n ] = null; #endif } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_MAP.this.key; // We have to horribly duplicate the shiftKeys() code because we need to update next/prev. for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; #if VALUES_REFERENCE value[ last ] = null; #endif return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } key[ last ] = curr; value[ last ] = value[ pos ]; if ( next == pos ) next = last; if ( prev == pos ) prev = last; fixPointers( pos, last ); } } } public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) nextEntry(); return n - i - 1; } public int back( final int n ) { int i = n; while( i-- != 0 && hasPrevious() ) previousEntry(); return n - i - 1; } } private class EntryIterator extends MapIterator implements ObjectListIterator { private MapEntry entry; public EntryIterator() {} public EntryIterator( KEY_GENERIC_TYPE from ) { super( from ); } public MapEntry next() { return entry = new MapEntry( nextEntry() ); } public MapEntry previous() { return entry = new MapEntry( previousEntry() ); } @Override public void remove() { super.remove(); entry.index = -1; // You cannot use a deleted entry. } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } private class FastEntryIterator extends MapIterator implements ObjectListIterator { final MapEntry entry = new MapEntry(); public FastEntryIterator() {} public FastEntryIterator( KEY_GENERIC_TYPE from ) { super( from ); } public MapEntry next() { entry.index = nextEntry(); return entry; } public MapEntry previous() { entry.index = previousEntry(); return entry; } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } #else /** An iterator over a hash map. */ private class MapIterator { /** The index of the last entry returned, if positive or zero; initially, {@link #n}. If negative, the last entry returned was that of the key of index {@code - pos - 1} from the {@link #wrapped} list. */ int pos = n; /** The index of the last entry that has been returned (more precisely, the value of {@link #pos} if {@link #pos} is positive, or {@link Integer#MIN_VALUE} if {@link #pos} is negative). It is -1 if either we did not return an entry yet, or the last returned entry has been removed. */ int last = -1; /** A downward counter measuring how many entries must still be returned. */ int c = size; /** A boolean telling us whether we should return the entry with the null key. */ boolean mustReturnNullKey = OPEN_HASH_MAP.this.containsNullKey; /** A lazily allocated list containing keys of entries that have wrapped around the table because of removals. */ ARRAY_LIST KEY_GENERIC wrapped; public boolean hasNext() { return c != 0; } public int nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); c--; if ( mustReturnNullKey ) { mustReturnNullKey = false; return last = n; } final KEY_GENERIC_TYPE key[] = OPEN_HASH_MAP.this.key; for(;;) { if ( --pos < 0 ) { // We are just enumerating elements from the wrapped list. last = Integer.MIN_VALUE; final KEY_GENERIC_TYPE k = wrapped.GET_KEY( - pos - 1 ); int p = KEY2INTHASH( k ) & mask; while ( ! KEY_EQUALS_NOT_NULL( k, key[ p ] ) ) p = ( p + 1 ) & mask; return p; } if ( ! KEY_IS_NULL( key[ pos ] ) ) return last = pos; } } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ private final void shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_MAP.this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; #if VALUES_REFERENCE value[ last ] = null; #endif return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( pos < last ) { // Wrapped entry. if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC( 2 ); wrapped.add( key[ pos ] ); } key[ last ] = curr; value[ last ] = value[ pos ]; } } public void remove() { if ( last == -1 ) throw new IllegalStateException(); if ( last == n ) { containsNullKey = false; #if KEYS_REFERENCE key[ n ] = null; #endif #if VALUES_REFERENCE value[ n ] = null; #endif } else if ( pos >= 0 ) shiftKeys( last ); else { // We're removing wrapped entries. #if KEYS_REFERENCE OPEN_HASH_MAP.this.remove( wrapped.set( - pos - 1, null ) ); #else OPEN_HASH_MAP.this.remove( wrapped.GET_KEY( - pos - 1 ) ); #endif last = -1; // Note that we must not decrement size return; } size--; last = -1; // You can no longer remove this entry. if ( ASSERTS ) checkTable(); } public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) nextEntry(); return n - i - 1; } } private class EntryIterator extends MapIterator implements ObjectIterator { private MapEntry entry; public MAP.Entry KEY_VALUE_GENERIC next() { return entry = new MapEntry( nextEntry() ); } @Override public void remove() { super.remove(); entry.index = -1; // You cannot use a deleted entry. } } private class FastEntryIterator extends MapIterator implements ObjectIterator { private final MapEntry entry = new MapEntry(); public MapEntry next() { entry.index = nextEntry(); return entry; } } #endif #ifdef Linked private final class MapEntrySet extends AbstractObjectSortedSet implements FastSortedEntrySet KEY_VALUE_GENERIC { public ObjectBidirectionalIterator iterator() { return new EntryIterator(); } public Comparator comparator() { return null; } public ObjectSortedSet subSet( MAP.Entry KEY_VALUE_GENERIC fromElement, MAP.Entry KEY_VALUE_GENERIC toElement) { throw new UnsupportedOperationException(); } public ObjectSortedSet headSet( MAP.Entry KEY_VALUE_GENERIC toElement ) { throw new UnsupportedOperationException(); } public ObjectSortedSet tailSet( MAP.Entry KEY_VALUE_GENERIC fromElement ) { throw new UnsupportedOperationException(); } public MAP.Entry KEY_VALUE_GENERIC first() { if ( size == 0 ) throw new NoSuchElementException(); return new MapEntry( OPEN_HASH_MAP.this.first ); } public MAP.Entry KEY_VALUE_GENERIC last() { if ( size == 0 ) throw new NoSuchElementException(); return new MapEntry( OPEN_HASH_MAP.this.last ); } #else private final class MapEntrySet extends AbstractObjectSet implements FastEntrySet KEY_VALUE_GENERIC { public ObjectIterator iterator() { return new EntryIterator(); } public ObjectIterator fastIterator() { return new FastEntryIterator(); } #endif SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public boolean contains( final Object o ) { if ( !( o instanceof Map.Entry ) ) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final KEY_GENERIC_TYPE k = KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ); final VALUE_GENERIC_TYPE v = VALUE_OBJ2TYPE( VALUE_GENERIC_CAST e.getValue() ); if ( KEY_EQUALS_NULL( k ) ) return OPEN_HASH_MAP.this.containsNullKey && VALUE_EQUALS( value[ n ], v ); KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_MAP.this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) return VALUE_EQUALS( value[ pos ], v ); // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( k, curr ) ) return VALUE_EQUALS( value[ pos ], v ); } } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED @Override public boolean rem( final Object o ) { if ( !( o instanceof Map.Entry ) ) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final KEY_GENERIC_TYPE k = KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ); final VALUE_GENERIC_TYPE v = VALUE_OBJ2TYPE( VALUE_GENERIC_CAST e.getValue() ); if ( KEY_EQUALS_NULL( k ) ) { if ( containsNullKey && VALUE_EQUALS( value[ n ], v ) ) { removeNullEntry(); return true; } return false; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_MAP.this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { if ( VALUE_EQUALS( value[ pos ], v ) ) { removeEntry( pos ); return true; } return false; } while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL( curr, k ) ) { if ( VALUE_EQUALS( value[ pos ], v ) ) { removeEntry( pos ); return true; } } } } public int size() { return size; } public void clear() { OPEN_HASH_MAP.this.clear(); } #ifdef Linked public ObjectBidirectionalIterator iterator( final MAP.Entry KEY_VALUE_GENERIC from ) { #if KEYS_PRIMITIVE return new EntryIterator( from.ENTRY_GET_KEY() ); #else return new EntryIterator( KEY_CLASS2TYPE( from.getKey() ) ); #endif } public ObjectBidirectionalIterator fastIterator() { return new FastEntryIterator(); } public ObjectBidirectionalIterator fastIterator( final MAP.Entry KEY_VALUE_GENERIC from ) { #if KEYS_PRIMITIVE return new FastEntryIterator( from.ENTRY_GET_KEY() ); #else return new FastEntryIterator( KEY_CLASS2TYPE( from.getKey() ) ); #endif } #endif } #ifdef Linked public FastSortedEntrySet KEY_VALUE_GENERIC ENTRYSET() { if ( entries == null ) entries = new MapEntrySet(); #else public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() { if ( entries == null ) entries = new MapEntrySet(); #endif return entries; } /** An iterator on keys. * *

We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods * (and possibly their type-specific counterparts) so that they return keys * instead of entries. */ #ifdef Linked private final class KeyIterator extends MapIterator implements KEY_LIST_ITERATOR KEY_GENERIC { public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public KEY_GENERIC_TYPE PREV_KEY() { return key[ previousEntry() ]; } public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if ! KEYS_REFERENCE public KEY_GENERIC_CLASS previous() { return KEY2OBJ( key[ previousEntry() ] ); } public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } #endif #else private final class KeyIterator extends MapIterator implements KEY_ITERATOR KEY_GENERIC { #endif public KeyIterator() { super(); } public KEY_GENERIC_TYPE NEXT_KEY() { return key[ nextEntry() ]; } #if ! KEYS_REFERENCE public KEY_GENERIC_CLASS next() { return KEY2OBJ( key[ nextEntry() ] ); } #endif } #ifdef Linked private final class KeySet extends ABSTRACT_SORTED_SET KEY_GENERIC { public KEY_LIST_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeyIterator( from ); } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return new KeyIterator(); } #else private final class KeySet extends ABSTRACT_SET KEY_GENERIC { public KEY_ITERATOR KEY_GENERIC iterator() { return new KeyIterator(); } #endif public int size() { return size; } public boolean contains( KEY_TYPE k ) { return containsKey( k ); } public boolean rem( KEY_TYPE k ) { final int oldSize = size; OPEN_HASH_MAP.this.remove( k ); return size != oldSize; } public void clear() { OPEN_HASH_MAP.this.clear(); } #ifdef Linked public KEY_GENERIC_TYPE FIRST() { if ( size == 0 ) throw new NoSuchElementException(); return key[ first ]; } public KEY_GENERIC_TYPE LAST() { if ( size == 0 ) throw new NoSuchElementException(); return key[ last ]; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } final public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); } final public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } final public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } #endif } #ifdef Linked public SORTED_SET KEY_GENERIC keySet() { #else public SET KEY_GENERIC keySet() { #endif if ( keys == null ) keys = new KeySet(); return keys; } /** An iterator on values. * *

We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods * (and possibly their type-specific counterparts) so that they return values * instead of entries. */ #ifdef Linked private final class ValueIterator extends MapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC { public VALUE_GENERIC_TYPE PREV_VALUE() { return value[ previousEntry() ]; } #if ! VALUES_REFERENCE public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( value[ previousEntry() ] ); } public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } #endif public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } #else private final class ValueIterator extends MapIterator implements VALUE_ITERATOR VALUE_GENERIC { #endif public ValueIterator() { super(); } public VALUE_GENERIC_TYPE NEXT_VALUE() { return value[ nextEntry() ]; } #if ! VALUES_REFERENCE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( value[ nextEntry() ] ); } #endif } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new ValueIterator(); } public int size() { return size; } public boolean contains( VALUE_TYPE v ) { return containsValue( v ); } public void clear() { OPEN_HASH_MAP.this.clear(); } }; return values; } /** A no-op for backward compatibility. The kind of tables implemented by * this class never need rehashing. * *

If you need to reduce the table size to fit exactly * this set, use {@link #trim()}. * * @return true. * @see #trim() * @deprecated A no-op. */ @Deprecated public boolean rehash() { return true; } /** Rehashes the map, making the table as small as possible. * *

This method rehashes the table to the smallest size satisfying the * load factor. It can be used when the set will not be changed anymore, so * to optimize access speed and size. * *

If the table size is already the minimum possible, this method * does nothing. * * @return true if there was enough memory to trim the map. * @see #trim(int) */ public boolean trim() { final int l = arraySize( size, f ); if ( l >= n || size > maxFill( l, f ) ) return true; try { rehash( l ); } catch(OutOfMemoryError cantDoIt) { return false; } return true; } /** Rehashes this map if the table is too large. * *

Let N be the smallest table size that can hold * max(n,{@link #size()}) entries, still satisfying the load factor. If the current * table size is smaller than or equal to N, this method does * nothing. Otherwise, it rehashes this map in a table of size * N. * *

This method is useful when reusing maps. {@linkplain #clear() Clearing a * map} leaves the table size untouched. If you are reusing a map * many times, you can call this method with a typical * size to avoid keeping around a very large table just * because of a few large transient maps. * * @param n the threshold for the trimming. * @return true if there was enough memory to trim the map. * @see #trim() */ public boolean trim( final int n ) { final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) ); if ( l >= n || size > maxFill( l, f ) ) return true; try { rehash( l ); } catch( OutOfMemoryError cantDoIt ) { return false; } return true; } /** Rehashes the map. * *

This method implements the basic rehashing strategy, and may be * overriden by subclasses implementing different rehashing strategies (e.g., * disk-based rehashing). However, you should not override this method * unless you understand the internal workings of this class. * * @param newN the new size */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED protected void rehash( final int newN ) { final KEY_GENERIC_TYPE key[] = this.key; final VALUE_GENERIC_TYPE value[] = this.value; final int mask = newN - 1; // Note that this is used by the hashing macro final KEY_GENERIC_TYPE newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newN + 1 ]; final VALUE_GENERIC_TYPE newValue[] = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ newN + 1 ]; #ifdef Linked int i = first, prev = -1, newPrev = -1, t, pos; final long link[] = this.link; final long newLink[] = new long[ newN + 1 ]; first = -1; for( int j = size; j-- != 0; ) { if ( KEY_EQUALS_NULL( key[ i ] ) ) pos = newN; else { pos = KEY2INTHASH( key[ i ] ) & mask; while ( ! KEY_IS_NULL( newKey[ pos ] ) ) pos = ( pos + 1 ) & mask; } newKey[ pos ] = key[ i ]; newValue[ pos ] = value[ i ]; if ( prev != -1 ) { SET_NEXT( newLink[ newPrev ], pos ); SET_PREV( newLink[ pos ], newPrev ); newPrev = pos; } else { newPrev = first = pos; // Special case of SET(newLink[ pos ], -1, -1); newLink[ pos ] = -1L; } t = i; i = GET_NEXT(link[ i ]); prev = t; } this.link = newLink; this.last = newPrev; if ( newPrev != -1 ) // Special case of SET_NEXT( newLink[ newPrev ], -1 ); newLink[ newPrev ] |= -1 & 0xFFFFFFFFL; #else int i = n, pos; for( int j = realSize(); j-- != 0; ) { while( KEY_IS_NULL( key[ --i ] ) ); if ( ! KEY_IS_NULL( newKey[ pos = KEY2INTHASH( key[ i ] ) & mask ] ) ) while ( ! KEY_IS_NULL( newKey[ pos = ( pos + 1 ) & mask ] ) ); newKey[ pos ] = key[ i ]; newValue[ pos ] = value[ i ]; } newValue[ newN ] = value[ n ]; #endif n = newN; this.mask = mask; maxFill = maxFill( n, f ); this.key = newKey; this.value = newValue; } /** Returns a deep copy of this map. * *

This method performs a deep copy of this hash map; the data stored in the * map, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this map. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public OPEN_HASH_MAP KEY_VALUE_GENERIC clone() { OPEN_HASH_MAP KEY_VALUE_GENERIC c; try { c = (OPEN_HASH_MAP KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.keys = null; c.values = null; c.entries = null; c.containsNullKey = containsNullKey; c.key = key.clone(); c.value = value.clone(); #ifdef Linked c.link = link.clone(); #endif #ifdef Custom c.strategy = strategy; #endif return c; } /** Returns a hash code for this map. * * This method overrides the generic method provided by the superclass. * Since equals() is not overriden, it is important * that the value returned by this method is the same value as * the one returned by the overriden method. * * @return a hash code for this map. */ public int hashCode() { int h = 0; for( int j = realSize(), i = 0, t = 0; j-- != 0; ) { while( KEY_IS_NULL( key[ i ] ) ) i++; #if KEYS_REFERENCE if ( this != key[ i ] ) #endif t = KEY2JAVAHASH_NOT_NULL( key[ i ] ); #if VALUES_REFERENCE if ( this != value[ i ] ) #endif t ^= VALUE2JAVAHASH( value[ i ] ); h += t; i++; } // Zero / null keys have hash zero. if ( containsNullKey ) h += VALUE2JAVAHASH( value[ n ] ); return h; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { final KEY_GENERIC_TYPE key[] = this.key; final VALUE_GENERIC_TYPE value[] = this.value; final MapIterator i = new MapIterator(); s.defaultWriteObject(); for( int j = size, e; j-- != 0; ) { e = i.nextEntry(); s.WRITE_KEY( key[ e ] ); s.WRITE_VALUE( value[ e ] ); } } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = arraySize( size, f ); maxFill = maxFill( n, f ); mask = n - 1; final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n + 1 ]; final VALUE_GENERIC_TYPE value[] = this.value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ n + 1 ]; #ifdef Linked final long link[] = this.link = new long[ n + 1 ]; int prev = -1; first = last = -1; #endif KEY_GENERIC_TYPE k; VALUE_GENERIC_TYPE v; for( int i = size, pos; i-- != 0; ) { k = KEY_GENERIC_CAST s.READ_KEY(); v = VALUE_GENERIC_CAST s.READ_VALUE(); if ( KEY_EQUALS_NULL( k ) ) { pos = n; containsNullKey = true; } else { pos = KEY2INTHASH( k ) & mask; while ( ! KEY_IS_NULL( key[ pos ] ) ) pos = ( pos + 1 ) & mask; } key[ pos ] = k; value[ pos ] = v; #ifdef Linked if ( first != -1 ) { SET_NEXT( link[ prev ], pos ); SET_PREV( link[ pos ], prev ); prev = pos; } else { prev = first = pos; // Special case of SET_PREV( newLink[ pos ], -1 ); link[ pos ] |= (-1L & 0xFFFFFFFFL) << 32; } #endif } #ifdef Linked last = prev; if ( prev != -1 ) // Special case of SET_NEXT( link[ prev ], -1 ); link[ prev ] |= -1 & 0xFFFFFFFFL; #endif if ( ASSERTS ) checkTable(); } #ifdef ASSERTS_CODE private void checkTable() { assert ( n & -n ) == n : "Table length is not a power of two: " + n; assert n == key.length - 1; int n = key.length - 1; while( n-- != 0 ) if ( ! KEY_IS_NULL( key[ n ] ) && ! containsKey( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); #if KEYS_PRIMITIVE java.util.HashSet s = new java.util.HashSet (); #else java.util.HashSet s = new java.util.HashSet(); #endif for( int i = key.length; i-- != 0; ) if ( ! KEY_IS_NULL( key[ i ] ) && ! s.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice at position " + i ); #ifdef Linked KEY_BIDI_ITERATOR KEY_GENERIC i = keySet().iterator(); KEY_GENERIC_TYPE k; n = size(); while( n-- != 0 ) if ( ! containsKey( k = i.NEXT_KEY() ) ) throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" ); if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" ); n = size(); if ( n > 0 ) { i = keySet().iterator( LAST_KEY() ); while( n-- != 0 ) if ( ! containsKey( k = i.PREV_KEY() ) ) throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" ); if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" ); } #endif } #else private void checkTable() {} #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif !KEY_CLASS_Reference #ifdef Custom int i = r.nextInt( 3 ); byte a[] = new byte[ i ]; while( i-- != 0 ) a[ i ] = (byte)r.nextInt(); return a; #else return Integer.toBinaryString( r.nextInt() ); #endif #else return new java.io.Serializable() {}; #endif } private static VALUE_TYPE genValue() { #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Character return (VALUE_TYPE)(r.nextInt()); #elif VALUES_PRIMITIVE return r.NEXT_VALUE(); #elif !VALUE_CLASS_Reference return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static final class ArrayComparator implements java.util.Comparator { public int compare( Object a, Object b ) { byte[] aa = (byte[])a; byte[] bb = (byte[])b; int length = Math.min( aa.length, bb.length ); for( int i = 0; i < length; i++ ) { if ( aa[ i ] < bb[ i ] ) return -1; if ( aa[ i ] > bb[ i ] ) return 1; } return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 ); } } private static final class MockMap extends java.util.TreeMap { private java.util.List list = new java.util.ArrayList(); public MockMap( java.util.Comparator c ) { super( c ); } public Object put( Object k, Object v ) { if ( ! containsKey( k ) ) list.add( k ); return super.put( k, v ); } public void putAll( Map m ) { java.util.Iterator i = m.entrySet().iterator(); while( i.hasNext() ) { Map.Entry e = (Map.Entry)i.next(); put( e.getKey(), e.getValue() ); } } public Object remove( Object k ) { if ( containsKey( k ) ) { int i = list.size(); while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) { list.remove( i ); break; } } return super.remove( k ); } private void justRemove( Object k ) { super.remove( k ); } private java.util.Set justEntrySet() { return super.entrySet(); } private java.util.Set justKeySet() { return super.keySet(); } public java.util.Set keySet() { return new java.util.AbstractSet() { final java.util.Set keySet = justKeySet(); public boolean contains( Object k ) { return keySet.contains( k ); } public int size() { return keySet.size(); } public java.util.Iterator iterator() { return new java.util.Iterator() { final java.util.Iterator iterator = list.iterator(); Object curr; public Object next() { return curr = iterator.next(); } public boolean hasNext() { return iterator.hasNext(); } public void remove() { justRemove( curr ); iterator.remove(); } }; } }; } public java.util.Set entrySet() { return new java.util.AbstractSet() { final java.util.Set entrySet = justEntrySet(); public boolean contains( Object k ) { return entrySet.contains( k ); } public int size() { return entrySet.size(); } public java.util.Iterator iterator() { return new java.util.Iterator() { final java.util.Iterator iterator = list.iterator(); Object curr; public Object next() { curr = iterator.next(); #if VALUE_CLASS_Reference #if KEY_CLASS_Reference return new ABSTRACT_MAP.BasicEntry( (Object)curr, (Object)get(curr) ) { #else return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (Object)get(curr) ) { #endif #else #if KEY_CLASS_Reference return new ABSTRACT_MAP.BasicEntry( (Object)curr, (VALUE_CLASS)get(curr) ) { #else return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (VALUE_CLASS)get(curr) ) { #endif #endif public VALUE_TYPE setValue( VALUE_TYPE v ) { return VALUE_OBJ2TYPE(put( getKey(), VALUE2OBJ(v) )); } }; } public boolean hasNext() { return iterator.hasNext(); } public void remove() { justRemove( ((Map.Entry)curr).getKey() ); iterator.remove(); } }; } }; } } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, fp ).toString(); } private static void speedTest( int n, float f, boolean comp ) { #ifndef Custom int i, j; OPEN_HASH_MAP m; #ifdef Linked java.util.LinkedHashMap t; #else java.util.HashMap t; #endif KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; VALUE_TYPE v[] = new VALUE_TYPE[n]; long ns; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); v[i] = genValue(); } double totPut = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d; if ( comp ) { for( j = 0; j < 20; j++ ) { #ifdef Linked t = new java.util.LinkedHashMap( 16 ); #else t = new java.util.HashMap( 16 ); #endif /* We put pairs to t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totPut += d; System.out.print("Put: " + format( d ) + "ns " ); /* We check for pairs in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( k[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) + "ns " ); /* We check for pairs not in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( nk[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) + "ns " ); /* We iterate on t. */ ns = System.nanoTime(); for( java.util.Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) + "ns " ); /* We delete pairs not in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( nk[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) + "ns " ); /* We delete pairs in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) + "ns " ); System.out.println(); } System.out.println(); System.out.println( "java.util Put: " + format( totPut/(j-3) ) + "ns Yes: " + format( totYes/(j-3) ) + "ns No: " + format( totNo/(j-3) ) + "ns Iter: " + format( totIter/(j-3) ) + "ns RemNo: " + format( totRemNo/(j-3) ) + "ns RemYes: " + format( totRemYes/(j-3) ) + "K/s" ); System.out.println(); totPut = totYes = totNo = totIter = totRemYes = totRemNo = 0; } for( j = 0; j < 20; j++ ) { m = new OPEN_HASH_MAP( 16, f ); /* We put pairs to m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.put( k[i], v[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totPut += d; System.out.print("Put: " + format( d ) + "ns " ); /* We check for pairs in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.containsKey( k[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) + "ns " ); /* We check for pairs not in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.containsKey( nk[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) + "ns " ); /* We iterate on m. */ ns = System.nanoTime(); for( java.util.Iterator it = m.entrySet().iterator(); it.hasNext(); it.next() ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) + "ns " ); /* We delete pairs not in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.remove( nk[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) + "ns " ); /* We delete pairs in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.remove( k[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) + "ns " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Put: " + format( totPut/(j-3) ) + "ns Yes: " + format( totYes/(j-3) ) + "ns No: " + format( totNo/(j-3) ) + "ns Iter: " + format( totIter/(j-3) ) + "ns RemNo: " + format( totRemNo/(j-3) ) + "ns RemYes: " + format( totRemYes/(j-3) ) + "ns" ); System.out.println(); #endif } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } protected static void test( int n, float f ) { #if !defined(Custom) || KEYS_REFERENCE #ifdef Custom OPEN_HASH_MAP m = new OPEN_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY); #else OPEN_HASH_MAP m = new OPEN_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f); #endif #ifdef Linked #ifdef Custom Map t = new MockMap( new ArrayComparator() ); #else Map t = new java.util.LinkedHashMap(); #endif #else #ifdef Custom Map t = new java.util.TreeMap(new ArrayComparator()); #else Map t = new java.util.HashMap(); #endif #endif /* First of all, we fill t with random data. */ for(int i=0; i 0 ) { java.util.ListIterator i, j; Object J; j = new java.util.LinkedList( t.keySet() ).listIterator(); int e = r.nextInt( t.size() ); Object from; do from = j.next(); while( e-- != 0 ); i = (java.util.ListIterator)((SORTED_SET)m.keySet()).iterator( KEY_OBJ2TYPE( from ) ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ); if ( r.nextFloat() < .8 && i.hasNext() ) { #ifdef Custom ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" ); #else ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" ); #endif if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { #ifdef Custom ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" ); #else ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" ); #endif if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" ); ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" ); } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" ); #endif /* Now we take out of m everything, and check that it is empty. */ for(java.util.Iterator i=t.keySet().iterator(); i.hasNext(); ) m.remove(i.next()); if (!m.isEmpty()) { System.out.println("Error (" + seed + "): m is not empty (as it should be)"); System.exit( 1 ); } #ifdef NumericEnhancements #if VALUE_CLASS_Byte || VALUE_CLASS_Character || VALUE_CLASS_Short || VALUE_CLASS_Integer || VALUE_CLASS_Long /* Now we check that increment works properly, using random data */ { t.clear(); m.clear(); for( int k = 0; k < 2*n; k++ ) { KEY_TYPE T = genKey(); VALUE_TYPE U = genValue(); VALUE_TYPE rU = m.increment(T, U); VALUE_GENERIC_CLASS tU = (VALUE_GENERIC_CLASS) t.get(KEY2OBJ(T)); if (null == tU) { ensure(m.defaultReturnValue() == rU, "Error (" + seed + "): map increment does not return proper starting value." ); t.put( KEY2OBJ(T), VALUE2OBJ((VALUE_TYPE) (m.defaultReturnValue() + U)) ); } else { t.put( KEY2OBJ(T), VALUE2OBJ((VALUE_TYPE) (((VALUE_TYPE) tU) + U)) ); } } // Maps should contain identical values ensure( new java.util.HashMap(m).equals(new java.util.HashMap(t)), "Error(" + seed + "): incremented maps are not equal." ); } #endif #endif #if (KEY_CLASS_Integer || KEY_CLASS_Long) && (VALUE_CLASS_Integer || VALUE_CLASS_Long) m = new OPEN_HASH_MAP(n, f); t.clear(); int x; /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ int p = m.key.length; for(int i=0; i2) f = Float.parseFloat(args[2]); if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n, f); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/OpenHashSet.drv0000664000000000000000000021436313050701620015546 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import static it.unimi.dsi.fastutil.HashCommon.arraySize; import static it.unimi.dsi.fastutil.HashCommon.maxFill; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; #ifdef Linked #if KEYS_REFERENCE import java.util.Comparator; #endif /** A type-specific linked hash set with with a fast, small-footprint implementation. * *

Instances of this class use a hash table to represent a set. The table is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * *

Iterators generated by this set will enumerate elements in the same order in which they * have been added to the set (addition of elements already present * in the set does not change the iteration order). Note that this order has nothing in common with the natural * order of the keys. The order is kept by means of a doubly linked list, represented * via an array of longs parallel to the table. * *

This class implements the interface of a sorted set, so to allow easy * access of the iteration order: for instance, you can get the first element * in iteration order with {@code first()} without having to create an * iterator; however, this class partially violates the {@link java.util.SortedSet} * contract because all subset methods throw an exception and {@link * #comparator()} returns always null. * *

Additional methods, such as addAndMoveToFirst(), make it easy * to use instances of this class as a cache (e.g., with LRU policy). * *

The iterators provided by this class are type-specific {@linkplain * java.util.ListIterator list iterators}, and can be started at any * element which is in the set (if the provided element * is not in the set, a {@link NoSuchElementException} exception will be thrown). * If, however, the provided element is not the first or last element in the * set, the first access to the list index will require linear time, as in the worst case * the entire set must be scanned in iteration order to retrieve the positional * index of the starting element. If you use just the methods of a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator}, * however, all operations will be performed in constant time. * * @see Hash * @see HashCommon */ public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash { #else #ifdef Custom /** A type-specific hash set with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy} * is specified at creation time. * *

Instances of this class use a hash table to represent a set. The table is * filled up to a specified load factor, and then doubled in size to * accommodate new entries. If the table is emptied below one fourth * of the load factor, it is halved in size. However, halving is * not performed when deleting entries from an iterator, as it would interfere * with the iteration process. * *

Note that {@link #clear()} does not modify the hash table size. * Rather, a family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * @see Hash * @see HashCommon */ public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash { #else /** A type-specific hash set with with a fast, small-footprint implementation. * *

Instances of this class use a hash table to represent a set. The table is * enlarged as needed by doubling its size when new entries are created, but it is never made * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * @see Hash * @see HashCommon */ public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash { #endif #endif private static final long serialVersionUID = 0L; private static final boolean ASSERTS = ASSERTS_VALUE; /** The array of keys. */ protected transient KEY_GENERIC_TYPE[] key; /** The mask for wrapping a position counter. */ protected transient int mask; /** Whether this set contains the null key. */ protected transient boolean containsNull; #ifdef Custom /** The hash strategy of this custom set. */ protected STRATEGY KEY_GENERIC strategy; #endif #ifdef Linked /** The index of the first entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */ protected transient int first = -1; /** The index of the last entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */ protected transient int last = -1; /** For each entry, the next and the previous entry in iteration order, * stored as ((prev & 0xFFFFFFFFL) << 32) | (next & 0xFFFFFFFFL). * The first entry contains predecessor -1, and the last entry * contains successor -1. */ protected transient long[] link; #endif /** The current table size. Note that an additional element is allocated for storing the null key. */ protected transient int n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected transient int maxFill; /** Number of entries in the set (including the null key, if present). */ protected int size; /** The acceptable load factor. */ protected final float f; #ifdef Custom /** Creates a new hash set. * *

The actual table size will be the least power of two greater than expected/f. * * @param expected the expected number of elements in the hash set. * @param f the load factor. * @param strategy the strategy. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public OPEN_HASH_SET( final int expected, final float f, final STRATEGY KEY_GENERIC strategy ) { this.strategy = strategy; #else /** Creates a new hash set. * *

The actual table size will be the least power of two greater than expected/f. * * @param expected the expected number of elements in the hash set. * @param f the load factor. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public OPEN_HASH_SET( final int expected, final float f ) { #endif if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" ); if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" ); this.f = f; n = arraySize( expected, f ); mask = n - 1; maxFill = maxFill( n, f ); key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n + 1 ]; #ifdef Linked link = new long[ n + 1 ]; #endif } #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash set. * @param strategy the strategy. */ public OPEN_HASH_SET( final int expected, final STRATEGY KEY_GENERIC strategy ) { this( expected, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash set. */ public OPEN_HASH_SET( final int expected ) { this( expected, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final STRATEGY KEY_GENERIC strategy ) { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. */ public OPEN_HASH_SET() { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash set copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final Collection c, final float f, final STRATEGY KEY_GENERIC strategy ) { this( c.size(), f, strategy ); addAll( c ); } #else /** Creates a new hash set copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. * @param f the load factor. */ public OPEN_HASH_SET( final Collection c, final float f ) { this( c.size(), f ); addAll( c ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. * @param strategy the strategy. */ public OPEN_HASH_SET( final Collection c, final STRATEGY KEY_GENERIC strategy ) { this( c, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. */ public OPEN_HASH_SET( final Collection c ) { this( c, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f, STRATEGY KEY_GENERIC strategy ) { this( c.size(), f, strategy ); addAll( c ); } #else /** Creates a new hash set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. * @param f the load factor. */ public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f ) { this( c.size(), f ); addAll( c ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. * @param strategy the strategy. */ public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final STRATEGY KEY_GENERIC strategy ) { this( c, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. */ public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) { this( c, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final float f, final STRATEGY KEY_GENERIC strategy ) { this( DEFAULT_INITIAL_SIZE, f, strategy ); while( i.hasNext() ) add( i.NEXT_KEY() ); } #else /** Creates a new hash set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. * @param f the load factor. */ public OPEN_HASH_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final float f ) { this( DEFAULT_INITIAL_SIZE, f ); while( i.hasNext() ) add( i.NEXT_KEY() ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. * @param strategy the strategy. */ public OPEN_HASH_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final STRATEGY KEY_GENERIC strategy ) { this( i, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. */ public OPEN_HASH_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { this( i, DEFAULT_LOAD_FACTOR ); } #endif #if KEYS_PRIMITIVE #ifdef Custom /** Creates a new hash set using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final Iterator i, final float f, final STRATEGY KEY_GENERIC strategy ) { this( ITERATORS.AS_KEY_ITERATOR( i ), f, strategy ); } #else /** Creates a new hash set using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. * @param f the load factor. */ public OPEN_HASH_SET( final Iterator i, final float f ) { this( ITERATORS.AS_KEY_ITERATOR( i ), f ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. * @param strategy the strategy. */ public OPEN_HASH_SET( final Iterator i, final STRATEGY KEY_GENERIC strategy ) { this( ITERATORS.AS_KEY_ITERATOR( i ), strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. */ public OPEN_HASH_SET( final Iterator i ) { this( ITERATORS.AS_KEY_ITERATOR( i ) ); } #endif #endif #ifdef Custom /** Creates a new hash set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f, final STRATEGY KEY_GENERIC strategy ) { this( length < 0 ? 0 : length, f, strategy ); ARRAYS.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } #else /** Creates a new hash set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param f the load factor. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f ) { this( length < 0 ? 0 : length, f ); ARRAYS.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param strategy the strategy. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final STRATEGY KEY_GENERIC strategy ) { this( a, offset, length, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) { this( a, offset, length, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Creates a new hash set copying the elements of an array. * * @param a an array to be copied into the new hash set. * @param f the load factor. * @param strategy the strategy. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f, final STRATEGY KEY_GENERIC strategy ) { this( a, 0, a.length, f, strategy ); } #else /** Creates a new hash set copying the elements of an array. * * @param a an array to be copied into the new hash set. * @param f the load factor. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f ) { this( a, 0, a.length, f ); } #endif #ifdef Custom /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying the elements of an array. * * @param a an array to be copied into the new hash set. * @param strategy the strategy. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final STRATEGY KEY_GENERIC strategy ) { this( a, DEFAULT_LOAD_FACTOR, strategy ); } #else /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying the elements of an array. * * @param a an array to be copied into the new hash set. */ public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a ) { this( a, DEFAULT_LOAD_FACTOR ); } #endif #ifdef Custom /** Returns the hashing strategy. * * @return the hashing strategy of this custom hash set. */ public STRATEGY KEY_GENERIC strategy() { return strategy; } #endif private int realSize() { return containsNull ? size - 1 : size; } private void ensureCapacity( final int capacity ) { final int needed = arraySize( capacity, f ); if ( needed > n ) rehash( needed ); } private void tryCapacity( final long capacity ) { final int needed = (int)Math.min( 1 << 30, Math.max( 2, HashCommon.nextPowerOfTwo( (long)Math.ceil( capacity / f ) ) ) ); if ( needed > n ) rehash( needed ); } #if KEYS_PRIMITIVE /** {@inheritDoc} */ public boolean addAll( COLLECTION c ) { if ( f <= .5 ) ensureCapacity( c.size() ); // The resulting collection will be sized for c.size() elements else tryCapacity( size() + c.size() ); // The resulting collection will be tentatively sized for size() + c.size() elements return super.addAll( c ); } #endif /** {@inheritDoc} */ public boolean addAll( Collection c ) { // The resulting collection will be at least c.size() big if ( f <= .5 ) ensureCapacity( c.size() ); // The resulting collection will be sized for c.size() elements else tryCapacity( size() + c.size() ); // The resulting collection will be tentatively sized for size() + c.size() elements return super.addAll( c ); } public boolean add( final KEY_GENERIC_TYPE k ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNull ) return false; #ifdef Linked pos = n; #endif containsNull = true; #ifdef Custom key[ n ] = k; #endif } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return false; while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return false; } key[ pos ] = k; } #ifdef Linked if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER(link[ pos ], -1, -1); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } #endif if ( size++ >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return true; } #if KEY_CLASS_Object /** Add a random element if not present, get the existing value if already present. * * This is equivalent to (but faster than) doing a: *

	 * K exist = set.get(k);
	 * if (exist == null) {
	 *   set.add(k);
	 *   exist = k;
	 * }
	 * 
*/ public KEY_GENERIC_TYPE addOrGet( final KEY_GENERIC_TYPE k ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNull ) return key [ n ]; #ifdef Linked pos = n; #endif containsNull = true; #ifdef Custom key [ n ] = k; #endif } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; // The starting point. if ( ! KEY_IS_NULL( curr = key[ pos = KEY2INTHASH( k ) & mask ] ) ) { if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; while( ! KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) if ( KEY_EQUALS_NOT_NULL( curr, k ) ) return curr; } key[ pos ] = k; } #ifdef Linked if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER(link[ pos ], -1, -1); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } #endif if ( size++ >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return k; } #endif /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ protected final void shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } key[ last ] = curr; #ifdef Linked fixPointers( pos, last ); #endif } } private boolean removeEntry( final int pos ) { size--; #ifdef Linked fixPointers( pos ); #endif shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return true; } private boolean removeNullEntry() { containsNull = false; key[ n ] = KEY_NULL; size--; #ifdef Linked fixPointers( n ); #endif if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return true; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean rem( final KEY_TYPE k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) { if ( containsNull ) return removeNullEntry(); return false; } KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return removeEntry( pos ); while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return removeEntry( pos ); } } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final KEY_TYPE k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) return containsNull; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return true; while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return false; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return true; } } #if KEY_CLASS_Object /** Returns the element of this set that is equal to the given key, or null. * @return the element of this set that is equal to the given key, or null. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public K get( final Object k ) { if ( KEY_EQUALS_NULL( KEY_GENERIC_CAST k ) ) return key[ n ]; // This is correct independently of the value of containsNull and of the map being custom KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = this.key; int pos; // The starting point. if ( KEY_IS_NULL( curr = key[ pos = KEY2INTHASH_CAST( k ) & mask ] ) ) return null; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return curr; // There's always an unused entry. while( true ) { if ( KEY_IS_NULL( curr = key[ pos = ( pos + 1 ) & mask ] ) ) return null; if ( KEY_EQUALS_NOT_NULL_CAST( k, curr ) ) return curr; } } #endif #ifdef Linked /** Removes the first key in iteration order. * @return the first key. * @throws NoSuchElementException is this set is empty. */ public KEY_GENERIC_TYPE REMOVE_FIRST_KEY() { if ( size == 0 ) throw new NoSuchElementException(); final int pos = first; // Abbreviated version of fixPointers(pos) first = GET_NEXT(link[ pos ]); if ( 0 <= first ) { // Special case of SET_PREV( link[ first ], -1 ) link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } final KEY_GENERIC_TYPE k = key[ pos ]; size--; if ( KEY_EQUALS_NULL( k ) ) { containsNull = false; key[ n ] = KEY_NULL; } else shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return k; } /** Removes the the last key in iteration order. * @return the last key. * @throws NoSuchElementException is this set is empty. */ public KEY_GENERIC_TYPE REMOVE_LAST_KEY() { if ( size == 0 ) throw new NoSuchElementException(); final int pos = last; // Abbreviated version of fixPointers(pos) last = GET_PREV(link[ pos ]); if ( 0 <= last ) { // Special case of SET_NEXT( link[ last ], -1 ) link[ last ] |= -1 & 0xFFFFFFFFL; } final KEY_GENERIC_TYPE k = key[ pos ]; size--; if ( KEY_EQUALS_NULL( k ) ) { containsNull = false; key[ n ] = KEY_NULL; } else shiftKeys( pos ); if ( size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE ) rehash( n / 2 ); return k; } private void moveIndexToFirst( final int i ) { if ( size == 1 || first == i ) return; if ( last == i ) { last = GET_PREV(link[ i ]); // Special case of SET_NEXT( link[ last ], -1 ); link[ last ] |= -1 & 0xFFFFFFFFL; } else { final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } SET_PREV( link[ first ], i ); SET_UPPER_LOWER( link[ i ], -1, first ); first = i; } private void moveIndexToLast( final int i ) { if ( size == 1 || last == i ) return; if ( first == i ) { first = GET_NEXT(link[ i ]); // Special case of SET_PREV( link[ first ], -1 ); link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } else { final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } SET_NEXT( link[ last ], i ); SET_UPPER_LOWER( link[ i ], last, -1 ); last = i; } /** Adds a key to the set; if the key is already present, it is moved to the first position of the iteration order. * * @param k the key. * @return true if the key was not present. */ public boolean addAndMoveToFirst( final KEY_GENERIC_TYPE k ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNull ) { moveIndexToFirst( n ); return false; } containsNull = true; pos = n; } else { // The starting point. final KEY_GENERIC_TYPE key[] = this.key; pos = KEY2INTHASH( k ) & mask; // There's always an unused entry. TODO while( ! KEY_IS_NULL( key[ pos ] ) ) { if ( KEY_EQUALS_NOT_NULL( k, key[ pos ] ) ) { moveIndexToFirst( pos ); return false; } pos = ( pos + 1 ) & mask; } } key[ pos ] = k; if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_PREV( link[ first ], pos ); SET_UPPER_LOWER( link[ pos ], -1, first ); first = pos; } if ( size++ >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return true; } /** Adds a key to the set; if the key is already present, it is moved to the last position of the iteration order. * * @param k the key. * @return true if the key was not present. */ public boolean addAndMoveToLast( final KEY_GENERIC_TYPE k ) { int pos; if ( KEY_EQUALS_NULL( k ) ) { if ( containsNull ) { moveIndexToLast( n ); return false; } containsNull = true; pos = n; } else { // The starting point. final KEY_GENERIC_TYPE key[] = this.key; pos = KEY2INTHASH( k ) & mask; // There's always an unused entry. while( ! KEY_IS_NULL( key[ pos ] ) ) { if ( KEY_EQUALS_NOT_NULL( k, key[ pos ] ) ) { moveIndexToLast( pos ); return false; } pos = ( pos + 1 ) & mask; } } key[ pos ] = k; if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { SET_NEXT( link[ last ], pos ); SET_UPPER_LOWER( link[ pos ], last, -1 ); last = pos; } if ( size++ >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return true; } #endif /* Removes all elements from this set. * *

To increase object reuse, this method does not change the table size. * If you want to reduce the table size, you must use {@link #trim()}. * */ public void clear() { if ( size == 0 ) return; size = 0; containsNull = false; Arrays.fill( key, KEY_NULL ); #ifdef Linked first = last = -1; #endif } public int size() { return size; } public boolean isEmpty() { return size == 0; } /** A no-op for backward compatibility. * * @param growthFactor unused. * @deprecated Since fastutil 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public void growthFactor( int growthFactor ) {} /** Gets the growth factor (2). * * @return the growth factor of this set, which is fixed (2). * @see #growthFactor(int) * @deprecated Since fastutil 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public int growthFactor() { return 16; } #ifdef Linked /** Modifies the {@link #link} vector so that the given entry is removed. * This method will complete in constant time. * * @param i the index of an entry. */ protected void fixPointers( final int i ) { if ( size == 0 ) { first = last = -1; return; } if ( first == i ) { first = GET_NEXT(link[ i ]); if (0 <= first) { // Special case of SET_PREV( link[ first ], -1 ) link[ first ] |= (-1 & 0xFFFFFFFFL) << 32; } return; } if ( last == i ) { last = GET_PREV(link[ i ]); if (0 <= last) { // Special case of SET_NEXT( link[ last ], -1 ) link[ last ] |= -1 & 0xFFFFFFFFL; } return; } final long linki = link[ i ]; final int prev = GET_PREV(linki); final int next = GET_NEXT(linki); COPY_NEXT(link[ prev ], linki); COPY_PREV(link[ next ], linki); } /** Modifies the {@link #link} vector for a shift from s to d. * This method will complete in constant time. * * @param s the source position. * @param d the destination position. */ protected void fixPointers( int s, int d ) { if ( size == 1 ) { first = last = d; // Special case of SET(link[ d ], -1, -1) link[ d ] = -1L; return; } if ( first == s ) { first = d; SET_PREV( link[ GET_NEXT(link[ s ]) ], d ); link[ d ] = link[ s ]; return; } if ( last == s ) { last = d; SET_NEXT( link[ GET_PREV(link[ s ])], d ); link[ d ] = link[ s ]; return; } final long links = link[ s ]; final int prev = GET_PREV(links); final int next = GET_NEXT(links); SET_NEXT( link[ prev ], d ); SET_PREV( link[ next ], d ); link[ d ] = links; } /** Returns the first element of this set in iteration order. * * @return the first element in iteration order. */ public KEY_GENERIC_TYPE FIRST() { if ( size == 0 ) throw new NoSuchElementException(); return key[ first ]; } /** Returns the last element of this set in iteration order. * * @return the last element in iteration order. */ public KEY_GENERIC_TYPE LAST() { if ( size == 0 ) throw new NoSuchElementException(); return key[ last ]; } public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); } public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } /** A list iterator over a linked set. * *

This class provides a list iterator over a linked hash set. The constructor runs in constant time. */ private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ int prev = -1; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ int next = -1; /** The last entry that was returned (or -1 if we did not iterate or used {@link #remove()}). */ int curr = -1; /** The current index (in the sense of a {@link java.util.ListIterator}). When -1, we do not know the current index.*/ int index = -1; SetIterator() { next = first; index = 0; } SetIterator( KEY_GENERIC_TYPE from ) { if ( KEY_EQUALS_NULL( from ) ) { if ( OPEN_HASH_SET.this.containsNull ) { next = GET_NEXT( link[ n ] ); prev = n; return; } else throw new NoSuchElementException( "The key " + from + " does not belong to this set." ); } if ( KEY_EQUALS( key[ last ], from ) ) { prev = last; index = size; return; } // The starting point. final KEY_GENERIC_TYPE key[] = OPEN_HASH_SET.this.key; int pos = KEY2INTHASH( from ) & mask; // There's always an unused entry. while( ! KEY_IS_NULL( key[ pos ] ) ) { if ( KEY_EQUALS_NOT_NULL( key[ pos ], from ) ) { // Note: no valid index known. next = GET_NEXT( link[ pos ] ); prev = pos; return; } pos = ( pos + 1 ) & mask; } throw new NoSuchElementException( "The key " + from + " does not belong to this set." ); } public boolean hasNext() { return next != -1; } public boolean hasPrevious() { return prev != -1; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = next; next = GET_NEXT(link[ curr ]); prev = curr; if ( index >= 0 ) index++; if ( ASSERTS ) assert curr == n || ! KEY_IS_NULL( key[ curr ] ) : "Position " + curr + " is not used"; return key[ curr ]; } public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = prev; prev = GET_PREV(link[ curr ]); next = curr; if ( index >= 0 ) index--; return key[ curr ]; } private final void ensureIndexKnown() { if ( index >= 0 ) return; if ( prev == -1 ) { index = 0; return; } if ( next == -1 ) { index = size; return; } int pos = first; index = 1; while( pos != prev ) { pos = GET_NEXT( link[ pos ] ); index++; } } public int nextIndex() { ensureIndexKnown(); return index; } public int previousIndex() { ensureIndexKnown(); return index - 1; } public void remove() { ensureIndexKnown(); if ( curr == -1 ) throw new IllegalStateException(); if ( curr == prev ) { /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ index--; prev = GET_PREV(link[ curr ]); } else next = GET_NEXT(link[ curr ]); size--; /* Now we manually fix the pointers. Because of our knowledge of next and prev, this is going to be faster than calling fixPointers(). */ if ( prev == -1 ) first = next; else SET_NEXT( link[ prev ], next ); if ( next == -1 ) last = prev; else SET_PREV( link[ next ], prev ); int last, slot, pos = curr; curr = -1; if ( pos == n ) { OPEN_HASH_SET.this.containsNull = false; OPEN_HASH_SET.this.key[ n ] = KEY_NULL; } else { KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_SET.this.key; // We have to horribly duplicate the shiftKeys() code because we need to update next/prev. for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } key[ last ] = curr; if ( next == pos ) next = last; if ( prev == pos ) prev = last; fixPointers( pos, last ); } } } } /** Returns a type-specific list iterator on the elements in this set, starting from a given element of the set. * Please see the class documentation for implementation details. * * @param from an element to start from. * @return a type-specific list iterator starting at the given element. * @throws IllegalArgumentException if from does not belong to the set. */ public KEY_LIST_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) { return new SetIterator( from ); } public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return new SetIterator(); } #else /** An iterator over a hash set. */ private class SetIterator extends KEY_ABSTRACT_ITERATOR KEY_GENERIC { /** The index of the last entry returned, if positive or zero; initially, {@link #n}. If negative, the last element returned was that of index {@code - pos - 1} from the {@link #wrapped} list. */ int pos = n; /** The index of the last entry that has been returned (more precisely, the value of {@link #pos} if {@link #pos} is positive, or {@link Integer#MIN_VALUE} if {@link #pos} is negative). It is -1 if either we did not return an entry yet, or the last returned entry has been removed. */ int last = -1; /** A downward counter measuring how many entries must still be returned. */ int c = size; /** A boolean telling us whether we should return the null key. */ boolean mustReturnNull = OPEN_HASH_SET.this.containsNull; /** A lazily allocated list containing elements that have wrapped around the table because of removals. */ ARRAY_LIST KEY_GENERIC wrapped; public boolean hasNext() { return c != 0; } public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); c--; if ( mustReturnNull ) { mustReturnNull = false; last = n; return key[ n ]; } final KEY_GENERIC_TYPE key[] = OPEN_HASH_SET.this.key; for(;;) { if ( --pos < 0 ) { // We are just enumerating elements from the wrapped list. last = Integer.MIN_VALUE; return wrapped.GET_KEY( - pos - 1 ); } if ( ! KEY_IS_NULL( key[ pos ] ) ) return key[ last = pos ]; } } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. */ private final void shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; KEY_GENERIC_TYPE curr; final KEY_GENERIC_TYPE[] key = OPEN_HASH_SET.this.key; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; for(;;) { if ( KEY_IS_NULL( curr = key[ pos ] ) ) { key[ last ] = KEY_NULL; return; } slot = KEY2INTHASH( curr ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( pos < last ) { // Wrapped entry. if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC( 2 ); wrapped.add( key[ pos ] ); } key[ last ] = curr; } } public void remove() { if ( last == -1 ) throw new IllegalStateException(); if ( last == n ) { OPEN_HASH_SET.this.containsNull = false; OPEN_HASH_SET.this.key[ n ] = KEY_NULL; } else if ( pos >= 0 ) shiftKeys( last ); else { // We're removing wrapped entries. #if KEYS_REFERENCE OPEN_HASH_SET.this.rem( wrapped.set( - pos - 1, null ) ); #else OPEN_HASH_SET.this.rem( wrapped.GET_KEY( - pos - 1 ) ); #endif last = -1; // Note that we must not decrement size return; } size--; last = -1; // You can no longer remove this entry. if ( ASSERTS ) checkTable(); } } public KEY_ITERATOR KEY_GENERIC iterator() { return new SetIterator(); } #endif /** A no-op for backward compatibility. The kind of tables implemented by * this class never need rehashing. * *

If you need to reduce the table size to fit exactly * this set, use {@link #trim()}. * * @return true. * @see #trim() * @deprecated A no-op. */ @Deprecated public boolean rehash() { return true; } /** Rehashes this set, making the table as small as possible. * *

This method rehashes the table to the smallest size satisfying the * load factor. It can be used when the set will not be changed anymore, so * to optimize access speed and size. * *

If the table size is already the minimum possible, this method * does nothing. * * @return true if there was enough memory to trim the set. * @see #trim(int) */ public boolean trim() { final int l = arraySize( size, f ); if ( l >= n || size > maxFill( l, f ) ) return true; try { rehash( l ); } catch(OutOfMemoryError cantDoIt) { return false; } return true; } /** Rehashes this set if the table is too large. * *

Let N be the smallest table size that can hold * max(n,{@link #size()}) entries, still satisfying the load factor. If the current * table size is smaller than or equal to N, this method does * nothing. Otherwise, it rehashes this set in a table of size * N. * *

This method is useful when reusing sets. {@linkplain #clear() Clearing a * set} leaves the table size untouched. If you are reusing a set * many times, you can call this method with a typical * size to avoid keeping around a very large table just * because of a few large transient sets. * * @param n the threshold for the trimming. * @return true if there was enough memory to trim the set. * @see #trim() */ public boolean trim( final int n ) { final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) ); if ( l >= n || size > maxFill( l, f ) ) return true; try { rehash( l ); } catch( OutOfMemoryError cantDoIt ) { return false; } return true; } /** Rehashes the set. * *

This method implements the basic rehashing strategy, and may be * overriden by subclasses implementing different rehashing strategies (e.g., * disk-based rehashing). However, you should not override this method * unless you understand the internal workings of this class. * * @param newN the new size */ SUPPRESS_WARNINGS_KEY_UNCHECKED protected void rehash( final int newN ) { final KEY_GENERIC_TYPE key[] = this.key; final int mask = newN - 1; // Note that this is used by the hashing macro final KEY_GENERIC_TYPE newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newN + 1 ]; #ifdef Linked int i = first, prev = -1, newPrev = -1, t, pos; final long link[] = this.link; final long newLink[] = new long[ newN + 1 ]; first = -1; for( int j = size; j-- != 0; ) { if ( KEY_EQUALS_NULL( key[ i ] ) ) pos = newN; else { pos = KEY2INTHASH( key[ i ] ) & mask; while ( ! KEY_IS_NULL( newKey[ pos ] ) ) pos = ( pos + 1 ) & mask; } newKey[ pos ] = key[ i ]; if ( prev != -1 ) { SET_NEXT( newLink[ newPrev ], pos ); SET_PREV( newLink[ pos ], newPrev ); newPrev = pos; } else { newPrev = first = pos; // Special case of SET(newLink[ pos ], -1, -1); newLink[ pos ] = -1L; } t = i; i = GET_NEXT(link[ i ]); prev = t; } this.link = newLink; this.last = newPrev; if ( newPrev != -1 ) // Special case of SET_NEXT( newLink[ newPrev ], -1 ); newLink[ newPrev ] |= -1 & 0xFFFFFFFFL; #else int i = n, pos; for( int j = realSize(); j-- != 0; ) { while( KEY_IS_NULL( key[ --i ] ) ); if ( ! KEY_IS_NULL( newKey[ pos = KEY2INTHASH( key[ i ] ) & mask ] ) ) while ( ! KEY_IS_NULL( newKey[ pos = ( pos + 1 ) & mask ] ) ); newKey[ pos ] = key[ i ]; } #endif n = newN; this.mask = mask; maxFill = maxFill( n, f ); this.key = newKey; } /** Returns a deep copy of this set. * *

This method performs a deep copy of this hash set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public OPEN_HASH_SET KEY_GENERIC clone() { OPEN_HASH_SET KEY_GENERIC c; try { c = (OPEN_HASH_SET KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key.clone(); c.containsNull = containsNull; #ifdef Linked c.link = link.clone(); #endif #ifdef Custom c.strategy = strategy; #endif return c; } /** Returns a hash code for this set. * * This method overrides the generic method provided by the superclass. * Since equals() is not overriden, it is important * that the value returned by this method is the same value as * the one returned by the overriden method. * * @return a hash code for this set. */ public int hashCode() { int h = 0; for( int j = realSize(), i = 0; j-- != 0; ) { while( KEY_IS_NULL( key[ i ] ) ) i++; #if KEYS_REFERENCE if ( this != key[ i ] ) #endif h += KEY2JAVAHASH_NOT_NULL( key[ i ] ); i++; } // Zero / null have hash zero. return h; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { final KEY_ITERATOR KEY_GENERIC i = iterator(); s.defaultWriteObject(); for( int j = size; j-- != 0; ) s.WRITE_KEY( i.NEXT_KEY() ); } SUPPRESS_WARNINGS_KEY_UNCHECKED private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = arraySize( size, f ); maxFill = maxFill( n, f ); mask = n - 1; final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n + 1 ]; #ifdef Linked final long link[] = this.link = new long[ n + 1 ]; int prev = -1; first = last = -1; #endif KEY_GENERIC_TYPE k; for( int i = size, pos; i-- != 0; ) { k = KEY_GENERIC_CAST s.READ_KEY(); if ( KEY_EQUALS_NULL( k ) ) { pos = n; containsNull = true; } else { if ( ! KEY_IS_NULL( key[ pos = KEY2INTHASH( k ) & mask ] ) ) while ( ! KEY_IS_NULL( key[ pos = ( pos + 1 ) & mask ] ) ); } key[ pos ] = k; #ifdef Linked if ( first != -1 ) { SET_NEXT( link[ prev ], pos ); SET_PREV( link[ pos ], prev ); prev = pos; } else { prev = first = pos; // Special case of SET_PREV( newLink[ pos ], -1 ); link[ pos ] |= (-1L & 0xFFFFFFFFL) << 32; } #endif } #ifdef Linked last = prev; if ( prev != -1 ) // Special case of SET_NEXT( link[ prev ], -1 ); link[ prev ] |= -1 & 0xFFFFFFFFL; #endif if ( ASSERTS ) checkTable(); } #ifdef ASSERTS_CODE private void checkTable() { assert ( n & -n ) == n : "Table length is not a power of two: " + n; assert n == key.length - 1; int n = key.length - 1; while( n-- != 0 ) if ( ! KEY_IS_NULL( key[ n ] ) && ! contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); #if KEYS_PRIMITIVE java.util.HashSet s = new java.util.HashSet (); #else java.util.HashSet s = new java.util.HashSet(); #endif for( int i = key.length - 1; i-- != 0; ) if ( ! KEY_IS_NULL( key[ i ] ) && ! s.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice at position " + i ); #ifdef Linked KEY_LIST_ITERATOR KEY_GENERIC i = iterator(); KEY_GENERIC_TYPE k; n = size(); while( n-- != 0 ) if ( ! contains( k = i.NEXT_KEY() ) ) throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" ); if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" ); n = size(); if ( n > 0 ) { i = iterator( LAST() ); while( n-- != 0 ) if ( ! contains( k = i.PREV_KEY() ) ) throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" ); if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" ); } #endif } #else private void checkTable() {} #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object #ifdef Custom int i = r.nextInt( 3 ); byte a[] = new byte[ i ]; while( i-- != 0 ) a[ i ] = (byte)r.nextInt(); return a; #else return Integer.toBinaryString( r.nextInt() ); #endif #else return new java.io.Serializable() {}; #endif } private static final class ArrayComparator implements java.util.Comparator { public int compare( Object a, Object b ) { byte[] aa = (byte[])a; byte[] bb = (byte[])b; int length = Math.min( aa.length, bb.length ); for( int i = 0; i < length; i++ ) { if ( aa[ i ] < bb[ i ] ) return -1; if ( aa[ i ] > bb[ i ] ) return 1; } return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 ); } } private static final class MockSet extends java.util.TreeSet { private java.util.List list = new java.util.ArrayList(); public MockSet( java.util.Comparator c ) { super( c ); } public boolean add( Object k ) { if ( ! contains( k ) ) list.add( k ); return super.add( k ); } public boolean addAll( Collection c ) { java.util.Iterator i = c.iterator(); boolean result = false; while( i.hasNext() ) result |= add( i.next() ); return result; } public boolean removeAll( Collection c ) { java.util.Iterator i = c.iterator(); boolean result = false; while( i.hasNext() ) result |= remove( i.next() ); return result; } public boolean remove( Object k ) { if ( contains( k ) ) { int i = list.size(); while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) { list.remove( i ); break; } } return super.remove( k ); } private void justRemove( Object k ) { super.remove( k ); } public java.util.Iterator iterator() { return new java.util.Iterator() { final java.util.Iterator iterator = list.iterator(); Object curr; public Object next() { return curr = iterator.next(); } public boolean hasNext() { return iterator.hasNext(); } public void remove() { justRemove( curr ); iterator.remove(); } }; } } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, fp ).toString(); } private static void speedTest( int n, float f, boolean comp ) { #ifndef Custom int i, j; OPEN_HASH_SET m; #ifdef Linked java.util.LinkedHashSet t; #else java.util.HashSet t; #endif KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; long ns; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); } double totAdd = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d; if ( comp ) { for( j = 0; j < 20; j++ ) { #ifdef Linked t = new java.util.LinkedHashSet( 16 ); #else t = new java.util.HashSet( 16 ); #endif /* We add pairs to t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totAdd += d; System.out.print("Add: " + format( d ) + "ns " ); /* We check for pairs in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( k[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) + "ns " ); /* We check for pairs not in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( nk[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) + "ns " ); /* We iterate on t. */ ns = System.nanoTime(); for( java.util.Iterator it = t.iterator(); it.hasNext(); it.next() ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) + "ns " ); // Too expensive in the linked case #ifndef Linked /* We delete pairs not in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( nk[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) + "ns " ); /* We delete pairs in t. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) + "ns " ); #endif System.out.println(); } System.out.println(); System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + "ns Yes: " + format( totYes/(j-3) ) + "ns No: " + format( totNo/(j-3) ) + "ns Iter: " + format( totIter/(j-3) ) + "ns RemNo: " + format( totRemNo/(j-3) ) + "ns RemYes: " + format( totRemYes/(j-3) ) + "ns" ); System.out.println(); totAdd = totYes = totNo = totIter = totRemYes = totRemNo = 0; } for( j = 0; j < 20; j++ ) { m = new OPEN_HASH_SET( 16, f ); /* We add pairs to m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.add( k[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totAdd += d; System.out.print("Add: " + format( d ) + "ns " ); /* We check for pairs in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.contains( k[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) + "ns " ); /* We check for pairs not in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.contains( nk[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) + "ns " ); /* We iterate on m. */ ns = System.nanoTime(); for( KEY_ITERATOR it = (KEY_ITERATOR)m.iterator(); it.hasNext(); it.NEXT_KEY() ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totIter += d; System.out.print("Iter: " + format( d ) + "ns " ); // Too expensive in the linked case #ifndef Linked /* We delete pairs not in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.remove( nk[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemNo += d; System.out.print("RemNo: " + format( d ) + "ns " ); /* We delete pairs in m. */ ns = System.nanoTime(); for( i = 0; i < n; i++ ) m.remove( k[i] ); d = ( System.nanoTime() - ns ) / (double)n; if ( j > 2 ) totRemYes += d; System.out.print("RemYes: " + format( d ) + "ns " ); #endif System.out.println(); } System.out.println(); System.out.println( "fastutil Add: " + format( totAdd/(j-3) ) + "ns Yes: " + format( totYes/(j-3) ) + "ns No: " + format( totNo/(j-3) ) + "ns Iter: " + format( totIter/(j-3) ) + "ns RemNo: " + format( totRemNo/(j-3) ) + "ns RemYes: " + format( totRemYes/(j-3) ) + "ns" ); System.out.println(); #endif } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static void printProbes( OPEN_HASH_SET m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final double f = (double)m.size / m.n; for( int i = 0, c = 0; i < m.n; i++ ) { if ( ! KEY_IS_NULL( m.key[ i ] ) ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } private static void test( int n, float f ) { #if !defined(Custom) || KEYS_REFERENCE int c; #ifdef Custom OPEN_HASH_SET m = new OPEN_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY); #else OPEN_HASH_SET m = new OPEN_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f); #endif #ifdef Linked #ifdef Custom java.util.Set t = new MockSet(new ArrayComparator()); #else java.util.Set t = new java.util.LinkedHashSet(); #endif #else #ifdef Custom java.util.Set t = new java.util.TreeSet(new ArrayComparator()); #else java.util.Set t = new java.util.HashSet(); #endif #endif /* First of all, we fill t with random data. */ for(int i=0; i 0 ) { java.util.ListIterator i, j; Object J; j = new java.util.LinkedList( t ).listIterator(); int e = r.nextInt( t.size() ); Object from; do from = j.next(); while( e-- != 0 ); i = (java.util.ListIterator)m.iterator( KEY_OBJ2TYPE( from ) ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ); if ( r.nextFloat() < .8 && i.hasNext() ) { #ifdef Custom ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" ); #else ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" ); #endif if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { #ifdef Custom ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" ); #else ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" ); #endif if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" ); ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" ); } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" ); #endif /* Now we take out of m everything, and check that it is empty. */ for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { i.next(); i.remove();} if (!m.isEmpty()) { System.out.println("Error (" + seed + "): m is not empty (as it should be)"); System.exit( 1 ); } #if KEY_CLASS_Integer || KEY_CLASS_Long m = new OPEN_HASH_SET(n, f); t.clear(); int x; /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ int p = m.key.length - 1; for(int i=0; i2) f = Float.parseFloat(args[2]); if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n, f); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/PriorityQueue.drv0000664000000000000000000000375013050701620016207 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.PriorityQueue; /** A type-specific {@link PriorityQueue}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #comparator()}. */ public interface PRIORITY_QUEUE extends PriorityQueue { /** Enqueues a new element. * * @param x the element to enqueue. */ void enqueue( KEY_GENERIC_TYPE x ); /** Dequeues the {@linkplain #first() first} element from the queue. * * @return the dequeued element. * @throws NoSuchElementException if the queue is empty. */ KEY_GENERIC_TYPE DEQUEUE(); /** Returns the first element of the queue. * * @return the first element. * @throws NoSuchElementException if the queue is empty. */ KEY_GENERIC_TYPE FIRST(); /** Returns the last element of the queue, that is, the element the would be dequeued last (optional operation). * * @return the last element. * @throws NoSuchElementException if the queue is empty. */ KEY_GENERIC_TYPE LAST(); /** Returns the comparator associated with this sorted set, or null if it uses its elements' natural ordering. * *

Note that this specification strengthens the one given in {@link PriorityQueue#comparator()}. * * @see PriorityQueue#comparator() */ KEY_COMPARATOR comparator(); } fastutil-7.1.0/drv/PriorityQueues.drv0000664000000000000000000000650713050701620016375 0ustar rootroot/* * Copyright (C) 2003-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; /** A class providing static methods and objects that do useful things with type-specific priority queues. * * @see it.unimi.dsi.fastutil.PriorityQueue */ public class PRIORITY_QUEUES { private PRIORITY_QUEUES() {} /** A synchronized wrapper class for priority queues. */ public static class SynchronizedPriorityQueue KEY_GENERIC implements PRIORITY_QUEUE KEY_GENERIC { final protected PRIORITY_QUEUE KEY_GENERIC q; final protected Object sync; protected SynchronizedPriorityQueue( final PRIORITY_QUEUE KEY_GENERIC q, final Object sync ) { this.q = q; this.sync = sync; } protected SynchronizedPriorityQueue( final PRIORITY_QUEUE KEY_GENERIC q ) { this.q = q; this.sync = this; } public void enqueue( KEY_GENERIC_TYPE x ) { synchronized( sync ) { q.enqueue( x ); } } public KEY_GENERIC_TYPE DEQUEUE() { synchronized( sync ) { return q.DEQUEUE(); } } public KEY_GENERIC_TYPE FIRST() { synchronized( sync ) { return q.FIRST(); } } public KEY_GENERIC_TYPE LAST() { synchronized( sync ) { return q.LAST(); } } public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } } public int size() { synchronized( sync ) { return q.size(); } } public void clear() { synchronized( sync ) { q.clear(); } } public void changed() { synchronized( sync ) { q.changed(); } } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return q.comparator(); } } #if !KEY_CLASS_Object public void enqueue( KEY_CLASS x ) { synchronized( sync ) { q.enqueue( x ); } } public KEY_CLASS dequeue() { synchronized( sync ) { return q.dequeue(); } } public KEY_CLASS first() { synchronized( sync ) { return q.first(); } } public KEY_CLASS last() { synchronized( sync ) { return q.last(); } } #endif } /** Returns a synchronized type-specific priority queue backed by the specified type-specific priority queue. * * @param q the priority queue to be wrapped in a synchronized priority queue. * @return a synchronized view of the specified priority queue. */ public static KEY_GENERIC PRIORITY_QUEUE KEY_GENERIC synchronize( final PRIORITY_QUEUE KEY_GENERIC q ) { return new SynchronizedPriorityQueue( q ); } /** Returns a synchronized type-specific priority queue backed by the specified type-specific priority queue, using an assigned object to synchronize. * * @param q the priority queue to be wrapped in a synchronized priority queue. * @param sync an object that will be used to synchronize the access to the priority queue. * @return a synchronized view of the specified priority queue. */ public static KEY_GENERIC PRIORITY_QUEUE KEY_GENERIC synchronize( final PRIORITY_QUEUE KEY_GENERIC q, final Object sync ) { return new SynchronizedPriorityQueue( q, sync ); } } fastutil-7.1.0/drv/RBTreeMap.drv0000664000000000000000000026543313050701620015152 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import VALUE_PACKAGE.VALUE_COLLECTION; import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION; import VALUE_PACKAGE.VALUE_ITERATOR; import java.util.Comparator; import java.util.Iterator; import java.util.Map; import java.util.SortedMap; import java.util.NoSuchElementException; #if VALUES_PRIMITIVE import VALUE_PACKAGE.VALUE_LIST_ITERATOR; #endif /** A type-specific red-black tree map with a fast, small-footprint implementation. * *

The iterators provided by the views of this class are type-specific {@linkplain * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. * Moreover, the iterator returned by iterator() can be safely cast * to a type-specific {@linkplain java.util.ListIterator list iterator}. * */ public class RB_TREE_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { /** A reference to the root entry. */ protected transient Entry KEY_VALUE_GENERIC tree; /** Number of entries in this map. */ protected int count; /** The first key in this map. */ protected transient Entry KEY_VALUE_GENERIC firstEntry; /** The last key in this map. */ protected transient Entry KEY_VALUE_GENERIC lastEntry; /** Cached set of entries. */ protected transient ObjectSortedSet entries; /** Cached set of keys. */ protected transient SORTED_SET KEY_GENERIC keys; /** Cached collection of values. */ protected transient VALUE_COLLECTION VALUE_GENERIC values; /** The value of this variable remembers, after a put() * or a remove(), whether the domain of the map * has been modified. */ protected transient boolean modified; /** This map's comparator, as provided in the constructor. */ protected Comparator storedComparator; /** This map's actual comparator; it may differ from {@link #storedComparator} because it is always a type-specific comparator, so it could be derived from the former by wrapping. */ protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator; private static final long serialVersionUID = -7046029254386353129L; private static final boolean ASSERTS = ASSERTS_VALUE; { allocatePaths(); } /** Creates a new empty tree map. */ public RB_TREE_MAP() { tree = null; count = 0; } /** Generates the comparator that will be actually used. * *

When a specific {@link Comparator} is specified and stored in {@link * #storedComparator}, we must check whether it is type-specific. If it is * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise, * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator} * and makes it into a type-specific one. */ private void setActualComparator() { #if KEY_CLASS_Object actualComparator = storedComparator; #else /* If the provided comparator is already type-specific, we use it. Otherwise, we use a wrapper anonymous class to fake that it is type-specific. */ if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator; else actualComparator = new KEY_COMPARATOR KEY_SUPER_GENERIC() { public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) { return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) ); } public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) { return storedComparator.compare( ok1, ok2 ); } }; #endif } /** Creates a new empty tree map with the given comparator. * * @param c a (possibly type-specific) comparator. */ public RB_TREE_MAP( final Comparator c ) { this(); storedComparator = c; setActualComparator(); } /** Creates a new tree map copying a given map. * * @param m a {@link Map} to be copied into the new tree map. */ public RB_TREE_MAP( final Map m ) { this(); putAll( m ); } /** Creates a new tree map copying a given sorted map (and its {@link Comparator}). * * @param m a {@link SortedMap} to be copied into the new tree map. */ public RB_TREE_MAP( final SortedMap m ) { this( m.comparator() ); putAll( m ); } /** Creates a new tree map copying a given map. * * @param m a type-specific map to be copied into the new tree map. */ public RB_TREE_MAP( final MAP KEY_VALUE_EXTENDS_GENERIC m ) { this(); putAll( m ); } /** Creates a new tree map copying a given sorted map (and its {@link Comparator}). * * @param m a type-specific sorted map to be copied into the new tree map. */ public RB_TREE_MAP( final SORTED_MAP KEY_VALUE_GENERIC m ) { this( m.comparator() ); putAll( m ); } /** Creates a new tree map using the elements of two parallel arrays and the given comparator. * * @param k the array of keys of the new tree map. * @param v the array of corresponding values in the new tree map. * @param c a (possibly type-specific) comparator. * @throws IllegalArgumentException if k and v have different lengths. */ public RB_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final Comparator c ) { this( c ); if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" ); for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] ); } /** Creates a new tree map using the elements of two parallel arrays. * * @param k the array of keys of the new tree map. * @param v the array of corresponding values in the new tree map. * @throws IllegalArgumentException if k and v have different lengths. */ public RB_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) { this( k, v, null ); } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in RBTreeSet.drv. * * The put()/remove() code is derived from Ben Pfaff's GNU libavl * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's * going on, you should have a look at the literate code contained therein * first. */ /** Compares two keys in the right way. * *

This method uses the {@link #actualComparator} if it is non-null. * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}. * * @param k1 the first key. * @param k2 the second key. * @return a number smaller than, equal to or greater than 0, as usual * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively). */ SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 ); } /** Returns the entry corresponding to the given key, if it is in the tree; null, otherwise. * * @param k the key to search for. * @return the corresponding entry, or null if no entry with the given key exists. */ final Entry KEY_VALUE_GENERIC findKey( final KEY_GENERIC_TYPE k ) { Entry KEY_VALUE_GENERIC e = tree; int cmp; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right(); return e; } /** Locates a key. * * @param k a key. * @return the last entry on a search for the given key; this will be * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key. */ final Entry KEY_VALUE_GENERIC locateKey( final KEY_GENERIC_TYPE k ) { Entry KEY_VALUE_GENERIC e = tree, last = tree; int cmp = 0; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) { last = e; e = cmp < 0 ? e.left() : e.right(); } return cmp == 0 ? e : last; } /** This vector remembers the path and the direction followed during the * current insertion. It suffices for about 232 entries. */ private transient boolean dirPath[]; private transient Entry KEY_VALUE_GENERIC nodePath[]; SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED private void allocatePaths() { dirPath = new boolean[ 64 ]; nodePath = new Entry[ 64 ]; } #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /** Adds an increment to value currently associated with a key. * *

Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when * called with a key that does not currently appears in the map, the key * will be associated with the default return value plus * the given increment. * * @param k the key. * @param incr the increment. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public VALUE_GENERIC_TYPE addTo( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE incr) { Entry KEY_VALUE_GENERIC e = add( k ); final VALUE_GENERIC_TYPE oldValue = e.value; e.value += incr; return oldValue; } #endif public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { Entry KEY_VALUE_GENERIC e = add( k ); final VALUE_GENERIC_TYPE oldValue = e.value; e.value = v; return oldValue; } /** Returns a node with key k in the balanced tree, creating one with defRetValue if necessary. * * @param k the key * @return a node with key k. If a node with key k already exists, then that node is returned, * otherwise a new node with defRetValue is created ensuring that the tree is balanced after creation of the node. */ private Entry KEY_VALUE_GENERIC add( final KEY_GENERIC_TYPE k ) { /* After execution of this method, modified is true iff a new entry has been inserted. */ modified = false; int maxDepth = 0; Entry KEY_VALUE_GENERIC e; if ( tree == null ) { // The case of the empty tree is treated separately. count++; e = tree = lastEntry = firstEntry = new Entry KEY_VALUE_GENERIC( k, defRetValue ); } else { Entry KEY_VALUE_GENERIC p = tree; int cmp, i = 0; while( true ) { if ( ( cmp = compare( k, p.key ) ) == 0 ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return p; } nodePath[ i ] = p; if ( dirPath[ i++ ] = cmp > 0 ) { if ( p.succ() ) { count++; e = new Entry KEY_VALUE_GENERIC( k, defRetValue ); if ( p.right == null ) lastEntry = e; e.left = p; e.right = p.right; p.right( e ); break; } p = p.right; } else { if ( p.pred() ) { count++; e = new Entry KEY_VALUE_GENERIC( k, defRetValue ); if ( p.left == null ) firstEntry = e; e.right = p; e.left = p.left; p.left( e ); break; } p = p.left; } } modified = true; maxDepth = i--; while( i > 0 && ! nodePath[ i ].black() ) { if ( ! dirPath[ i - 1 ] ) { Entry KEY_VALUE_GENERIC y = nodePath[ i - 1 ].right; if ( ! nodePath[ i - 1 ].succ() && ! y.black() ) { nodePath[ i ].black( true ); y.black( true ); nodePath[ i - 1 ].black( false ); i -= 2; } else { Entry KEY_VALUE_GENERIC x; if ( ! dirPath[ i ] ) y = nodePath[ i ]; else { x = nodePath[ i ]; y = x.right; x.right = y.left; y.left = x; nodePath[ i - 1 ].left = y; if ( y.pred() ) { y.pred( false ); x.succ( y ); } } x = nodePath[ i - 1 ]; x.black( false ); y.black( true ); x.left = y.right; y.right = x; if ( i < 2 ) tree = y; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y; else nodePath[ i - 2 ].left = y; } if ( y.succ() ) { y.succ( false ); x.pred( y ); } break; } } else { Entry KEY_VALUE_GENERIC y = nodePath[ i - 1 ].left; if ( ! nodePath[ i - 1 ].pred() && ! y.black() ) { nodePath[ i ].black( true ); y.black( true ); nodePath[ i - 1 ].black( false ); i -= 2; } else { Entry KEY_VALUE_GENERIC x; if ( dirPath[ i ] ) y = nodePath[ i ]; else { x = nodePath[ i ]; y = x.left; x.left = y.right; y.right = x; nodePath[ i - 1 ].right = y; if ( y.succ() ) { y.succ( false ); x.pred( y ); } } x = nodePath[ i - 1 ]; x.black( false ); y.black( true ); x.right = y.left; y.left = x; if ( i < 2 ) tree = y; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y; else nodePath[ i - 2 ].left = y; } if ( y.pred() ){ y.pred( false ); x.succ( y ); } break; } } } } tree.black( true ); // We clean up the node path, or we could have stale references later. while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null; if ( ASSERTS ) { checkNodePath(); checkTree( tree, 0, -1 ); } return e; } /* After execution of this method, {@link #modified} is true iff an entry has been deleted. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { modified = false; if ( tree == null ) return defRetValue; Entry KEY_VALUE_GENERIC p = tree; int cmp; int i = 0; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; while( true ) { if ( ( cmp = compare( kk, p.key ) ) == 0 ) break; dirPath[ i ] = cmp > 0; nodePath[ i ] = p; if ( dirPath[ i++ ] ) { if ( ( p = p.right() ) == null ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return defRetValue; } } else { if ( ( p = p.left() ) == null ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return defRetValue; } } } if ( p.left == null ) firstEntry = p.next(); if ( p.right == null ) lastEntry = p.prev(); if ( p.succ() ) { if ( p.pred() ) { if ( i == 0 ) tree = p.left; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].succ( p.right ); else nodePath[ i - 1 ].pred( p.left ); } } else { p.prev().right = p.right; if ( i == 0 ) tree = p.left; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = p.left; else nodePath[ i - 1 ].left = p.left; } } } else { boolean color; Entry KEY_VALUE_GENERIC r = p.right; if ( r.pred() ) { r.left = p.left; r.pred( p.pred() ); if ( ! r.pred() ) r.prev().right = r; if ( i == 0 ) tree = r; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = r; else nodePath[ i - 1 ].left = r; } color = r.black(); r.black( p.black() ); p.black( color ); dirPath[ i ] = true; nodePath[ i++ ] = r; } else { Entry KEY_VALUE_GENERIC s; int j = i++; while( true ) { dirPath[ i ] = false; nodePath[ i++ ] = r; s = r.left; if ( s.pred() ) break; r = s; } dirPath[ j ] = true; nodePath[ j ] = s; if ( s.succ() ) r.pred( s ); else r.left = s.right; s.left = p.left; if ( ! p.pred() ) { p.prev().right = s; s.pred( false ); } s.right( p.right ); color = s.black(); s.black( p.black() ); p.black( color ); if ( j == 0 ) tree = s; else { if ( dirPath[ j - 1 ] ) nodePath[ j - 1 ].right = s; else nodePath[ j - 1 ].left = s; } } } int maxDepth = i; if ( p.black() ) { for( ; i > 0; i-- ) { if ( dirPath[ i - 1 ] && ! nodePath[ i - 1 ].succ() || ! dirPath[ i - 1 ] && ! nodePath[ i - 1 ].pred() ) { Entry KEY_VALUE_GENERIC x = dirPath[ i - 1 ] ? nodePath[ i - 1 ].right : nodePath[ i - 1 ].left; if ( ! x.black() ) { x.black( true ); break; } } if ( ! dirPath[ i - 1 ] ) { Entry KEY_VALUE_GENERIC w = nodePath[ i - 1 ].right; if ( ! w.black() ) { w.black( true ); nodePath[ i - 1 ].black( false ); nodePath[ i - 1 ].right = w.left; w.left = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } nodePath[ i ] = nodePath[ i - 1 ]; dirPath[ i ] = false; nodePath[ i - 1 ] = w; if ( maxDepth == i++ ) maxDepth++; w = nodePath[ i - 1 ].right; } if ( ( w.pred() || w.left.black() ) && ( w.succ() || w.right.black() ) ) { w.black( false ); } else { if ( w.succ() || w.right.black() ) { Entry KEY_VALUE_GENERIC y = w.left; y.black ( true ); w.black( false ); w.left = y.right; y.right = w; w = nodePath[ i - 1 ].right = y; if ( w.succ() ) { w.succ( false ); w.right.pred( w ); } } w.black( nodePath[ i - 1 ].black() ); nodePath[ i - 1 ].black( true ); w.right.black( true ); nodePath[ i - 1 ].right = w.left; w.left = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } if ( w.pred() ) { w.pred( false ); nodePath[ i - 1 ].succ( w ); } break; } } else { Entry KEY_VALUE_GENERIC w = nodePath[ i - 1 ].left; if ( ! w.black() ) { w.black ( true ); nodePath[ i - 1 ].black( false ); nodePath[ i - 1 ].left = w.right; w.right = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } nodePath[ i ] = nodePath[ i - 1 ]; dirPath[ i ] = true; nodePath[ i - 1 ] = w; if ( maxDepth == i++ ) maxDepth++; w = nodePath[ i - 1 ].left; } if ( ( w.pred() || w.left.black() ) && ( w.succ() || w.right.black() ) ) { w.black( false ); } else { if ( w.pred() || w.left.black() ) { Entry KEY_VALUE_GENERIC y = w.right; y.black( true ); w.black ( false ); w.right = y.left; y.left = w; w = nodePath[ i - 1 ].left = y; if ( w.pred() ) { w.pred( false ); w.left.succ( w ); } } w.black( nodePath[ i - 1 ].black() ); nodePath[ i - 1 ].black( true ); w.left.black( true ); nodePath[ i - 1 ].left = w.right; w.right = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } if ( w.succ() ) { w.succ( false ); nodePath[ i - 1 ].pred( w ); } break; } } } if ( tree != null ) tree.black( true ); } modified = true; count--; // We clean up the node path, or we could have stale references later. while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null; if ( ASSERTS ) { checkNodePath(); checkTree( tree, 0, -1 ); } return p.value; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) ); return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue ); } #endif #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS remove( final Object ok ) { final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) ); return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE; } #endif public boolean containsValue( final VALUE_TYPE v ) { final ValueIterator i = new ValueIterator(); VALUE_TYPE ev; int j = count; while( j-- != 0 ) { ev = i.NEXT_VALUE(); if ( VALUE_EQUALS( ev, v ) ) return true; } return false; } public void clear() { count = 0; tree = null; entries = null; values = null; keys = null; firstEntry = lastEntry = null; } /** This class represent an entry in a tree map. * *

We use the only "metadata", i.e., {@link Entry#info}, to store * information about color, predecessor status and successor status. * *

Note that since the class is recursive, it can be * considered equivalently a tree. */ private static final class Entry KEY_VALUE_GENERIC implements Cloneable, MAP.Entry KEY_VALUE_GENERIC { /** The the bit in this mask is true, the node is black. */ private final static int BLACK_MASK = 1; /** If the bit in this mask is true, {@link #right} points to a successor. */ private final static int SUCC_MASK = 1 << 31; /** If the bit in this mask is true, {@link #left} points to a predecessor. */ private final static int PRED_MASK = 1 << 30; /** The key of this entry. */ KEY_GENERIC_TYPE key; /** The value of this entry. */ VALUE_GENERIC_TYPE value; /** The pointers to the left and right subtrees. */ Entry KEY_VALUE_GENERIC left, right; /** This integers holds different information in different bits (see {@link #SUCC_MASK} and {@link #PRED_MASK}. */ int info; Entry() {} /** Creates a new entry with the given key and value. * * @param k a key. * @param v a value. */ Entry( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { this.key = k; this.value = v; info = SUCC_MASK | PRED_MASK; } /** Returns the left subtree. * * @return the left subtree (null if the left * subtree is empty). */ Entry KEY_VALUE_GENERIC left() { return ( info & PRED_MASK ) != 0 ? null : left; } /** Returns the right subtree. * * @return the right subtree (null if the right * subtree is empty). */ Entry KEY_VALUE_GENERIC right() { return ( info & SUCC_MASK ) != 0 ? null : right; } /** Checks whether the left pointer is really a predecessor. * @return true if the left pointer is a predecessor. */ boolean pred() { return ( info & PRED_MASK ) != 0; } /** Checks whether the right pointer is really a successor. * @return true if the right pointer is a successor. */ boolean succ() { return ( info & SUCC_MASK ) != 0; } /** Sets whether the left pointer is really a predecessor. * @param pred if true then the left pointer will be considered a predecessor. */ void pred( final boolean pred ) { if ( pred ) info |= PRED_MASK; else info &= ~PRED_MASK; } /** Sets whether the right pointer is really a successor. * @param succ if true then the right pointer will be considered a successor. */ void succ( final boolean succ ) { if ( succ ) info |= SUCC_MASK; else info &= ~SUCC_MASK; } /** Sets the left pointer to a predecessor. * @param pred the predecessr. */ void pred( final Entry KEY_VALUE_GENERIC pred ) { info |= PRED_MASK; left = pred; } /** Sets the right pointer to a successor. * @param succ the successor. */ void succ( final Entry KEY_VALUE_GENERIC succ ) { info |= SUCC_MASK; right = succ; } /** Sets the left pointer to the given subtree. * @param left the new left subtree. */ void left( final Entry KEY_VALUE_GENERIC left ) { info &= ~PRED_MASK; this.left = left; } /** Sets the right pointer to the given subtree. * @param right the new right subtree. */ void right( final Entry KEY_VALUE_GENERIC right ) { info &= ~SUCC_MASK; this.right = right; } /** Returns whether this node is black. * @return true iff this node is black. */ boolean black() { return ( info & BLACK_MASK ) != 0; } /** Sets whether this node is black. * @param black if true, then this node becomes black; otherwise, it becomes red.. */ void black( final boolean black ) { if ( black ) info |= BLACK_MASK; else info &= ~BLACK_MASK; } /** Computes the next entry in the set order. * * @return the next entry (null) if this is the last entry). */ Entry KEY_VALUE_GENERIC next() { Entry KEY_VALUE_GENERIC next = this.right; if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left; return next; } /** Computes the previous entry in the set order. * * @return the previous entry (null) if this is the first entry). */ Entry KEY_VALUE_GENERIC prev() { Entry KEY_VALUE_GENERIC prev = this.left; if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right; return prev; } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public KEY_GENERIC_CLASS getKey() { return KEY2OBJ(key); } #if ! KEY_CLASS_Object public KEY_GENERIC_TYPE ENTRY_GET_KEY() { return key; } #endif #if VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated #endif public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ(value); } #if VALUES_PRIMITIVE public VALUE_TYPE ENTRY_GET_VALUE() { return value; } #endif public VALUE_GENERIC_TYPE setValue(final VALUE_GENERIC_TYPE value) { final VALUE_GENERIC_TYPE oldValue = this.value; this.value = value; return oldValue; } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS setValue(final VALUE_GENERIC_CLASS value) { return VALUE2OBJ(setValue(VALUE_CLASS2TYPE(value))); } #endif SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public Entry KEY_VALUE_GENERIC clone() { Entry KEY_VALUE_GENERIC c; try { c = (Entry KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key; c.value = value; c.info = info; return c; } @SuppressWarnings("unchecked") public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry )o; return KEY_EQUALS( key, KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_CLASS2TYPE( e.getValue() ) ); } public int hashCode() { return KEY2JAVAHASH_NOT_NULL(key) ^ VALUE2JAVAHASH(value); } public String toString() { return key + "=>" + value; } /* public void prettyPrint() { prettyPrint(0); } public void prettyPrint(int level) { if ( pred() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("pred: " + left ); } else if (left != null) left.prettyPrint(level +1 ); for (int i = 0; i < level; i++) System.err.print(" "); System.err.println(key + "=" + value + " (" + balance() + ")"); if ( succ() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("succ: " + right ); } else if (right != null) right.prettyPrint(level + 1); }*/ } /* public void prettyPrint() { System.err.println("size: " + count); if (tree != null) tree.prettyPrint(); }*/ SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean containsKey( final KEY_TYPE k ) { return findKey( KEY_GENERIC_CAST k ) != null; } public int size() { return count; } public boolean isEmpty() { return count == 0; } SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST k ); return e == null ? defRetValue : e.value; } #if KEY_CLASS_Object && VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_CLASS get( final Object ok ) { final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST ok ); return e == null ? OBJECT_DEFAULT_RETURN_VALUE : e.getValue(); } #endif public KEY_GENERIC_TYPE FIRST_KEY() { if ( tree == null ) throw new NoSuchElementException(); return firstEntry.key; } public KEY_GENERIC_TYPE LAST_KEY() { if ( tree == null ) throw new NoSuchElementException(); return lastEntry.key; } /** An abstract iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class TreeIterator { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ Entry KEY_VALUE_GENERIC prev; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ Entry KEY_VALUE_GENERIC next; /** The last entry that was returned (or null if we did not iterate or used {@link #remove()}). */ Entry KEY_VALUE_GENERIC curr; /** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link TreeIterator} has been created using the nonempty constructor.*/ int index = 0; TreeIterator() { next = firstEntry; } TreeIterator( final KEY_GENERIC_TYPE k ) { if ( ( next = locateKey( k ) ) != null ) { if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } public boolean hasNext() { return next != null; } public boolean hasPrevious() { return prev != null; } void updateNext() { next = next.next(); } Entry KEY_VALUE_GENERIC nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = prev = next; index++; updateNext(); return curr; } void updatePrevious() { prev = prev.prev(); } Entry KEY_VALUE_GENERIC previousEntry() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = next = prev; index--; updatePrevious(); return curr; } public int nextIndex() { return index; } public int previousIndex() { return index - 1; } public void remove() { if ( curr == null ) throw new IllegalStateException(); /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ if ( curr == prev ) index--; next = prev = curr; updatePrevious(); updateNext(); RB_TREE_MAP.this.REMOVE_VALUE( curr.key ); curr = null; } public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) nextEntry(); return n - i - 1; } public int back( final int n ) { int i = n; while( i-- != 0 && hasPrevious() ) previousEntry(); return n - i - 1; } } /** An iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class EntryIterator extends TreeIterator implements ObjectListIterator { EntryIterator() {} EntryIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); } public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = new AbstractObjectSortedSet() { final Comparator comparator = new Comparator () { public int compare( final MAP.Entry KEY_VALUE_GENERIC x, MAP.Entry KEY_VALUE_GENERIC y ) { return RB_TREE_MAP.this.actualComparator.compare( x.ENTRY_GET_KEY(), y.ENTRY_GET_KEY() ); } }; public Comparator comparator() { return comparator; } public ObjectBidirectionalIterator iterator() { return new EntryIterator(); } public ObjectBidirectionalIterator iterator( final MAP.Entry KEY_VALUE_GENERIC from ) { return new EntryIterator( from.ENTRY_GET_KEY() ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); return e.equals( f ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean remove( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); if ( f != null ) RB_TREE_MAP.this.REMOVE_VALUE( f.key ); return f != null; } public int size() { return count; } public void clear() { RB_TREE_MAP.this.clear(); } public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry; } public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry; } public ObjectSortedSet subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to ) { return subMap( from.ENTRY_GET_KEY(), to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet headSet( MAP.Entry KEY_VALUE_GENERIC to ) { return headMap( to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet tailSet( MAP.Entry KEY_VALUE_GENERIC from ) { return tailMap( from.ENTRY_GET_KEY() ).ENTRYSET(); } }; return entries; } /** An iterator on the whole range of keys. * *

This class can iterate in both directions on the keys of a threaded tree. We * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly * their type-specific counterparts) so that they return keys instead of entries. */ private final class KeyIterator extends TreeIterator implements KEY_LIST_ITERATOR KEY_GENERIC { public KeyIterator() {} public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if !KEY_CLASS_Object public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); } public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); } public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** A keyset implementation using a more direct implementation for iterators. */ private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet { public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeyIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeyIterator( from ); } } /** Returns a type-specific sorted set view of the keys contained in this map. * *

In addition to the semantics of {@link java.util.Map#keySet()}, you can * safely cast the set returned by this call to a type-specific sorted * set interface. * * @return a type-specific sorted set view of the keys contained in this map. */ public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = new KeySet(); return keys; } /** An iterator on the whole range of values. * *

This class can iterate in both directions on the values of a threaded tree. We * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly * their type-specific counterparts) so that they return values instead of entries. */ private final class ValueIterator extends TreeIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC { public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; } public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; } public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); } public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); } public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** Returns a type-specific collection view of the values contained in this map. * *

In addition to the semantics of {@link java.util.Map#values()}, you can * safely cast the collection returned by this call to a type-specific collection * interface. * * @return a type-specific collection view of the values contained in this map. */ public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new ValueIterator(); } public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return count; } public void clear() { RB_TREE_MAP.this.clear(); } }; return values; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) { return new Submap( KEY_NULL, true, to, false ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) { return new Submap( from, false, KEY_NULL, true ); } public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { return new Submap( from, false, to, false ); } /** A submap with given range. * *

This class represents a submap. One has to specify the left/right * limits (which can be set to -∞ or ∞). Since the submap is a * view on the map, at a given moment it could happen that the limits of * the range are not any longer in the main map. Thus, things such as * {@link java.util.SortedMap#firstKey()} or {@link java.util.Collection#size()} must be always computed * on-the-fly. */ private final class Submap extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; /** The start of the submap range, unless {@link #bottom} is true. */ KEY_GENERIC_TYPE from; /** The end of the submap range, unless {@link #top} is true. */ KEY_GENERIC_TYPE to; /** If true, the submap range starts from -∞. */ boolean bottom; /** If true, the submap range goes to ∞. */ boolean top; /** Cached set of entries. */ @SuppressWarnings("hiding") protected transient ObjectSortedSet entries; /** Cached set of keys. */ @SuppressWarnings("hiding") protected transient SORTED_SET KEY_GENERIC keys; /** Cached collection of values. */ @SuppressWarnings("hiding") protected transient VALUE_COLLECTION VALUE_GENERIC values; /** Creates a new submap with given key range. * * @param from the start of the submap range. * @param bottom if true, the first parameter is ignored and the range starts from -∞. * @param to the end of the submap range. * @param top if true, the third parameter is ignored and the range goes to ∞. */ public Submap( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) { if ( ! bottom && ! top && RB_TREE_MAP.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start key (" + from + ") is larger than end key (" + to + ")" ); this.from = from; this.bottom = bottom; this.to = to; this.top = top; this.defRetValue = RB_TREE_MAP.this.defRetValue; } public void clear() { final SubmapIterator i = new SubmapIterator(); while( i.hasNext() ) { i.nextEntry(); i.remove(); } } /** Checks whether a key is in the submap range. * @param k a key. * @return true if is the key is in the submap range. */ final boolean in( final KEY_GENERIC_TYPE k ) { return ( bottom || RB_TREE_MAP.this.compare( k, from ) >= 0 ) && ( top || RB_TREE_MAP.this.compare( k, to ) < 0 ); } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = new AbstractObjectSortedSet() { public ObjectBidirectionalIterator iterator() { return new SubmapEntryIterator(); } public ObjectBidirectionalIterator iterator( final MAP.Entry KEY_VALUE_GENERIC from ) { return new SubmapEntryIterator( from.ENTRY_GET_KEY() ); } public Comparator comparator() { return RB_TREE_MAP.this.ENTRYSET().comparator(); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final RB_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); return f != null && in( f.key ) && e.equals( f ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean remove( final Object o ) { if (!(o instanceof Map.Entry)) return false; final Map.Entry e = (Map.Entry)o; #if KEYS_PRIMITIVE if (e.getKey() == null || ! (e.getKey() instanceof KEY_CLASS)) return false; #endif #if VALUES_PRIMITIVE if (e.getValue() == null || ! (e.getValue() instanceof VALUE_CLASS)) return false; #endif final RB_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_OBJ2TYPE( KEY_GENERIC_CAST e.getKey() ) ); if ( f != null && in( f.key ) ) Submap.this.REMOVE_VALUE( f.key ); return f != null; } public int size() { int c = 0; for( Iterator i = iterator(); i.hasNext(); i.next() ) c++; return c; } public boolean isEmpty() { return ! new SubmapIterator().hasNext(); } public void clear() { Submap.this.clear(); } public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry(); } public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry(); } public ObjectSortedSet subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to ) { return subMap( from.ENTRY_GET_KEY(), to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet headSet( MAP.Entry KEY_VALUE_GENERIC to ) { return headMap( to.ENTRY_GET_KEY() ).ENTRYSET(); } public ObjectSortedSet tailSet( MAP.Entry KEY_VALUE_GENERIC from ) { return tailMap( from.ENTRY_GET_KEY() ).ENTRYSET(); } }; return entries; } private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet { public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubmapKeyIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubmapKeyIterator( from ); } } public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = new KeySet(); return keys; } public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() { public VALUE_ITERATOR VALUE_GENERIC iterator() { return new SubmapValueIterator(); } public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } public int size() { return Submap.this.size(); } public void clear() { Submap.this.clear(); } }; return values; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean containsKey( final KEY_TYPE k ) { return in( KEY_GENERIC_CAST k ) && RB_TREE_MAP.this.containsKey( k ); } public boolean containsValue( final VALUE_TYPE v ) { final SubmapIterator i = new SubmapIterator(); VALUE_TYPE ev; while( i.hasNext() ) { ev = i.nextEntry().value; if ( VALUE_EQUALS( ev, v ) ) return true; } return false; } SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE(final KEY_TYPE k) { final RB_TREE_MAP.Entry KEY_VALUE_GENERIC e; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; return in( kk ) && ( e = findKey( kk ) ) != null ? e.value : this.defRetValue; } #if KEY_CLASS_Object && VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_CLASS get( final Object ok ) { final RB_TREE_MAP.Entry KEY_VALUE_GENERIC e; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok ); return in( kk ) && ( e = findKey( kk ) ) != null ? e.getValue() : OBJECT_DEFAULT_RETURN_VALUE; } #endif public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) { modified = false; if ( ! in( k ) ) throw new IllegalArgumentException( "Key (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); final VALUE_GENERIC_TYPE oldValue = RB_TREE_MAP.this.put( k, v ); return modified ? this.defRetValue : oldValue; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) ); return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue ); } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) { modified = false; if ( ! in( KEY_GENERIC_CAST k ) ) return this.defRetValue; final VALUE_GENERIC_TYPE oldValue = RB_TREE_MAP.this.REMOVE_VALUE( k ); return modified ? oldValue : this.defRetValue; } #if ! KEY_CLASS_Object || VALUES_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public VALUE_GENERIC_CLASS remove( final Object ok ) { final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) ); return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE; } #endif public int size() { final SubmapIterator i = new SubmapIterator(); int n = 0; while( i.hasNext() ) { n++; i.nextEntry(); } return n; } public boolean isEmpty() { return ! new SubmapIterator().hasNext(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { if ( top ) return new Submap( from, bottom, to, false ); return compare( to, this.to ) < 0 ? new Submap( from, bottom, to, false ) : this; } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { if ( bottom ) return new Submap( from, false, to, top ); return compare( from, this.from ) > 0 ? new Submap( from, false, to, top ) : this; } public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { if ( top && bottom ) return new Submap( from, false, to, false ); if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to; if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from; if ( ! top && ! bottom && from == this.from && to == this.to ) return this; return new Submap( from, false, to, false ); } /** Locates the first entry. * * @return the first entry of this submap, or null if the submap is empty. */ public RB_TREE_MAP.Entry KEY_VALUE_GENERIC firstEntry() { if ( tree == null ) return null; // If this submap goes to -infinity, we return the main map first entry; otherwise, we locate the start of the map. RB_TREE_MAP.Entry KEY_VALUE_GENERIC e; if ( bottom ) e = firstEntry; else { e = locateKey( from ); // If we find either the start or something greater we're OK. if ( compare( e.key, from ) < 0 ) e = e.next(); } // Finally, if this submap doesn't go to infinity, we check that the resulting key isn't greater than the end. if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null; return e; } /** Locates the last entry. * * @return the last entry of this submap, or null if the submap is empty. */ public RB_TREE_MAP.Entry KEY_VALUE_GENERIC lastEntry() { if ( tree == null ) return null; // If this submap goes to infinity, we return the main map last entry; otherwise, we locate the end of the map. RB_TREE_MAP.Entry KEY_VALUE_GENERIC e; if ( top ) e = lastEntry; else { e = locateKey( to ); // If we find something smaller than the end we're OK. if ( compare( e.key, to ) >= 0 ) e = e.prev(); } // Finally, if this submap doesn't go to -infinity, we check that the resulting key isn't smaller than the start. if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null; return e; } public KEY_GENERIC_TYPE FIRST_KEY() { RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } public KEY_GENERIC_TYPE LAST_KEY() { RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } #if !KEY_CLASS_Object /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS firstKey() { RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.getKey(); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated @Override public KEY_GENERIC_CLASS lastKey() { RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.getKey(); } #endif /** An iterator for subranges. * *

This class inherits from {@link TreeIterator}, but overrides the methods that * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would * move out of the range of the submap we just overwrite the next or previous * entry with null. */ private class SubmapIterator extends TreeIterator { SubmapIterator() { next = firstEntry(); } SubmapIterator( final KEY_GENERIC_TYPE k ) { this(); if ( next != null ) { if ( ! bottom && compare( k, next.key ) < 0 ) prev = null; else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null; else { next = locateKey( k ); if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } } void updatePrevious() { prev = prev.prev(); if ( ! bottom && prev != null && RB_TREE_MAP.this.compare( prev.key, from ) < 0 ) prev = null; } void updateNext() { next = next.next(); if ( ! top && next != null && RB_TREE_MAP.this.compare( next.key, to ) >= 0 ) next = null; } } private class SubmapEntryIterator extends SubmapIterator implements ObjectListIterator { SubmapEntryIterator() {} SubmapEntryIterator( final KEY_GENERIC_TYPE k ) { super( k ); } public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); } public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); } public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); } } /** An iterator on a subrange of keys. * *

This class can iterate in both directions on a subrange of the * keys of a threaded tree. We simply override the {@link * java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their * type-specific counterparts) so that they return keys instead of * entries. */ private final class SubmapKeyIterator extends SubmapIterator implements KEY_LIST_ITERATOR KEY_GENERIC { public SubmapKeyIterator() { super(); } public SubmapKeyIterator( KEY_GENERIC_TYPE from ) { super( from ); } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } #if !KEY_CLASS_Object public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); } public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); } public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; /** An iterator on a subrange of values. * *

This class can iterate in both directions on the values of a * subrange of the keys of a threaded tree. We simply override the * {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their * type-specific counterparts) so that they return values instead of * entries. */ private final class SubmapValueIterator extends SubmapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC { public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; } public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; } public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); } #if VALUES_PRIMITIVE public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); } public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); } public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); } #endif }; } /** Returns a deep copy of this tree map. * *

This method performs a deep copy of this tree map; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this tree map. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public RB_TREE_MAP KEY_VALUE_GENERIC clone() { RB_TREE_MAP KEY_VALUE_GENERIC c; try { c = (RB_TREE_MAP KEY_VALUE_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.keys = null; c.values = null; c.entries = null; c.allocatePaths(); if ( count != 0 ) { // Also this apparently unfathomable code is derived from GNU libavl. Entry KEY_VALUE_GENERIC e, p, q, rp = new Entry KEY_VALUE_GENERIC(), rq = new Entry KEY_VALUE_GENERIC(); p = rp; rp.left( tree ); q = rq; rq.pred( null ); while( true ) { if ( ! p.pred() ) { e = p.left.clone(); e.pred( q.left ); e.succ( q ); q.left( e ); p = p.left; q = q.left; } else { while( p.succ() ) { p = p.right; if ( p == null ) { q.right = null; c.tree = rq.left; c.firstEntry = c.tree; while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left; c.lastEntry = c.tree; while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right; return c; } q = q.right; } p = p.right; q = q.right; } if ( ! p.succ() ) { e = p.right.clone(); e.succ( q.right ); e.pred( q ); q.right( e ); } } } return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { int n = count; EntryIterator i = new EntryIterator(); Entry KEY_VALUE_GENERIC e; s.defaultWriteObject(); while(n-- != 0) { e = i.nextEntry(); s.WRITE_KEY( e.key ); s.WRITE_VALUE( e.value ); } } /** Reads the given number of entries from the input stream, returning the corresponding tree. * * @param s the input stream. * @param n the (positive) number of entries to read. * @param pred the entry containing the key that preceeds the first key in the tree. * @param succ the entry containing the key that follows the last key in the tree. */ SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED private Entry KEY_VALUE_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_VALUE_GENERIC pred, final Entry KEY_VALUE_GENERIC succ ) throws java.io.IOException, ClassNotFoundException { if ( n == 1 ) { final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ); top.pred( pred ); top.succ( succ ); top.black( true ); return top; } if ( n == 2 ) { /* We handle separately this case so that recursion will *always* be on nonempty subtrees. */ final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ); top.black( true ); top.right( new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ) ); top.right.pred( top ); top.pred( pred ); top.right.succ( succ ); return top; } // The right subtree is the largest one. final int rightN = n / 2, leftN = n - rightN - 1; final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC(); top.left( readTree( s, leftN, pred, top ) ); top.key = KEY_GENERIC_CAST s.READ_KEY(); top.value = VALUE_GENERIC_CAST s.READ_VALUE(); top.black( true ); top.right( readTree( s, rightN, top, succ ) ); if ( n + 2 == ( ( n + 2 ) & -( n + 2 ) ) ) top.right.black( false ); // Quick test for determining whether n + 2 is a power of 2. return top; } private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); /* The storedComparator is now correctly set, but we must restore on-the-fly the actualComparator. */ setActualComparator(); allocatePaths(); if ( count != 0 ) { tree = readTree( s, count, null, null ); Entry KEY_VALUE_GENERIC e; e = tree; while( e.left() != null ) e = e.left(); firstEntry = e; e = tree; while( e.right() != null ) e = e.right(); lastEntry = e; } if ( ASSERTS ) checkTree( tree, 0, -1 ); } #ifdef ASSERTS_CODE private void checkNodePath() { for( int i = nodePath.length; i-- != 0; ) assert nodePath[ i ] == null : i; } private static KEY_VALUE_GENERIC int checkTree( Entry KEY_VALUE_GENERIC e, int d, int D ) { if ( e == null ) return 0; if ( e.black() ) d++; if ( e.left() != null ) D = checkTree( e.left(), d, D ); if ( e.right() != null ) D = checkTree( e.right(), d, D ); if ( e.left() == null && e.right() == null ) { if ( D == -1 ) D = d; else if ( D != d ) throw new AssertionError( "Mismatch between number of black nodes (" + D + " and " + d + ")" ); } return D; } #else private void checkNodePath() {} @SuppressWarnings("unused") private static KEY_VALUE_GENERIC int checkTree( Entry KEY_VALUE_GENERIC e, int d, int D ) { return 0; } #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #else return Integer.toBinaryString( r.nextInt() ); #endif } private static VALUE_TYPE genValue() { #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Character return (VALUE_TYPE)(r.nextInt()); #elif VALUES_PRIMITIVE return r.NEXT_VALUE(); #elif !VALUE_CLASS_Reference || KEY_CLASS_Reference return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { int i, j; RB_TREE_MAP m; java.util.TreeMap t; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; VALUE_TYPE v[] = new VALUE_TYPE[n]; long ms; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); v[i] = genValue(); } double totPut = 0, totYes = 0, totNo = 0, totAddTo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd, ddd; if ( comp ) { for( j = 0; j < 20; j++ ) { t = new java.util.TreeMap(); /* We first add all pairs to t. */ for( i = 0; i < n; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totPut += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) ); #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /* we perform n/2 addTo() operations with get then put */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.put( KEY2OBJ( k[i] ), (VALUE_TYPE) ((VALUE_CLASS) t.get( KEY2OBJ(k[i])) + i) ); ddd = System.currentTimeMillis() - ms; if ( j > 2 ) totAddTo += n/ddd; System.out.print("AddTo: " + format( n/ddd ) +" K/s " ); #endif /* We check for pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.containsKey( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on t. */ ms = System.currentTimeMillis(); for( Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ "K/s AddTo: " + format( totAddTo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); t = null; totPut = totYes = totNo = totIterFor = totIterBack = totRemYes = 0; } for( j = 0; j < 20; j++ ) { m = new RB_TREE_MAP(); /* We first add all pairs to m. */ for( i = 0; i < n; i++ ) m.put( k[i], v[i] ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) m.remove( k[i] ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.put( k[i], v[i] ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.remove( k[i] ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.put( k[i], v[i] ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totPut += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.remove( k[i] ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) m.put( k[i], v[i] ); #if VALUES_PRIMITIVE && !VALUE_CLASS_Boolean /* we perform n/2 addTo() operations with get then put */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.addTo( k[i], (VALUE_TYPE) i ); ddd = System.currentTimeMillis() - ms; if ( j > 2 ) totAddTo += n/ddd; System.out.print("AddTo: " + format( n/ddd ) +" K/s " ); #endif /* We check for pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.containsKey( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.containsKey( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on m. */ java.util.ListIterator it = (java.util.ListIterator)m.entrySet().iterator(); ms = System.currentTimeMillis(); for( ; it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); /* We iterate back on m. */ ms = System.currentTimeMillis(); for( ; it.hasPrevious(); it.previous() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterBack += d; System.out.print("IterBack: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ "K/s AddTo: " + format( totAddTo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static VALUE_TYPE vt[]; private static RB_TREE_MAP topMap; protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) { long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement; Object rt = null, rm = null; if ( level > 4 ) return; /* Now we check that both maps agree on first/last keys. */ mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.firstKey(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.firstKey(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" ); mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.lastKey(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.lastKey(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")"); /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.entrySet().iterator(); i.hasNext(); ) { Entry e = (Entry)i.next(); ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" ); } /* Now we check that m actually holds the same keys. */ for(Iterator i=t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" ); ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for(Iterator i=m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" ); ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" ); } /* Now we check that m actually hold the same values. */ for(Iterator i=t.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" ); ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" ); } /* Now we check that m actually hold the same values, but iterating on m. */ for(Iterator i=m.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)"); ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)"); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 0 ) { badPrevious = true; j.previous(); break; } previous = k; } i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); badPrevious = false; if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in next"); i.remove(); j.remove(); t.remove( J ); } } else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in prev"); i.remove(); j.remove(); t.remove( J ); } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a submap. */ if ( ! m.isEmpty() ) { java.util.ListIterator i; Object start = m.firstKey(), end = m.firstKey(); for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() ); for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() ); //System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS) start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" ); //System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" ); //System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" ); } } private static void test( int n ) { RB_TREE_MAP m = new RB_TREE_MAP(); SortedMap t = new java.util.TreeMap(); topMap = m; k = new Object[n]; v = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; vt = new VALUE_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEY_CLASS_Object k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif #if VALUES_REFERENCE v[i] = vt[i] = genValue(); #else v[i] = new VALUE_CLASS( vt[i] = genValue() ); #endif } /* We add pairs to t. */ for( int i = 0; i < n; i++ ) t.put( k[i], v[i] ); /* We add to m the same data */ m.putAll(t); testMaps( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/RBTreeSet.drv0000664000000000000000000020057713050701620015166 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.SortedSet; import java.util.NoSuchElementException; /** A type-specific red-black tree set with a fast, small-footprint implementation. * *

The iterators provided by this class are type-specific {@link * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. * Moreover, the iterator returned by iterator() can be safely cast * to a type-specific {@linkplain java.util.ListIterator list iterator}. */ public class RB_TREE_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, SORTED_SET KEY_GENERIC { /** A reference to the root entry. */ protected transient Entry KEY_GENERIC tree; /** Number of elements in this set. */ protected int count; /** The entry of the first element of this set. */ protected transient Entry KEY_GENERIC firstEntry; /** The entry of the last element of this set. */ protected transient Entry KEY_GENERIC lastEntry; /** This set's comparator, as provided in the constructor. */ protected Comparator storedComparator; /** This set's actual comparator; it may differ from {@link #storedComparator} because it is always a type-specific comparator, so it could be derived from the former by wrapping. */ protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator; private static final long serialVersionUID = -7046029254386353130L; private static final boolean ASSERTS = ASSERTS_VALUE; { allocatePaths(); } /** Creates a new empty tree set. */ public RB_TREE_SET() { tree = null; count = 0; } /** Generates the comparator that will be actually used. * *

When a specific {@link Comparator} is specified and stored in {@link * #storedComparator}, we must check whether it is type-specific. If it is * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise, * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator} * and makes it into a type-specific one. */ private void setActualComparator() { #if KEY_CLASS_Object actualComparator = storedComparator; #else /* If the provided comparator is already type-specific, we use it. Otherwise, we use a wrapper anonymous class to fake that it is type-specific. */ if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator; else actualComparator = new KEY_COMPARATOR KEY_GENERIC() { public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) { return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) ); } public int compare( KEY_CLASS ok1, KEY_CLASS ok2 ) { return storedComparator.compare( ok1, ok2 ); } }; #endif } /** Creates a new empty tree set with the given comparator. * * @param c a {@link Comparator} (even better, a type-specific comparator). */ public RB_TREE_SET( final Comparator c ) { this(); storedComparator = c; setActualComparator(); } /** Creates a new tree set copying a given collection. * * @param c a collection to be copied into the new tree set. */ public RB_TREE_SET( final Collection c ) { this(); addAll( c ); } /** Creates a new tree set copying a given sorted set (and its {@link Comparator}). * * @param s a {@link SortedSet} to be copied into the new tree set. */ public RB_TREE_SET( final SortedSet s ) { this( s.comparator() ); addAll( s ); } /** Creates a new tree set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new tree set. */ public RB_TREE_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) { this(); addAll( c ); } /** Creates a new tree set copying a given type-specific sorted set (and its {@link Comparator}). * * @param s a type-specific sorted set to be copied into the new tree set. */ public RB_TREE_SET( final SORTED_SET KEY_GENERIC s ) { this( s.comparator() ); addAll( s ); } /** Creates a new tree set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. */ public RB_TREE_SET( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) { while( i.hasNext() ) add( i.NEXT_KEY() ); } #if KEYS_PRIMITIVE /** Creates a new tree set using elements provided by an iterator. * * @param i an iterator whose elements will fill the set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public RB_TREE_SET( final Iterator i ) { this( ITERATORS.AS_KEY_ITERATOR( i ) ); } #endif /** Creates a new tree set and fills it with the elements of a given array using a given {@link Comparator}. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param c a {@link Comparator} (even better, a type-specific comparator). */ public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final Comparator c ) { this( c ); ARRAYS.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } /** Creates a new tree set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. */ public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) { this( a, offset, length, null ); } /** Creates a new tree set copying the elements of an array. * * @param a an array to be copied into the new tree set. */ public RB_TREE_SET( final KEY_GENERIC_TYPE[] a ) { this(); int i = a.length; while( i-- != 0 ) add( a[ i ] ); } /** Creates a new tree set copying the elements of an array using a given {@link Comparator}. * * @param a an array to be copied into the new tree set. * @param c a {@link Comparator} (even better, a type-specific comparator). */ public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final Comparator c ) { this( c ); int i = a.length; while( i-- != 0 ) add( a[ i ] ); } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in RBTreeMap.drv. * * The add()/remove() code is derived from Ben Pfaff's GNU libavl * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's * going on, you should have a look at the literate code contained therein * first. */ /** Compares two keys in the right way. * *

This method uses the {@link #actualComparator} if it is non-null. * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}. * * @param k1 the first key. * @param k2 the second key. * @return a number smaller than, equal to or greater than 0, as usual * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively). */ SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 ); } /** Returns the entry corresponding to the given key, if it is in the tree; null, otherwise. * * @param k the key to search for. * @return the corresponding entry, or null if no entry with the given key exists. */ private Entry KEY_GENERIC findKey( final KEY_GENERIC_TYPE k ) { Entry KEY_GENERIC e = tree; int cmp; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right(); return e; } /** Locates a key. * * @param k a key. * @return the last entry on a search for the given key; this will be * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key. */ final Entry KEY_GENERIC locateKey( final KEY_GENERIC_TYPE k ) { Entry KEY_GENERIC e = tree, last = tree; int cmp = 0; while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) { last = e; e = cmp < 0 ? e.left() : e.right(); } return cmp == 0 ? e : last; } /** This vector remembers the path and the direction followed during the * current insertion. It suffices for about 232 entries. */ private transient boolean dirPath[]; private transient Entry KEY_GENERIC nodePath[]; SUPPRESS_WARNINGS_KEY_UNCHECKED private void allocatePaths() { dirPath = new boolean[ 64 ]; #if KEYS_REFERENCE nodePath = new Entry[ 64 ]; #else nodePath = new Entry[ 64 ]; #endif } public boolean add( final KEY_GENERIC_TYPE k ) { int maxDepth = 0; if ( tree == null ) { // The case of the empty tree is treated separately. count++; tree = lastEntry = firstEntry = new Entry KEY_GENERIC( k ); } else { Entry KEY_GENERIC p = tree, e; int cmp, i = 0; while( true ) { if ( ( cmp = compare( k, p.key ) ) == 0 ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return false; } nodePath[ i ] = p; if ( dirPath[ i++ ] = cmp > 0 ) { if ( p.succ() ) { count++; e = new Entry KEY_GENERIC( k ); if ( p.right == null ) lastEntry = e; e.left = p; e.right = p.right; p.right( e ); break; } p = p.right; } else { if ( p.pred() ) { count++; e = new Entry KEY_GENERIC( k ); if ( p.left == null ) firstEntry = e; e.right = p; e.left = p.left; p.left( e ); break; } p = p.left; } } maxDepth = i--; while( i > 0 && ! nodePath[ i ].black() ) { if ( ! dirPath[ i - 1 ] ) { Entry KEY_GENERIC y = nodePath[ i - 1 ].right; if ( ! nodePath[ i - 1 ].succ() && ! y.black() ) { nodePath[ i ].black( true ); y.black( true ); nodePath[ i - 1 ].black( false ); i -= 2; } else { Entry KEY_GENERIC x; if ( ! dirPath[ i ] ) y = nodePath[ i ]; else { x = nodePath[ i ]; y = x.right; x.right = y.left; y.left = x; nodePath[ i - 1 ].left = y; if ( y.pred() ) { y.pred( false ); x.succ( y ); } } x = nodePath[ i - 1 ]; x.black( false ); y.black( true ); x.left = y.right; y.right = x; if ( i < 2 ) tree = y; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y; else nodePath[ i - 2 ].left = y; } if ( y.succ() ) { y.succ( false ); x.pred( y ); } break; } } else { Entry KEY_GENERIC y = nodePath[ i - 1 ].left; if ( ! nodePath[ i - 1 ].pred() && ! y.black() ) { nodePath[ i ].black( true ); y.black( true ); nodePath[ i - 1 ].black( false ); i -= 2; } else { Entry KEY_GENERIC x; if ( dirPath[ i ] ) y = nodePath[ i ]; else { x = nodePath[ i ]; y = x.left; x.left = y.right; y.right = x; nodePath[ i - 1 ].right = y; if ( y.succ() ) { y.succ( false ); x.pred( y ); } } x = nodePath[ i - 1 ]; x.black( false ); y.black( true ); x.right = y.left; y.left = x; if ( i < 2 ) tree = y; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y; else nodePath[ i - 2 ].left = y; } if ( y.pred() ){ y.pred( false ); x.succ( y ); } break; } } } } tree.black( true ); // We clean up the node path, or we could have stale references later. while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null; if ( ASSERTS ) { checkNodePath(); checkTree( tree, 0, -1 ); } return true; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean rem( final KEY_TYPE k ) { if ( tree == null ) return false; Entry KEY_GENERIC p = tree; int cmp; int i = 0; final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k; while( true ) { if ( ( cmp = compare( kk, p.key ) ) == 0 ) break; dirPath[ i ] = cmp > 0; nodePath[ i ] = p; if ( dirPath[ i++ ] ) { if ( ( p = p.right() ) == null ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return false; } } else { if ( ( p = p.left() ) == null ) { // We clean up the node path, or we could have stale references later. while( i-- != 0 ) nodePath[ i ] = null; return false; } } } if ( p.left == null ) firstEntry = p.next(); if ( p.right == null ) lastEntry = p.prev(); if ( p.succ() ) { if ( p.pred() ) { if ( i == 0 ) tree = p.left; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].succ( p.right ); else nodePath[ i - 1 ].pred( p.left ); } } else { p.prev().right = p.right; if ( i == 0 ) tree = p.left; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = p.left; else nodePath[ i - 1 ].left = p.left; } } } else { boolean color; Entry KEY_GENERIC r = p.right; if ( r.pred() ) { r.left = p.left; r.pred( p.pred() ); if ( ! r.pred() ) r.prev().right = r; if ( i == 0 ) tree = r; else { if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = r; else nodePath[ i - 1 ].left = r; } color = r.black(); r.black( p.black() ); p.black( color ); dirPath[ i ] = true; nodePath[ i++ ] = r; } else { Entry KEY_GENERIC s; int j = i++; while( true ) { dirPath[ i ] = false; nodePath[ i++ ] = r; s = r.left; if ( s.pred() ) break; r = s; } dirPath[ j ] = true; nodePath[ j ] = s; if ( s.succ() ) r.pred( s ); else r.left = s.right; s.left = p.left; if ( ! p.pred() ) { p.prev().right = s; s.pred( false ); } s.right( p.right ); color = s.black(); s.black( p.black() ); p.black( color ); if ( j == 0 ) tree = s; else { if ( dirPath[ j - 1 ] ) nodePath[ j - 1 ].right = s; else nodePath[ j - 1 ].left = s; } } } int maxDepth = i; if ( p.black() ) { for( ; i > 0; i-- ) { if ( dirPath[ i - 1 ] && ! nodePath[ i - 1 ].succ() || ! dirPath[ i - 1 ] && ! nodePath[ i - 1 ].pred() ) { Entry KEY_GENERIC x = dirPath[ i - 1 ] ? nodePath[ i - 1 ].right : nodePath[ i - 1 ].left; if ( ! x.black() ) { x.black( true ); break; } } if ( ! dirPath[ i - 1 ] ) { Entry KEY_GENERIC w = nodePath[ i - 1 ].right; if ( ! w.black() ) { w.black( true ); nodePath[ i - 1 ].black( false ); nodePath[ i - 1 ].right = w.left; w.left = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } nodePath[ i ] = nodePath[ i - 1 ]; dirPath[ i ] = false; nodePath[ i - 1 ] = w; if ( maxDepth == i++ ) maxDepth++; w = nodePath[ i - 1 ].right; } if ( ( w.pred() || w.left.black() ) && ( w.succ() || w.right.black() ) ) { w.black( false ); } else { if ( w.succ() || w.right.black() ) { Entry KEY_GENERIC y = w.left; y.black ( true ); w.black( false ); w.left = y.right; y.right = w; w = nodePath[ i - 1 ].right = y; if ( w.succ() ) { w.succ( false ); w.right.pred( w ); } } w.black( nodePath[ i - 1 ].black() ); nodePath[ i - 1 ].black( true ); w.right.black( true ); nodePath[ i - 1 ].right = w.left; w.left = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } if ( w.pred() ) { w.pred( false ); nodePath[ i - 1 ].succ( w ); } break; } } else { Entry KEY_GENERIC w = nodePath[ i - 1 ].left; if ( ! w.black() ) { w.black ( true ); nodePath[ i - 1 ].black( false ); nodePath[ i - 1 ].left = w.right; w.right = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } nodePath[ i ] = nodePath[ i - 1 ]; dirPath[ i ] = true; nodePath[ i - 1 ] = w; if ( maxDepth == i++ ) maxDepth++; w = nodePath[ i - 1 ].left; } if ( ( w.pred() || w.left.black() ) && ( w.succ() || w.right.black() ) ) { w.black( false ); } else { if ( w.pred() || w.left.black() ) { Entry KEY_GENERIC y = w.right; y.black( true ); w.black ( false ); w.right = y.left; y.left = w; w = nodePath[ i - 1 ].left = y; if ( w.pred() ) { w.pred( false ); w.left.succ( w ); } } w.black( nodePath[ i - 1 ].black() ); nodePath[ i - 1 ].black( true ); w.left.black( true ); nodePath[ i - 1 ].left = w.right; w.right = nodePath[ i - 1 ]; if ( i < 2 ) tree = w; else { if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w; else nodePath[ i - 2 ].left = w; } if ( w.succ() ) { w.succ( false ); nodePath[ i - 1 ].pred( w ); } break; } } } if ( tree != null ) tree.black( true ); } count--; // We clean up the node path, or we could have stale references later. while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null; if ( ASSERTS ) { checkNodePath(); checkTree( tree, 0, -1 ); } return true; } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final KEY_TYPE k ) { return findKey( KEY_GENERIC_CAST k ) != null; } #if KEY_CLASS_Object SUPPRESS_WARNINGS_KEY_UNCHECKED public K get( final KEY_TYPE k ) { final Entry KEY_GENERIC entry = findKey( KEY_GENERIC_CAST k ); return entry == null ? null : entry.key; } #endif public void clear() { count = 0; tree = null; firstEntry = lastEntry = null; } /** This class represent an entry in a tree set. * *

We use the only "metadata", i.e., {@link Entry#info}, to store * information about color, predecessor status and successor status. * *

Note that since the class is recursive, it can be * considered equivalently a tree. */ private static final class Entry KEY_GENERIC implements Cloneable { /** The the bit in this mask is true, the node is black. */ private final static int BLACK_MASK = 1; /** If the bit in this mask is true, {@link #right} points to a successor. */ private final static int SUCC_MASK = 1 << 31; /** If the bit in this mask is true, {@link #left} points to a predecessor. */ private final static int PRED_MASK = 1 << 30; /** The key of this entry. */ KEY_GENERIC_TYPE key; /** The pointers to the left and right subtrees. */ Entry KEY_GENERIC left, right; /** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BLACK_MASK}). */ int info; Entry() {} /** Creates a new red entry with the given key. * * @param k a key. */ Entry( final KEY_GENERIC_TYPE k ) { this.key = k; info = SUCC_MASK | PRED_MASK; } /** Returns the left subtree. * * @return the left subtree (null if the left * subtree is empty). */ Entry KEY_GENERIC left() { return ( info & PRED_MASK ) != 0 ? null : left; } /** Returns the right subtree. * * @return the right subtree (null if the right * subtree is empty). */ Entry KEY_GENERIC right() { return ( info & SUCC_MASK ) != 0 ? null : right; } /** Checks whether the left pointer is really a predecessor. * @return true if the left pointer is a predecessor. */ boolean pred() { return ( info & PRED_MASK ) != 0; } /** Checks whether the right pointer is really a successor. * @return true if the right pointer is a successor. */ boolean succ() { return ( info & SUCC_MASK ) != 0; } /** Sets whether the left pointer is really a predecessor. * @param pred if true then the left pointer will be considered a predecessor. */ void pred( final boolean pred ) { if ( pred ) info |= PRED_MASK; else info &= ~PRED_MASK; } /** Sets whether the right pointer is really a successor. * @param succ if true then the right pointer will be considered a successor. */ void succ( final boolean succ ) { if ( succ ) info |= SUCC_MASK; else info &= ~SUCC_MASK; } /** Sets the left pointer to a predecessor. * @param pred the predecessr. */ void pred( final Entry KEY_GENERIC pred ) { info |= PRED_MASK; left = pred; } /** Sets the right pointer to a successor. * @param succ the successor. */ void succ( final Entry KEY_GENERIC succ ) { info |= SUCC_MASK; right = succ; } /** Sets the left pointer to the given subtree. * @param left the new left subtree. */ void left( final Entry KEY_GENERIC left ) { info &= ~PRED_MASK; this.left = left; } /** Sets the right pointer to the given subtree. * @param right the new right subtree. */ void right( final Entry KEY_GENERIC right ) { info &= ~SUCC_MASK; this.right = right; } /** Returns whether this node is black. * @return true iff this node is black. */ boolean black() { return ( info & BLACK_MASK ) != 0; } /** Sets whether this node is black. * @param black if true, then this node becomes black; otherwise, it becomes red.. */ void black( final boolean black ) { if ( black ) info |= BLACK_MASK; else info &= ~BLACK_MASK; } /** Computes the next entry in the set order. * * @return the next entry (null) if this is the last entry). */ Entry KEY_GENERIC next() { Entry KEY_GENERIC next = this.right; if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left; return next; } /** Computes the previous entry in the set order. * * @return the previous entry (null) if this is the first entry). */ Entry KEY_GENERIC prev() { Entry KEY_GENERIC prev = this.left; if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right; return prev; } SUPPRESS_WARNINGS_KEY_UNCHECKED public Entry KEY_GENERIC clone() { Entry KEY_GENERIC c; try { c = (Entry KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key; c.info = info; return c; } public boolean equals( final Object o ) { if (!(o instanceof Entry)) return false; Entry KEY_GENERIC_WILDCARD e = (Entry KEY_GENERIC_WILDCARD)o; return KEY_EQUALS(key, e.key); } public int hashCode() { return KEY2JAVAHASH_NOT_NULL(key); } public String toString() { return String.valueOf( key ); } /* public void prettyPrint() { prettyPrint(0); } public void prettyPrint(int level) { if ( pred() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("pred: " + left ); } else if (left != null) left.prettyPrint(level +1 ); for (int i = 0; i < level; i++) System.err.print(" "); System.err.println(key + " (" + (black() ? "black" : "red") + ")"); if ( succ() ) { for (int i = 0; i < level; i++) System.err.print(" "); System.err.println("succ: " + right ); } else if (right != null) right.prettyPrint(level + 1); }*/ } /* public void prettyPrint() { System.err.println("size: " + count); if (tree != null) tree.prettyPrint(); } */ public int size() { return count; } public boolean isEmpty() { return count == 0; } public KEY_GENERIC_TYPE FIRST() { if ( tree == null ) throw new NoSuchElementException(); return firstEntry.key; } public KEY_GENERIC_TYPE LAST() { if ( tree == null ) throw new NoSuchElementException(); return lastEntry.key; } /** An iterator on the whole range. * *

This class can iterate in both directions on a threaded tree. */ private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC { /** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or null if no previous entry exists). */ Entry KEY_GENERIC prev; /** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or null if no next entry exists). */ Entry KEY_GENERIC next; /** The last entry that was returned (or null if we did not iterate or used {@link #remove()}). */ Entry KEY_GENERIC curr; /** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor.*/ int index = 0; SetIterator() { next = firstEntry; } SetIterator( final KEY_GENERIC_TYPE k ) { if ( ( next = locateKey( k ) ) != null ) { if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } public boolean hasNext() { return next != null; } public boolean hasPrevious() { return prev != null; } void updateNext() { next = next.next(); } Entry KEY_GENERIC nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); curr = prev = next; index++; updateNext(); return curr; } public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; } public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; } void updatePrevious() { prev = prev.prev(); } Entry KEY_GENERIC previousEntry() { if ( ! hasPrevious() ) throw new NoSuchElementException(); curr = next = prev; index--; updatePrevious(); return curr; } public int nextIndex() { return index; } public int previousIndex() { return index - 1; } public void remove() { if ( curr == null ) throw new IllegalStateException(); /* If the last operation was a next(), we are removing an entry that preceeds the current index, and thus we must decrement it. */ if ( curr == prev ) index--; next = prev = curr; updatePrevious(); updateNext(); RB_TREE_SET.this.rem( curr.key ); curr = null; } } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SetIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SetIterator( from ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new Subset( KEY_NULL, true, to, false ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new Subset( from, false, KEY_NULL, true ); } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new Subset( from, false, to, false ); } /** A subset with given range. * *

This class represents a subset. One has to specify the left/right * limits (which can be set to -∞ or ∞). Since the subset is a * view on the set, at a given moment it could happen that the limits of * the range are not any longer in the main set. Thus, things such as * {@link java.util.SortedSet#first()} or {@link java.util.Collection#size()} must be always computed * on-the-fly. */ private final class Subset extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, SORTED_SET KEY_GENERIC { private static final long serialVersionUID = -7046029254386353129L; /** The start of the subset range, unless {@link #bottom} is true. */ KEY_GENERIC_TYPE from; /** The end of the subset range, unless {@link #top} is true. */ KEY_GENERIC_TYPE to; /** If true, the subset range starts from -∞. */ boolean bottom; /** If true, the subset range goes to ∞. */ boolean top; /** Creates a new subset with given key range. * * @param from the start of the subset range. * @param bottom if true, the first parameter is ignored and the range starts from -∞. * @param to the end of the subset range. * @param top if true, the third parameter is ignored and the range goes to ∞. */ public Subset( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) { if ( ! bottom && ! top && RB_TREE_SET.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start element (" + from + ") is larger than end element (" + to + ")" ); this.from = from; this.bottom = bottom; this.to = to; this.top = top; } public void clear() { final SubsetIterator i = new SubsetIterator(); while( i.hasNext() ) { i.NEXT_KEY(); i.remove(); } } /** Checks whether a key is in the subset range. * @param k a key. * @return true if is the key is in the subset range. */ final boolean in( final KEY_GENERIC_TYPE k ) { return ( bottom || RB_TREE_SET.this.compare( k, from ) >= 0 ) && ( top || RB_TREE_SET.this.compare( k, to ) < 0 ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean contains( final KEY_TYPE k ) { return in( KEY_GENERIC_CAST k ) && RB_TREE_SET.this.contains( k ); } public boolean add( final KEY_GENERIC_TYPE k ) { if ( ! in( k ) ) throw new IllegalArgumentException( "Element (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); return RB_TREE_SET.this.add( k ); } SUPPRESS_WARNINGS_KEY_UNCHECKED public boolean rem( final KEY_TYPE k ) { if ( ! in( KEY_GENERIC_CAST k ) ) return false; return RB_TREE_SET.this.rem( k ); } public int size() { final SubsetIterator i = new SubsetIterator(); int n = 0; while( i.hasNext() ) { n++; i.NEXT_KEY(); } return n; } public boolean isEmpty() { return ! new SubsetIterator().hasNext(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return actualComparator; } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubsetIterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubsetIterator( from ); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { if ( top ) return new Subset( from, bottom, to, false ); return compare( to, this.to ) < 0 ? new Subset( from, bottom, to, false ) : this; } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { if ( bottom ) return new Subset( from, false, to, top ); return compare( from, this.from ) > 0 ? new Subset( from, false, to, top ) : this; } public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { if ( top && bottom ) return new Subset( from, false, to, false ); if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to; if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from; if ( ! top && ! bottom && from == this.from && to == this.to ) return this; return new Subset( from, false, to, false ); } /** Locates the first entry. * * @return the first entry of this subset, or null if the subset is empty. */ public RB_TREE_SET.Entry KEY_GENERIC firstEntry() { if ( tree == null ) return null; // If this subset goes to -infinity, we return the main set first entry; otherwise, we locate the start of the set. RB_TREE_SET.Entry KEY_GENERIC e; if ( bottom ) e = firstEntry; else { e = locateKey( from ); // If we find either the start or something greater we're OK. if ( compare( e.key, from ) < 0 ) e = e.next(); } // Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end. if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null; return e; } /** Locates the last entry. * * @return the last entry of this subset, or null if the subset is empty. */ public RB_TREE_SET.Entry KEY_GENERIC lastEntry() { if ( tree == null ) return null; // If this subset goes to infinity, we return the main set last entry; otherwise, we locate the end of the set. RB_TREE_SET.Entry KEY_GENERIC e; if ( top ) e = lastEntry; else { e = locateKey( to ); // If we find something smaller than the end we're OK. if ( compare( e.key, to ) >= 0 ) e = e.prev(); } // Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start. if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null; return e; } public KEY_GENERIC_TYPE FIRST() { RB_TREE_SET.Entry KEY_GENERIC e = firstEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } public KEY_GENERIC_TYPE LAST() { RB_TREE_SET.Entry KEY_GENERIC e = lastEntry(); if ( e == null ) throw new NoSuchElementException(); return e.key; } /** An iterator for subranges. * *

This class inherits from {@link SetIterator}, but overrides the methods that * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would * move out of the range of the subset we just overwrite the next or previous * entry with null. */ private final class SubsetIterator extends SetIterator { SubsetIterator() { next = firstEntry(); } SubsetIterator( final KEY_GENERIC_TYPE k ) { this(); if ( next != null ) { if ( ! bottom && compare( k, next.key ) < 0 ) prev = null; else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null; else { next = locateKey( k ); if ( compare( next.key, k ) <= 0 ) { prev = next; next = next.next(); } else prev = next.prev(); } } } void updatePrevious() { prev = prev.prev(); if ( ! bottom && prev != null && RB_TREE_SET.this.compare( prev.key, from ) < 0 ) prev = null; } void updateNext() { next = next.next(); if ( ! top && next != null && RB_TREE_SET.this.compare( next.key, to ) >= 0 ) next = null; } } } /** Returns a deep copy of this tree set. * *

This method performs a deep copy of this tree set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this tree set. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public Object clone() { RB_TREE_SET KEY_GENERIC c; try { c = (RB_TREE_SET KEY_GENERIC)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.allocatePaths(); if ( count != 0 ) { // Also this apparently unfathomable code is derived from GNU libavl. Entry KEY_GENERIC e, p, q, rp = new Entry KEY_GENERIC(), rq = new Entry KEY_GENERIC(); p = rp; rp.left( tree ); q = rq; rq.pred( null ); while( true ) { if ( ! p.pred() ) { e = p.left.clone(); e.pred( q.left ); e.succ( q ); q.left( e ); p = p.left; q = q.left; } else { while( p.succ() ) { p = p.right; if ( p == null ) { q.right = null; c.tree = rq.left; c.firstEntry = c.tree; while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left; c.lastEntry = c.tree; while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right; return c; } q = q.right; } p = p.right; q = q.right; } if ( ! p.succ() ) { e = p.right.clone(); e.succ( q.right ); e.pred( q ); q.right( e ); } } } return c; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { int n = count; SetIterator i = new SetIterator(); s.defaultWriteObject(); while( n-- != 0 ) s.WRITE_KEY( i.NEXT_KEY() ); } /** Reads the given number of entries from the input stream, returning the corresponding tree. * * @param s the input stream. * @param n the (positive) number of entries to read. * @param pred the entry containing the key that preceeds the first key in the tree. * @param succ the entry containing the key that follows the last key in the tree. */ SUPPRESS_WARNINGS_KEY_UNCHECKED private Entry KEY_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_GENERIC pred, final Entry KEY_GENERIC succ ) throws java.io.IOException, ClassNotFoundException { if ( n == 1 ) { final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ); top.pred( pred ); top.succ( succ ); top.black( true ); return top; } if ( n == 2 ) { /* We handle separately this case so that recursion will *always* be on nonempty subtrees. */ final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ); top.black( true ); top.right( new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ) ); top.right.pred( top ); top.pred( pred ); top.right.succ( succ ); return top; } // The right subtree is the largest one. final int rightN = n / 2, leftN = n - rightN - 1; final Entry KEY_GENERIC top = new Entry KEY_GENERIC(); top.left( readTree( s, leftN, pred, top ) ); top.key = KEY_GENERIC_CAST s.READ_KEY(); top.black( true ); top.right( readTree( s, rightN, top, succ ) ); if ( n + 2 == ( ( n + 2 ) & -( n + 2 ) ) ) top.right.black( false ); // Quick test for determining whether n + 2 is a power of 2. return top; } private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); /* The storedComparator is now correctly set, but we must restore on-the-fly the actualComparator. */ setActualComparator(); allocatePaths(); if ( count != 0 ) { tree = readTree( s, count, null, null ); Entry KEY_GENERIC e; e = tree; while( e.left() != null ) e = e.left(); firstEntry = e; e = tree; while( e.right() != null ) e = e.right(); lastEntry = e; } if ( ASSERTS ) checkTree( tree, 0, -1 ); } #ifdef ASSERTS_CODE private void checkNodePath() { for( int i = nodePath.length; i-- != 0; ) assert nodePath[ i ] == null : i; } private static KEY_GENERIC int checkTree( Entry KEY_GENERIC e, int d, int D ) { if ( e == null ) return 0; if ( e.black() ) d++; if ( e.left() != null ) D = checkTree( e.left(), d, D ); if ( e.right() != null ) D = checkTree( e.right(), d, D ); if ( e.left() == null && e.right() == null ) { if ( D == -1 ) D = d; else if ( D != d ) throw new AssertionError( "Mismatch between number of black nodes (" + D + " and " + d + ")" ); } return D; } #else private void checkNodePath() {} @SuppressWarnings("unused") private int checkTree( Entry KEY_GENERIC e, int d, int D ) { return 0; } #endif #ifdef TEST private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #else return Integer.toBinaryString( r.nextInt() ); #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { int i, j; RB_TREE_SET m; java.util.TreeSet t; KEY_TYPE k[] = new KEY_TYPE[n]; KEY_TYPE nk[] = new KEY_TYPE[n]; long ms; for( i = 0; i < n; i++ ) { k[i] = genKey(); nk[i] = genKey(); } double totAdd = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd; if ( comp ) { for( j = 0; j < 20; j++ ) { t = new java.util.TreeSet(); /* We first add all pairs to t. */ for( i = 0; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.add( KEY2OBJ( k[i] ) ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) t.add( KEY2OBJ( k[i] ) ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totAdd += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) t.remove( KEY2OBJ( k[i] ) ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) t.add( KEY2OBJ( k[i] ) ); /* We check for pairs in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( k[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in t. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) t.contains( KEY2OBJ( nk[i] ) ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on t. */ ms = System.currentTimeMillis(); for( Iterator it = t.iterator(); it.hasNext(); it.next() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s" ); System.out.println(); totAdd = totYes = totNo = totIterFor = totIterBack = totRemYes = 0; } for( j = 0; j < 20; j++ ) { m = new RB_TREE_SET(); /* We first add all pairs to m. */ for( i = 0; i < n; i++ ) m.add( k[i] ); /* Then we remove the first half and put it back. */ for( i = 0; i < n/2; i++ ) m.remove( k[i] ); ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.add( k[i] ); d = System.currentTimeMillis() - ms; /* Then we remove the other half and put it back again. */ ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.remove( k[i] ); dd = System.currentTimeMillis() - ms ; ms = System.currentTimeMillis(); for( i = n/2; i < n; i++ ) m.add( k[i] ); d += System.currentTimeMillis() - ms; if ( j > 2 ) totAdd += n/d; System.out.print("Add: " + format( n/d ) +" K/s " ); /* Then we remove again the first half. */ ms = System.currentTimeMillis(); for( i = 0; i < n/2; i++ ) m.remove( k[i] ); dd += System.currentTimeMillis() - ms ; if ( j > 2 ) totRemYes += n/dd; System.out.print("RemYes: " + format( n/dd ) +" K/s " ); /* And then we put it back. */ for( i = 0; i < n/2; i++ ) m.add( k[i] ); /* We check for pairs in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( k[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totYes += d; System.out.print("Yes: " + format( d ) +" K/s " ); /* We check for pairs not in m. */ ms = System.currentTimeMillis(); for( i = 0; i < n; i++ ) m.contains( nk[i] ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totNo += d; System.out.print("No: " + format( d ) +" K/s " ); /* We iterate on m. */ KEY_LIST_ITERATOR it = (KEY_LIST_ITERATOR)m.iterator(); ms = System.currentTimeMillis(); for( ; it.hasNext(); it.NEXT_KEY() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterFor += d; System.out.print("IterFor: " + format( d ) +" K/s " ); /* We iterate back on m. */ ms = System.currentTimeMillis(); for( ; it.hasPrevious(); it.PREV_KEY() ); d = 1.0 * n / (System.currentTimeMillis() - ms ); if ( j > 2 ) totIterBack += d; System.out.print("IterBack: " + format( d ) +" K/s " ); System.out.println(); } System.out.println(); System.out.println( "fastutil Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s" ); System.out.println(); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static RB_TREE_SET topSet; protected static void testSets( SORTED_SET m, SortedSet t, int n, int level ) { long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement; boolean rt = false, rm = false; if ( level > 4 ) return; /* Now we check that both sets agree on first/last keys. */ mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.first(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.first(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.first() + ", " + t.first() +")" ); mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.last(); } catch ( NoSuchElementException e ) { mThrowsNoElement = true; } try { t.last(); } catch ( NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence at start in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.last() + ", " + t.last() +")"); /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(Iterator i=m.iterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 0 ) { badPrevious = true; j.previous(); break; } previous = k; } i = (it.unimi.dsi.fastutil.BidirectionalIterator)m.iterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); badPrevious = false; if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in next"); i.remove(); j.remove(); t.remove( J ); } } else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.5 ) { //System.err.println("Removing in prev"); i.remove(); j.remove(); t.remove( J ); } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a subset. */ if ( ! m.isEmpty() ) { java.util.ListIterator i; Object start = m.first(), end = m.first(); for( i = (java.util.ListIterator)m.iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() ); for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() ); //System.err.println("Checking subSet from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.subSet( (KEY_CLASS)start, (KEY_CLASS)end ), t.subSet( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subSet" ); //System.err.println("Checking headSet to " + end + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.headSet( (KEY_CLASS)end ), t.headSet( end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headSet" ); //System.err.println("Checking tailSet from " + start + " (level=" + (level+1) + ")..." ); testSets( (SORTED_SET)m.tailSet( (KEY_CLASS)start ), t.tailSet( start ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailSet" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailSet" ); } } private static void test( int n ) { RB_TREE_SET m = new RB_TREE_SET(); SortedSet t = new java.util.TreeSet(); topSet = m; k = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEY_CLASS_Object k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif } /* We add pairs to t. */ for( int i = 0; i < n; i++ ) t.add( k[i] ); /* We add to m the same data */ m.addAll(t); testSets( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { int n = Integer.parseInt(args[1]); if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) ); try { if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) ); else if ( "test".equals( args[0] ) ) test(n); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/SemiIndirectHeaps.drv0000664000000000000000000002407113050701620016720 0ustar rootroot/* * Copyright (C) 2003-2016 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; #if KEY_CLASS_Object import java.util.Comparator; #endif import it.unimi.dsi.fastutil.ints.IntArrays; /** A class providing static methods and objects that do useful things with semi-indirect heaps. * *

A semi-indirect heap is based on a reference array. Elements of * a semi-indirect heap are integers that index the reference array (note that * in an indirect heap you can also map elements of the reference * array to heap positions). */ public class SEMI_INDIRECT_HEAPS { private SEMI_INDIRECT_HEAPS() {} /** Moves the given element down into the semi-indirect heap until it reaches the lowest possible position. * * @param refArray the reference array. * @param heap the semi-indirect heap (starting at 0). * @param size the number of elements in the heap. * @param i the index in the heap of the element to be moved down. * @param c a type-specific comparator, or null for the natural order. * @return the new position in the heap of the element of heap index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) { assert i < size; final int e = heap[ i ]; final KEY_GENERIC_TYPE E = refArray[ e ]; int child; if ( c == null ) while ( ( child = ( i << 1 ) + 1 ) < size ) { int t = heap[ child ]; final int right = child + 1; if ( right < size && KEY_LESS( refArray[ heap[ right ] ], refArray[ t ] ) ) t = heap[ child = right ]; if ( KEY_LESSEQ( E, refArray[ t ] ) ) break; heap[ i ] = t; i = child; } else while ( ( child = ( i << 1 ) + 1 ) < size ) { int t = heap[ child ]; final int right = child + 1; if ( right < size && c.compare( refArray[ heap[ right ] ], refArray[ t ] ) < 0 ) t = heap[ child = right ]; if ( c.compare( E, refArray[ t ] ) <= 0 ) break; heap[ i ] = t; i = child; } heap[ i ] = e; return i; } /** Moves the given element up in the semi-indirect heap until it reaches the highest possible position. * * @param refArray the reference array. * @param heap the semi-indirect heap (starting at 0). * @param size the number of elements in the heap. * @param i the index in the heap of the element to be moved up. * @param c a type-specific comparator, or null for the natural order. * @return the new position in the heap of the element of heap index i. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) { assert i < size; final int e = heap[ i ]; final KEY_GENERIC_TYPE E = refArray[ e ]; if ( c == null ) while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final int t = heap[ parent ]; if ( KEY_LESSEQ( refArray[ t ], E ) ) break; heap[ i ] = t; i = parent; } else while ( i != 0 ) { final int parent = ( i - 1 ) >>> 1; final int t = heap[ parent ]; if ( c.compare( refArray[ t ], E ) <= 0 ) break; heap[ i ] = t; i = parent; } heap[ i ] = e; return i; } /** Creates a semi-indirect heap in the given array. * * @param refArray the reference array. * @param offset the first element of the reference array to be put in the heap. * @param length the number of elements to be put in the heap. * @param heap the array where the heap is to be created. * @param c a type-specific comparator, or null for the natural order. */ public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final int[] heap, final KEY_COMPARATOR KEY_GENERIC c ) { ARRAYS.ensureOffsetLength( refArray, offset, length ); if ( heap.length < length ) throw new IllegalArgumentException( "The heap length (" + heap.length + ") is smaller than the number of elements (" + length + ")" ); int i = length; while( i-- != 0 ) heap[ i ] = offset + i; i = length >>> 1; while( i-- != 0 ) downHeap( refArray, heap, length, i, c ); } /** Creates a semi-indirect heap, allocating its heap array. * * @param refArray the reference array. * @param offset the first element of the reference array to be put in the heap. * @param length the number of elements to be put in the heap. * @param c a type-specific comparator, or null for the natural order. * @return the heap array. */ public static KEY_GENERIC int[] makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final KEY_COMPARATOR KEY_GENERIC c ) { final int[] heap = length <= 0 ? IntArrays.EMPTY_ARRAY : new int[ length ]; makeHeap( refArray, offset, length, heap, c ); return heap; } /** Creates a semi-indirect heap from a given index array. * * @param refArray the reference array. * @param heap an array containing indices into refArray. * @param size the number of elements in the heap. * @param c a type-specific comparator, or null for the natural order. */ public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final KEY_COMPARATOR KEY_GENERIC c ) { int i = size >>> 1; while( i-- != 0 ) downHeap( refArray, heap, size, i, c ); } /** Retrieves the front of a heap in a given array. * *

The front of a semi-indirect heap is the set of indices whose associated elements in the reference array * are equal to the element associated to the first index. * *

In several circumstances you need to know the front, and scanning linearly the entire heap is not * the best strategy. This method simulates (using a partial linear scan) a breadth-first visit that * terminates when all visited nodes are larger than the element associated * to the top index, which implies that no elements of the front can be found later. * In most cases this trick yields a significant improvement. * * @param refArray the reference array. * @param heap an array containing indices into refArray. * @param size the number of elements in the heap. * @param a an array large enough to hold the front (e.g., at least long as refArray). * @return the number of elements actually written (starting from the first position of a). */ SUPPRESS_WARNINGS_KEY_UNCHECKED public static KEY_GENERIC int front( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final int[] a ) { final KEY_GENERIC_TYPE top = refArray[ heap[ 0 ] ]; int j = 0, // The current position in a l = 0, // The first position to visit in the next level (inclusive) r = 1, // The last position to visit in the next level (exclusive) f = 0; // The first position (in the heap array) of the next level for( int i = 0; i < r; i++ ) { if ( i == f ) { // New level if ( l >= r ) break; // If we are crossing the two bounds, we're over f = (f << 1) + 1; // Update the first position of the next level... i = l; // ...and jump directly to position l l = -1; // Invalidate l } if ( KEY_CMP_EQ( top, refArray[ heap[ i ] ] ) ) { a[ j++ ] = heap[ i ]; if ( l == -1 ) l = i * 2 + 1; // If this is the first time in this level, set l r = Math.min( size, i * 2 + 3 ); // Update r, but do not go beyond size } } return j; } /** Retrieves the front of a heap in a given array using a given comparator. * *

The front of a semi-indirect heap is the set of indices whose associated elements in the reference array * are equal to the element associated to the first index. * *

In several circumstances you need to know the front, and scanning linearly the entire heap is not * the best strategy. This method simulates (using a partial linear scan) a breadth-first visit that * terminates when all visited nodes are larger than the element associated * to the top index, which implies that no elements of the front can be found later. * In most cases this trick yields a significant improvement. * * @param refArray the reference array. * @param heap an array containing indices into refArray. * @param size the number of elements in the heap. * @param a an array large enough to hold the front (e.g., at least long as refArray). * @param c a type-specific comparator. * @return the number of elements actually written (starting from the first position of a). */ public static KEY_GENERIC int front( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final int[] a, final KEY_COMPARATOR KEY_GENERIC c ) { final KEY_GENERIC_TYPE top = refArray[ heap[ 0 ] ]; int j = 0, // The current position in a l = 0, // The first position to visit in the next level (inclusive) r = 1, // The last position to visit in the next level (exclusive) f = 0; // The first position (in the heap array) of the next level for( int i = 0; i < r; i++ ) { if ( i == f ) { // New level if ( l >= r ) break; // If we are crossing the two bounds, we're over f = (f << 1) + 1; // Update the first position of the next level... i = l; // ...and jump directly to position l l = -1; // Invalidate l } if ( c.compare( top, refArray[ heap[ i ] ] ) == 0 ) { a[ j++ ] = heap[ i ]; if ( l == -1 ) l = i * 2 + 1; // If this is the first time in this level, set l r = Math.min( size, i * 2 + 3 ); // Update r, but do not go beyond size } } return j; } } fastutil-7.1.0/drv/Set.drv0000664000000000000000000000335513050701620014115 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Set; /** A type-specific {@link Set}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens (again) {@link #iterator()}. * * @see Set */ public interface SET KEY_GENERIC extends COLLECTION KEY_GENERIC, Set { /** Returns a type-specific iterator on the elements of this set. * *

Note that this specification strengthens the one given in {@link java.lang.Iterable#iterator()}, * which was already strengthened in the corresponding type-specific class, * but was weakened by the fact that this interface extends {@link Set}. * * @return a type-specific iterator on the elements of this set. */ KEY_ITERATOR KEY_GENERIC iterator(); /** Removes an element from this set. * *

Note that the corresponding method of the type-specific collection is rem(). * This unfortunate situation is caused by the clash * with the similarly named index-based method in the {@link java.util.List} interface. * * @see java.util.Collection#remove(Object) */ public boolean remove( KEY_TYPE k ); } fastutil-7.1.0/drv/Sets.drv0000664000000000000000000005542013050701620014300 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.Collection; import java.util.Set; /** A class providing static methods and objects that do useful things with type-specific sets. * * @see java.util.Collections */ public class SETS { private SETS() {} /** An immutable class representing the empty set and implementing a type-specific set interface. * *

This class may be useful to implement your own in case you subclass * a type-specific set. */ public static class EmptySet KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptySet() {} public boolean remove( KEY_TYPE ok ) { throw new UnsupportedOperationException(); } public Object clone() { return EMPTY_SET; } @SuppressWarnings("rawtypes") public boolean equals( final Object o ) { return o instanceof Set && ((Set)o).isEmpty(); } private Object readResolve() { return EMPTY_SET; } } /** An empty set (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_RAWTYPES public static final EmptySet EMPTY_SET = new EmptySet(); #if KEYS_REFERENCE /** Return an empty set (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_SET}. * @return an empty set (immutable). */ @SuppressWarnings("unchecked") public static KEY_GENERIC SET KEY_GENERIC emptySet() { return EMPTY_SET; } #endif /** An immutable class representing a type-specific singleton set. * *

This class may be useful to implement your own in case you subclass * a type-specific set. */ public static class Singleton KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected final KEY_GENERIC_TYPE element; protected Singleton( final KEY_GENERIC_TYPE element ) { this.element = element; } public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); } public boolean addAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean removeAll( final Collection c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final Collection c ) { throw new UnsupportedOperationException(); } #if KEYS_PRIMITIVE /* Slightly optimized w.r.t. the one in ABSTRACT_SET. */ public KEY_TYPE[] TO_KEY_ARRAY() { KEY_TYPE a[] = new KEY_TYPE[ 1 ]; a[ 0 ] = element; return a; } public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean removeAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } public boolean retainAll( final COLLECTION c ) { throw new UnsupportedOperationException(); } #endif public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return ITERATORS.singleton( element ); } public int size() { return 1; } public Object clone() { return this; } } #if ! KEY_CLASS_Reference /** Returns a type-specific immutable set containing only the specified element. The returned set is serializable and cloneable. * * @param element the only element of the returned set. * @return a type-specific immutable set containing just element. */ public static KEY_GENERIC SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); } #endif #if ! KEY_CLASS_Object /** Returns a type-specific immutable set containing only the specified element. The returned set is serializable and cloneable. * * @param element the only element of the returned set. * @return a type-specific immutable set containing just element. */ public static KEY_GENERIC SET KEY_GENERIC singleton( final KEY_GENERIC_CLASS element ) { return new Singleton KEY_GENERIC( KEY_CLASS2TYPE( element ) ); } #endif /** A synchronized wrapper class for sets. */ public static class SynchronizedSet KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected SynchronizedSet( final SET KEY_GENERIC s, final Object sync ) { super( s, sync ); } protected SynchronizedSet( final SET KEY_GENERIC s ) { super( s ); } public boolean remove( final KEY_TYPE k ) { synchronized( sync ) { return collection.remove( KEY2OBJ( k ) ); } } public boolean equals( final Object o ) { synchronized( sync ) { return collection.equals( o ); } } public int hashCode() { synchronized( sync ) { return collection.hashCode(); } } } /** Returns a synchronized type-specific set backed by the given type-specific set. * * @param s the set to be wrapped in a synchronized set. * @return a synchronized view of the specified set. * @see java.util.Collections#synchronizedSet(Set) */ public static KEY_GENERIC SET KEY_GENERIC synchronize( final SET KEY_GENERIC s ) { return new SynchronizedSet KEY_GENERIC( s ); } /** Returns a synchronized type-specific set backed by the given type-specific set, using an assigned object to synchronize. * * @param s the set to be wrapped in a synchronized set. * @param sync an object that will be used to synchronize the access to the set. * @return a synchronized view of the specified set. * @see java.util.Collections#synchronizedSet(Set) */ public static KEY_GENERIC SET KEY_GENERIC synchronize( final SET KEY_GENERIC s, final Object sync ) { return new SynchronizedSet KEY_GENERIC( s, sync ); } /** An unmodifiable wrapper class for sets. */ public static class UnmodifiableSet KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected UnmodifiableSet( final SET KEY_GENERIC s ) { super( s ); } public boolean remove( final KEY_TYPE k ) { throw new UnsupportedOperationException(); } public boolean equals( final Object o ) { return collection.equals( o ); } public int hashCode() { return collection.hashCode(); } } /** Returns an unmodifiable type-specific set backed by the given type-specific set. * * @param s the set to be wrapped in an unmodifiable set. * @return an unmodifiable view of the specified set. * @see java.util.Collections#unmodifiableSet(Set) */ public static KEY_GENERIC SET KEY_GENERIC unmodifiable( final SET KEY_GENERIC s ) { return new UnmodifiableSet KEY_GENERIC( s ); } #ifdef TEST private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static void test() { int n = 100; int c; KEY_TYPE k = genKey(); Singleton m = new Singleton( k ); Set t = java.util.Collections.singleton( KEY2OBJ( k ) ); long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp; boolean rt = false, rm = false; /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(java.util.Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) ); try { test(); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/SortedMap.drv0000664000000000000000000001404513050701620015256 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import VALUE_PACKAGE.VALUE_COLLECTION; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import java.util.Map; import java.util.SortedMap; #if KEYS_REFERENCE import java.util.Comparator; #endif /** A type-specific {@link SortedMap}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #entrySet()}, * {@link #keySet()}, {@link #values()}, * {@link #comparator()}, {@link SortedMap#subMap(Object,Object)}, {@link SortedMap#headMap(Object)} and {@link SortedMap#tailMap(Object)}. * * @see SortedMap */ public interface SORTED_MAP KEY_VALUE_GENERIC extends MAP KEY_VALUE_GENERIC, SortedMap { /** A sorted entry set providing fast iteration. * *

In some cases (e.g., hash-based classes) iteration over an entry set requires the creation * of a large number of entry objects. Some fastutil * maps might return {@linkplain #entrySet() entry set} objects of type FastSortedEntrySet: in this case, {@link #fastIterator() fastIterator()} * will return an iterator that is guaranteed not to create a large number of objects, possibly * by returning always the same entry (of course, mutated). */ public interface FastSortedEntrySet KEY_VALUE_GENERIC extends ObjectSortedSet, FastEntrySet KEY_VALUE_GENERIC { /** Returns a fast iterator over this sorted entry set; the iterator might return always the same entry object, suitably mutated. * * @return a fast iterator over this sorted entry set; the iterator might return always the same entry object, suitably mutated. */ public ObjectBidirectionalIterator fastIterator( MAP.Entry KEY_VALUE_GENERIC from ); } /** Returns a sorted-set view of the mappings contained in this map. * Note that this specification strengthens the one given in the * corresponding type-specific unsorted map. * * @return a sorted-set view of the mappings contained in this map. * @see Map#entrySet() */ ObjectSortedSet> entrySet(); /** Returns a type-specific sorted-set view of the mappings contained in this map. * Note that this specification strengthens the one given in the * corresponding type-specific unsorted map. * * @return a type-specific sorted-set view of the mappings contained in this map. * @see #entrySet() */ ObjectSortedSet ENTRYSET(); /** Returns a sorted-set view of the keys contained in this map. * Note that this specification strengthens the one given in the * corresponding type-specific unsorted map. * * @return a sorted-set view of the keys contained in this map. * @see Map#keySet() */ SORTED_SET KEY_GENERIC keySet(); /** Returns a set view of the values contained in this map. *

Note that this specification strengthens the one given in {@link Map#values()}, * which was already strengthened in the corresponding type-specific class, * but was weakened by the fact that this interface extends {@link SortedMap}. * * @return a set view of the values contained in this map. * @see Map#values() */ VALUE_COLLECTION VALUE_GENERIC values(); /** Returns the comparator associated with this sorted set, or null if it uses its keys' natural ordering. * *

Note that this specification strengthens the one given in {@link SortedMap#comparator()}. * * @see SortedMap#comparator() */ KEY_COMPARATOR KEY_SUPER_GENERIC comparator(); /** Returns a view of the portion of this sorted map whose keys range from fromKey, inclusive, to toKey, exclusive. * *

Note that this specification strengthens the one given in {@link SortedMap#subMap(Object,Object)}. * * @see SortedMap#subMap(Object,Object) */ SORTED_MAP KEY_VALUE_GENERIC subMap(KEY_GENERIC_CLASS fromKey, KEY_GENERIC_CLASS toKey); /** Returns a view of the portion of this sorted map whose keys are strictly less than toKey. * *

Note that this specification strengthens the one given in {@link SortedMap#headMap(Object)}. * * @see SortedMap#headMap(Object) */ SORTED_MAP KEY_VALUE_GENERIC headMap(KEY_GENERIC_CLASS toKey); /** Returns a view of the portion of this sorted map whose keys are greater than or equal to fromKey. * *

Note that this specification strengthens the one given in {@link SortedMap#tailMap(Object)}. * * @see SortedMap#tailMap(Object) */ SORTED_MAP KEY_VALUE_GENERIC tailMap(KEY_GENERIC_CLASS fromKey); #if KEYS_PRIMITIVE /** Returns a view of the portion of this sorted map whose keys range from fromKey, inclusive, to toKey, exclusive. * @see SortedMap#subMap(Object,Object) */ SORTED_MAP KEY_VALUE_GENERIC subMap(KEY_TYPE fromKey, KEY_TYPE toKey); /** Returns a view of the portion of this sorted map whose keys are strictly less than toKey. * @see SortedMap#headMap(Object) */ SORTED_MAP KEY_VALUE_GENERIC headMap(KEY_TYPE toKey); /** Returns a view of the portion of this sorted map whose keys are greater than or equal to fromKey. * @see SortedMap#tailMap(Object) */ SORTED_MAP KEY_VALUE_GENERIC tailMap(KEY_TYPE fromKey); /** * @see SortedMap#firstKey() */ KEY_TYPE FIRST_KEY(); /** * @see SortedMap#lastKey() */ KEY_TYPE LAST_KEY(); #endif } fastutil-7.1.0/drv/SortedMaps.drv0000664000000000000000000012251113050701620015437 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectSortedSets; import java.util.Comparator; import java.util.Map; import java.util.SortedMap; import java.util.NoSuchElementException; /** A class providing static methods and objects that do useful things with type-specific sorted maps. * * @see java.util.Collections */ public class SORTED_MAPS { private SORTED_MAPS() {} /** Returns a comparator for entries based on a given comparator on keys. * * @param comparator a comparator on keys. * @return the associated comparator on entries. */ public static KEY_GENERIC Comparator> entryComparator( final KEY_COMPARATOR KEY_GENERIC comparator ) { return new Comparator>() { public int compare( Map.Entry x, Map.Entry y ) { return comparator.compare( x.getKey(), y.getKey() ); } }; } /** An immutable class representing an empty type-specific sorted map. * *

This class may be useful to implement your own in case you subclass * a type-specific sorted map. */ public static class EmptySortedMap KEY_VALUE_GENERIC extends MAPS.EmptyMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptySortedMap() {} public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } @SuppressWarnings("unchecked") public ObjectSortedSet ENTRYSET() { return ObjectSortedSets.EMPTY_SET; } @SuppressWarnings("unchecked") public ObjectSortedSet> entrySet() { return ObjectSortedSets.EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC keySet() { return SORTED_SETS.EMPTY_SET; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return EMPTY_MAP; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return EMPTY_MAP; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return EMPTY_MAP; } public KEY_GENERIC_TYPE FIRST_KEY() { throw new NoSuchElementException(); } public KEY_GENERIC_TYPE LAST_KEY() { throw new NoSuchElementException(); } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_CLASS oto ) { return headMap( KEY_CLASS2TYPE( oto ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_CLASS ofrom ) { return tailMap( KEY_CLASS2TYPE( ofrom ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_CLASS ofrom, KEY_GENERIC_CLASS oto ) { return subMap( KEY_CLASS2TYPE( ofrom ), KEY_CLASS2TYPE( oto ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS firstKey() { return KEY2OBJ( FIRST_KEY() ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS lastKey() { return KEY2OBJ( LAST_KEY() ); } #endif } /** An empty sorted map (immutable). It is serializable and cloneable. */ SUPPRESS_WARNINGS_KEY_VALUE_RAWTYPES public static final EmptySortedMap EMPTY_MAP = new EmptySortedMap(); #if KEYS_REFERENCE || VALUES_REFERENCE /** Return an empty sorted map (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_MAP}. * @return an empty sorted map (immutable). */ @SuppressWarnings("unchecked") public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC emptyMap() { return EMPTY_MAP; } #endif /** An immutable class representing a type-specific singleton sorted map. * *

This class may be useful to implement your own in case you subclass * a type-specific sorted map. */ public static class Singleton KEY_VALUE_GENERIC extends MAPS.Singleton KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected final KEY_COMPARATOR KEY_SUPER_GENERIC comparator; protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { super( key, value ); this.comparator = comparator; } protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) { this( key, value, null ); } SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return comparator == null ? KEY_CMP( k1, k2 ) : comparator.compare( k1, k2 ); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return comparator; } SUPPRESS_WARNINGS_KEY_UNCHECKED public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.singleton( (MAP.Entry KEY_VALUE_GENERIC)new SingletonEntry(), (Comparator)entryComparator( comparator ) ); return (ObjectSortedSet)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet> entrySet() { return (ObjectSortedSet)ENTRYSET(); } public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.singleton( key, comparator ); return (SORTED_SET KEY_GENERIC)keys; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { if ( compare( from, key ) <= 0 && compare( key, to ) < 0 ) return this; return EMPTY_MAP; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { if ( compare( key, to ) < 0 ) return this; return EMPTY_MAP; } SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { if ( compare( from, key ) <= 0 ) return this; return EMPTY_MAP; } public KEY_GENERIC_TYPE FIRST_KEY() { return key; } public KEY_GENERIC_TYPE LAST_KEY() { return key; } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_CLASS oto ) { return headMap( KEY_CLASS2TYPE( oto ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_CLASS ofrom ) { return tailMap( KEY_CLASS2TYPE( ofrom ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_CLASS ofrom, KEY_GENERIC_CLASS oto ) { return subMap( KEY_CLASS2TYPE( ofrom ), KEY_CLASS2TYPE( oto ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS firstKey() { return KEY2OBJ( FIRST_KEY() ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_GENERIC_CLASS lastKey() { return KEY2OBJ( LAST_KEY() ); } #endif } /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, VALUE_GENERIC_CLASS value ) { return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) ); } /** RETURNS a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @param comparator the comparator to use in the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, VALUE_GENERIC_CLASS value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ), comparator ); } #if KEYS_PRIMITIVE || VALUES_PRIMITIVE /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) { return new Singleton KEY_VALUE_GENERIC( key, value ); } /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * *

Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @param comparator the comparator to use in the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <key,value>. */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { return new Singleton KEY_VALUE_GENERIC( key, value, comparator ); } #endif /** A synchronized wrapper class for sorted maps. */ public static class SynchronizedSortedMap KEY_VALUE_GENERIC extends MAPS.SynchronizedMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final SORTED_MAP KEY_VALUE_GENERIC sortedMap; protected SynchronizedSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m, final Object sync ) { super( m, sync ); sortedMap = m; } protected SynchronizedSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m ) { super( m ); sortedMap = m; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return sortedMap.comparator(); } } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.synchronize( sortedMap.ENTRYSET(), sync ); return (ObjectSortedSet)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet> entrySet() { return (ObjectSortedSet)ENTRYSET(); } public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.synchronize( sortedMap.keySet(), sync ); return (SORTED_SET KEY_GENERIC)keys; } public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ), sync ); } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ), sync ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ), sync ); } public KEY_GENERIC_TYPE FIRST_KEY() { synchronized( sync ) { return sortedMap.FIRST_KEY(); } } public KEY_GENERIC_TYPE LAST_KEY() { synchronized( sync ) { return sortedMap.LAST_KEY(); } } #if KEYS_PRIMITIVE public KEY_GENERIC_CLASS firstKey() { synchronized( sync ) { return sortedMap.firstKey(); } } public KEY_GENERIC_CLASS lastKey() { synchronized( sync ) { return sortedMap.lastKey(); } } public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ), sync ); } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ), sync ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ), sync ); } #endif } /** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map. * * @param m the sorted map to be wrapped in a synchronized sorted map. * @return a synchronized view of the specified sorted map. * @see java.util.Collections#synchronizedSortedMap(SortedMap) */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC synchronize( final SORTED_MAP KEY_VALUE_GENERIC m ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( m ); } /** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map, using an assigned object to synchronize. * * @param m the sorted map to be wrapped in a synchronized sorted map. * @param sync an object that will be used to synchronize the access to the sorted sorted map. * @return a synchronized view of the specified sorted map. * @see java.util.Collections#synchronizedSortedMap(SortedMap) */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC synchronize( final SORTED_MAP KEY_VALUE_GENERIC m, final Object sync ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( m, sync ); } /** An unmodifiable wrapper class for sorted maps. */ public static class UnmodifiableSortedMap KEY_VALUE_GENERIC extends MAPS.UnmodifiableMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final SORTED_MAP KEY_VALUE_GENERIC sortedMap; protected UnmodifiableSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m ) { super( m ); sortedMap = m; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return sortedMap.comparator(); } public ObjectSortedSet ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.unmodifiable( sortedMap.ENTRYSET() ); return (ObjectSortedSet)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet> entrySet() { return (ObjectSortedSet)ENTRYSET(); } public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.unmodifiable( sortedMap.keySet() ); return (SORTED_SET KEY_GENERIC)keys; } public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ) ); } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ) ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ) ); } public KEY_GENERIC_TYPE FIRST_KEY() { return sortedMap.FIRST_KEY(); } public KEY_GENERIC_TYPE LAST_KEY() { return sortedMap.LAST_KEY(); } #if KEYS_PRIMITIVE public KEY_GENERIC_CLASS firstKey() { return sortedMap.firstKey(); } public KEY_GENERIC_CLASS lastKey() { return sortedMap.lastKey(); } public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ) ); } public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ) ); } public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ) ); } #endif } /** Returns an unmodifiable type-specific sorted map backed by the given type-specific sorted map. * * @param m the sorted map to be wrapped in an unmodifiable sorted map. * @return an unmodifiable view of the specified sorted map. * @see java.util.Collections#unmodifiableSortedMap(SortedMap) */ public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC unmodifiable( final SORTED_MAP KEY_VALUE_GENERIC m ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( m ); } #if defined(TEST) && ! KEY_CLASS_Reference private static long seed = System.currentTimeMillis(); private static java.util.Random r = new java.util.Random( seed ); private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #else return Integer.toBinaryString( r.nextInt() ); #endif } private static VALUE_TYPE genValue() { #if VALUE_CLASS_Byte || VALUE_CLASS_Short || VALUE_CLASS_Character return (VALUE_TYPE)(r.nextInt()); #elif VALUES_PRIMITIVE return r.NEXT_VALUE(); #elif !VALUE_CLASS_Reference || KEY_CLASS_Reference return Integer.toBinaryString( r.nextInt() ); #else return new java.io.Serializable() {}; #endif } private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" ); private static java.text.FieldPosition p = new java.text.FieldPosition( 0 ); private static String format( double d ) { StringBuffer s = new StringBuffer(); return format.format( d, s, p ).toString(); } private static void speedTest( int n, boolean comp ) { System.out.println( "There are presently no speed tests for this class." ); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } private static void fatal( String msg ) { System.out.println( msg ); System.exit( 1 ); } private static void ensure( boolean cond, String msg ) { if ( cond ) return; fatal( msg ); } private static Object[] k, v, nk; private static KEY_TYPE kt[]; private static KEY_TYPE nkt[]; private static VALUE_TYPE vt[]; private static SORTED_MAP topMap; protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) { long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsUnsupp, tThrowsUnsupp; Object rt = null, rm = null; if ( level > 1 ) return; /* Now we check that both maps agree on first/last keys. */ mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.firstKey(); } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; } try { t.firstKey(); } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" ); mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false; try { m.lastKey(); } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; } try { t.lastKey(); } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; } ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" ); if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")"); /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" ); } /* Now we check that m actually holds the same keys. */ for(java.util.Iterator i=t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" ); ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for(java.util.Iterator i=m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" ); ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" ); } /* Now we check that m actually hold the same values. */ for(java.util.Iterator i=t.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" ); ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" ); } /* Now we check that m actually hold the same values, but iterating on m. */ for(java.util.Iterator i=m.values().iterator(); i.hasNext(); ) { Object o = i.next(); ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)"); ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)"); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 0 ) { badPrevious = true; j.previous(); break; } previous = k; } i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); for( int k = 0; k < 2*n; k++ ) { ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" ); ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious ); if ( r.nextFloat() < .8 && i.hasNext() ) { ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); //System.err.println("Done next " + I + " " + J + " " + badPrevious); badPrevious = false; if ( r.nextFloat() < 0.5 ) { } } else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) { ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" ); if ( r.nextFloat() < 0.5 ) { } } } } /* Now we check that m actually holds that data. */ ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" ); /* Now we select a pair of keys and create a submap. */ if ( ! m.isEmpty() ) { java.util.ListIterator i; Object start = m.firstKey(), end = m.firstKey(); for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() ); for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() ); //System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS)start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" ); //System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" ); //System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." ); testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 ); ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" ); ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" ); } } private static void test() { int n = 1; k = new Object[n]; v = new Object[n]; nk = new Object[n]; kt = new KEY_TYPE[n]; nkt = new KEY_TYPE[n]; vt = new VALUE_TYPE[n]; for( int i = 0; i < n; i++ ) { #if KEY_CLASS_Object k[i] = kt[i] = genKey(); nk[i] = nkt[i] = genKey(); #else k[i] = new KEY_CLASS( kt[i] = genKey() ); nk[i] = new KEY_CLASS( nkt[i] = genKey() ); #endif #if VALUES_REFERENCE v[i] = vt[i] = genValue(); #else v[i] = new VALUE_CLASS( vt[i] = genValue() ); #endif } SORTED_MAP m = new Singleton( kt[0], vt[0] ); topMap = m; SortedMap t1 = new java.util.TreeMap(); t1.put( k[0], v[0] ); SortedMap t = java.util.Collections.unmodifiableSortedMap( t1 ); testMaps( m, t, n, 0 ); System.out.println("Test OK"); return; } public static void main( String args[] ) { if ( args.length > 1 ) r = new java.util.Random( seed = Long.parseLong( args[ 1 ] ) ); try { test(); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/SortedSet.drv0000664000000000000000000001275613050701620015303 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.SortedSet; import java.util.Collection; /** A type-specific {@link SortedSet}; provides some additional methods that use polymorphism to avoid (un)boxing. * *

Additionally, this interface strengthens {@link #iterator()}, * {@link #comparator()} (for primitive types), {@link SortedSet#subSet(Object,Object)}, * {@link SortedSet#headSet(Object)} and {@link SortedSet#tailSet(Object)}. * * @see SortedSet */ public interface SORTED_SET KEY_GENERIC extends SET KEY_GENERIC, SortedSet { /** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} on the elements in * this set, starting from a given element of the domain (optional operation). * *

This method returns a type-specific bidirectional iterator with given * starting point. The starting point is any element comparable to the * elements of this set (even if it does not actually belong to the * set). The next element of the returned iterator is the least element of * the set that is greater than the starting point (if there are no * elements greater than the starting point, {@link * it.unimi.dsi.fastutil.BidirectionalIterator#hasNext() hasNext()} will return * false). The previous element of the returned iterator is * the greatest element of the set that is smaller than or equal to the * starting point (if there are no elements smaller than or equal to the * starting point, {@link it.unimi.dsi.fastutil.BidirectionalIterator#hasPrevious() * hasPrevious()} will return false). * *

Note that passing the last element of the set as starting point and * calling {@link it.unimi.dsi.fastutil.BidirectionalIterator#previous() previous()} you can traverse the * entire set in reverse order. * * @param fromElement an element to start from. * @return a bidirectional iterator on the element in this set, starting at the given element. * @throws UnsupportedOperationException if this set does not support iterators with a starting point. */ KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE fromElement ); /** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} iterator on the collection. * *

The iterator returned by the {@link #iterator()} method and by this * method are identical; however, using this method you can save a type casting. * * Note that this specification strengthens the one given in the corresponding type-specific * {@link Collection}. * * @deprecated As of fastutil 5, replaced by {@link #iterator()}. */ @Deprecated KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD(); /** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} on the elements in * this set. * *

This method returns a parameterised bidirectional iterator. The iterator * can be moreover safely cast to a type-specific iterator. * * Note that this specification strengthens the one given in the corresponding type-specific * {@link Collection}. * * @return a bidirectional iterator on the element in this set. */ KEY_BIDI_ITERATOR KEY_GENERIC iterator(); /** Returns a view of the portion of this sorted set whose elements range from fromElement, inclusive, to toElement, exclusive. * *

Note that this specification strengthens the one given in {@link SortedSet#subSet(Object,Object)}. * * @see SortedSet#subSet(Object,Object) */ SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_CLASS fromElement, KEY_GENERIC_CLASS toElement) ; /** Returns a view of the portion of this sorted set whose elements are strictly less than toElement. * *

Note that this specification strengthens the one given in {@link SortedSet#headSet(Object)}. * * @see SortedSet#headSet(Object) */ SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_CLASS toElement ); /** Returns a view of the portion of this sorted set whose elements are greater than or equal to fromElement. * *

Note that this specification strengthens the one given in {@link SortedSet#tailSet(Object)}. * * @see SortedSet#tailSet(Object) */ SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_CLASS fromElement ); #if KEYS_PRIMITIVE /** Returns the comparator associated with this sorted set, or null if it uses its elements' natural ordering. * *

Note that this specification strengthens the one given in {@link SortedSet#comparator()}. * * @see SortedSet#comparator() */ KEY_COMPARATOR comparator(); /** * @see SortedSet#subSet(Object,Object) */ SORTED_SET subSet( KEY_TYPE fromElement, KEY_TYPE toElement) ; /** * @see SortedSet#headSet(Object) */ SORTED_SET headSet( KEY_TYPE toElement ); /** * @see SortedSet#tailSet(Object) */ SORTED_SET tailSet( KEY_TYPE fromElement ); /** * @see SortedSet#first() */ KEY_TYPE FIRST(); /** * @see SortedSet#last() */ KEY_TYPE LAST(); #endif } fastutil-7.1.0/drv/SortedSets.drv0000664000000000000000000007753413050701620015473 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import java.util.SortedSet; import java.util.NoSuchElementException; #if KEYS_REFERENCE import java.util.Comparator; #endif /** A class providing static methods and objects that do useful things with type-specific sorted sets. * * @see java.util.Collections */ public class SORTED_SETS { private SORTED_SETS() {} /** An immutable class representing the empty sorted set and implementing a type-specific set interface. * *

This class may be useful to implement your own in case you subclass * a type-specific sorted set. */ public static class EmptySet KEY_GENERIC extends SETS.EmptySet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptySet() {} public boolean rem( KEY_TYPE ok ) { throw new UnsupportedOperationException(); } @Deprecated public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } SUPPRESS_WARNINGS_KEY_UNCHECKED public KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) { return ITERATORS.EMPTY_ITERATOR; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { return EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE from ) { return EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE to ) { return EMPTY_SET; } public KEY_GENERIC_TYPE FIRST() { throw new NoSuchElementException(); } public KEY_GENERIC_TYPE LAST() { throw new NoSuchElementException(); } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } #if KEYS_PRIMITIVE public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_CLASS from, KEY_GENERIC_CLASS to ) { return EMPTY_SET; } public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_CLASS from ) { return EMPTY_SET; } public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_CLASS to ) { return EMPTY_SET; } public KEY_GENERIC_CLASS first() { throw new NoSuchElementException(); } public KEY_GENERIC_CLASS last() { throw new NoSuchElementException(); } #endif public Object clone() { return EMPTY_SET; } private Object readResolve() { return EMPTY_SET; } } /** An empty sorted set (immutable). It is serializable and cloneable. * */ SUPPRESS_WARNINGS_KEY_RAWTYPES public static final EmptySet EMPTY_SET = new EmptySet(); #if KEYS_REFERENCE /** Return an empty sorted set (immutable). It is serializable and cloneable. * *

This method provides a typesafe access to {@link #EMPTY_SET}. * @return an empty sorted set (immutable). */ @SuppressWarnings("unchecked") public static KEY_GENERIC SET KEY_GENERIC emptySet() { return EMPTY_SET; } #endif /** A class representing a singleton sorted set. * *

This class may be useful to implement your own in case you subclass * a type-specific sorted set. */ public static class Singleton KEY_GENERIC extends SETS.Singleton KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; final KEY_COMPARATOR KEY_SUPER_GENERIC comparator; private Singleton( final KEY_GENERIC_TYPE element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { super( element ); this.comparator = comparator; } private Singleton( final KEY_GENERIC_TYPE element ) { this( element, null ); } SUPPRESS_WARNINGS_KEY_UNCHECKED final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) { return comparator == null ? KEY_CMP( k1, k2 ) : comparator.compare( k1, k2 ); } @Deprecated public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) { KEY_BIDI_ITERATOR KEY_GENERIC i = iterator(); if ( compare( element, from ) <= 0 ) i.next(); return i; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return comparator; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { if ( compare( from, element ) <= 0 && compare( element, to ) < 0 ) return this; return EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { if ( compare( element, to ) < 0 ) return this; return EMPTY_SET; } SUPPRESS_WARNINGS_KEY_UNCHECKED public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { if ( compare( from, element ) <= 0 ) return this; return EMPTY_SET; } public KEY_GENERIC_TYPE FIRST() { return element; } public KEY_GENERIC_TYPE LAST() { return element; } #if KEYS_PRIMITIVE /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS first() { return KEY2OBJ( element ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public KEY_CLASS last() { return KEY2OBJ( element ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC subSet( final KEY_CLASS from, final KEY_CLASS to ) { return subSet( KEY_CLASS2TYPE( from ), KEY_CLASS2TYPE( to ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC headSet( final KEY_CLASS to ) { return headSet( KEY_CLASS2TYPE( to ) ); } /** {@inheritDoc} * @deprecated Please use the corresponding type-specific method instead. */ @Deprecated public SORTED_SET KEY_GENERIC tailSet( final KEY_CLASS from ) { return tailSet( KEY_CLASS2TYPE( from ) ); } #endif } /** Returns a type-specific immutable sorted set containing only the specified element. The returned sorted set is serializable and cloneable. * * @param element the only element of the returned sorted set. * @return a type-specific immutable sorted set containing just element. */ public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); } /** Returns a type-specific immutable sorted set containing only the specified element, and using a specified comparator. The returned sorted set is serializable and cloneable. * * @param element the only element of the returned sorted set. * @param comparator the comparator to use in the returned sorted set. * @return a type-specific immutable sorted set containing just element. */ public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { return new Singleton KEY_GENERIC( element, comparator ); } #if KEYS_PRIMITIVE /** Returns a type-specific immutable sorted set containing only the specified element. The returned sorted set is serializable and cloneable. * * @param element the only element of the returned sorted set. * @return a type-specific immutable sorted set containing just element. */ public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final Object element ) { return new Singleton( KEY_OBJ2TYPE( element ) ); } /** Returns a type-specific immutable sorted set containing only the specified element, and using a specified comparator. The returned sorted set is serializable and cloneable. * * @param element the only element of the returned sorted set. * @param comparator the comparator to use in the returned sorted set. * @return a type-specific immutable sorted set containing just element. */ public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final Object element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) { return new Singleton( KEY_OBJ2TYPE( element ), comparator ); } #endif /** A synchronized wrapper class for sorted sets. */ public static class SynchronizedSortedSet KEY_GENERIC extends SETS.SynchronizedSet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final SORTED_SET KEY_GENERIC sortedSet; protected SynchronizedSortedSet( final SORTED_SET KEY_GENERIC s, final Object sync ) { super( s, sync ); sortedSet = s; } protected SynchronizedSortedSet( final SORTED_SET KEY_GENERIC s ) { super( s ); sortedSet = s; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return sortedSet.comparator(); } } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedSet KEY_GENERIC( sortedSet.subSet( from, to ), sync ); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedSet KEY_GENERIC( sortedSet.headSet( to ), sync ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new SynchronizedSortedSet KEY_GENERIC( sortedSet.tailSet( from ), sync ); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return sortedSet.iterator(); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return sortedSet.iterator( from ); } @Deprecated public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return sortedSet.iterator(); } public KEY_GENERIC_TYPE FIRST() { synchronized( sync ) { return sortedSet.FIRST(); } } public KEY_GENERIC_TYPE LAST() { synchronized( sync ) { return sortedSet.LAST(); } } #if KEYS_PRIMITIVE public KEY_CLASS first() { synchronized( sync ) { return sortedSet.first(); } } public KEY_CLASS last() { synchronized( sync ) { return sortedSet.last(); } } public SORTED_SET KEY_GENERIC subSet( final KEY_CLASS from, final KEY_CLASS to ) { return new SynchronizedSortedSet( sortedSet.subSet( from, to ), sync ); } public SORTED_SET KEY_GENERIC headSet( final KEY_CLASS to ) { return new SynchronizedSortedSet( sortedSet.headSet( to ), sync ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_CLASS from ) { return new SynchronizedSortedSet( sortedSet.tailSet( from ), sync ); } #endif } /** Returns a synchronized type-specific sorted set backed by the given type-specific sorted set. * * @param s the sorted set to be wrapped in a synchronized sorted set. * @return a synchronized view of the specified sorted set. * @see java.util.Collections#synchronizedSortedSet(SortedSet) */ public static KEY_GENERIC SORTED_SET KEY_GENERIC synchronize( final SORTED_SET KEY_GENERIC s ) { return new SynchronizedSortedSet KEY_GENERIC( s ); } /** Returns a synchronized type-specific sorted set backed by the given type-specific sorted set, using an assigned object to synchronize. * * @param s the sorted set to be wrapped in a synchronized sorted set. * @param sync an object that will be used to synchronize the access to the sorted set. * @return a synchronized view of the specified sorted set. * @see java.util.Collections#synchronizedSortedSet(SortedSet) */ public static KEY_GENERIC SORTED_SET KEY_GENERIC synchronize( final SORTED_SET KEY_GENERIC s, final Object sync ) { return new SynchronizedSortedSet KEY_GENERIC( s, sync ); } /** An unmodifiable wrapper class for sorted sets. */ public static class UnmodifiableSortedSet KEY_GENERIC extends SETS.UnmodifiableSet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final SORTED_SET KEY_GENERIC sortedSet; protected UnmodifiableSortedSet( final SORTED_SET KEY_GENERIC s ) { super( s ); sortedSet = s; } public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return sortedSet.comparator(); } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.subSet( from, to ) ); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.headSet( to ) ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.tailSet( from ) ); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return ITERATORS.unmodifiable( sortedSet.iterator() ); } public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return ITERATORS.unmodifiable( sortedSet.iterator( from ) ); } @Deprecated public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); } public KEY_GENERIC_TYPE FIRST() { return sortedSet.FIRST(); } public KEY_GENERIC_TYPE LAST() { return sortedSet.LAST(); } #if KEYS_PRIMITIVE public KEY_CLASS first() { return sortedSet.first(); } public KEY_CLASS last() { return sortedSet.last(); } public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedSet( sortedSet.subSet( from, to ) ); } public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedSet( sortedSet.headSet( to ) ); } public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_CLASS from ) { return new UnmodifiableSortedSet( sortedSet.tailSet( from ) ); } #endif } /** Returns an unmodifiable type-specific sorted set backed by the given type-specific sorted set. * * @param s the sorted set to be wrapped in an unmodifiable sorted set. * @return an unmodifiable view of the specified sorted set. * @see java.util.Collections#unmodifiableSortedSet(SortedSet) */ public static KEY_GENERIC SORTED_SET KEY_GENERIC unmodifiable( final SORTED_SET KEY_GENERIC s ) { return new UnmodifiableSortedSet KEY_GENERIC( s ); } #if defined(TEST) && ! KEY_CLASS_Reference private static KEY_TYPE genKey() { #if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character return (KEY_TYPE)(r.nextInt()); #elif KEYS_PRIMITIVE return r.NEXT_KEY(); #elif KEY_CLASS_Object return Integer.toBinaryString( r.nextInt() ); #endif } protected static void testSets( KEY_TYPE k, SORTED_SET m, SortedSet t, int level ) { int n = 100; int c; long ms; boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp; boolean rt = false, rm = false; if ( level == 0 ) return; /* Now we check that m and t are equal. */ if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t); ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" ); ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" ); /* Now we check that m actually holds that data. */ for(java.util.Iterator i=t.iterator(); i.hasNext(); ) { ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" ); } /* Now we check that m actually holds that data, but iterating on m. */ for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" ); } /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for(int i=0; i 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) ); try { test(); } catch( Throwable e ) { e.printStackTrace( System.err ); System.err.println( "seed: " + seed ); } } #endif } fastutil-7.1.0/drv/Stack.drv0000664000000000000000000000210013050701620014412 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.Stack; /** A type-specific {@link Stack}; provides some additional methods that use polymorphism to avoid (un)boxing. */ public interface STACK KEY_GENERIC extends Stack { /** * @see Stack#push(Object) */ void push( KEY_TYPE k ); /** * @see Stack#pop() */ KEY_TYPE POP(); /** * @see Stack#top() */ KEY_TYPE TOP(); /** * @see Stack#peek(int) */ KEY_TYPE PEEK( int i ); } fastutil-7.1.0/drv/StripedOpenHashMap.drv0000664000000000000000000001214213050701620017052 0ustar rootroot/* * Copyright (C) 2002-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package PACKAGE; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import java.io.Serializable; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; /** A concurrent counting map. The map is made by a number of stripes (instances of {@link Object2IntOpenHashMap}) * which are accessed independently * using a {@link ReentrantReadWriteLock}. Only one thread can write in a stripe at a time, but different stripes * can be modified independently and read access can happen concurrently on each stripe. * * @param the type of keys. */ public class STRIPED_OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable { private static final long serialVersionUID = 1L; /** The stripes. Keys are distributed among them using the lower bits of their {@link Object#hashCode()}. */ private final OPEN_HASH_MAP KEY_VALUE_GENERIC[] map; /** An array of locks parallel to {@link #map}, protecting each stripe. */ private final transient ReentrantReadWriteLock[] lock; /** {@link #map map.length} − 1, cached. */ private final int mask; /** Creates a new concurrent counting map with concurrency level equal to {@link Runtime#availableProcessors()}. */ public STRIPED_OPEN_HASH_MAP() { this( Runtime.getRuntime().availableProcessors() ); } /** Creates a new concurrent counting map. * * @param concurrencyLevel the number of stripes (it will be {@linkplain Integer#highestOneBit(int) forced to be a power of two}); ideally, as large as the number of threads that will ever access * this map, but higher values require more space. */ SUPPRESS_WARNINGS_KEY_UNCHECKED public STRIPED_OPEN_HASH_MAP( final int concurrencyLevel ) { map = new OPEN_HASH_MAP[ Integer.highestOneBit( concurrencyLevel ) ]; lock = new ReentrantReadWriteLock[ map.length ]; for( int i = map.length; i-- != 0; ) { map[ i ] = new OPEN_HASH_MAP KEY_VALUE_GENERIC(); lock[ i ] = new ReentrantReadWriteLock(); } mask = map.length - 1; } #if KEYS_PRIMITIVE public VALUE_GENERIC_CLASS get( final KEY_CLASS k ) { final int stripe = KEY2INTHASH( k ) & mask; final ReadLock readLock = lock[ stripe ].readLock(); try { readLock.lock(); return map[ stripe ].get( k ); } finally { readLock.unlock(); } } #endif SUPPRESS_WARNINGS_KEY_UNCHECKED public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { final int stripe = KEY2INTHASH( k ) & mask; final ReadLock readLock = lock[ stripe ].readLock(); try { readLock.lock(); return map[ stripe ].GET_VALUE( k ); } finally { readLock.unlock(); } } public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { final int stripe = KEY2INTHASH( k ) & mask; final WriteLock writeLock = lock[ stripe ].writeLock(); try { writeLock.lock(); return map[ stripe ].put( k, v ); } finally { writeLock.unlock(); } } public VALUE_GENERIC_TYPE putIfAbsent( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { final int stripe = KEY2INTHASH( k ) & mask; final WriteLock writeLock = lock[ stripe ].writeLock(); try { writeLock.lock(); if ( map[ stripe ].containsKey( k ) ) return map[ stripe ].get( k ); return map[ stripe ].put( k, v ); } finally { writeLock.unlock(); } } #if VALUES_PRIMITIVE || KEYS_PRIMITIVE public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final int stripe = KEY2INTHASH( ok ) & mask; final WriteLock writeLock = lock[ stripe ].writeLock(); try { writeLock.lock(); return map[ stripe ].put( ok, ov ); } finally { writeLock.unlock(); } } public VALUE_GENERIC_CLASS putIfAbsent( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) { final int stripe = KEY2INTHASH( ok ) & mask; final WriteLock writeLock = lock[ stripe ].writeLock(); try { writeLock.lock(); if ( map[ stripe ].containsKey( ok ) ) return map[ stripe ].get( ok ); return map[ stripe ].put( ok, ov ); } finally { writeLock.unlock(); } } #endif public int size() { int size = 0; for( int stripe = lock.length; stripe-- != 0; ) { final ReadLock readLock = lock[ stripe ].readLock(); try { readLock.lock(); size += map[ stripe ].size(); } finally { readLock.unlock(); } } return size; } public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() { throw new UnsupportedOperationException(); } }fastutil-7.1.0/drv/TextIO.drv0000664000000000000000000000541613050701620014536 0ustar rootroot/* * Copyright (C) 2005-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.io; import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK; import static it.unimi.dsi.fastutil.BigArrays.segment; import static it.unimi.dsi.fastutil.BigArrays.start; import java.io.*; import java.util.*; import it.unimi.dsi.fastutil.booleans.*; import it.unimi.dsi.fastutil.bytes.*; import it.unimi.dsi.fastutil.shorts.*; import it.unimi.dsi.fastutil.ints.*; import it.unimi.dsi.fastutil.longs.*; import it.unimi.dsi.fastutil.floats.*; import it.unimi.dsi.fastutil.doubles.*; /** Provides static methods to perform easily textual I/O. * *

This class fills a gap in the Java API: a natural operation on sequences * of primitive elements is to load or store them in textual form. This format * makes files humanly readable. * *

For each primitive type, this class provides methods that read elements * from a {@link BufferedReader} or from a filename (which will be opened * using a buffer of {@link #BUFFER_SIZE} bytes) into an array. Analogously, * there are methods that store the content of an array (fragment) or the * elements returned by an iterator to a {@link PrintStream} or to a given * filename. * *

Finally, there are useful wrapper methods that {@linkplain #asIntIterator(CharSequence) * exhibit a file as a type-specific iterator}. * *

Note that, contrarily to the binary case, there is no way to * {@linkplain BinIO#loadInts(CharSequence) load from a file without providing an array}. You can * easily work around the problem as follows: *

 * array = IntIterators.unwrap( TextIO.asIntIterator("foo") );
 * 
* * @since 4.4 */ public class TextIO { private TextIO() {} /** The size of the buffer used for all I/O on files. */ final public static int BUFFER_SIZE = 8 * 1024; #include "src/it/unimi/dsi/fastutil/io/BooleanTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/ByteTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/ShortTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/IntTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/LongTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/FloatTextIOFragment.h" #include "src/it/unimi/dsi/fastutil/io/DoubleTextIOFragment.h" } fastutil-7.1.0/drv/TextIOFragment.drv0000664000000000000000000004347213050701620016226 0ustar rootroot/* * Copyright (C) 2004-2016 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** Loads elements from a given fast buffered reader, storing them in a given array fragment. * * @param reader a buffered reader. * @param array an array which will be filled with data from reader. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from reader (it might be less than length if reader ends). */ public static int LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); int i = 0; String s; try { for( i = 0; i < length; i++ ) if ( ( s = reader.readLine() ) != null ) array[ i + offset ] = KEY_CLASS.PARSE_KEY( s.trim() ); else break; } catch( EOFException itsOk ) {} return i; } /** Loads elements from a given buffered reader, storing them in a given array. * * @param reader a buffered reader. * @param array an array which will be filled with data from reader. * @return the number of elements actually read from reader (it might be less than the array length if reader ends). */ public static int LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[] array ) throws IOException { return LOAD_KEYS( reader, array, 0, array.length ); } /** Loads elements from a file given by a {@link File} object, storing them in a given array fragment. * * @param file a file. * @param array an array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static int LOAD_KEYS( final File file, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { final BufferedReader reader = new BufferedReader( new FileReader( file ) ); final int result = LOAD_KEYS( reader, array, offset, length ); reader.close(); return result; } /** Loads elements from a file given by a filename, storing them in a given array fragment. * * @param filename a filename. * @param array an array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array, final int offset, final int length ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array, offset, length ); } /** Loads elements from a file given by a {@link File} object, storing them in a given array. * * @param file a file. * @param array an array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static int LOAD_KEYS( final File file, final KEY_TYPE[] array ) throws IOException { return LOAD_KEYS( file, array, 0, array.length ); } /** Loads elements from a file given by a filename, storing them in a given array. * * @param filename a filename. * @param array an array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array ) throws IOException { return LOAD_KEYS( filename, array, 0, array.length ); } /** Stores an array fragment to a given print stream. * * @param array an array whose elements will be written to stream. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param stream a print stream. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final PrintStream stream ) { PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length ); for( int i = 0; i < length; i++ ) stream.println( array[ offset + i ] ); } /** Stores an array to a given print stream. * * @param array an array whose elements will be written to stream. * @param stream a print stream. */ public static void STORE_KEYS( final KEY_TYPE array[], final PrintStream stream ) { STORE_KEYS( array, 0, array.length, stream ); } /** Stores an array fragment to a file given by a {@link File} object. * * @param array an array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final File file ) throws IOException { final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); STORE_KEYS( array, offset, length, stream ); stream.close(); } /** Stores an array fragment to a file given by a pathname. * * @param array an array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final CharSequence filename ) throws IOException { STORE_KEYS( array, offset, length, new File( filename.toString() ) ); } /** Stores an array to a file given by a {@link File} object. * * @param array an array whose elements will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[], final File file ) throws IOException { STORE_KEYS( array, 0, array.length, file ); } /** Stores an array to a file given by a pathname. * * @param array an array whose elements will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[], final CharSequence filename ) throws IOException { STORE_KEYS( array, 0, array.length, filename ); } /** Stores the element returned by an iterator to a given print stream. * * @param i an iterator whose output will be written to stream. * @param stream a print stream. */ public static void STORE_KEYS( final KEY_ITERATOR i, final PrintStream stream ) { while( i.hasNext() ) stream.println( i.NEXT_KEY() ); } /** Stores the element returned by an iterator to a file given by a {@link File} object. * * @param i an iterator whose output will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_ITERATOR i, final File file ) throws IOException { final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); STORE_KEYS( i, stream ); stream.close(); } /** Stores the element returned by an iterator to a file given by a pathname. * * @param i an iterator whose output will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_ITERATOR i, final CharSequence filename ) throws IOException { STORE_KEYS( i, new File( filename.toString() ) ); } /** Loads elements from a given fast buffered reader, storing them in a given big-array fragment. * * @param reader a buffered reader. * @param array a big array which will be filled with data from reader. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from reader (it might be less than length if reader ends). */ public static long LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); long c = 0; String s; try { for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) { if ( ( s = reader.readLine() ) != null ) t[ d ] = KEY_CLASS.PARSE_KEY( s.trim() ); else return c; c++; } } } catch( EOFException itsOk ) {} return c; } /** Loads elements from a given buffered reader, storing them in a given array. * * @param reader a buffered reader. * @param array a big array which will be filled with data from reader. * @return the number of elements actually read from reader (it might be less than the array length if reader ends). */ public static long LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[][] array ) throws IOException { return LOAD_KEYS( reader, array, 0, PACKAGE.BIG_ARRAYS.length( array ) ); } /** Loads elements from a file given by a {@link File} object, storing them in a given big-array fragment. * * @param file a file. * @param array a big array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { final BufferedReader reader = new BufferedReader( new FileReader( file ) ); final long result = LOAD_KEYS( reader, array, offset, length ); reader.close(); return result; } /** Loads elements from a file given by a filename, storing them in a given big-array fragment. * * @param filename a filename. * @param array a big array which will be filled with data from the specified file. * @param offset the index of the first element of array to be filled. * @param length the number of elements of array to be filled. * @return the number of elements actually read from the given file (it might be less than length if the file is too short). */ public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException { return LOAD_KEYS( new File( filename.toString() ), array, offset, length ); } /** Loads elements from a file given by a {@link File} object, storing them in a given array. * * @param file a file. * @param array a big array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array ) throws IOException { return LOAD_KEYS( file, array, 0, PACKAGE.BIG_ARRAYS.length( array ) ); } /** Loads elements from a file given by a filename, storing them in a given array. * * @param filename a filename. * @param array a big array which will be filled with data from the specified file. * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short). */ public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array ) throws IOException { return LOAD_KEYS( filename, array, 0, PACKAGE.BIG_ARRAYS.length( array ) ); } /** Stores a big-array fragment to a given print stream. * * @param array a big array whose elements will be written to stream. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param stream a print stream. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final PrintStream stream ) { PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length ); for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) { final KEY_TYPE[] t = array[ i ]; final int l = (int)Math.min( t.length, offset + length - start( i ) ); for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) stream.println( t[ d ] ); } } /** Stores a big array to a given print stream. * * @param array a big array whose elements will be written to stream. * @param stream a print stream. */ public static void STORE_KEYS( final KEY_TYPE array[][], final PrintStream stream ) { STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), stream ); } /** Stores a big-array fragment to a file given by a {@link File} object. * * @param array a big array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final File file ) throws IOException { final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) ); STORE_KEYS( array, offset, length, stream ); stream.close(); } /** Stores a big-array fragment to a file given by a pathname. * * @param array a big array whose elements will be written to filename. * @param offset the index of the first element of array to be written. * @param length the number of elements of array to be written. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final CharSequence filename ) throws IOException { STORE_KEYS( array, offset, length, new File( filename.toString() ) ); } /** Stores a big array to a file given by a {@link File} object. * * @param array a big array whose elements will be written to filename. * @param file a file. */ public static void STORE_KEYS( final KEY_TYPE array[][], final File file ) throws IOException { STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), file ); } /** Stores a big array to a file given by a pathname. * * @param array a big array whose elements will be written to filename. * @param filename a filename. */ public static void STORE_KEYS( final KEY_TYPE array[][], final CharSequence filename ) throws IOException { STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), filename ); } /** A wrapper that exhibits the content of a reader as a type-specific iterator. */ final private static class KEY_READER_WRAPPER extends KEY_ABSTRACT_ITERATOR { final private BufferedReader reader; private boolean toAdvance = true; private String s; private KEY_TYPE next; public KEY_READER_WRAPPER( final BufferedReader reader ) { this.reader = reader; } public boolean hasNext() { if ( ! toAdvance ) return s != null; toAdvance = false; try { s = reader.readLine(); } catch( EOFException itsOk ) {} catch( IOException rethrow ) { throw new RuntimeException( rethrow ); } if ( s == null ) return false; next = KEY_CLASS.PARSE_KEY( s.trim() ); return true; } public KEY_TYPE NEXT_KEY() { if (! hasNext()) throw new NoSuchElementException(); toAdvance = true; return next; } } /** Wraps the given buffered reader into an iterator. * * @param reader a buffered reader. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final BufferedReader reader ) { return new KEY_READER_WRAPPER( reader ); } /** Wraps a file given by a {@link File} object into an iterator. * * @param file a file. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final File file ) throws IOException { return new KEY_READER_WRAPPER( new BufferedReader( new FileReader( file ) ) ); } /** Wraps a file given by a pathname into an iterator. * * @param filename a filename. */ public static KEY_ITERATOR AS_KEY_ITERATOR( final CharSequence filename ) throws IOException { return AS_KEY_ITERATOR( new File( filename.toString() ) ); } /** Wraps a file given by a {@link File} object into an iterable object. * * @param file a file. */ public static KEY_ITERABLE AS_KEY_ITERABLE( final File file ) { return new KEY_ITERABLE() { public KEY_ITERATOR iterator() { try { return AS_KEY_ITERATOR( file ); } catch( IOException e ) { throw new RuntimeException( e ); } } }; } /** Wraps a file given by a pathname into an iterable object. * * @param filename a filename. */ public static KEY_ITERABLE AS_KEY_ITERABLE( final CharSequence filename ) { return new KEY_ITERABLE() { public KEY_ITERATOR iterator() { try { return AS_KEY_ITERATOR( filename ); } catch( IOException e ) { throw new RuntimeException( e ); } } }; } fastutil-7.1.0/build.xml0000664000000000000000000001630213050701620013667 0ustar rootroot fastutil-7.1.0/ivy.xml0000664000000000000000000000041313050701620013373 0ustar rootroot fastutil-7.1.0/fastutil.bnd0000664000000000000000000000022113050701620014357 0ustar rootrootBundle-Name: it.unimi.dsi.fastutil Bundle-SymbolicName: it.unimi.dsi.fastutil Export-Package: it.unimi.dsi.fastutil.* Bundle-Version: ${version} fastutil-7.1.0/pom.xml0000664000000000000000000000211413050714527013371 0ustar rootroot 4.0.0 it.unimi.dsi fastutil jar fastutil 7.1.0 fastutil extends the Java Collections Framework by providing type-specific maps, sets, lists and priority queues with a small memory footprint and fast access and insertion; provides also big (64-bit) arrays, sets and lists, and fast, practical I/O classes for binary and text files. http://fasutil.di.unimi.it/ Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0.html repo scm:git://github.com/vigna/fastutil.git https://github.com/vigna/fastutil vigna Sebastiano Vigna vigna@di.unimi.it fastutil-7.1.0/build.properties0000664000000000000000000000053413050701620015263 0ustar rootrootjar.base=/usr/share/java javadoc.base=/usr/share/javadoc build.sysclasspath=ignore version=7.1.0 dist=dist src=src drv=drv test=test reports=reports coverage=coverage checkstyle=checkstyle docs=docs build=build instrumented=instrumented remote.j2se.apiurl=http://docs.oracle.com/javase/7/docs/api/ local.j2se.apiurl=file://${javadoc.base}/java fastutil-7.1.0/gencsource.sh0000775000000000000000000006263413050701620014553 0ustar rootroot#!/bin/bash # # This script generates from driver files fake C sources to be passed # through a C preprocessor to get the actual Java sources. It expects # as arguments the name of the driver and the name of the file to be # generated. # # The types we specialise to (these are actual Java types, so references appear here as Object). TYPE=(boolean byte short int long char float double Object Object) # The capitalized types used to build class and method names (now references appear as Reference). TYPE_CAP=(Boolean Byte Short Int Long Char Float Double Object Reference) # Much like $TYPE_CAP, by the place occupied by Reference is now occupied by Object. TYPE_CAP2=(Boolean Byte Short Int Long Char Float Double Object Object) # Much like $TYPE_CAP, but object type get the empty string. TYPE_STD=(Boolean Byte Short Int Long Char Float Double "" "") # The upper case types used to build class and method names. TYPE_UC=(BOOLEAN BYTE SHORT INT LONG CHAR FLOAT DOUBLE OBJECT REFERENCE) # The downcased types used to build method names. TYPE_LC=(boolean byte short int long char float double object reference) # Much like $TYPE_LC, by the place occupied by reference is now occupied by object. TYPE_LC2=(boolean byte short int long char float double object object) # The corresponding classes (in few cases, there are differences with $TYPE_CAP). CLASS=(Boolean Byte Short Integer Long Character Float Double Object Reference) export LC_ALL=C shopt -s extglob file=${2##*/} name=${file%.*} class=${name#Abstract} if [[ "$class" == "$name" ]]; then abstract= else abstract=Abstract fi class=${class#Striped} # Now we rip off the types. rem=${class##[A-Z]+([a-z])} keylen=$(( ${#class} - ${#rem} )) root=$rem KEY_TYPE_CAP=${class:0:$keylen} VALUE_TYPE_CAP=Object # Just for filling holes if [[ "${rem:0:1}" == "2" ]]; then isFunction=true rem=${rem:1} rem2=${rem##[A-Z]+([a-z])} valuelen=$(( ${#rem} - ${#rem2} )) VALUE_TYPE_CAP=${rem:0:$valuelen} root=$rem2 else isFunction=false fi for((k=0; k<${#TYPE_CAP[*]}; k++)); do if [[ ${TYPE_CAP[$k]} == $KEY_TYPE_CAP ]]; then break; fi; done for((v=0; v<${#TYPE_CAP[*]}; v++)); do if [[ ${TYPE_CAP[$v]} == $VALUE_TYPE_CAP ]]; then break; fi; done if [[ $root == *Linked* ]]; then Linked=Linked # Macros for transforming the bi-directional long link. Return values are 32-bit int indexes. # SET_UPPER and SET_LOWER do a masked assignment as described at # http://www-graphics.stanford.edu/~seander/bithacks.html#MaskedMerge echo -e \ "#define SET_PREV( f64, p32 ) SET_UPPER( f64, p32 )\n"\ "#define SET_NEXT( f64, n32 ) SET_LOWER( f64, n32 )\n"\ "#define COPY_PREV( f64, p64 ) SET_UPPER64( f64, p64 )\n"\ "#define COPY_NEXT( f64, n64 ) SET_LOWER64( f64, n64 )\n"\ "#define GET_PREV( f64 ) GET_UPPER( f64 )\n"\ "#define GET_NEXT( f64 ) GET_LOWER( f64 )\n"\ "#define SET_UPPER_LOWER( f64, up32, low32 ) f64 = ( ( up32 & 0xFFFFFFFFL ) << 32 ) | ( low32 & 0xFFFFFFFFL )\n"\ "#define SET_UPPER( f64, up32 ) f64 ^= ( ( f64 ^ ( ( up32 & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L )\n"\ "#define SET_LOWER( f64, low32 ) f64 ^= ( ( f64 ^ ( low32 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\ "#define SET_UPPER64( f64, up64 ) f64 ^= ( ( f64 ^ ( up64 & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L )\n"\ "#define SET_LOWER64( f64, low64 ) f64 ^= ( ( f64 ^ ( low64 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\ "#define GET_UPPER( f64 ) (int) ( f64 >>> 32 )\n"\ "#define GET_LOWER( f64 ) (int) f64\n" fi if [[ $root == *Custom* ]]; then Custom=Custom; fi echo -e \ \ \ "/* Generic definitions */\n"\ \ \ "${Linked:+#define Linked}\n"\ "${Custom:+#define Custom}\n"\ "#define PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$k]}s\n"\ "#define VALUE_PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$v]}s\n"\ \ \ "/* Assertions (useful to generate conditional code) */\n"\ \ \ $(if [[ "${CLASS[$k]}" != "" ]]; then\ echo "#define KEY_CLASS_${CLASS[$k]} 1\\n";\ if [[ "${CLASS[$k]}" != "Object" && "${CLASS[$k]}" != "Reference" ]]; then\ echo "#define KEYS_PRIMITIVE 1\\n";\ else\ echo "#define KEYS_REFERENCE 1\\n";\ fi;\ fi)\ $(if [[ "${CLASS[$v]}" != "" ]]; then\ echo "#define VALUE_CLASS_${CLASS[$v]} 1\\n";\ if [[ "${CLASS[$v]}" != "Object" && "${CLASS[$v]}" != "Reference" ]]; then\ echo "#define VALUES_PRIMITIVE 1\\n";\ else\ echo "#define VALUES_REFERENCE 1\\n";\ fi;\ fi)\ \ \ "/* Current type and class (and size, if applicable) */\n"\ \ \ "#define KEY_TYPE ${TYPE[$k]}\n"\ "#define VALUE_TYPE ${TYPE[$v]}\n"\ "#define KEY_CLASS ${CLASS[$k]}\n"\ "#define VALUE_CLASS ${CLASS[$v]}\n"\ \ \ "#if KEYS_REFERENCE\n"\ "#define KEY_GENERIC_CLASS K\n"\ "#define KEY_GENERIC_TYPE K\n"\ "#define KEY_GENERIC \n"\ "#define KEY_GENERIC_WILDCARD \n"\ "#define KEY_EXTENDS_GENERIC \n"\ "#define KEY_SUPER_GENERIC \n"\ "#define KEY_GENERIC_CAST (K)\n"\ "#define KEY_GENERIC_ARRAY_CAST (K[])\n"\ "#define KEY_GENERIC_BIG_ARRAY_CAST (K[][])\n"\ "#define SUPPRESS_WARNINGS_KEY_UNCHECKED @SuppressWarnings(\"unchecked\")\n"\ "#define SUPPRESS_WARNINGS_KEY_RAWTYPES @SuppressWarnings(\"rawtypes\")\n"\ "#define SUPPRESS_WARNINGS_KEY_UNCHECKED_RAWTYPES @SuppressWarnings({\"unchecked\",\"rawtypes\"})\n"\ "#if defined(Custom)\n"\ "#define SUPPRESS_WARNINGS_CUSTOM_KEY_UNCHECKED @SuppressWarnings(\"unchecked\")\n"\ "#else\n"\ "#define SUPPRESS_WARNINGS_CUSTOM_KEY_UNCHECKED\n"\ "#endif\n"\ "#else\n"\ "#define KEY_GENERIC_CLASS KEY_CLASS\n"\ "#define KEY_GENERIC_TYPE KEY_TYPE\n"\ "#define KEY_GENERIC\n"\ "#define KEY_GENERIC_WILDCARD\n"\ "#define KEY_EXTENDS_GENERIC\n"\ "#define KEY_SUPER_GENERIC\n"\ "#define KEY_GENERIC_CAST\n"\ "#define KEY_GENERIC_ARRAY_CAST\n"\ "#define KEY_GENERIC_BIG_ARRAY_CAST\n"\ "#define SUPPRESS_WARNINGS_KEY_UNCHECKED\n"\ "#define SUPPRESS_WARNINGS_KEY_RAWTYPES\n"\ "#define SUPPRESS_WARNINGS_KEY_UNCHECKED_RAWTYPES\n"\ "#define SUPPRESS_WARNINGS_CUSTOM_KEY_UNCHECKED\n"\ "#endif\n"\ \ "#if VALUES_REFERENCE\n"\ "#define VALUE_GENERIC_CLASS V\n"\ "#define VALUE_GENERIC_TYPE V\n"\ "#define VALUE_GENERIC \n"\ "#define VALUE_EXTENDS_GENERIC \n"\ "#define VALUE_GENERIC_CAST (V)\n"\ "#define VALUE_GENERIC_ARRAY_CAST (V[])\n"\ "#define SUPPRESS_WARNINGS_VALUE_UNCHECKED @SuppressWarnings(\"unchecked\")\n"\ "#define SUPPRESS_WARNINGS_VALUE_RAWTYPES @SuppressWarnings(\"rawtypes\")\n"\ "#else\n"\ "#define VALUE_GENERIC_CLASS VALUE_CLASS\n"\ "#define VALUE_GENERIC_TYPE VALUE_TYPE\n"\ "#define VALUE_GENERIC\n"\ "#define VALUE_EXTENDS_GENERIC\n"\ "#define VALUE_GENERIC_CAST\n"\ "#define VALUE_GENERIC_ARRAY_CAST\n"\ "#define SUPPRESS_WARNINGS_VALUE_UNCHECKED\n"\ "#define SUPPRESS_WARNINGS_VALUE_RAWTYPES\n"\ "#endif\n"\ \ "#if KEYS_REFERENCE\n"\ "#if VALUES_REFERENCE\n"\ "#define KEY_VALUE_GENERIC \n"\ "#define KEY_VALUE_EXTENDS_GENERIC \n"\ "#else\n"\ "#define KEY_VALUE_GENERIC \n"\ "#define KEY_VALUE_EXTENDS_GENERIC \n"\ "#endif\n"\ "#else\n"\ "#if VALUES_REFERENCE\n"\ "#define KEY_VALUE_GENERIC \n"\ "#define KEY_VALUE_EXTENDS_GENERIC \n"\ "#else\n"\ "#define KEY_VALUE_GENERIC\n"\ "#define KEY_VALUE_EXTENDS_GENERIC\n"\ "#endif\n"\ "#endif\n"\ \ "#if KEYS_REFERENCE || VALUES_REFERENCE\n"\ "#define SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED @SuppressWarnings(\"unchecked\")\n"\ "#define SUPPRESS_WARNINGS_KEY_VALUE_RAWTYPES @SuppressWarnings(\"rawtypes\")\n"\ "#else\n"\ "#define SUPPRESS_WARNINGS_KEY_VALUE_UNCHECKED\n"\ "#define SUPPRESS_WARNINGS_KEY_VALUE_RAWTYPES\n"\ "#endif\n"\ \ \ "/* Value methods */\n"\ \ \ "#define KEY_VALUE ${TYPE[$k]}Value\n"\ "#define VALUE_VALUE ${TYPE[$v]}Value\n"\ \ \ "/* Interfaces (keys) */\n"\ \ \ "#define COLLECTION ${TYPE_CAP[$k]}Collection\n\n"\ "#define SET ${TYPE_CAP[$k]}Set\n\n"\ "#define HASH ${TYPE_CAP[$k]}Hash\n\n"\ "#define SORTED_SET ${TYPE_CAP[$k]}SortedSet\n\n"\ "#define STD_SORTED_SET ${TYPE_STD[$k]}SortedSet\n\n"\ "#define FUNCTION ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\ "#define MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\ "#define SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\ "#if KEYS_REFERENCE\n"\ "#define STD_SORTED_MAP SortedMap\n\n"\ "#define STRATEGY Strategy\n\n"\ "#else\n"\ "#define STD_SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n\n"\ "#define STRATEGY PACKAGE.${TYPE_CAP[$k]}Hash.Strategy\n\n"\ "#endif\n"\ "#define LIST ${TYPE_CAP[$k]}List\n\n"\ "#define BIG_LIST ${TYPE_CAP[$k]}BigList\n\n"\ "#define STACK ${TYPE_STD[$k]}Stack\n\n"\ "#define PRIORITY_QUEUE ${TYPE_STD[$k]}PriorityQueue\n\n"\ "#define INDIRECT_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectPriorityQueue\n\n"\ "#define INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectDoublePriorityQueue\n\n"\ "#define KEY_ITERATOR ${TYPE_CAP2[$k]}Iterator\n\n"\ "#define KEY_ITERABLE ${TYPE_CAP2[$k]}Iterable\n\n"\ "#define KEY_BIDI_ITERATOR ${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\ "#define KEY_LIST_ITERATOR ${TYPE_CAP2[$k]}ListIterator\n\n"\ "#define KEY_BIG_LIST_ITERATOR ${TYPE_CAP2[$k]}BigListIterator\n\n"\ "#define STD_KEY_ITERATOR ${TYPE_STD[$k]}Iterator\n\n"\ "#define KEY_COMPARATOR ${TYPE_STD[$k]}Comparator\n\n"\ \ \ "/* Interfaces (values) */\n"\ \ \ "#define VALUE_COLLECTION ${TYPE_CAP[$v]}Collection\n\n"\ "#define VALUE_ARRAY_SET ${TYPE_CAP[$v]}ArraySet\n\n"\ "#define VALUE_ITERATOR ${TYPE_CAP2[$v]}Iterator\n\n"\ "#define VALUE_LIST_ITERATOR ${TYPE_CAP2[$v]}ListIterator\n\n"\ \ \ "/* Abstract implementations (keys) */\n"\ \ \ "#define ABSTRACT_COLLECTION Abstract${TYPE_CAP[$k]}Collection\n\n"\ "#define ABSTRACT_SET Abstract${TYPE_CAP[$k]}Set\n\n"\ "#define ABSTRACT_SORTED_SET Abstract${TYPE_CAP[$k]}SortedSet\n"\ "#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\ "#define ABSTRACT_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\ "#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\ "#define ABSTRACT_SORTED_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\ "#define ABSTRACT_LIST Abstract${TYPE_CAP[$k]}List\n\n"\ "#define ABSTRACT_BIG_LIST Abstract${TYPE_CAP[$k]}BigList\n\n"\ "#define SUBLIST ${TYPE_CAP[$k]}SubList\n\n"\ "#define ABSTRACT_PRIORITY_QUEUE Abstract${TYPE_STD[$k]}PriorityQueue\n\n"\ "#define ABSTRACT_STACK Abstract${TYPE_STD[$k]}Stack\n\n"\ "#define KEY_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$k]}Iterator\n\n"\ "#define KEY_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\ "#define KEY_ABSTRACT_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}ListIterator\n\n"\ "#define KEY_ABSTRACT_BIG_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}BigListIterator\n\n"\ "#if KEY_CLASS_Object\n"\ "#define KEY_ABSTRACT_COMPARATOR Comparator\n\n"\ "#else\n"\ "#define KEY_ABSTRACT_COMPARATOR Abstract${TYPE_CAP[$k]}Comparator\n\n"\ "#endif\n"\ \ \ "/* Abstract implementations (values) */\n"\ \ \ "#define VALUE_ABSTRACT_COLLECTION Abstract${TYPE_CAP[$v]}Collection\n\n"\ "#define VALUE_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$v]}Iterator\n\n"\ "#define VALUE_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$v]}BidirectionalIterator\n\n"\ \ \ "/* Static containers (keys) */\n"\ \ \ "#define COLLECTIONS ${TYPE_CAP[$k]}Collections\n\n"\ "#define SETS ${TYPE_CAP[$k]}Sets\n\n"\ "#define SORTED_SETS ${TYPE_CAP[$k]}SortedSets\n\n"\ "#define LISTS ${TYPE_CAP[$k]}Lists\n\n"\ "#define BIG_LISTS ${TYPE_CAP[$k]}BigLists\n\n"\ "#define MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Maps\n"\ "#define FUNCTIONS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Functions\n"\ "#define SORTED_MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMaps\n"\ "#define PRIORITY_QUEUES ${TYPE_CAP2[$k]}PriorityQueues\n\n"\ "#define HEAPS ${TYPE_CAP2[$k]}Heaps\n\n"\ "#define SEMI_INDIRECT_HEAPS ${TYPE_CAP2[$k]}SemiIndirectHeaps\n\n"\ "#define INDIRECT_HEAPS ${TYPE_CAP2[$k]}IndirectHeaps\n\n"\ "#define ARRAYS ${TYPE_CAP2[$k]}Arrays\n\n"\ "#define BIG_ARRAYS ${TYPE_CAP2[$k]}BigArrays\n\n"\ "#define ITERATORS ${TYPE_CAP2[$k]}Iterators\n\n"\ "#define BIG_LIST_ITERATORS ${TYPE_CAP2[$k]}BigListIterators\n\n"\ "#define COMPARATORS ${TYPE_CAP2[$k]}Comparators\n\n"\ \ \ "/* Static containers (values) */\n"\ \ \ "#define VALUE_COLLECTIONS ${TYPE_CAP[$v]}Collections\n\n"\ "#define VALUE_SETS ${TYPE_CAP[$v]}Sets\n\n"\ "#define VALUE_ARRAYS ${TYPE_CAP2[$v]}Arrays\n\n"\ \ \ "/* Implementations */\n"\ \ \ "#define OPEN_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashSet\n\n"\ "#define OPEN_HASH_BIG_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashBigSet\n\n"\ "#define OPEN_DOUBLE_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}DoubleHashSet\n\n"\ "#define OPEN_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}HashMap\n\n"\ "#define OPEN_HASH_BIG_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}HashBigMap\n\n"\ "#define STRIPED_OPEN_HASH_MAP Striped${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Open${Custom}HashMap\n\n"\ "#define OPEN_DOUBLE_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}DoubleHashMap\n\n"\ "#define ARRAY_SET ${TYPE_CAP[$k]}ArraySet\n\n"\ "#define ARRAY_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}ArrayMap\n\n"\ "#define LINKED_OPEN_HASH_SET ${TYPE_CAP[$k]}LinkedOpenHashSet\n\n"\ "#define AVL_TREE_SET ${TYPE_CAP[$k]}AVLTreeSet\n\n"\ "#define RB_TREE_SET ${TYPE_CAP[$k]}RBTreeSet\n\n"\ "#define AVL_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}AVLTreeMap\n\n"\ "#define RB_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}RBTreeMap\n\n"\ "#define ARRAY_LIST ${TYPE_CAP[$k]}ArrayList\n\n"\ "#define BIG_ARRAY_BIG_LIST ${TYPE_CAP[$k]}BigArrayBigList\n\n"\ "#define ARRAY_FRONT_CODED_LIST ${TYPE_CAP[$k]}ArrayFrontCodedList\n\n"\ "#define HEAP_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapPriorityQueue\n\n"\ "#define HEAP_SEMI_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSemiIndirectPriorityQueue\n\n"\ "#define HEAP_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectPriorityQueue\n\n"\ "#define HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSesquiIndirectDoublePriorityQueue\n\n"\ "#define HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectDoublePriorityQueue\n\n"\ "#define ARRAY_FIFO_QUEUE ${TYPE_CAP2[$k]}ArrayFIFOQueue\n\n"\ "#define ARRAY_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayPriorityQueue\n\n"\ "#define ARRAY_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectPriorityQueue\n\n"\ "#define ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectDoublePriorityQueue\n\n"\ \ \ "/* Synchronized wrappers */\n"\ \ \ "#define SYNCHRONIZED_COLLECTION Synchronized${TYPE_CAP[$k]}Collection\n\n"\ "#define SYNCHRONIZED_SET Synchronized${TYPE_CAP[$k]}Set\n\n"\ "#define SYNCHRONIZED_SORTED_SET Synchronized${TYPE_CAP[$k]}SortedSet\n\n"\ "#define SYNCHRONIZED_FUNCTION Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\ "#define SYNCHRONIZED_MAP Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\ "#define SYNCHRONIZED_LIST Synchronized${TYPE_CAP[$k]}List\n\n"\ \ \ "/* Unmodifiable wrappers */\n"\ \ \ "#define UNMODIFIABLE_COLLECTION Unmodifiable${TYPE_CAP[$k]}Collection\n\n"\ "#define UNMODIFIABLE_SET Unmodifiable${TYPE_CAP[$k]}Set\n\n"\ "#define UNMODIFIABLE_SORTED_SET Unmodifiable${TYPE_CAP[$k]}SortedSet\n\n"\ "#define UNMODIFIABLE_FUNCTION Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\ "#define UNMODIFIABLE_MAP Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\ "#define UNMODIFIABLE_LIST Unmodifiable${TYPE_CAP[$k]}List\n\n"\ "#define UNMODIFIABLE_KEY_ITERATOR Unmodifiable${TYPE_CAP[$k]}Iterator\n\n"\ "#define UNMODIFIABLE_KEY_BIDI_ITERATOR Unmodifiable${TYPE_CAP[$k]}BidirectionalIterator\n\n"\ "#define UNMODIFIABLE_KEY_LIST_ITERATOR Unmodifiable${TYPE_CAP[$k]}ListIterator\n\n"\ \ \ "/* Other wrappers */\n"\ \ \ "#define KEY_READER_WRAPPER ${TYPE_CAP[$k]}ReaderWrapper\n\n"\ "#define KEY_DATA_INPUT_WRAPPER ${TYPE_CAP[$k]}DataInputWrapper\n\n"\ \ \ "/* Methods (keys) */\n"\ \ \ "#define NEXT_KEY next${TYPE_STD[$k]}\n"\ "#define PREV_KEY previous${TYPE_STD[$k]}\n"\ "#define FIRST_KEY first${TYPE_STD[$k]}Key\n"\ "#define LAST_KEY last${TYPE_STD[$k]}Key\n"\ "#define GET_KEY get${TYPE_STD[$k]}\n"\ "#define REMOVE_KEY remove${TYPE_STD[$k]}\n"\ "#define READ_KEY read${TYPE_CAP2[$k]}\n"\ "#define WRITE_KEY write${TYPE_CAP2[$k]}\n"\ "#define DEQUEUE dequeue${TYPE_STD[$k]}\n"\ "#define DEQUEUE_LAST dequeueLast${TYPE_STD[$k]}\n"\ "#define SUBLIST_METHOD ${TYPE_LC[$k]}SubList\n"\ "#define SINGLETON_METHOD ${TYPE_LC[$k]}Singleton\n\n"\ "#define FIRST first${TYPE_STD[$k]}\n"\ "#define LAST last${TYPE_STD[$k]}\n"\ "#define TOP top${TYPE_STD[$k]}\n"\ "#define PEEK peek${TYPE_STD[$k]}\n"\ "#define POP pop${TYPE_STD[$k]}\n"\ "#define KEY_ITERATOR_METHOD ${TYPE_LC2[$k]}Iterator\n\n"\ "#define KEY_LIST_ITERATOR_METHOD ${TYPE_LC2[$k]}ListIterator\n\n"\ "#define KEY_EMPTY_ITERATOR_METHOD empty${TYPE_CAP2[$k]}Iterator\n\n"\ "#define AS_KEY_ITERATOR as${TYPE_CAP2[$k]}Iterator\n\n"\ "#define AS_KEY_ITERABLE as${TYPE_CAP2[$k]}Iterable\n\n"\ "#define TO_KEY_ARRAY to${TYPE_STD[$k]}Array\n"\ "#define ENTRY_GET_KEY get${TYPE_STD[$k]}Key\n"\ "#define REMOVE_FIRST_KEY removeFirst${TYPE_STD[$k]}\n"\ "#define REMOVE_LAST_KEY removeLast${TYPE_STD[$k]}\n"\ "#define PARSE_KEY parse${TYPE_STD[$k]}\n"\ "#define LOAD_KEYS load${TYPE_STD[$k]}s\n"\ "#define LOAD_KEYS_BIG load${TYPE_STD[$k]}sBig\n"\ "#define STORE_KEYS store${TYPE_STD[$k]}s\n"\ \ \ "/* Methods (values) */\n"\ \ \ "#define NEXT_VALUE next${TYPE_STD[$v]}\n"\ "#define PREV_VALUE previous${TYPE_STD[$v]}\n"\ "#define READ_VALUE read${TYPE_CAP2[$v]}\n"\ "#define WRITE_VALUE write${TYPE_CAP2[$v]}\n"\ "#define VALUE_ITERATOR_METHOD ${TYPE_LC2[$v]}Iterator\n\n"\ "#define ENTRY_GET_VALUE get${TYPE_STD[$v]}Value\n"\ "#define REMOVE_FIRST_VALUE removeFirst${TYPE_STD[$v]}\n"\ "#define REMOVE_LAST_VALUE removeLast${TYPE_STD[$v]}\n"\ \ \ "/* Methods (keys/values) */\n"\ \ \ "#define ENTRYSET ${TYPE_LC[$k]}2${TYPE_CAP[$v]}EntrySet\n"\ \ \ "/* Methods that have special names depending on keys (but the special names depend on values) */\n"\ \ \ "#if KEYS_REFERENCE\n"\ "#define GET_VALUE get${TYPE_STD[$v]}\n"\ "#define REMOVE_VALUE remove${TYPE_STD[$v]}\n"\ "#else\n"\ "#define GET_VALUE get\n"\ "#define REMOVE_VALUE remove\n"\ "#endif\n"\ \ \ \ "/* Equality */\n"\ \ \ \ "#define KEY_EQUALS_NOT_NULL_CAST(x,y) KEY_EQUALS_NOT_NULL(x,y)\n"\ "#define KEY2INTHASH_CAST(x) KEY2INTHASH(x)\n\n"\ "#if KEY_CLASS_Object\n"\ "#define KEY_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\ "#define KEY_EQUALS_NOT_NULL(x,y) ( (x).equals(y) )\n"\ "#define KEY_IS_NULL(x) ( (x) == null )\n"\ "#elif KEY_CLASS_Float\n"\ "#define KEY_EQUALS(x,y) ( Float.floatToIntBits(x) == Float.floatToIntBits(y) )\n"\ "#define KEY_EQUALS_NOT_NULL(x,y) ( Float.floatToIntBits(x) == Float.floatToIntBits(y) )\n"\ "#define KEY_IS_NULL(x) ( Float.floatToIntBits(x) == 0 )\n"\ "#elif KEY_CLASS_Double\n"\ "#define KEY_EQUALS(x,y) ( Double.doubleToLongBits(x) == Double.doubleToLongBits(y) )\n"\ "#define KEY_EQUALS_NOT_NULL(x,y) ( Double.doubleToLongBits(x) == Double.doubleToLongBits(y) )\n"\ "#define KEY_IS_NULL(x) ( Double.doubleToLongBits(x) == 0 )\n"\ "#else\n"\ "#define KEY_EQUALS(x,y) ( (x) == (y) )\n"\ "#define KEY_EQUALS_NOT_NULL(x,y) ( (x) == (y) )\n"\ "#define KEY_IS_NULL(x) ( (x) == KEY_NULL )\n"\ "#endif\n\n"\ \ "#ifdef Custom\n"\ "#undef KEY_EQUALS\n"\ "#define KEY_EQUALS(x,y) ( strategy.equals( (x), (y) ) )\n"\ "#undef KEY_EQUALS_NOT_NULL\n"\ "#define KEY_EQUALS_NOT_NULL(x,y) ( strategy.equals( (x), (y) ) )\n"\ "#undef KEY_EQUALS_NOT_NULL_CAST\n"\ "#define KEY_EQUALS_NOT_NULL_CAST(x,y) ( strategy.equals( " KEY_GENERIC_CAST "(x), (y) ) )\n"\ "#define KEY_EQUALS_NULL(x) ( strategy.equals( (x), KEY_NULL ) )\n"\ "#else\n"\ "#define KEY_EQUALS_NULL(x) KEY_IS_NULL(x)\n"\ "#endif\n\n"\ \ "#if VALUE_CLASS_Object\n"\ "#define VALUE_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\ "#else\n"\ "#define VALUE_EQUALS(x,y) ( (x) == (y) )\n"\ "#endif\n\n"\ \ \ \ "/* Object/Reference-only definitions (keys) */\n"\ \ \ "#if KEYS_REFERENCE\n"\ \ "#define REMOVE remove\n"\ \ "#define KEY_OBJ2TYPE(x) (x)\n"\ "#define KEY_CLASS2TYPE(x) (x)\n"\ "#define KEY2OBJ(x) (x)\n"\ \ "#ifdef Custom\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) ( strategy.hashCode(x) )\n"\ "#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( strategy.hashCode(x) ) )\n"\ "#undef KEY2INTHASH_CAST\n"\ "#define KEY2INTHASH_CAST(x) ( it.unimi.dsi.fastutil.HashCommon.mix( strategy.hashCode( " KEY_GENERIC_CAST " x) ) )\n"\ "#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (long)( strategy.hashCode(x)) ) ) )\n"\ "#elif KEY_CLASS_Object\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) ( (x).hashCode() )\n"\ "#define KEY2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\ "#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (x).hashCode() ) )\n"\ "#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (long)( (x).hashCode() ) ) )\n"\ "#else\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) ( System.identityHashCode(x) )\n"\ "#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( System.identityHashCode(x) ) )\n"\ "#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (long)( System.identityHashCode(x) ) ) )\n"\ "#endif\n"\ \ "#define KEY_CMP(x,y) ( ((Comparable)(x)).compareTo(y) )\n"\ "#define KEY_CMP_EQ(x,y) ( ((Comparable)(x)).compareTo(y) == 0 )\n"\ "#define KEY_LESS(x,y) ( ((Comparable)(x)).compareTo(y) < 0 )\n"\ "#define KEY_LESSEQ(x,y) ( ((Comparable)(x)).compareTo(y) <= 0 )\n"\ \ "#define KEY_NULL (null)\n"\ \ \ "#else\n"\ \ \ "/* Primitive-type-only definitions (keys) */\n"\ \ \ "#define REMOVE rem\n"\ \ "#define KEY_CLASS2TYPE(x) ((x).KEY_VALUE())\n"\ "#define KEY_OBJ2TYPE(x) (KEY_CLASS2TYPE((KEY_CLASS)(x)))\n"\ "#define KEY2OBJ(x) (KEY_CLASS.valueOf(x))\n"\ \ "#if KEY_CLASS_Boolean\n"\ "#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\ "#define KEY_NULL (false)\n"\ "#define KEY_CMP(x,y) ( KEY_CLASS.compare((x),(y)) )\n"\ "#define KEY_LESS(x,y) ( !(x) && (y) )\n"\ "#define KEY_LESSEQ(x,y) ( !(x) || (y) )\n"\ "#else\n"\ "#if KEY_CLASS_Byte || KEY_CLASS_Short || KEY_CLASS_Character\n"\ "#define KEY_NULL ((KEY_TYPE)0)\n"\ "#else\n"\ "#define KEY_NULL (0)\n"\ "#endif\n"\ "#if KEY_CLASS_Float || KEY_CLASS_Double\n"\ "#define KEY_CMP_EQ(x,y) ( KEY_CLASS.compare((x),(y)) == 0 )\n"\ "#define KEY_CMP(x,y) ( KEY_CLASS.compare((x),(y)) )\n"\ "#define KEY_LESS(x,y) ( KEY_CLASS.compare((x),(y)) < 0 )\n"\ "#define KEY_LESSEQ(x,y) ( KEY_CLASS.compare((x),(y)) <= 0 )\n"\ "#else\n"\ "#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\ "#define KEY_CMP(x,y) ( KEY_CLASS.compare((x),(y)) )\n"\ "#define KEY_LESS(x,y) ( (x) < (y) )\n"\ "#define KEY_LESSEQ(x,y) ( (x) <= (y) )\n"\ "#endif\n"\ \ "#if KEY_CLASS_Float\n"\ "#define KEY2LEXINT(x) fixFloat(x)\n"\ "#elif KEY_CLASS_Double\n"\ "#define KEY2LEXINT(x) fixDouble(x)\n"\ "#else\n"\ "#define KEY2LEXINT(x) (x)\n"\ "#endif\n"\ \ "#endif\n"\ \ "#ifdef Custom\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) ( strategy.hashCode(x) )\n"\ "#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( strategy.hashCode(x) ) )\n"\ "#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (long)( strategy.hashCode(x) ) ) )\n"\ "#else\n"\ \ "#if KEY_CLASS_Float\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) it.unimi.dsi.fastutil.HashCommon.float2int(x)\n"\ "#define KEY2INTHASH(x) it.unimi.dsi.fastutil.HashCommon.mix( it.unimi.dsi.fastutil.HashCommon.float2int(x) )\n"\ "#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.mix( (long)( it.unimi.dsi.fastutil.HashCommon.float2int(x) ) )\n"\ "#define INT(x) (x)\n"\ "#elif KEY_CLASS_Double\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) it.unimi.dsi.fastutil.HashCommon.double2int(x)\n"\ "#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(x) )\n"\ "#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(x) )\n"\ "#define INT(x) (int)(x)\n"\ "#elif KEY_CLASS_Long\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) it.unimi.dsi.fastutil.HashCommon.long2int(x)\n"\ "#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.mix( (x) )\n"\ "#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.mix( (x) )\n"\ "#define INT(x) (int)(x)\n"\ "#elif KEY_CLASS_Boolean\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) ((x) ? 1231 : 1237)\n"\ "#define KEY2INTHASH(x) ((x) ? 0xfab5368 : 0xcba05e7b)\n"\ "#define KEY2LONGHASH(x) ((x) ? 0x74a19fc8b6428188L : 0xbaeca2031a4fd9ecL)\n"\ "#else\n"\ "#define KEY2JAVAHASH_NOT_NULL(x) (x)\n"\ "#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (x) ) )\n"\ "#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.mix( (long)( (x) ) ) )\n"\ "#define INT(x) (x)\n"\ "#endif\n"\ "#endif\n"\ \ "#endif\n"\ \ "#ifndef KEY2JAVAHASH\n"\ "#define KEY2JAVAHASH(x) KEY2JAVAHASH_NOT_NULL(x)\n"\ "#endif\n"\ \ \ \ "/* Object/Reference-only definitions (values) */\n"\ \ \ "#if VALUES_REFERENCE\n"\ "#define VALUE_OBJ2TYPE(x) (x)\n"\ "#define VALUE_CLASS2TYPE(x) (x)\n"\ "#define VALUE2OBJ(x) (x)\n"\ \ "#if VALUE_CLASS_Object\n"\ "#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\ "#else\n"\ "#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : System.identityHashCode(x) )\n"\ "#endif\n"\ \ "#define VALUE_NULL (null)\n"\ "#define OBJECT_DEFAULT_RETURN_VALUE (this.defRetValue)\n"\ \ "#else\n"\ \ \ "/* Primitive-type-only definitions (values) */\n"\ \ \ "#define VALUE_CLASS2TYPE(x) ((x).VALUE_VALUE())\n"\ "#define VALUE_OBJ2TYPE(x) (VALUE_CLASS2TYPE((VALUE_CLASS)(x)))\n"\ "#define VALUE2OBJ(x) (VALUE_CLASS.valueOf(x))\n"\ \ "#if VALUE_CLASS_Float || VALUE_CLASS_Double || VALUE_CLASS_Long\n"\ "#define VALUE_NULL (0)\n"\ "#define VALUE2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.${TYPE[$v]}2int(x)\n"\ "#elif VALUE_CLASS_Boolean\n"\ "#define VALUE_NULL (false)\n"\ "#define VALUE2JAVAHASH(x) (x ? 1231 : 1237)\n"\ "#else\n"\ "#if VALUE_CLASS_Integer\n"\ "#define VALUE_NULL (0)\n"\ "#else\n"\ "#define VALUE_NULL ((VALUE_TYPE)0)\n"\ "#endif\n"\ "#define VALUE2JAVAHASH(x) (x)\n"\ "#endif\n"\ \ "#define OBJECT_DEFAULT_RETURN_VALUE (null)\n"\ \ "#endif\n"\ \ "#include \"$1\"\n" fastutil-7.1.0/CHANGES0000664000000000000000000011550413050704125013050 0ustar rootroot7.1.0 - Fixed decade-old efficiency bug. Due to a name clash between lists and sets, the type-specific deletion method of a type-specific collection is rem(), not remove(). The latter is reinstated in sets (but not, for example, in lists) by the type-specific abstract set classes. Nonetheless, implementors of subclasses must override rem(), not remove(), as methods such as the type-specific version of removeAll() invoke necessarily invoke rem() rather than remove(). Up to this version, all concrete set implementations were overriding remove(), instead, causing inefficiencies in the inherited methods. Thanks to Christian Habermehl for reporting this bug. - Fixed a bug introduced with the removal of old-style gcc assertions: all load methods in BinIO that did not specify the number of elements to read were computing the number of items in the loaded file incorrectly, causing an EOFException (except for booleans and bytes). 7.0.13 - Fixed inheritance problem that would surface as key sets of maps not implementing remove(). Thanks to Luke Nezda for reporting this bug. 7.0.12 - Collection.isEmpty() was checking for iterator().hasNext() instead of the opposite. Thanks to Olaf Krische for reporting this bug. - Fixed lack of test for null/wrong class when testing entries. 7.0.11 - Several small glitches that were making fastutil's classes behave differently from those of java.util have been fixed. Thanks to Balázs Attila-Mihály or reporting these bug (obtained by massive testing using Guava's battery of unit tests). 7.0.10 - The infinite-loop bug was affecting trim(int) besides trim(). Thanks to Igor Kabiljo for reporting this bug. - With the help of Erich Schubert, all methods with a type-specific, more efficient counterpart have been deprecated. 7.0.9 - A subtle infinite-loop bug in hash-based structures (happening with load factor 1 and tiny structures) has been fixed. Thanks to Tuomas Välimäki and Jarkko Mönkkönen for reporting this bug. - Now tree-based map have an addTo() method analogous to that of hash-based maps. Thanks to Almog Gavra for implementing the method. 7.0.8 - Non-indirect priority queues are now serializable. - Fixed implementation of structures based on a custom hash: keys strategy-equal to zero zero would not be managed correctly. Thanks to Shawn Cao for reporting this bug. - Natural/opposite/abstract comparators are now serializable. 7.0.7 - Now we check whether ranges of parallel sorting algorithms are too small *before* creating the thread pool. - Merged Erich Schubert's fix for Object{AVL,RB}TreeSet.get(). 7.0.6 - Faster priority queues: better variable caching, deleted a spurious check, tests for parameters turned into assertions. - New collection-based constructors for heap-based priority queues. - Reviewed ObjectArrays.newArray() so that there is a fast track for reallocation of arrays of type Object[]. 7.0.4 - Fixed old-standing bug: iterators in linked maps would return bogus data on entrySet().next()/entrySet().previous() when no element is available instead of throwing an exception. 7.0.3 - Fixed wrong generation of custom-hash classes with primitive keys. Thanks to Michael Henke for reporting this bug. 7.0.2 - Now we shutdown() correctly ForkJoinPool's. - Constants limiting parallelism and recursion have been tuned. - New implementations of indirect [parallel] quicksort (in ascending order only). - New stabilization method for post-processing of non-stable indirect sorts. 7.0.1 - Now generated sources are formatted using the Eclipse command-line facility. 7.0.0 - Now we need Java 7. - New parallel versions of radix sort and quicksort. The sequential implementations have been further improved. - Restored the previous constants in mixing functions. 6.6.4 - Hopefully better mixing functions created by a genetic algorithm. - Fixed a bug in floating-point hash-based containers: -0.0 and +0.0 were both converted to +0.0. Thanks to Dawid Weiss for reporting this bug. 6.6.3 - Fixed subtle wrap-around bug in removal from iterator. Thanks to Eugene Yakavets for reporting this bug. 6.6.2 - We now reduce backing arrays of hash-based classes when they are filled below one fourth of the load factor. The reduction is not performed when deleting from an iterator, as it would make iteration impossible. - Significant simplification of Iterator.remove()'s implementations for hash-based data structures. 6.6.1 - Fixed missed implementation: setValue() was not implemented for fast iterators in hash-based maps. 6.6.0 - Major (transparent) rewrite of all hash-based classes inspired by the Goldman-Sachs collections. We no longer allocate a byte array to store the status of each slot: a null (or zero) key denotes an empty slot. The null key is handled separately. The reduction in memory accesses makes the cost of the additional logic negligible, and brings in significant performance improvements. The code is actually simplified, as all loops become a search for a nonzero element. - Partial (one-step) unrolling of all lookup loops, following the strategy used in Koloboke. - Fixed an old bug: entrySet().remove(Entry) would remove entries checking the value of the key, only. - Fixed a bug in the iterator over hash big sets. - OSGI metadata, thanks to Benson Margulies. 6.5.17 - Now TextIO methods trim strings before parsing numbers. This avoids obnoxious exceptions when numbers are followed by whitespace. 6.5.16 - Improved speed of FastMultiByteArrayInputStream, and removed support for mark()/reset(). - Deprecated array fill() methods in favour of java.util's. 6.5.15 - De-deprecated quicksort methods for primitive-type arrays. It turned out that Java's Arrays.sort() switches to mergesort on large, semi-sorted arrays. Moreover, in Java 7 the support array is allocated of the same size of the argument array, not of the sorted fragment. This performance bug was entirely killing the performance of Transform.transposeOffline() and other methods. Until that bug is fixed, we will have to rely on our quicksort method (which is a pity, because Java's sort is, for the rest, so beautifully engineered). 6.5.14 - Equality in type-specific hash-based data structures with float or double keys is now checked by converting to int/long bits using the conversion method of the appropriate class. Previously, using NaNs as keys would have led to misbehaviour. Thanks to Davide Savazzi for reporting this bug. 6.5.13 - Fixed a very unlikely corner case that might have led to reduction in size of an array instead of a growth. Thanks to Ernst Reissner for reporting this bug. - InspectableFileCachedInputStream no longer performs a call to RandomAccessFile.position() when the end of file has been reached and the file is entirely held in memory. - All front-coded lists now implement java.util.RandomAccess. 6.5.12 - Removed some useless wrapper creation in a few methods of tree-based map classes. - Fixed pathological maxFill computation for very small-sized big open hash sets. 6.5.11 - A very old and subtle performance bug in hash-based data structures has been fixed. Backing arrays were allocated using the number of expected elements divided by the load factor. However, since the test for rehashing was fired by equality with the table size multiplied by the load factor, if the expected number of elements multiplied by the load factor was an integer a useless rehash would happen for the very last added element. The only effect was an useless increase in object creation. 6.5.10 - Now iterators in object set constructors are of type Iterator, and not anymore ObjectIterator. The kind of allowed iterators has been rationalised and made uniform through all classes implementing Set. 6.5.9 - New methods to get a type-specific Iterable from binary or text files. 6.5.8 - Fixed stupid bug in creation of array-based FIFO queues. 6.5.7 - Fixed a very subtle bug in hash-based data structures: addAll() to a newly created structure could require a very long time due to correlation between the positions in structures with different table sizes. 6.5.6 - equals() method between arrays have been deprecated in favour of the java.util.Arrays version, which is intrinsified in recent JVMs. - InspectableFileCachedInputStream.reopen() makes it possible to read again from the start an instance on which close() was invoked. 6.5.5 - The abstract implementation of equals() between (big) lists now uses type-specific access methods (as the compareTo() method was already doing) to avoid massive boxing/unboxing. Thanks to Adrien Grand for suggesting this improvement. - FIFO array-based queues are now serializable. 6.5.4 - Further fixes related to NaNs in sorting. - Fixed very old bug in FastByteArrayOutputStream.write(int). Thanks to Massimo Santini for reporting this bug. - We now use Arrays.MAX_ARRAY_SIZE, which is equal to Integer.MAX_VALUE minus 8, to bound all array allocations. Previously, it might happen that grow() and other array-related functions could try to allocate an array of size Integer.MAX_VALUE, which is technically correct from the JLS, but will not work on most JVMs. The maximum length we use now is the same value as that used by java.util.ArrayList. Thanks to William Harvey for suggesting this change. 6.5.3 - Corrected erroneous introduction of compare() methods on integral classes (they appeared in Java 7). 6.5.2 - A few changes were necessary to make fastutil behave as Java on NaNs when sorting. Double.compareTo() and Float.compareTo() treat Double.NaN as greater than Double.POSITIVE_INFINITY, and fastutil was not doing it. As part of the change, now all comparisons between primitive types are performed using the compare() method of the wrapper class (microbenchmarks confirmed that there is no speed penalty for that, probably due to inlining or even intrinsification). Thanks to Adam Klein for reporting this bug. - All quickSort() implementations that do not involve a comparator are now deprecated, as there are equivalent/better versions in java.util.Arrays. 6.5.0 -> 6.5.1 - Now FastBuffered{Input/Output}Stream has a constructor with an explicitly given buffer. - Abandoned golden-ratio based expansion of arrays and lists in favour of a (more standard) doubling approach. - Array-based FIFO queues now reduce their capacity automatically by halving when the size becomes one fourth of the length. - The add() method for open hash maps has been deprecated and replaced by addTo(), as the name choice proved to be a recipe for disaster. - New InspectableFileCachedInputStream for caching easily large byte streams partially on file and partially in memory. - The front() method for semi-indirect heaps took no comparator, but was used in queues in which you could support a comparator. There is now a further version accepting a comparator. - Serial Version UIDs are now private. 6.4.6 -> 6.5.0 - Fixed type of array hash strategies. - Fixed use of equals() instead of compareTo() in SemiIndirectHeaps.front(). Thanks to Matthew Hatem for reporting this bug. - Now we generate custom hash maps for primite types, too (as we were already doing for sets). 6.4.5 -> 6.4.6 - In array-based priority queues changed() would not invalidate the cached index of the smallest element. 6.4.4 -> 6.4.5 - In some very rare circumstances, enumeration of hash sets or maps combined with massive element removal (using the iterator remove() method) could have led to inconsistent enumeration (duplicates and missing elements). Thanks to Hamish Morgan for reporting this bug. 6.4.3 -> 6.4.4 - Array-based maps were not implementing correctly entrySet().contains(), and as a consequence equals() between such maps was broken. Thanks to Benson Margulies for reporting this bug. 6.4.2 -> 6.4.3 - Now array-based priority queue cache their first element. Moreover, they implement the correct type-specific interface. 6.4.1 -> 6.4.2 - Now we have indirect lexicographical radix sort on pairs of arrays, mainly used to compute quickly Kendall's tau. - New reverse method for arrays (useful for radix descending sorts). - Radix sort (one or two arrays) for big arrays. - Now radix sort uses correctly (minimally) sized support arrays when sorting subarrays. 6.4 -> 6.4.1 - Now we have a separate directory, settable in the makefile, to generate sources. This makes Maven integration easier. - The store methods in TextIO for big arrays were broken. - Now big-array lists implement the Stack interface. - Fixed subtle bug in rehash() methods of big hash sets. 6.3 -> 6.4 - WARNING: Indirect queues must obviously have a way to determine whether an index is in the queue. It was an oversight in the interface design that a contains() method was not present. We wook the risk of adding it now. At the same time, we modified remove() so that now returns a boolean specifying whether the index to be removed was actually in the queue, as this is more in line with the Java Collections Framework. - Removed unused double-priority queue related classes. - Now array-based sets and maps have a constructor based on java.util.Collection and java.util.Map (as for the other kind of sets and maps). - New doubly linked implementation for linked hash maps and sets. It uses twice the space for pointers, but mixes well with linear probing, so we have again constant-time true deletions. Moreover, iterators can be started from any key in constant time (albeit the first access to the index of the list iterator will require a linear scan, unless the iterator started from the first or the last key). Additional methods such as getAndMoveToFirst() make the creation of LRU caches very easy. Thanks to Brien Colwell for donating the code. - Now object-based array FIFO queues provide deque methods. Moreover, they clean up the backing array after returning an object or when performing a clear(). - New get() method in set implementations makes it possible to recover the actual object int the collection that is equal to the query key. - A number of bugs were found and fixed by Christian Falz (thanks!). In all binary search code the "to" parameter was *inclusive*, but the documentation said *exclusive*, with obvious problems. Hash map iterators could return under some very subtle and almost irreproducible circumstances a previously deleted slot. Deleted hash map entries would return spurious null values. 6.2.2 -> 6.3 - We now have radix sort. It's much faster than quicksort, but it can only sort keys in their natural order. There are multiple-array and indirect (and possibly stable) versions available. - There are now custom hash sets also for type-specific keys. This makes it possible to use hash sets to index data indirectly (e.g., using integer or long just as indices). - Shuffling static methods for all kinds of (big) list and arrays. 6.2.1 -> 6.2.2 - A new add() method makes the usage of maps as counters easier and faster. 6.2.0 -> 6.2.1 - A very stupid bug was causing twice the rehashing that was necessary. Now insertions in hash-based classes are significantly faster. 6.1.0 -> 6.2.0 - A better structure of the scan loop for hash tables borrowed from HPPC (http://labs.carrotsearch.com/hppc.html) gives some speed improvement to hash-based classes. 6.0.0 -> 6.1.0 - Hash-based classes have been rewritten using linear probing and a good hash (MurmurHash3). The old classes can be still generated using the target oldsources. - Bizarre queues (double- and sesqui-indirect) have been removed from the standard jar, but they can be still generated using the target oldsources. 5.1.5 -> 6.0.0 - WARNING: the jar file is now fastutil.jar (not fastutil5.jar), again. - WARNING: now fastutil requires Java 6+. - fastutil is now released under the Apache License 2.0. - New framework for big arrays, represented as arrays-of-arrays. BigArrays and the type-specific counterparts provide static methods of all kinds. - New Size64 interface for classes implementing big collections. - New framework for big lists--lists with longs as indices. The only present implementation uses big arrays, but, for instance, Sux4J's succinct lists will be retrofitted to LongBigList (presently they implement LongBigList from dsiutils, which will be deprecated). - List.iterator() now returns a ListIterator. There is no real reason not to do this, and the API change is handled from an implementation viewpoint in AbstractList, so nodoby should really notice. - New Collections.asCollection(Iterable) method to expose iterables as collections (missing methods are computed using the iterator). This was also the occasion to streamline type-specific abstract collections, which now inherit from java.util.AbstractCollection, so we support contains, clear, etc. methods as long as there is an iterator. - Fixed bugged array-based constructors of ArrayMap and ArraySet. - Fixed bugged put/remove methods in abstract functions. Thanks to Katja Filippova for reporting this bug. - New front-coded lists use big arrays, so they can store much more (in fact, unlimited) data. Unfortunately, they are no longer serialisation-compatible with previous versions. - New MeasurableStream interface that is implemented by MeasurableInputStream and by a new, analogous MeasurableOutputStream. - Better FastBufferedOutputStream and FastByteArrayOutputStream that are measurable and positionable. - Now all clone() methods override covariantly the defult return type (Object). 5.1.4 -> 5.1.5 - ArraySet was implementing isEmpty() with inverted logic (thanks to Marko Srdanovic for reporting this bug). - New constructor for FastMultiByteArrayInputStream: it takes a MeasurableInputStream and uses length() to determine the number of bytes to load into memory. 5.1.3 -> 5.1.4 - The implementation of RepositionableStream in FastByteArrayOutputStream was fraught with a horrendous bug (thanks to Claudio Corsi for reporting), in spite of extensive unit tests. 5.1.2 -> 5.1.3 - A bug existing since the first release was preventing tables larger than 2^30 bits to work (the computation of the next bucket to look at would cause an integer overflow). - FastByteArrayOutputStream now implements RepositionableStream. - Type-specific versions of Iterable. - Some methods (e.g., iterator() and values()) are now explicitly re-strengthened wherever necessary to avoid complaints about ambiguous method invocations by some compilers. - The introduction of functions added several bugs to the empty/singleton map classes. Inheriting from the respective function counterparts left several methods underspecified (equals(), etc.). This has been (hopefully) fixed. 5.1.1 -> 5.1.2 - FastBufferedInputStream now supportw length() by FileChannel-fetching on FileInputStream instances (it already used to support position() by the same mechanism). 5.1.0 -> 5.1.1 - Byte-array MG4J I/O classes have been moved here. 5.0.9 -> 5.1.0 - Fixed documentation for custom/noncustom maps (it was exchanged). - New type-specify entrySet() methods that avoid complicated casting to get a type-specific entryset. Moreover, now entrySet() can return an object implementing Fast(Sorted)EntrySet to indicate that a fastIterator() method is available. Fast iterators can return always the same Entry object, suitably mutated. We thank Daniel Ramage for suggesting this feature. - Several hundreds of new classes generated by the new Function interface, which represent mappings for which the entry set is not enumerable (e.g., hashes). Functions have their usual share of satellite objects (wrappers, etc.). There are no implementations--the main purpose of the new interfaces is to make Sux4J (http://sux.dsi.unimi.it/) more object-oriented. 5.0.8 -> 5.0.9 - Slightly reduced overhead for bound checks in heap-based queues. - BinIO was loading byte arrays one byte at a time. Now some conditionally compiled code uses bulk-read methods instead. Moreover, horrible kluges to work around Java bug #6478546 have been included. 5.0.7 -> 5.0.8 - Faster array maps and sets: System.arraycopy() is very slow on small arrays (due to inherent costs of calling native code) and reflection-based array creation is a disaster. Now we use object arrays and loops. - New clone() methods for array-based structures and custom serialisation. - FastBuffered*Stream has been simplified and streamlined. No more block alignment. 5.0.6 -> 5.0.7 - Better algorithm for front() in heaps. - New comprehensive collection of array-based maps and sets. The motivation behind such structures is the need for quick, low-footprint data structures for *very* small sets (say, less than 10 elements). For instance, in MG4J we were using sparse reference-based hash tables, but it turned out that System.identityHashCode() is *deadly* slow and scanning linearly an array searching for the desired element is significantly faster. 5.0.5 -> 5.0.6 - Due to erratic and unpredictable behaviour of InputStream.skip(), which does not correspond to its specification and Sun refuses to fix (see bug 6222822; don't be fooled by the “closed, fixed” label), FastBufferedInputStream now peeks at the underlying stream and if it is System.in it uses repeated reads. Moreover, it will use alternatively reads and skips to guarantee that the number of skipped bytes will be smaller than requested only if end of file has been reached. - The insertion and key retrieval methods of hash-based structures are now protected and final. - New front() method for indirect queues. It retrieves quickly the indices associated to elements equal to the top. - First JUnit tests. 5.0.4 -> 5.0.5 - Fixed possible overflow in FastBufferedInputStream.available(). - Indirect heaps have faster checks for elements belonging or not to the queue. In particular, we just rely on array access for detecting indices out of bounds. Profiling with LaMa4J showed that in some circumstances checking explicitly the indices were within bounds was taking more time that the actual heap inner workings. - Fixed obnoxious bug dating to the first fastutil implementation. The macro KEY_EQUALS_HASH(x,h,y), which checks for equality between x and y given that the hash of x is h, was evaluating hashCode() on y without guarantee that y was non-null. As a result, adding a null to a mapped followed by the insertion of an element with hash code 0 would have thrown a NullPointerException. The bug went unobserved for years because no one use nulls as keys, and was actually detected by a bug in BUbiNG's code (which was in turn mistakenly inserting nulls in a set). 5.0.3 -> 5.0.4 - Fixed missing declaration of generic type for HASH_STRATEGY. - A new abstract class, MeasurableInputStream, is used for streams whose length and current position are always known. This actually was needed for BUbiNG development. - New readLine() family of method for reading "lines" directly from a FastBufferedInputStream. - In FastBufferedInputStream, reset() has been deprecated in favour of flush(). - Array-based lists of objects now reallocate the backing array using reflection *only* if they were created by wrapping. This won't change the previous behaviour, but at the price of a boolean per list we have unbelievably faster array reallocation. - New explicit fast load factors in Hash. 5.0.2 -> 5.0.3 - Bizarrily, java.util.List re-specifies iterator(), even if it extends Collection. As a result, we need to re-strengthen it in type-specific lists. - Fixed new horrible bug introduced by adding Booleans to BinIO and TextIO. Problem is, I didn't know #assert is cumulative. 5.0.1 -> 5.0.2 - Fixed bug in sorted maps key sets and values that would cause a stack overflow when calling size() and a few other methods. - Fixed lack of booleans in BinIO and TextIO. - BinIO now checks for too large files. 5.0 -> 5.0.1 - In BinIO, it was assumed that .SIZE would give the size of primitive types in *bytes*. Bad mistake. 4.4.3 -> 5.0 - Java 5 only! - Support for generics. This led to a number of backward-incompatible changes: * toArray(Object[]) does not accept any longer null as an argument; * singletons for empty collections (sets, lists, ecc.) are type-specific; * iterators on sorted collections are bidirectional *by specification*; * the new, covariantly stronger methods defined in all interfaces (e.g., iterator() returning a type-specific iterator) are now the default, and in the abstract classes the old methods (e.g., objectIterator()) now just delegate to the standard method, which is the contrary of what was happening before: you'll have to turn all methods such as objectIterator() in iterator(), etc. * all deprecated methods have been dropped. - Array growth functions now will return the correct empty array for object arrays (it used to return ObjectArrays.EMPTY_ARRAY). - Strategies are generic and no longer required to accept REMOVED. - Stale references could hang around in the nodePath array for Red-Black trees and map; this has been fixed. - The difference in semantics with the standard toArray(Object[]) specification, which has always been in place, is now exhaustively explained. - Major code cleanup (mostly code deletion) due to passing fastutil into Eclipse to check unused code, etc. 4.4.2 -> 4.4.3 - Important bug fix in FastBufferedInputStream. 4.4.1 -> 4.4.2 - New reset() method to invalidate the buffer of a FastBufferedInputStream, making it possible to read safely files written by other processes (given, of course, that you are synchronising the accesses). 4.4.0 -> 4.4.1 - New parallel-array constructor for all maps. Very useful for static final map initialisation. - Following considerations in Jakarta Commons I/O, the standard buffer size has be lowered to 8Ki. - Some arguments were declared as DataInputStream instead of DataInput. - New methods for reading/writing objects from/to streams. 4.3.2 -> 4.4 - New static containers for reading and writing easily text and binary data streams. They load/save arrays, iterators etc. to buffered readers or streams. - Moved here fast input/output buffered classes from MG4J. This makes fastutil self-contained. - The trivial implementation of the type-specific iterator was missing from AbstractList.drv (surprisingly, not from the subclass implementation!). - The sublist implementation in AbstractList.drv is now protected and static. The attributes are protected, too. - Now we compare booleans (false 4.3.2 - Fixed small innocuous bug: a code fragment related to non-linked hash table was generated for linked hash tables, too, do to a case type in a preprocessor directive. The code fragment, however, had no effect. - Fixed memory leak in OpenHashMap: the remove() method was not clearing the key (whereas OpenHashSet was). 4.3 -> 4.3.1 - New fully indirect heap-based double priority queues. - Fixed docs for queues: in 4.3, we were claiming that greater elements are dequeued first, while the opposite happens. 4.2 -> 4.3 - New full-fledged set of unmodifiable structures *and* iterators. - Removed about a dozen spurious final method modifiers. - Made rehash() protected, so that everybody can play with different rehashing strategies. - trim() in array lists wasn't doing the right thing, because trim(int) wasn't doing it in the first place. Now if n is smaller than the size of the list, we trim at the list size (previously we were doing nothing). - Analogously, trim() in hash-table-based structures was fixed so that trimming a table below its size will result in rehashing to the minimum possible size. 4.1 -> 4.2 - Improved array methods: now all methods on objects (e.g., grow()) return an array of the same type of the array that was passed to them, similarly to toArray() in collections. - Fixed missing macro substitution for empty iterator methods. In any case, they were already deprecated. 4.0 -> 4.1 - New classes for custom hashing methods (mainly thought for arrays). Correspondingly, methods for arrays have been implemented in the static containers. - BasicEntry now throws an UnsupportedOperationException on calls to setValue(). If you ever used that method, you got wierd results, as it does not update the underlying map. The method is now implemented correctly in open hash maps, in which previously did not correctly update the underying map. - Reimplemented copy of an entire array using clone(). - Fixed a bug in clear() for indirect heaps (the inversion array was not being cleared). - Indirect priority queue interfaces now feature an optional allChanged() method that can be used to force a complete heap rebuild. It is implemented by all current array-based and head-based concrete classes. 3.1 -> 4.0 - IMPORTANT: The optimized methods that a type-specific must provide now include an addElements() method that quickly adds an array of elements. As usual, the method is fully implemented by the type-specific abstract lists. - IMPORTANT: The abstract generic version of get(), put() and remove() for maps with non-object keys or values now always return null to denote a missing key. They used to return an object-wrapped default return value. - Completely new and comprehensive implementation of priority queues, both direct and indirect. Implementations are by heaps and by flat arrays. There are also static containers with all relevant heap methods, for people wanting to do their own thing. - New static containers for comparators. - All singletons, empty sets and snychronized wrappers are public so you can inherit from them. - Abstract maps now provide keySet() and values() based on entrySet(). - New abstract classes for sorted sets and maps with delegators to type-specific methods. - New public methods in Arrays and in type-specific Arrays classes for checking ranges. - New static methods for type-specific arrays that allow to grow, enlarge and trim them with ease. - Clarified abstract implementation of default return values, and implemented clarified specification. Just a couple of method in hash maps were not already compliant. - The pour() method now returns a list. The previous version was returning a linked hash set, which was rather nonsensical anyway, since an iterator build on the returned set could have been different from the original iterator. You can always pour an iterator into a set by providing the set explicitly. - An exception-throwing implementation of some methods in AbstractSet was missing. Same for AbstractCollection, AbstractMap and AbstractList. - New basic inner entry class for abstract maps, which makes it easier to write entrySet() methods for classes that do not have their own entries. - Added missing get(Object) method in AbstractMap (just delegates to the type-specific version). - For lazy people, now containsKey() and containsValue() in AbstractMap are defined by looking into keySet() and values(). - Fixed a few methods of EMPTY_LIST which were throwing exception semantically (see the introduction). - The interval iterators are now list iterators, except for longs. - Fixed a bug in size() for array lists (reducing the size of an array would lead to an exception). - Fixed double bug in hash tables: first of all, on very small sizes adding growthFactor would have left the size unchanged, giving rise to infinite loops. (Thanks to Heikki Uusitalo for reporting this bug.) Second, growthFactor was not being used *at all* by hash maps. - Fixed entries emitted by singleton maps. Now they are type-specific. - Fixed a number of minor glitches in gencsource.sh, and added some comments. - HashCommon.removed has been renamed HashCommon.REMOVED. - Boolean objects are now generated using valueOf() instead of the constructor. - New type-specific wrappers for list iterators. 3.0 -> 3.1 - IMPORTANT: it.unimi.dsi.fastutil.Iterators methods have been spread in type-specific static containers. - New Stack interface, implemented by type-specific lists. - New static container classes Collections, Sets, and Lists. Presently they just provide empty containers. - New type-specific static contains (e.g., IntSets) providing singletons and synchronized wrappers. - Entry sets now have entries that are equal() to entries coming from corresponding maps in java.util. - Spelling everywhere changed to Pure American. "synchronized" in code and "synchronise" in text side-by-side were looking really wierd... 3.0 -> 3.0.1 - New unwrap() methods for type-specific collections. - Fixed old-as-world-bug, apparently wide but that evidently no one ever noticed: AbstractMap was not serialisable, and, as a result, the default return value was not serialised (I find sincerely counterintuitive that making a class serialisable doesn't do the same for its supertypes). It wasn't ever even *documented* as preserved, so probably everyone thought this was my idea, too. Too bad this breaks once more serialisation compatibility. Since I had to break some serialisation anyway, I decided to eliminate the residual serialisation of p in hash table classes, too (which breaks serialisation for all hash-based classes). 2.60 -> 3.0 - IMPORTANT: All classes have been repackaged following the type of elements/keys. Sources will have to be retouched (just to change the import clause) and recompiled. - IMPORTANT: Because of an unavoidable name clash in the new type-specific list interface, the method remove(int) of IntCollection has been renamed rem(int). The only really unpleasant effect is that you must use rem(int) on variables of type IntCollection that are not of type IntSet (as IntSet reinstates remove(int) in its right place)--for instance, IntList. - Brand-new implementation of type-specific lists, with all the features you'd expect and more. - Insertions for readObject() in hash tables are now handled in a special way (20% faster). - Implemented linear-time tree reconstruction for readObject() (in practice, more than twice faster). - Fixed a problem with serialisation of hash tables: the table would have been reloaded with the same p, even if it was preposterous. We still save p, however, to avoid breaking serialisation compatibility. - Fixed missing implementation of type-specific sets, which should have extended type-specific collections, but they weren't. - The default return value is now protected. - New family of pour() methods that pour an iterator into a set. - New programmable growth factor for hash-table-based classes. - Eliminated a few useless method calls in tree map. - Wide range of complex assertions, which are compiled in or out using the "private static final boolean" idiom. - For references we now use System.identityHashCode(); this shouldn't change much, but it seems definitely more sensible. - Fixed major bug in subSet()/subMap(): creating a subMap of a tailMap (or headMap) a right extreme (left, resp.) equal to 0 would have caused the creation of a tailMap (or headMap, resp.), discarding the extreme. Very, very unlikely, but it happened in a test. - Fixed small bug in standard remove() method of submaps, which would have returned a default return value wrapped in a suitable object instead of null on non-existing keys. 2.52 -> 2.60 - IMPORTANT: Major overhaul of iterators. Now iterators must be skippable, so previous implementation of type-specific iterator interfaces will not work. However, new abstract classes allow to build iterator with ease by providing for free the skipping logic, and many useful static methods in Iterators allow to generate type-specific iterators wrapping standard iterators, arrays, etc. - Better strategy for clear() on hash tables: we don't do anything only if all entries are free (which means that an empty table with deleted entry will be cleared). 2.51 -> 2.52 - IMPORTANT: The package name has changed to it.unimi.dsi.fastutil to be uniform with JPackage conventions. However, this means that you must manually erase the old one and update your sources. - clear() doesn't do anything on empty hash tables. 2.50 -> 2.51 - New trim(int) method to reduce a hash table size avoiding to make it too small. - serialVersionUID is now fixed, to avoid future incompatibilities. 2.11 -> 2.50 - IMPORTANT: The Collection interface now prescribes an iterator method with a type-specific name (e.g., intIterator()) that returns directly a type-specific iterator. - New Reference maps and sets that allow to store more quickly canonised objects. - New linked maps mimicking java.util's, but with a boatload of additional features. - Small bug fix: the get(Object) method would return null instead of the default return value for maps with object keys. - Major bug fix: iterating backwards on submaps was leading to unpredictable results. - Major bug fix: cloning maps would have caused inconsistent behaviour. - Major code redistribution: now whenever possible wrappers belong to abstract superclasses. 2.1 -> 2.11 - Now we cache the hash of an object before entering the hash table loop. 2.0 -> 2.1 - A simple optimisation in hash-table inner loops has given quite a performance boost under certain conditions (we do not compute the secondary hashing if it is not necessary). Inspired by Gnu Trove. - The trim() method would have in fact trimmed nothing, just rehashed the table. - The computed maxFill value was sligtly too small. - Also tree sets now have constructors from arrays. - More internal methods have been made final. 1.3 -> 2.0 - ALL MAPS AND SETS HAVE NEW NAMES DEPENDING ON THE IMPLEMENTATION. - Introducing new high-performance, low memory-footprint implementation of SortedMap and SortedSet. - Two tree implementations are available: RB trees and AVL trees. Both implementations are threaded. See the README. - Fixed a bug in hashCode() and contains() for HashMap.drv (it was considering keys only!). - Fixed a bug in contains() for entrySet() in all maps (it was using VALUE_EQUAL to test equality for values given as objects). - I realised that a default return value can be useful also for maps and sets returning objects, so now you have it. It is even independent for submaps and subsets. - Classes are no longer final. The performance gain is around 1%, and the decrease in usefulness is orders of magnitudes greater. - We now check equality using first hashCode() and then equals(). - The tests for speed now warm up the trees by doing repeated insertions and deletions, so that the benefits of a better balancing criterion are more evident. - The regression tests are much more stringent. - Fixed hashCode() for hash maps (wasn't conforming to the Map interface specification). - Implemented linear cloning for tree classes. fastutil-7.1.0/README.md0000664000000000000000000000302013050701620013316 0ustar rootrootWelcome to fastutil. -------------------- [fastutil](http://fastutil.di.unimi.it/) is a collection of type-specific Java classes that extend the Java Collections Framework by providing several containers, such as maps, sets, lists and prority queues, implementing the interfaces of the java.util package; it provides also big (64-bit) arrays, sets and lists, and fast, practical I/O classes for binary and text files. fastutil provides a huge collection of specialized classes generated starting from a parameterized version; the classes are much more compact and much faster than the general ones. Please read the package documentation for more information. The compiled code is contained in the jar file, and should be installed where you keep Java extensions. Note that the jar file is huge, due to the large number of classes: if you plan to ship your own jar with some fastutil classes included, you should look at AutoJar or similar tools to extract automatically the necessary classes. You have to "make sources" to get the actual Java sources; finally, "ant jar" and "ant javadoc" will generate the jar file and the API documentation. The Java sources are generated using a C preprocessor. The gencsource.sh script reads in a driver file, that is, a Java source that uses some preprocessor-defined symbols and some conditional compilation, and produces a (fake) C source, which includes the driver code and some definitions that customize the environment. * seba () * fastutil-7.1.0/LICENSE-2.00000664000000000000000000002613613050701620013356 0ustar rootroot Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. fastutil-7.1.0/makefile0000664000000000000000000005631513050711476013570 0ustar rootrootinclude build.properties TAR=tar PKG_PATH = it/unimi/dsi/fastutil SOURCEDIR = src/$(PKG_PATH) GEN_SRCDIR ?= src export GEN_SRCDIR DOCSDIR = docs APIURL=http://java.sun.com/j2se/5.0/docs/api # External URLs in the docs will point here .SUFFIXES: .java .j .PHONY: all clean depend install docs jar tar jsources csources dirs .SECONDARY: $(JSOURCES) # The capitalized types used to build class and method names; boolean and object types are not listed. TYPE_NOBOOL_NOOBJ=Byte Short Int Long Char Float Double # The capitalized types used to build class and method names; boolean and reference are not listed. TYPE_NOBOOL_NOREF=$(TYPE_NOBOOL_NOOBJ) Object # The capitalized types used to build class and method names; object types are not listed. TYPE_NOOBJ=Boolean $(TYPE_NOBOOL_NOOBJ) # The capitalized types used to build class and method names; references are not listed. TYPE_NOREF=$(TYPE_NOOBJ) Object # The capitalized types used to build class and method names; boolean is not listed. TYPE_NOBOOL=$(TYPE_NOBOOL_NOREF) Reference # The capitalized types used to build class and method names; now references appear as Reference. TYPE=$(TYPE_NOREF) Reference # The capitalized types used to build class and method names; only types for which big structures are built are listed. TYPE_BIG=Int Long Float Double Object Reference # These variables are used as an associative array (using computed names). PACKAGE_Boolean = booleans PACKAGE_Byte = bytes PACKAGE_Short = shorts PACKAGE_Int = ints PACKAGE_Long = longs PACKAGE_Char = chars PACKAGE_Float= floats PACKAGE_Double = doubles PACKAGE_Object = objects PACKAGE_Reference = objects explain: @echo -e "\nTo build fastutil, you must first use \"make sources\"" @echo -e "to obtain the actual Java files. Then, you can build the jar" @echo -e "file using \"ant jar\", or the documentation using \"ant javadoc\".\n" @echo -e "If you set the make variable TEST (e.g., make sources TEST=1), you" @echo -e "will compile behavioral and speed tests into the classes.\n" @echo -e "If you set the make variable ASSERTS (e.g., make sources ASSERTS=1), you" @echo -e "will compile assertions into the classes.\n\n" source: -rm -f fastutil-$(version) ln -s . fastutil-$(version) $(TAR) zcvf fastutil-$(version)-src.tar.gz --owner=0 --group=0 \ fastutil-$(version)/drv/*.drv \ fastutil-$(version)/build.xml \ fastutil-$(version)/ivy.xml \ fastutil-$(version)/fastutil.bnd \ fastutil-$(version)/pom.xml \ fastutil-$(version)/build.properties \ fastutil-$(version)/gencsource.sh \ fastutil-$(version)/CHANGES \ fastutil-$(version)/README.md \ fastutil-$(version)/LICENSE-2.0 \ fastutil-$(version)/makefile \ $(foreach f, $(SOURCES), fastutil-$(version)/$(f)) \ fastutil-$(version)/$(SOURCEDIR)/{boolean,byte,char,short,int,long,float,double,object}s/package.html \ fastutil-$(version)/$(SOURCEDIR)/io/package.html \ fastutil-$(version)/src/overview.html \ $$(find fastutil-$(version)/test -iname \*.java) rm fastutil-$(version) binary: make -s clean sources format ant clean osgi javadoc -rm -f fastutil-$(version) ln -s . fastutil-$(version) cp dist/lib/fastutil-$(version).jar . $(TAR) zcvf fastutil-$(version)-bin.tar.gz --owner=0 --group=0 \ fastutil-$(version)/CHANGES \ fastutil-$(version)/README.md \ fastutil-$(version)/LICENSE-2.0 \ fastutil-$(version)/docs \ fastutil-$(version)/fastutil-$(version).jar rm fastutil-$(version) format: /usr/bin/eclipse -nosplash -application org.eclipse.jdt.core.JavaCodeFormatter -verbose -config $(CURDIR)/.settings/org.eclipse.jdt.core.prefs $(CURDIR)/src/it/unimi/dsi/fastutil/{booleans,bytes,shorts,chars,ints,floats,longs,doubles,objects} stage: (sed -e s/VERSION/$$(grep version build.properties | cut -d= -f2)/ pom.xml) (unset LOCAL_IVY_SETTINGS; ant stage) dirs: mkdir -p $(GEN_SRCDIR)/$(PKG_PATH) mkdir -p $(GEN_SRCDIR)/$(PKG_PATH)/io mkdir -p $(foreach k, $(sort $(TYPE)), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))) # # Interfaces # ITERABLES := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterable.c) $(ITERABLES): drv/Iterable.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ITERABLES) COLLECTIONS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Collection.c) $(COLLECTIONS): drv/Collection.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(COLLECTIONS) SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Set.c) $(SETS): drv/Set.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SETS) HASHES := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Hash.c) $(HASHES): drv/Hash.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(HASHES) SORTED_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SortedSet.c) $(SORTED_SETS): drv/SortedSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SORTED_SETS) FUNCTIONS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Function.c)) $(FUNCTIONS): drv/Function.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(FUNCTIONS) MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Map.c)) $(MAPS): drv/Map.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(MAPS) SORTED_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)SortedMap.c)) $(SORTED_MAPS): drv/SortedMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SORTED_MAPS) LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)List.c) $(LISTS): drv/List.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LISTS) STACKS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Stack.c) $(STACKS): drv/Stack.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(STACKS) PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)PriorityQueue.c) $(PRIORITY_QUEUES): drv/PriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(PRIORITY_QUEUES) INDIRECT_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)IndirectPriorityQueue.c) $(INDIRECT_PRIORITY_QUEUES): drv/IndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(INDIRECT_PRIORITY_QUEUES) COMPARATORS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Comparator.c) $(COMPARATORS): drv/Comparator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(COMPARATORS) ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterator.c) $(ITERATORS): drv/Iterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ITERATORS) BIDIRECTIONAL_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BidirectionalIterator.c) $(BIDIRECTIONAL_ITERATORS): drv/BidirectionalIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIDIRECTIONAL_ITERATORS) LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ListIterator.c) $(LIST_ITERATORS): drv/ListIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LIST_ITERATORS) BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigList.c) $(BIG_LISTS): drv/BigList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_LISTS) BIG_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigListIterator.c) $(BIG_LIST_ITERATORS): drv/BigListIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_LIST_ITERATORS) # # Abstract implementations # ABSTRACT_COLLECTIONS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Collection.c) $(ABSTRACT_COLLECTIONS): drv/AbstractCollection.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_COLLECTIONS) ABSTRACT_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Set.c) $(ABSTRACT_SETS): drv/AbstractSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_SETS) ABSTRACT_SORTED_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)SortedSet.c) $(ABSTRACT_SORTED_SETS): drv/AbstractSortedSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_SORTED_SETS) ABSTRACT_FUNCTIONS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)Function.c)) $(ABSTRACT_FUNCTIONS): drv/AbstractFunction.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_FUNCTIONS) ABSTRACT_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)Map.c)) $(ABSTRACT_MAPS): drv/AbstractMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_MAPS) ABSTRACT_SORTED_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)SortedMap.c)) $(ABSTRACT_SORTED_MAPS): drv/AbstractSortedMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_SORTED_MAPS) ABSTRACT_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)List.c) $(ABSTRACT_LISTS): drv/AbstractList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_LISTS) ABSTRACT_BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BigList.c) $(ABSTRACT_BIG_LISTS): drv/AbstractBigList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_BIG_LISTS) ABSTRACT_STACKS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Stack.c) $(ABSTRACT_STACKS): drv/AbstractStack.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_STACKS) ABSTRACT_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)PriorityQueue.c) $(ABSTRACT_PRIORITY_QUEUES): drv/AbstractPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_PRIORITY_QUEUES) ABSTRACT_COMPARATORS := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Comparator.c) $(ABSTRACT_COMPARATORS): drv/AbstractComparator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_COMPARATORS) ABSTRACT_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Iterator.c) $(ABSTRACT_ITERATORS): drv/AbstractIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_ITERATORS) ABSTRACT_BIDIRECTIONAL_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BidirectionalIterator.c) $(ABSTRACT_BIDIRECTIONAL_ITERATORS): drv/AbstractBidirectionalIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_BIDIRECTIONAL_ITERATORS) ABSTRACT_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)ListIterator.c) $(ABSTRACT_LIST_ITERATORS): drv/AbstractListIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_LIST_ITERATORS) ABSTRACT_BIG_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BigListIterator.c) $(ABSTRACT_BIG_LIST_ITERATORS): drv/AbstractBigListIterator.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ABSTRACT_BIG_LIST_ITERATORS) # # Concrete implementations # OPEN_HASH_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenHashSet.c) $(OPEN_HASH_SETS): drv/OpenHashSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(OPEN_HASH_SETS) OPEN_HASH_BIG_SETS := $(foreach k,$(TYPE_BIG), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenHashBigSet.c) $(OPEN_HASH_BIG_SETS): drv/OpenHashBigSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(OPEN_HASH_BIG_SETS) LINKED_OPEN_HASH_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)LinkedOpenHashSet.c) $(LINKED_OPEN_HASH_SETS): drv/LinkedOpenHashSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LINKED_OPEN_HASH_SETS) OPEN_CUSTOM_HASH_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenCustomHashSet.c) $(OPEN_CUSTOM_HASH_SETS): drv/OpenCustomHashSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(OPEN_CUSTOM_HASH_SETS) LINKED_OPEN_CUSTOM_HASH_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)LinkedOpenCustomHashSet.c) $(LINKED_OPEN_CUSTOM_HASH_SETS): drv/LinkedOpenCustomHashSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LINKED_OPEN_CUSTOM_HASH_SETS) ARRAY_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArraySet.c) $(ARRAY_SETS): drv/ArraySet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_SETS) AVL_TREE_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)AVLTreeSet.c) $(AVL_TREE_SETS): drv/AVLTreeSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(AVL_TREE_SETS) RB_TREE_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)RBTreeSet.c) $(RB_TREE_SETS): drv/RBTreeSet.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(RB_TREE_SETS) OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)OpenHashMap.c)) $(OPEN_HASH_MAPS): drv/OpenHashMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(OPEN_HASH_MAPS) LINKED_OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)LinkedOpenHashMap.c)) $(LINKED_OPEN_HASH_MAPS): drv/LinkedOpenHashMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LINKED_OPEN_HASH_MAPS) OPEN_CUSTOM_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)OpenCustomHashMap.c)) $(OPEN_CUSTOM_HASH_MAPS): drv/OpenCustomHashMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(OPEN_CUSTOM_HASH_MAPS) LINKED_OPEN_CUSTOM_HASH_MAPS := $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/objects/Object2$(v)LinkedOpenCustomHashMap.c) $(LINKED_OPEN_CUSTOM_HASH_MAPS): drv/LinkedOpenCustomHashMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LINKED_OPEN_CUSTOM_HASH_MAPS) #STRIPED_OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Striped$(k)2$(v)OpenHashMap.c)) #$(STRIPED_OPEN_HASH_MAPS): drv/StripedOpenHashMap.drv; ./gencsource.sh $< $@ >$@ #CSOURCES += $(STRIPED_OPEN_HASH_MAPS) ARRAY_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)ArrayMap.c)) $(ARRAY_MAPS): drv/ArrayMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_MAPS) AVL_TREE_MAPS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)AVLTreeMap.c)) $(AVL_TREE_MAPS): drv/AVLTreeMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(AVL_TREE_MAPS) RB_TREE_MAPS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)RBTreeMap.c)) $(RB_TREE_MAPS): drv/RBTreeMap.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(RB_TREE_MAPS) ARRAY_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayList.c) $(ARRAY_LISTS): drv/ArrayList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_LISTS) BIG_ARRAY_BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigArrayBigList.c) $(BIG_ARRAY_BIG_LISTS): drv/BigArrayBigList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_ARRAY_BIG_LISTS) FRONT_CODED_LISTS := $(foreach k, Byte Short Int Long Char, $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayFrontCodedList.c) $(FRONT_CODED_LISTS): drv/ArrayFrontCodedList.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(FRONT_CODED_LISTS) HEAP_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapPriorityQueue.c) $(HEAP_PRIORITY_QUEUES): drv/HeapPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(HEAP_PRIORITY_QUEUES) ARRAY_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayPriorityQueue.c) $(ARRAY_PRIORITY_QUEUES): drv/ArrayPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_PRIORITY_QUEUES) ARRAY_FIFO_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayFIFOQueue.c) $(ARRAY_FIFO_QUEUES): drv/ArrayFIFOQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_FIFO_QUEUES) HEAP_SEMI_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapSemiIndirectPriorityQueue.c) $(HEAP_SEMI_INDIRECT_PRIORITY_QUEUES): drv/HeapSemiIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(HEAP_SEMI_INDIRECT_PRIORITY_QUEUES) HEAP_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapIndirectPriorityQueue.c) $(HEAP_INDIRECT_PRIORITY_QUEUES): drv/HeapIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(HEAP_INDIRECT_PRIORITY_QUEUES) ARRAY_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayIndirectPriorityQueue.c) $(ARRAY_INDIRECT_PRIORITY_QUEUES): drv/ArrayIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAY_INDIRECT_PRIORITY_QUEUES) # # Static containers # ITERATORS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterators.c) $(ITERATORS_STATIC): drv/Iterators.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ITERATORS_STATIC) BIG_LIST_ITERATORS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigListIterators.c) $(BIG_LIST_ITERATORS_STATIC): drv/BigListIterators.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_LIST_ITERATORS_STATIC) COLLECTIONS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Collections.c) $(COLLECTIONS_STATIC): drv/Collections.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(COLLECTIONS_STATIC) SETS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Sets.c) $(SETS_STATIC): drv/Sets.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SETS_STATIC) SORTED_SETS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SortedSets.c) $(SORTED_SETS_STATIC): drv/SortedSets.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SORTED_SETS_STATIC) LISTS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Lists.c) $(LISTS_STATIC): drv/Lists.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(LISTS_STATIC) BIG_LISTS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigLists.c) $(BIG_LISTS_STATIC): drv/BigLists.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_LISTS_STATIC) ARRAYS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Arrays.c) $(ARRAYS_STATIC): drv/Arrays.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(ARRAYS_STATIC) BIG_ARRAYS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigArrays.c) $(BIG_ARRAYS_STATIC): drv/BigArrays.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(BIG_ARRAYS_STATIC) PRIORITY_QUEUES_STATIC := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)PriorityQueues.c) $(PRIORITY_QUEUES_STATIC): drv/PriorityQueues.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(PRIORITY_QUEUES_STATIC) HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Heaps.c) $(HEAPS_STATIC): drv/Heaps.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(HEAPS_STATIC) SEMI_INDIRECT_HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SemiIndirectHeaps.c) $(SEMI_INDIRECT_HEAPS_STATIC): drv/SemiIndirectHeaps.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SEMI_INDIRECT_HEAPS_STATIC) INDIRECT_HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)IndirectHeaps.c) $(INDIRECT_HEAPS_STATIC): drv/IndirectHeaps.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(INDIRECT_HEAPS_STATIC) FUNCTIONS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Functions.c)) $(FUNCTIONS_STATIC): drv/Functions.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(FUNCTIONS_STATIC) MAPS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Maps.c)) $(MAPS_STATIC): drv/Maps.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(MAPS_STATIC) SORTED_MAPS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)SortedMaps.c)) $(SORTED_MAPS_STATIC): drv/SortedMaps.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(SORTED_MAPS_STATIC) COMPARATORS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Comparators.c) $(COMPARATORS_STATIC): drv/Comparators.drv; ./gencsource.sh $< $@ >$@ CSOURCES += $(COMPARATORS_STATIC) # # Fragmented stuff # BINIO_FRAGMENTS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/io/$(k)BinIOFragment.h) $(BINIO_FRAGMENTS): drv/BinIOFragment.drv; ./gencsource.sh $< $@ >$@ CFRAGMENTS += $(BINIO_FRAGMENTS) $(GEN_SRCDIR)/$(PKG_PATH)/io/BinIO.c: drv/BinIO.drv $(BINIO_FRAGMENTS) ./gencsource.sh drv/BinIO.drv $@ >$@ CSOURCES += $(GEN_SRCDIR)/$(PKG_PATH)/io/BinIO.c TEXTIO_FRAGMENTS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/io/$(k)TextIOFragment.h) $(TEXTIO_FRAGMENTS): drv/TextIOFragment.drv; ./gencsource.sh $< $@ >$@ CFRAGMENTS += $(TEXTIO_FRAGMENTS) $(GEN_SRCDIR)/$(PKG_PATH)/io/TextIO.c: drv/TextIO.drv $(TEXTIO_FRAGMENTS) ./gencsource.sh drv/TextIO.drv $@ >$@ CSOURCES += $(GEN_SRCDIR)/$(PKG_PATH)/io/TextIO.c JSOURCES = $(CSOURCES:.c=.java) # The list of generated Java source files SOURCES = \ $(SOURCEDIR)/Function.java \ $(SOURCEDIR)/Hash.java \ $(SOURCEDIR)/HashCommon.java \ $(SOURCEDIR)/BidirectionalIterator.java \ $(SOURCEDIR)/Stack.java \ $(SOURCEDIR)/BigList.java \ $(SOURCEDIR)/BigListIterator.java \ $(SOURCEDIR)/BigArrays.java \ $(SOURCEDIR)/PriorityQueue.java \ $(SOURCEDIR)/IndirectPriorityQueue.java \ $(SOURCEDIR)/Maps.java \ $(SOURCEDIR)/Arrays.java \ $(SOURCEDIR)/Swapper.java \ $(SOURCEDIR)/BigSwapper.java \ $(SOURCEDIR)/Size64.java \ $(SOURCEDIR)/PriorityQueues.java \ $(SOURCEDIR)/IndirectPriorityQueues.java \ $(SOURCEDIR)/AbstractPriorityQueue.java \ $(SOURCEDIR)/AbstractIndirectPriorityQueue.java \ $(SOURCEDIR)/AbstractStack.java \ $(SOURCEDIR)/io/FastByteArrayInputStream.java \ $(SOURCEDIR)/io/FastByteArrayOutputStream.java \ $(SOURCEDIR)/io/FastMultiByteArrayInputStream.java \ $(SOURCEDIR)/io/FastBufferedInputStream.java \ $(SOURCEDIR)/io/FastBufferedOutputStream.java \ $(SOURCEDIR)/io/InspectableFileCachedInputStream.java \ $(SOURCEDIR)/io/MeasurableInputStream.java \ $(SOURCEDIR)/io/MeasurableOutputStream.java \ $(SOURCEDIR)/io/MeasurableStream.java \ $(SOURCEDIR)/io/RepositionableStream.java # These are True Java Sources instead # We pass each generated Java source through the gccpreprocessor. TEST compiles in the test code, # whereas ASSERTS compiles in some assertions (whose testing, of course, must be enabled in the JVM). $(JSOURCES): %.java: %.c $(CC) -w -I. -ftabstop=4 $(if $(TEST),-DTEST,) $(if $(ASSERTS),-DASSERTS_CODE,) -DASSERTS_VALUE=$(if $(ASSERTS),true,false) -E -C -P $< >$@ clean: -@find build -name \*.class -exec rm {} \; -@find . -name \*.java~ -exec rm {} \; -@find . -name \*.html~ -exec rm {} \; -@rm -f $(GEN_SRCDIR)/$(PKG_PATH)/{booleans,bytes,shorts,chars,ints,longs,floats,doubles,objects}/*.java -@rm -f $(GEN_SRCDIR)/$(PKG_PATH)/io/*IO.java -@rm -f $(GEN_SRCDIR)/$(PKG_PATH)/*.{c,h,j} $(GEN_SRCDIR)/$(PKG_PATH)/*/*.{c,h,j} -@rm -fr $(DOCSDIR)/* sources: $(JSOURCES) csources: $(CSOURCES) fastutil-7.1.0/src/it/unimi/dsi/fastutil/Function.java0000664000000000000000000000742213050705451021445 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** A function mapping keys into values. * *

Instances of this class represent functions: the main difference with {@link java.util.Map} * is that functions do not in principle allow enumeration of their domain or range. The need for * this interface lies in the existence of several highly optimized implementations of * functions (e.g., minimal perfect hashes) which do not actually store their domain or range explicitly. * In case the domain is known, {@link #containsKey(Object)} can be used to perform membership queries. * *

The choice of naming all methods exactly as in {@link java.util.Map} makes it possible * for all type-specific maps to extend type-specific functions (e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntMap} * extends {@link it.unimi.dsi.fastutil.ints.Int2IntFunction}). However, {@link #size()} is allowed to return -1 to denote * that the number of keys is not available (e.g., in the case of a string hash function). * *

Note that there is an {@link it.unimi.dsi.fastutil.objects.Object2ObjectFunction} that * can also set its default return value. * *

Warning: Equality of functions is not specified * by contract, and it will usually be by reference, as there is no way to enumerate the keys * and establish whether two functions represent the same mathematical entity. * * @see java.util.Map */ public interface Function { /** Associates the specified value with the specified key in this function (optional operation). * * @param key the key. * @param value the value. * @return the old value, or null if no value was present for the given key. * @see java.util.Map#put(Object,Object) */ V put( K key, V value ); /** Returns the value associated by this function to the specified key. * * @param key the key. * @return the corresponding value, or null if no value was present for the given key. * @see java.util.Map#get(Object) */ V get( Object key ); /** Returns true if this function contains a mapping for the specified key. * *

Note that for some kind of functions (e.g., hashes) this method * will always return true. * * @param key the key. * @return true if this function associates a value to key. * @see java.util.Map#containsKey(Object) */ boolean containsKey( Object key ); /** Removes this key and the associated value from this function if it is present (optional operation). * * @param key the key. * @return the old value, or null if no value was present for the given key. * @see java.util.Map#remove(Object) */ V remove( Object key ); /** Returns the intended number of keys in this function, or -1 if no such number exists. * *

Most function implementations will have some knowledge of the intended number of keys * in their domain. In some cases, however, this might not be possible. * * @return the intended number of keys in this function, or -1 if that number is not available. */ int size(); /** Removes all associations from this function (optional operation). * * @see java.util.Map#clear() */ void clear(); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Hash.java0000664000000000000000000002347013050705451020544 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** Basic data for all hash-based classes. * *

Historical note

* *

Warning: the following comments are here for historical reasons, * and apply just to the double hash classes that can be optionally generated. * The standard fastutil distribution since 6.1.0 uses linear-probing hash * tables, and tables are always sized as powers of two. * *

The classes in fastutil are built around open-addressing hashing * implemented via double hashing. Following Knuth's suggestions in the third volume of The Art of Computer * Programming, we use for the table size a prime p such that * p-2 is also prime. In this way hashing is implemented with modulo p, * and secondary hashing with modulo p-2. * *

Entries in a table can be in three states: {@link #FREE}, {@link #OCCUPIED} or {@link #REMOVED}. * The naive handling of removed entries requires that you search for a free entry as if they were occupied. However, * fastutil implements two useful optimizations, based on the following invariant: *

* Let i0, i1, …, ip-1 be * the permutation of the table indices induced by the key k, that is, i0 is the hash * of k and the following indices are obtained by adding (modulo p) the secondary hash plus one. * If there is a {@link #OCCUPIED} entry with key k, its index in the sequence above comes before * the indices of any {@link #REMOVED} entries with key k. *
* *

When we search for the key k we scan the entries in the * sequence i0, i1, …, * ip-1 and stop when k is found, * when we finished the sequence or when we find a {@link #FREE} entry. Note * that the correctness of this procedure it is not completely trivial. Indeed, * when we stop at a {@link #REMOVED} entry with key k we must rely * on the invariant to be sure that no {@link #OCCUPIED} entry with the same * key can appear later. If we insert and remove frequently the same entries, * this optimization can be very effective (note, however, that when using * objects as keys or values deleted entries are set to a special fixed value to * optimize garbage collection). * *

Moreover, during the probe we keep the index of the first {@link #REMOVED} entry we meet. * If we actually have to insert a new element, we use that * entry if we can, thus avoiding to pollute another {@link #FREE} entry. Since this position comes * a fortiori before any {@link #REMOVED} entries with the same key, we are also keeping the invariant true. */ public interface Hash { /** The initial default size of a hash table. */ final public int DEFAULT_INITIAL_SIZE = 16; /** The default load factor of a hash table. */ final public float DEFAULT_LOAD_FACTOR = .75f; /** The load factor for a (usually small) table that is meant to be particularly fast. */ final public float FAST_LOAD_FACTOR = .5f; /** The load factor for a (usually very small) table that is meant to be extremely fast. */ final public float VERY_FAST_LOAD_FACTOR = .25f; /** A generic hash strategy. * *

Custom hash structures (e.g., {@link * it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet}) allow to hash objects * using arbitrary functions, a typical example being that of {@linkplain * it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY arrays}. Of course, * one has to compare objects for equality consistently with the chosen * function. A hash strategy, thus, specifies an {@linkplain * #equals(Object,Object) equality method} and a {@linkplain * #hashCode(Object) hash function}, with the obvious property that * equal objects must have the same hash code. * *

Note that the {@link #equals(Object,Object) equals()} method of a strategy must * be able to handle null, too. */ public interface Strategy { /** Returns the hash code of the specified object with respect to this hash strategy. * * @param o an object (or null). * @return the hash code of the given object with respect to this hash strategy. */ public int hashCode( K o ); /** Returns true if the given objects are equal with respect to this hash strategy. * * @param a an object (or null). * @param b another object (or null). * @return true if the two specified objects are equal with respect to this hash strategy. */ public boolean equals( K a, K b ); } /** The default growth factor of a hash table. */ final public int DEFAULT_GROWTH_FACTOR = 16; /** The state of a free hash table entry. */ final public byte FREE = 0; /** The state of a occupied hash table entry. */ final public byte OCCUPIED = -1; /** The state of a hash table entry freed by a deletion. */ final public byte REMOVED = 1; /** A list of primes to be used as table sizes. The i-th element is * the largest prime p smaller than 2(i+28)/16 * and such that p-2 is also prime (or 1, for the first few entries). */ final public int PRIMES[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5, 5, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 13, 13, 13, 13, 13, 13, 13, 13, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 31, 31, 31, 31, 31, 31, 31, 43, 43, 43, 43, 43, 43, 43, 43, 61, 61, 61, 61, 61, 73, 73, 73, 73, 73, 73, 73, 103, 103, 109, 109, 109, 109, 109, 139, 139, 151, 151, 151, 151, 181, 181, 193, 199, 199, 199, 229, 241, 241, 241, 271, 283, 283, 313, 313, 313, 349, 349, 349, 349, 421, 433, 463, 463, 463, 523, 523, 571, 601, 619, 661, 661, 661, 661, 661, 823, 859, 883, 883, 883, 1021, 1063, 1093, 1153, 1153, 1231, 1321, 1321, 1429, 1489, 1489, 1621, 1699, 1789, 1873, 1951, 2029, 2131, 2143, 2311, 2383, 2383, 2593, 2731, 2803, 3001, 3121, 3259, 3391, 3583, 3673, 3919, 4093, 4273, 4423, 4651, 4801, 5023, 5281, 5521, 5743, 5881, 6301, 6571, 6871, 7129, 7489, 7759, 8089, 8539, 8863, 9283, 9721, 10141, 10531, 11071, 11551, 12073, 12613, 13009, 13759, 14323, 14869, 15649, 16363, 17029, 17839, 18541, 19471, 20233, 21193, 22159, 23059, 24181, 25171, 26263, 27541, 28753, 30013, 31321, 32719, 34213, 35731, 37309, 38923, 40639, 42463, 44281, 46309, 48313, 50461, 52711, 55051, 57529, 60091, 62299, 65521, 68281, 71413, 74611, 77713, 81373, 84979, 88663, 92671, 96739, 100801, 105529, 109849, 115021, 120079, 125509, 131011, 136861, 142873, 149251, 155863, 162751, 169891, 177433, 185071, 193381, 202129, 211063, 220021, 229981, 240349, 250969, 262111, 273643, 285841, 298411, 311713, 325543, 339841, 355009, 370663, 386989, 404269, 422113, 440809, 460081, 480463, 501829, 524221, 547399, 571603, 596929, 623353, 651019, 679909, 709741, 741343, 774133, 808441, 844201, 881539, 920743, 961531, 1004119, 1048573, 1094923, 1143283, 1193911, 1246963, 1302181, 1359733, 1420039, 1482853, 1548541, 1616899, 1688413, 1763431, 1841293, 1922773, 2008081, 2097133, 2189989, 2286883, 2388163, 2493853, 2604013, 2719669, 2840041, 2965603, 3097123, 3234241, 3377191, 3526933, 3682363, 3845983, 4016041, 4193803, 4379719, 4573873, 4776223, 4987891, 5208523, 5439223, 5680153, 5931313, 6194191, 6468463, 6754879, 7053331, 7366069, 7692343, 8032639, 8388451, 8759953, 9147661, 9552733, 9975193, 10417291, 10878619, 11360203, 11863153, 12387841, 12936529, 13509343, 14107801, 14732413, 15384673, 16065559, 16777141, 17519893, 18295633, 19105483, 19951231, 20834689, 21757291, 22720591, 23726449, 24776953, 25873963, 27018853, 28215619, 29464579, 30769093, 32131711, 33554011, 35039911, 36591211, 38211163, 39903121, 41669479, 43514521, 45441199, 47452879, 49553941, 51747991, 54039079, 56431513, 58930021, 61539091, 64263571, 67108669, 70079959, 73182409, 76422793, 79806229, 83339383, 87029053, 90881083, 94906249, 99108043, 103495879, 108077731, 112863013, 117860053, 123078019, 128526943, 134217439, 140159911, 146365159, 152845393, 159612601, 166679173, 174058849, 181765093, 189812341, 198216103, 206991601, 216156043, 225726379, 235720159, 246156271, 257054491, 268435009, 280319203, 292730833, 305691181, 319225021, 333358513, 348117151, 363529759, 379624279, 396432481, 413983771, 432312511, 451452613, 471440161, 492312523, 514109251, 536870839, 560640001, 585461743, 611382451, 638450569, 666717199, 696235363, 727060069, 759249643, 792864871, 827967631, 864625033, 902905501, 942880663, 984625531, 1028218189, 1073741719, 1121280091, 1170923713, 1222764841, 1276901371, 1333434301, 1392470281, 1454120779, 1518500173, 1585729993, 1655935399, 1729249999, 1805811253, 1885761133, 1969251079, 2056437379, 2147482951 }; } fastutil-7.1.0/src/it/unimi/dsi/fastutil/HashCommon.java0000664000000000000000000002062013050705451021707 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** Common code for all hash-based classes. */ public class HashCommon { protected HashCommon() {}; /** This reference is used to fill keys and values of removed entries (if they are objects). null cannot be used as it would confuse the search algorithm in the presence of an actual null key. */ public static final Object REMOVED = new Object(); /** 232 · φ, φ = (√5 − 1)/2. */ private static final int INT_PHI = 0x9E3779B9; /** The reciprocal of {@link #INT_PHI} modulo 232. */ private static final int INV_INT_PHI = 0x144cbc89; /** 264 · φ, φ = (√5 − 1)/2. */ private static final long LONG_PHI = 0x9E3779B97F4A7C15L; /** The reciprocal of {@link #LONG_PHI} modulo 264. */ private static final long INV_LONG_PHI = 0xf1de83e19937733dL; /** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3. * *

This method implements the finalisation step of Austin Appleby's MurmurHash3. * Its purpose is to avalanche the bits of the argument to within 0.25% bias. * * @param x an integer. * @return a hash value with good avalanching properties. */ public final static int murmurHash3( int x ) { x ^= x >>> 16; x *= 0x85ebca6b; x ^= x >>> 13; x *= 0xc2b2ae35; x ^= x >>> 16; return x; } /** Avalanches the bits of a long integer by applying the finalisation step of MurmurHash3. * *

This method implements the finalisation step of Austin Appleby's MurmurHash3. * Its purpose is to avalanche the bits of the argument to within 0.25% bias. * * @param x a long integer. * @return a hash value with good avalanching properties. */ public final static long murmurHash3( long x ) { x ^= x >>> 33; x *= 0xff51afd7ed558ccdL; x ^= x >>> 33; x *= 0xc4ceb9fe1a85ec53L; x ^= x >>> 33; return x; } /** Quickly mixes the bits of an integer. * *

This method mixes the bits of the argument by multiplying by the golden ratio and * xorshifting the result. It is borrowed from Koloboke, and * it has slightly worse behaviour than {@link #murmurHash3(int)} (in open-addressing hash tables the average number of probes * is slightly larger), but it's much faster. * * @param x an integer. * @return a hash value obtained by mixing the bits of {@code x}. * @see #invMix(int) */ public final static int mix( final int x ) { final int h = x * INT_PHI; return h ^ (h >>> 16); } /** The inverse of {@link #mix(int)}. This method is mainly useful to create unit tests. * * @param x an integer. * @return a value that passed through {@link #mix(int)} would give {@code x}. */ public final static int invMix( final int x ) { return ( x ^ x >>> 16 ) * INV_INT_PHI; } /** Quickly mixes the bits of a long integer. * *

This method mixes the bits of the argument by multiplying by the golden ratio and * xorshifting twice the result. It is borrowed from Koloboke, and * it has slightly worse behaviour than {@link #murmurHash3(long)} (in open-addressing hash tables the average number of probes * is slightly larger), but it's much faster. * * @param x a long integer. * @return a hash value obtained by mixing the bits of {@code x}. */ public final static long mix( final long x ) { long h = x * LONG_PHI; h ^= h >>> 32; return h ^ (h >>> 16); } /** The inverse of {@link #mix(long)}. This method is mainly useful to create unit tests. * * @param x a long integer. * @return a value that passed through {@link #mix(long)} would give {@code x}. */ public final static long invMix( long x ) { x ^= x >>> 32; x ^= x >>> 16; return ( x ^ x >>> 32 ) * INV_LONG_PHI; } /** Returns the hash code that would be returned by {@link Float#hashCode()}. * * @param f a float. * @return the same code as {@link Float#hashCode() new Float(f).hashCode()}. */ final public static int float2int( final float f ) { return Float.floatToRawIntBits( f ); } /** Returns the hash code that would be returned by {@link Double#hashCode()}. * * @param d a double. * @return the same code as {@link Double#hashCode() new Double(f).hashCode()}. */ final public static int double2int( final double d ) { final long l = Double.doubleToRawLongBits( d ); return (int)( l ^ ( l >>> 32 ) ); } /** Returns the hash code that would be returned by {@link Long#hashCode()}. * * @param l a long. * @return the same code as {@link Long#hashCode() new Long(f).hashCode()}. */ final public static int long2int( final long l ) { return (int)( l ^ ( l >>> 32 ) ); } /** Return the least power of two greater than or equal to the specified value. * *

Note that this function will return 1 when the argument is 0. * * @param x an integer smaller than or equal to 230. * @return the least power of two greater than or equal to the specified value. */ public static int nextPowerOfTwo( int x ) { if ( x == 0 ) return 1; x--; x |= x >> 1; x |= x >> 2; x |= x >> 4; x |= x >> 8; return ( x | x >> 16 ) + 1; } /** Return the least power of two greater than or equal to the specified value. * *

Note that this function will return 1 when the argument is 0. * * @param x a long integer smaller than or equal to 262. * @return the least power of two greater than or equal to the specified value. */ public static long nextPowerOfTwo( long x ) { if ( x == 0 ) return 1; x--; x |= x >> 1; x |= x >> 2; x |= x >> 4; x |= x >> 8; x |= x >> 16; return ( x | x >> 32 ) + 1; } /** Returns the maximum number of entries that can be filled before rehashing. * * @param n the size of the backing array. * @param f the load factor. * @return the maximum number of entries before rehashing. */ public static int maxFill( final int n, final float f ) { /* We must guarantee that there is always at least * one free entry (even with pathological load factors). */ return Math.min( (int)Math.ceil( n * f ), n - 1 ); } /** Returns the maximum number of entries that can be filled before rehashing. * * @param n the size of the backing array. * @param f the load factor. * @return the maximum number of entries before rehashing. */ public static long maxFill( final long n, final float f ) { /* We must guarantee that there is always at least * one free entry (even with pathological load factors). */ return Math.min( (long)Math.ceil( n * f ), n - 1 ); } /** Returns the least power of two smaller than or equal to 230 and larger than or equal to Math.ceil( expected / f ). * * @param expected the expected number of elements in a hash table. * @param f the load factor. * @return the minimum possible size for a backing array. * @throws IllegalArgumentException if the necessary size is larger than 230. */ public static int arraySize( final int expected, final float f ) { final long s = Math.max( 2, nextPowerOfTwo( (long)Math.ceil( expected / f ) ) ); if ( s > (1 << 30) ) throw new IllegalArgumentException( "Too large (" + expected + " expected elements with load factor " + f + ")" ); return (int)s; } /** Returns the least power of two larger than or equal to Math.ceil( expected / f ). * * @param expected the expected number of elements in a hash table. * @param f the load factor. * @return the minimum possible size for a backing big array. */ public static long bigArraySize( final long expected, final float f ) { return nextPowerOfTwo( (long)Math.ceil( expected / f ) ); } }fastutil-7.1.0/src/it/unimi/dsi/fastutil/BidirectionalIterator.java0000664000000000000000000000326013050705451024136 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Iterator; import java.util.ListIterator; /** A bidirectional {@link Iterator}. * *

This kind of iterator is essentially a {@link ListIterator} that * does not support {@link ListIterator#previousIndex()} and {@link * ListIterator#nextIndex()}. It is useful for those maps that can easily * provide bidirectional iteration, but provide no index. * *

Note that iterators returned by fastutil classes are more * specific, and support skipping. This class serves the purpose of organising * in a cleaner way the relationships between various iterators. * * @see Iterator * @see ListIterator */ public interface BidirectionalIterator extends Iterator { /** Returns the previous element from the collection. * * @return the previous element from the collection. * @see java.util.ListIterator#previous() */ K previous(); /** Returns whether there is a previous element. * * @return whether there is a previous element. * @see java.util.ListIterator#hasPrevious() */ boolean hasPrevious(); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Stack.java0000664000000000000000000000364713050705451020732 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.NoSuchElementException; /** A stack. * *

A stack must provide the classical {@link #push(Object)} and * {@link #pop()} operations, but may be also peekable * to some extent: it may provide just the {@link #top()} function, * or even a more powerful {@link #peek(int)} method that provides * access to all elements on the stack (indexed from the top, which * has index 0). */ public interface Stack { /** Pushes the given object on the stack. * * @param o the object that will become the new top of the stack. */ void push( K o ); /** Pops the top off the stack. * * @return the top of the stack. * @throws NoSuchElementException if the stack is empty. */ K pop(); /** Checks whether the stack is empty. * * @return true if the stack is empty. */ boolean isEmpty(); /** Peeks at the top of the stack (optional operation). * * @return the top of the stack. * @throws NoSuchElementException if the stack is empty. */ K top(); /** Peeks at an element on the stack (optional operation). * * @param i an index from the stop of the stack (0 represents the top). * @return the i-th element on the stack. * @throws IndexOutOfBoundsException if the designated element does not exist.. */ K peek( int i ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/BigList.java0000664000000000000000000001104613050705451021212 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Collection; import java.util.List; /** A list with big (i.e., 64-bit) indices. * *

Instances of this class implement the same semantics as that of {@link List}: however, * setters methods use long indices, getters return long values, and returned iterators are actually * of type {@link BigListIterator}. */ public interface BigList extends Collection, Size64 { /** Returns the element at the specified position. * * @param index a position in the big list. * @return the element at the specified position. * @see List#get(int) */ public K get( long index ); /** Removes the element at the specified position. * * @param index a position in the big list. * @return the element previously at the specified position. * @see List#remove(int) */ public K remove( long index ); /** Replaces the element at the specified position in this big list with the specified element (optional operation). * * @param index a position in the big list. * @param element the element to be stored at the specified position. * @return the element previously at the specified positions. * @see List#set(int,Object) */ public K set( long index, K element ); /** Inserts the specified element at the specified position in this big list (optional operation). * * @param index a position in the big list. * @param element an element to be inserted. * @see List#add(int,Object) */ public void add( long index, K element ); /** Sets the size of this big list. * *

If the specified size is smaller than the current size, the last elements are * discarded. Otherwise, they are filled with 0/null/false. * * @param size the new size. */ void size( long size ); /** Inserts all of the elements in the specified collection into this big list at the specified position (optional operation). * * @param index index at which to insert the first element from the specified collection. * @param c collection containing elements to be added to this big list. * @return true if this big list changed as a result of the call * @see List#addAll(int, Collection) */ public boolean addAll( long index, Collection c ); /** Returns the index of the first occurrence of the specified element in this big list, or -1 if this big list does not contain the element. * * @param o the object to search for. * @return the index of the first occurrence of the specified element in this big list, or -1 if this big list does not contain the element. * @see List#indexOf(Object) */ public long indexOf( Object o ); /** Returns the index of the last occurrence of the specified element in this big list, or -1 if this big list does not contain the element. * * @param o the object to search for. * @return the index of the last occurrence of the specified element in this big list, or -1 if this big list does not contain the element. * @see List#lastIndexOf(Object) */ public long lastIndexOf( Object o ); /** Returns a big-list iterator over the elements in this big list. * * @return a big-list iterator over the elements in this big list. * @see List#listIterator() */ public BigListIterator listIterator(); /** Returns a big-list iterator of the elements in this big list, starting at the specified position in this big list. * * @param index index of first element to be returned from the big-list iterator. * @return a big-list iterator of the elements in this big list, starting at the specified position in * this big list. * @see List#listIterator(int) */ public BigListIterator listIterator( long index ); /** Returns a big sublist view of this big list. * * @param from the starting element (inclusive). * @param to the ending element (exclusive). * @return a big sublist view of this big list. * @see List#subList(int, int) */ public BigList subList( long from, long to ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/BigListIterator.java0000664000000000000000000000401713050705451022724 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Iterator; import java.util.ListIterator; /** A list iterator over a {@link BigList}. * *

This kind of iterator is essentially a {@link ListIterator} with long indices. * * @see Iterator * @see ListIterator */ public interface BigListIterator extends BidirectionalIterator { /** Returns the index of the element that would be returned by a subsequent call to next. * (Returns list size if the list iterator is at the end of the list.) * * @return the index of the element that would be returned by a subsequent call to next, or list * size if list iterator is at end of list. * @see ListIterator#nextIndex() */ long nextIndex(); /** Returns the index of the element that would be returned by a subsequent call to previous. * (Returns -1 if the list iterator is at the beginning of the list.) * * @return the index of the element that would be returned by a subsequent call to previous, or * -1 if list iterator is at beginning of list. * @see ListIterator#previousIndex() */ long previousIndex(); /** Skips the given number of elements. * *

The effect of this call is exactly the same as that of * calling {@link #next()} for n times (possibly stopping * if {@link #hasNext()} becomes false). * * @param n the number of elements to skip. * @return the number of elements actually skipped. */ long skip( long n ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/BigArrays.java0000664000000000000000000005145413050705451021547 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For the sorting code: * * Copyright (C) 1999 CERN - European Organization for Nuclear Research. * * Permission to use, copy, modify, distribute and sell this software and * its documentation for any purpose is hereby granted without fee, * provided that the above copyright notice appear in all copies and that * both that copyright notice and this permission notice appear in * supporting documentation. CERN makes no representations about the * suitability of this software for any purpose. It is provided "as is" * without expressed or implied warranty. */ import it.unimi.dsi.fastutil.ints.IntBigArrayBigList; import it.unimi.dsi.fastutil.ints.IntBigArrays; import it.unimi.dsi.fastutil.longs.LongComparator; /** * A class providing static methods and objects that do useful things with big * arrays. * *

Introducing big arrays

* *

* A big array is an array-of-arrays representation of an array. The * length of a big array is bounded by {@link #SEGMENT_SIZE} * * {@link Integer#MAX_VALUE} = {@value #SEGMENT_SIZE} * (231 − * 1) rather than {@link Integer#MAX_VALUE}. The type of a big array is that of * an array-of-arrays, so a big array of integers is of type * int[][]. Note that {@link #SEGMENT_SIZE} has been chosen so that * a single segment is smaller than 231 bytes independently of the * data type. It might be enlarged in the future. * *

* If a is a big array, a[0], a[1], * … are called the segments of the big array. All segments, * except possibly for the last one, are of length {@link #SEGMENT_SIZE}. Given * an index i into a big array, there is an associated * {@linkplain #segment(long) segment} and an associated * {@linkplain #displacement(long) * displacement} into that segment. Access to single members happens by * means of accessors defined in the type-specific versions (see, e.g., * {@link IntBigArrays#get(int[][], long)} and * {@link IntBigArrays#set(int[][], long, int)}), but you can also use the * methods {@link #segment(long)}/{@link #displacement(long)} to access entries * manually. * *

Scanning big arrays

* *

* You can scan a big array using the following idiomatic form: * *

 * for(int s = 0; s < a.length; s++) {
 *     final int[] t = a[s];
 *     final int l = t.length;
 *     for(int d = 0; d < l; d++) {
 *          do something with t[d]
 *     }
 * }
 * 
* * or using the (simpler and usually faster) reversed version: * *
 * for( int s = a.length; s-- != 0;) {
 *     final int[] t = a[s];
 *     for( int d = t.length; d-- != 0;) {
 *         do something with t[d]
 *     }
 * }
 * 
*

* Inside the inner loop, the original index in a can be retrieved * using {@link #index(int, int) index(segment, displacement)}. You can also * use an additional long to keep track of the index. * *

* Note that caching is essential in making these loops essentially as fast as * those scanning standard arrays (as iterations of the outer loop happen very * rarely). Using loops of this kind is extremely faster than using a standard * loop and accessors. * *

* In some situations, you might want to iterate over a part of a big array * having an offset and a length. In this case, the idiomatic loops are as * follows: * *

 * for(int s = segment(offset); s < segment(offset + length + SEGMENT_MASK); s++) {
 *     final int[] t = a[s];
 *     final int l = (int)Math.min(t.length, offset + length - start(s));
 *     for(int d = (int)Math.max(0, offset - start(s)); d < l; d++) {
 *         do something with t[d]
 *     }
 * }
 * 
* * or, in a reversed form, * *
 * for(int s = segment(offset + length + SEGMENT_MASK); s-- != segment(offset);) {
 *     final int[] t = a[s];
 *     final int b = (int)Math.max(0, offset - start(s));
 *     for(int d = (int)Math.min(t.length, offset + length - start(s)); d-- != b ;) {
 *         do something with t[d]
 *     }
 * }
 * 
* *

Literal big arrays

* *

* A literal big array can be easily created by using the suitable type-specific * wrap() method (e.g., {@link IntBigArrays#wrap(int[])}) around a * literal standard array. Alternatively, for very small arrays you can just * declare a literal array-of-array (e.g., new int[][] { { 1, 2 } } * ). Be warned, however, that this can lead to creating illegal big arrays if * for some reason (e.g., stress testing) {@link #SEGMENT_SIZE} is set to a * value smaller than the inner array length. * *

Big alternatives

* *

* If you find the kind of “bare hands” approach to big arrays not * enough object-oriented, please use big lists based on big arrays (.e.g, * {@link IntBigArrayBigList}). Big arrays follow the Java tradition of * considering arrays as a “legal alien”—something in-between * an object and a primitive type. This approach lacks the consistency of a full * object-oriented approach, but provides some significant performance gains. * *

Additional methods

* *

* In addition to commodity methods, this class contains {@link BigSwapper} * -based implementations of * {@linkplain #quickSort(long, long, LongComparator, BigSwapper) quicksort} and * of a stable, in-place * {@linkplain #mergeSort(long, long, LongComparator, BigSwapper) mergesort}. * These generic sorting methods can be used to sort any kind of list, but they * find their natural usage, for instance, in sorting big arrays in parallel. * * @see it.unimi.dsi.fastutil.Arrays */ public class BigArrays { /** * The shift used to compute the segment associated with an index * (equivalently, the logarithm of the segment size). */ public final static int SEGMENT_SHIFT = 27; /** * The current size of a segment (227) is the largest size that * makes the physical memory allocation for a single segment strictly * smaller than 231 bytes. */ public final static int SEGMENT_SIZE = 1 << SEGMENT_SHIFT; /** The mask used to compute the displacement associated to an index. */ public final static int SEGMENT_MASK = SEGMENT_SIZE - 1; protected BigArrays() { } /** * Computes the segment associated with a given index. * * @param index * an index into a big array. * @return the associated segment. */ public static int segment(final long index) { return (int) (index >>> SEGMENT_SHIFT); } /** * Computes the displacement associated with a given index. * * @param index * an index into a big array. * @return the associated displacement (in the associated * {@linkplain #segment(long) segment}). */ public static int displacement(final long index) { return (int) (index & SEGMENT_MASK); } /** * Computes the starting index of a given segment. * * @param segment * the segment of a big array. * @return the starting index of the segment. */ public static long start(final int segment) { return (long) segment << SEGMENT_SHIFT; } /** * Computes the index associated with given segment and displacement. * * @param segment * the segment of a big array. * @param displacement * the displacement into the segment. * @return the associated index: that is, {@link #segment(long) * segment(index(segment, displacement)) == segment} and * {@link #displacement(long) displacement(index(segment, * displacement)) == displacement}. */ public static long index(final int segment, final int displacement) { return start(segment) + displacement; } /** * Ensures that a range given by its first (inclusive) and last (exclusive) * elements fits a big array of given length. * *

* This method may be used whenever a big array range check is needed. * * @param bigArrayLength * a big-array length. * @param from * a start index (inclusive). * @param to * an end index (inclusive). * @throws IllegalArgumentException * if from is greater than to. * @throws ArrayIndexOutOfBoundsException * if from or to are greater than * bigArrayLength or negative. */ public static void ensureFromTo(final long bigArrayLength, final long from, final long to) { if (from < 0) throw new ArrayIndexOutOfBoundsException("Start index (" + from + ") is negative"); if (from > to) throw new IllegalArgumentException("Start index (" + from + ") is greater than end index (" + to + ")"); if (to > bigArrayLength) throw new ArrayIndexOutOfBoundsException("End index (" + to + ") is greater than big-array length (" + bigArrayLength + ")"); } /** * Ensures that a range given by an offset and a length fits a big array of * given length. * *

* This method may be used whenever a big array range check is needed. * * @param bigArrayLength * a big-array length. * @param offset * a start index for the fragment * @param length * a length (the number of elements in the fragment). * @throws IllegalArgumentException * if length is negative. * @throws ArrayIndexOutOfBoundsException * if offset is negative or offset + * length is greater than * bigArrayLength. */ public static void ensureOffsetLength(final long bigArrayLength, final long offset, final long length) { if (offset < 0) throw new ArrayIndexOutOfBoundsException("Offset (" + offset + ") is negative"); if (length < 0) throw new IllegalArgumentException("Length (" + length + ") is negative"); if (offset + length > bigArrayLength) throw new ArrayIndexOutOfBoundsException("Last index (" + (offset + length) + ") is greater than big-array length (" + bigArrayLength + ")"); } /** * Ensures that a big-array length is legal. * * @param bigArrayLength * a big-array length. * @throws IllegalArgumentException * if length is negative, or larger than or equal * to {@link #SEGMENT_SIZE} * {@link Integer#MAX_VALUE}. */ public static void ensureLength(final long bigArrayLength) { if (bigArrayLength < 0) throw new IllegalArgumentException("Negative big-array size: " + bigArrayLength); if (bigArrayLength >= (long) Integer.MAX_VALUE << SEGMENT_SHIFT) throw new IllegalArgumentException("Big-array size too big: " + bigArrayLength); } private static final int SMALL = 7; private static final int MEDIUM = 40; /** * Transforms two consecutive sorted ranges into a single sorted range. The * initial ranges are [first, middle) and * [middle, last), and the resulting range is * [first, last). Elements in the first input range will * precede equal elements in the second. */ private static void inPlaceMerge(final long from, long mid, final long to, final LongComparator comp, final BigSwapper swapper) { if (from >= mid || mid >= to) return; if (to - from == 2) { if (comp.compare(mid, from) < 0) { swapper.swap(from, mid); } return; } long firstCut; long secondCut; if (mid - from > to - mid) { firstCut = from + (mid - from) / 2; secondCut = lowerBound(mid, to, firstCut, comp); } else { secondCut = mid + (to - mid) / 2; firstCut = upperBound(from, mid, secondCut, comp); } long first2 = firstCut; long middle2 = mid; long last2 = secondCut; if (middle2 != first2 && middle2 != last2) { long first1 = first2; long last1 = middle2; while (first1 < --last1) swapper.swap(first1++, last1); first1 = middle2; last1 = last2; while (first1 < --last1) swapper.swap(first1++, last1); first1 = first2; last1 = last2; while (first1 < --last1) swapper.swap(first1++, last1); } mid = firstCut + (secondCut - mid); inPlaceMerge(from, firstCut, mid, comp, swapper); inPlaceMerge(mid, secondCut, to, comp, swapper); } /** * Performs a binary search on an already sorted range: finds the first * position where an element can be inserted without violating the ordering. * Sorting is by a user-supplied comparison function. * * @param mid * Beginning of the range. * @param to * One past the end of the range. * @param firstCut * Element to be searched for. * @param comp * Comparison function. * @return The largest index i such that, for every j in the range * [first, i), comp.apply(array[j], x) is * true. */ private static long lowerBound(long mid, final long to, final long firstCut, final LongComparator comp) { long len = to - mid; while (len > 0) { long half = len / 2; long middle = mid + half; if (comp.compare(middle, firstCut) < 0) { mid = middle + 1; len -= half + 1; } else { len = half; } } return mid; } /** Returns the index of the median of three elements. */ private static long med3(final long a, final long b, final long c, final LongComparator comp) { final int ab = comp.compare(a, b); final int ac = comp.compare(a, c); final int bc = comp.compare(b, c); return (ab < 0 ? (bc < 0 ? b : ac < 0 ? c : a) : (bc > 0 ? b : ac > 0 ? c : a)); } /** * Sorts the specified range of elements using the specified big swapper and * according to the order induced by the specified comparator using * mergesort. * *

* This sort is guaranteed to be stable: equal elements will not be * reordered as a result of the sort. The sorting algorithm is an in-place * mergesort that is significantly slower than a standard mergesort, as its * running time is * O(n (log n)2), but it * does not allocate additional memory; as a result, it can be used as a * generic sorting algorithm. * * @param from * the index of the first element (inclusive) to be sorted. * @param to * the index of the last element (exclusive) to be sorted. * @param comp * the comparator to determine the order of the generic data * (arguments are positions). * @param swapper * an object that knows how to swap the elements at any two * positions. */ public static void mergeSort(final long from, final long to, final LongComparator comp, final BigSwapper swapper) { final long length = to - from; // Insertion sort on smallest arrays if (length < SMALL) { for (long i = from; i < to; i++) { for (long j = i; j > from && (comp.compare(j - 1, j) > 0); j--) { swapper.swap(j, j - 1); } } return; } // Recursively sort halves long mid = (from + to) >>> 1; mergeSort(from, mid, comp, swapper); mergeSort(mid, to, comp, swapper); // If list is already sorted, nothing left to do. This is an // optimization that results in faster sorts for nearly ordered lists. if (comp.compare(mid - 1, mid) <= 0) return; // Merge sorted halves inPlaceMerge(from, mid, to, comp, swapper); } /** * Sorts the specified range of elements using the specified big swapper and * according to the order induced by the specified comparator using * quicksort. * *

* The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley * and M. Douglas McIlroy, “Engineering a Sort Function”, * Software: Practice and Experience, 23(11), pages 1249−1265, * 1993. * * @param from * the index of the first element (inclusive) to be sorted. * @param to * the index of the last element (exclusive) to be sorted. * @param comp * the comparator to determine the order of the generic data. * @param swapper * an object that knows how to swap the elements at any two * positions. */ public static void quickSort(final long from, final long to, final LongComparator comp, final BigSwapper swapper) { final long len = to - from; // Insertion sort on smallest arrays if (len < SMALL) { for (long i = from; i < to; i++) for (long j = i; j > from && (comp.compare(j - 1, j) > 0); j--) { swapper.swap(j, j - 1); } return; } // Choose a partition element, v long m = from + len / 2; // Small arrays, middle element if (len > SMALL) { long l = from, n = to - 1; if (len > MEDIUM) { // Big arrays, pseudomedian of 9 long s = len / 8; l = med3(l, l + s, l + 2 * s, comp); m = med3(m - s, m, m + s, comp); n = med3(n - 2 * s, n - s, n, comp); } m = med3(l, m, n, comp); // Mid-size, med of 3 } // long v = x[m]; long a = from, b = a, c = to - 1, d = c; // Establish Invariant: v* (v)* v* while (true) { int comparison; while (b <= c && ((comparison = comp.compare(b, m)) <= 0)) { if (comparison == 0) { if (a == m) m = b; // moving target; DELTA to JDK !!! else if (b == m) m = a; // moving target; DELTA to JDK !!! swapper.swap(a++, b); } b++; } while (c >= b && ((comparison = comp.compare(c, m)) >= 0)) { if (comparison == 0) { if (c == m) m = d; // moving target; DELTA to JDK !!! else if (d == m) m = c; // moving target; DELTA to JDK !!! swapper.swap(c, d--); } c--; } if (b > c) break; if (b == m) m = d; // moving target; DELTA to JDK !!! else if (c == m) m = c; // moving target; DELTA to JDK !!! swapper.swap(b++, c--); } // Swap partition elements back to middle long s; long n = from + len; s = Math.min(a - from, b - a); vecSwap(swapper, from, b - s, s); s = Math.min(d - c, n - d - 1); vecSwap(swapper, b, n - s, s); // Recursively sort non-partition-elements if ((s = b - a) > 1) quickSort(from, from + s, comp, swapper); if ((s = d - c) > 1) quickSort(n - s, n, comp, swapper); } /** * Performs a binary search on an already-sorted range: finds the last * position where an element can be inserted without violating the ordering. * Sorting is by a user-supplied comparison function. * * @param from * Beginning of the range. * @param mid * One past the end of the range. * @param secondCut * Element to be searched for. * @param comp * Comparison function. * @return The largest index i such that, for every j in the range * [first, i), comp.apply(x, array[j]) is * false. */ private static long upperBound(long from, final long mid, final long secondCut, final LongComparator comp) { long len = mid - from; while (len > 0) { long half = len / 2; long middle = from + half; if (comp.compare(secondCut, middle) < 0) { len = half; } else { from = middle + 1; len -= half + 1; } } return from; } /** Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)]. */ private static void vecSwap(final BigSwapper swapper, long from, long l, final long s) { for (int i = 0; i < s; i++, from++, l++) swapper.swap(from, l); } public static void main(final String arg[]) { int[][] a = IntBigArrays.newBigArray(1L << Integer.parseInt(arg[0])); long x, y, z, start; for (int k = 10; k-- != 0;) { start = -System.currentTimeMillis(); x = 0; for (long i = IntBigArrays.length(a); i-- != 0;) x ^= i ^ IntBigArrays.get(a, i); if (x == 0) System.err.println(); System.out.println("Single loop: " + (start + System.currentTimeMillis()) + "ms"); start = -System.currentTimeMillis(); y = 0; for (int i = a.length; i-- != 0;) { final int[] t = a[i]; for (int d = t.length; d-- != 0;) y ^= t[d] ^ index(i, d); } if (y == 0) System.err.println(); if (x != y) throw new AssertionError(); System.out.println("Double loop: " + (start + System.currentTimeMillis()) + "ms"); z = 0; long j = IntBigArrays.length(a); for (int i = a.length; i-- != 0;) { final int[] t = a[i]; for (int d = t.length; d-- != 0;) y ^= t[d] ^ --j; } if (z == 0) System.err.println(); if (x != z) throw new AssertionError(); System.out.println("Double loop (with additional index): " + (start + System.currentTimeMillis()) + "ms"); } } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/PriorityQueue.java0000664000000000000000000000562613050705451022512 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Comparator; import java.util.NoSuchElementException; /** A priority queue. * *

A priority queue provides a way to {@linkplain #enqueue(Object) enqueue} * elements, and to {@linkplain #dequeue() dequeue} them in some specified * order. Elements that are smaller in the specified order are * dequeued first. It is also possible to get the {@linkplain #first() first * element}, that is, the element that would be dequeued next. * *

Additionally, the queue may provide a method to peek at * element that would be dequeued {@linkplain #last() last}. * *

The relative order of the elements enqueued should not change during * queue operations. Nonetheless, some implementations may give the caller a * way to notify the queue that the {@linkplain #changed() first element has * changed its relative position in the order}. */ public interface PriorityQueue { /** Enqueues a new element. * * @param x the element to enqueue.. */ void enqueue( K x ); /** Dequeues the {@linkplain #first() first} element from the queue. * * @return the dequeued element. * @throws NoSuchElementException if the queue is empty. */ K dequeue(); /** Checks whether the queue is empty. * * @return true if the queue is empty. */ boolean isEmpty(); /** Returns the number of elements in this queue. * * @return the number of elements in this queue. */ int size(); /** Removes all elements from this queue. */ void clear(); /** Returns the first element of the queue. * * @return the first element. * @throws NoSuchElementException if the queue is empty. */ K first(); /** Returns the last element of the queue, that is, the element the would be dequeued last (optional operation). * * @return the last element. * @throws NoSuchElementException if the queue is empty. */ K last(); /** Notifies the queue that the {@linkplain #first() first} element has changed (optional operation). */ void changed(); /** Returns the comparator associated with this queue, or null if it uses its elements' natural ordering. * * @return the comparator associated with this sorted set, or null if it uses its elements' natural ordering. */ Comparator comparator(); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/IndirectPriorityQueue.java0000664000000000000000000001265613050705451024175 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Comparator; import java.util.NoSuchElementException; /** An indirect priority queue. * *

An indirect priority queue provides a way to {@linkplain #enqueue(int) * enqueue} by index elements taken from a given reference list, * and to {@linkplain #dequeue() dequeue} them in some specified order. * Elements that are smaller in the specified order are * dequeued first. It * is also possible to get the {@linkplain #first() index of the first element}, that * is, the index that would be dequeued next. * *

Additionally, the queue may provide a method to peek at the index of the * element that would be dequeued {@linkplain #last() last}. * *

The reference list should not change during queue operations (or, more * precisely, the relative order of the elements corresponding to indices in the queue should not * change). Nonetheless, some implementations may give the caller a way to * notify the queue that the {@linkplain #changed() first element has changed its * relative position in the order}. * *

Optionally, an indirect priority queue may even provide methods to notify * {@linkplain #changed(int) the change of any element of the * reference list}, to check {@linkplain #contains(int) the presence of * an index in the queue}, and to {@linkplain #remove(int) remove an index from the queue}. * It may even allow to notify that {@linkplain #allChanged() all elements have changed}. * *

It is always possible to enqueue two distinct indices corresponding to * equal elements of the reference list. However, depending on the * implementation, it may or may not be possible to enqueue twice the same * index. * *

Note that all element manipulation happens via indices. */ public interface IndirectPriorityQueue { /** Enqueues a new element. * * @param index the element to enqueue. */ void enqueue( int index ); /** Dequeues the {@linkplain #first() first} element from this queue. * * @return the dequeued element. * @throws NoSuchElementException if this queue is empty. */ int dequeue(); /** Checks whether this queue is empty. * * @return true if this queue is empty. */ boolean isEmpty(); /** Returns the number of elements in this queue. * * @return the number of elements in this queue. */ int size(); /** Removes all elements from this queue. */ void clear(); /** Returns the first element of this queue. * * @return the first element. * @throws NoSuchElementException if this queue is empty. */ int first(); /** Returns the last element of this queue, that is, the element the would be dequeued last (optional operation). * * @return the last element. * @throws NoSuchElementException if this queue is empty. */ int last(); /** Notifies this queue that the {@linkplain #first() first element} has changed (optional operation). * */ void changed(); /** Returns the comparator associated with this queue, or null if it uses its elements' natural ordering. * * @return the comparator associated with this sorted set, or null if it uses its elements' natural ordering. */ Comparator comparator(); /** Notifies this queue that the specified element has changed (optional operation). * *

Note that the specified element must belong to this queue. * * @param index the element that has changed. * @throws NoSuchElementException if the specified element is not in this queue. */ public void changed( int index ); /** Notifies this queue that the all elements have changed (optional operation). */ public void allChanged(); /** Checks whether a given index belongs to this queue (optional operation). * * @param index an index possibly in the queue. * @return true if the specified index belongs to this queue. */ public boolean contains( int index ); /** Removes the specified element from this queue (optional operation). * * @param index the element to be removed. * @return true if the index was in the queue. */ public boolean remove( int index ); /** Retrieves the front of this queue in a given array (optional operation). * *

The front of an indirect queue is the set of indices whose associated elements in the reference array * are equal to the element associated to the {@linkplain #first() first index}. These indices can be always obtain by dequeueing, but * this method should retrieve efficiently such indices in the given array without modifying the state of this queue. * * @param a an array large enough to hold the front (e.g., at least long as the reference array). * @return the number of elements actually written (starting from the first position of a). */ public int front( final int[] a ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Maps.java0000664000000000000000000000206513050705451020556 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** A class providing static methods and objects that do useful things with maps. * * @see java.util.Collections */ public class Maps { private Maps() {} /** A standard default return value to be used in maps containing null values. * @deprecated Since fastutil 5.0, the introduction of generics * makes this object pretty useless. */ @Deprecated public static final Object MISSING = new Object(); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Arrays.java0000664000000000000000000004060213050705451021116 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.fastutil.ints.IntComparator; import java.util.ArrayList; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.RecursiveAction; /** A class providing static methods and objects that do useful things with arrays. * *

In addition to commodity methods, this class contains {@link Swapper}-based implementations * of {@linkplain #quickSort(int, int, IntComparator, Swapper) quicksort} and of * a stable, in-place {@linkplain #mergeSort(int, int, IntComparator, Swapper) mergesort}. These * generic sorting methods can be used to sort any kind of list, but they find their natural * usage, for instance, in sorting arrays in parallel. * * @see Arrays */ public class Arrays { private Arrays() {} /** This is a safe value used by {@link ArrayList} (as of Java 7) to avoid * throwing {@link OutOfMemoryError} on some JVMs. We adopt the same value. */ public static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; /** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits an array of given length. * *

This method may be used whenever an array range check is needed. * * @param arrayLength an array length. * @param from a start index (inclusive). * @param to an end index (inclusive). * @throws IllegalArgumentException if from is greater than to. * @throws ArrayIndexOutOfBoundsException if from or to are greater than arrayLength or negative. */ public static void ensureFromTo( final int arrayLength, final int from, final int to ) { if ( from < 0 ) throw new ArrayIndexOutOfBoundsException( "Start index (" + from + ") is negative" ); if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" ); if ( to > arrayLength ) throw new ArrayIndexOutOfBoundsException( "End index (" + to + ") is greater than array length (" + arrayLength + ")" ); } /** Ensures that a range given by an offset and a length fits an array of given length. * *

This method may be used whenever an array range check is needed. * * @param arrayLength an array length. * @param offset a start index for the fragment * @param length a length (the number of elements in the fragment). * @throws IllegalArgumentException if length is negative. * @throws ArrayIndexOutOfBoundsException if offset is negative or offset+length is greater than arrayLength. */ public static void ensureOffsetLength( final int arrayLength, final int offset, final int length ) { if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" ); if ( length < 0 ) throw new IllegalArgumentException( "Length (" + length + ") is negative" ); if ( offset + length > arrayLength ) throw new ArrayIndexOutOfBoundsException( "Last index (" + ( offset + length ) + ") is greater than array length (" + arrayLength + ")" ); } /** * Transforms two consecutive sorted ranges into a single sorted range. The initial ranges are * [first..middle) and [middle..last), and the resulting range is * [first..last). Elements in the first input range will precede equal elements in * the second. */ private static void inPlaceMerge( final int from, int mid, final int to, final IntComparator comp, final Swapper swapper ) { if ( from >= mid || mid >= to ) return; if ( to - from == 2 ) { if ( comp.compare( mid, from ) < 0 ) swapper.swap( from, mid ); return; } int firstCut; int secondCut; if ( mid - from > to - mid ) { firstCut = from + ( mid - from ) / 2; secondCut = lowerBound( mid, to, firstCut, comp ); } else { secondCut = mid + ( to - mid ) / 2; firstCut = upperBound( from, mid, secondCut, comp ); } int first2 = firstCut; int middle2 = mid; int last2 = secondCut; if ( middle2 != first2 && middle2 != last2 ) { int first1 = first2; int last1 = middle2; while ( first1 < --last1 ) swapper.swap( first1++, last1 ); first1 = middle2; last1 = last2; while ( first1 < --last1 ) swapper.swap( first1++, last1 ); first1 = first2; last1 = last2; while ( first1 < --last1 ) swapper.swap( first1++, last1 ); } mid = firstCut + ( secondCut - mid ); inPlaceMerge( from, firstCut, mid, comp, swapper ); inPlaceMerge( mid, secondCut, to, comp, swapper ); } /** * Performs a binary search on an already-sorted range: finds the first position where an * element can be inserted without violating the ordering. Sorting is by a user-supplied * comparison function. * * @param from the index of the first element (inclusive) to be included in the binary search. * @param to the index of the last element (exclusive) to be included in the binary search. * @param pos the position of the element to be searched for. * @param comp the comparison function. * @return the largest index i such that, for every j in the range [first..i), * comp.compare(j, pos) is true. */ private static int lowerBound( int from, final int to, final int pos, final IntComparator comp ) { // if (comp==null) throw new NullPointerException(); int len = to - from; while ( len > 0 ) { int half = len / 2; int middle = from + half; if ( comp.compare( middle, pos ) < 0 ) { from = middle + 1; len -= half + 1; } else { len = half; } } return from; } /** * Performs a binary search on an already sorted range: finds the last position where an element * can be inserted without violating the ordering. Sorting is by a user-supplied comparison * function. * * @param from the index of the first element (inclusive) to be included in the binary search. * @param to the index of the last element (exclusive) to be included in the binary search. * @param pos the position of the element to be searched for. * @param comp the comparison function. * @return The largest index i such that, for every j in the range [first..i), * comp.compare(pos, j) is false. */ private static int upperBound( int from, final int mid, final int pos, final IntComparator comp ) { // if (comp==null) throw new NullPointerException(); int len = mid - from; while ( len > 0 ) { int half = len / 2; int middle = from + half; if ( comp.compare( pos, middle ) < 0 ) { len = half; } else { from = middle + 1; len -= half + 1; } } return from; } /** * Returns the index of the median of the three indexed chars. */ private static int med3( final int a, final int b, final int c, final IntComparator comp ) { int ab = comp.compare( a, b ); int ac = comp.compare( a, c ); int bc = comp.compare( b, c ); return ( ab < 0 ? ( bc < 0 ? b : ac < 0 ? c : a ) : ( bc > 0 ? b : ac > 0 ? c : a ) ); } private static final int MERGESORT_NO_REC = 16; /** Sorts the specified range of elements using the specified swapper and according to the order induced by the specified * comparator using mergesort. * *

This sort is guaranteed to be stable: equal elements will not be reordered as a result * of the sort. The sorting algorithm is an in-place mergesort that is significantly slower than a * standard mergesort, as its running time is O(n (log n)2), but it does not allocate additional memory; as a result, it can be * used as a generic sorting algorithm. * * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param c the comparator to determine the order of the generic data (arguments are positions). * @param swapper an object that knows how to swap the elements at any two positions. */ public static void mergeSort( final int from, final int to, final IntComparator c, final Swapper swapper ) { /* * We retain the same method signature as quickSort. Given only a comparator and swapper we * do not know how to copy and move elements from/to temporary arrays. Hence, in contrast to * the JDK mergesorts this is an "in-place" mergesort, i.e. does not allocate any temporary * arrays. A non-inplace mergesort would perhaps be faster in most cases, but would require * non-intuitive delegate objects... */ final int length = to - from; // Insertion sort on smallest arrays if ( length < MERGESORT_NO_REC ) { for ( int i = from; i < to; i++ ) { for ( int j = i; j > from && ( c.compare( j - 1, j ) > 0 ); j-- ) { swapper.swap( j, j - 1 ); } } return; } // Recursively sort halves int mid = ( from + to ) >>> 1; mergeSort( from, mid, c, swapper ); mergeSort( mid, to, c, swapper ); // If list is already sorted, nothing left to do. This is an // optimization that results in faster sorts for nearly ordered lists. if ( c.compare( mid - 1, mid ) <= 0 ) return; // Merge sorted halves inPlaceMerge( from, mid, to, c, swapper ); } /** Swaps two sequences of elements using a provided swapper. * * @param swapper the swapper. * @param a a position in {@code x}. * @param b another position in {@code x}. * @param n the number of elements to exchange starting at {@code a} and {@code b}. */ protected static void swap( final Swapper swapper, int a, int b, final int n ) { for ( int i = 0; i < n; i++, a++, b++ ) swapper.swap( a, b ); } private static final int QUICKSORT_NO_REC = 16; private static final int PARALLEL_QUICKSORT_NO_FORK = 8192; private static final int QUICKSORT_MEDIAN_OF_9 = 128; protected static class ForkJoinGenericQuickSort extends RecursiveAction { private static final long serialVersionUID = 1L; private final int from; private final int to; private final IntComparator comp; private final Swapper swapper; public ForkJoinGenericQuickSort( final int from, final int to, final IntComparator comp, final Swapper swapper ) { this.from = from; this.to = to; this.comp = comp; this.swapper = swapper; } @Override protected void compute() { final int len = to - from; if ( len < PARALLEL_QUICKSORT_NO_FORK ) { quickSort( from, to, comp, swapper ); return; } // Choose a partition element, v int m = from + len / 2; int l = from; int n = to - 1; int s = len / 8; l = med3( l, l + s, l + 2 * s, comp ); m = med3( m - s, m, m + s, comp ); n = med3( n - 2 * s, n - s, n, comp ); m = med3( l, m, n, comp ); // Establish Invariant: v* (v)* v* int a = from, b = a, c = to - 1, d = c; while ( true ) { int comparison; while ( b <= c && ( ( comparison = comp.compare( b, m ) ) <= 0 ) ) { if ( comparison == 0 ) { // Fix reference to pivot if necessary if ( a == m ) m = b; else if ( b == m ) m = a; swapper.swap( a++, b ); } b++; } while ( c >= b && ( ( comparison = comp.compare( c, m ) ) >= 0 ) ) { if ( comparison == 0 ) { // Fix reference to pivot if necessary if ( c == m ) m = d; else if ( d == m ) m = c; swapper.swap( c, d-- ); } c--; } if ( b > c ) break; // Fix reference to pivot if necessary if ( b == m ) m = d; else if ( c == m ) m = c; swapper.swap( b++, c-- ); } // Swap partition elements back to middle s = Math.min( a - from, b - a ); swap( swapper, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( swapper, b, to - s, s ); // Recursively sort non-partition-elements int t; s = b - a; t = d - c; if ( s > 1 && t > 1 ) invokeAll( new ForkJoinGenericQuickSort( from, from + s, comp, swapper ), new ForkJoinGenericQuickSort( to - t, to, comp, swapper ) ); else if ( s > 1 ) invokeAll( new ForkJoinGenericQuickSort( from, from + s, comp, swapper ) ); else invokeAll( new ForkJoinGenericQuickSort( to - t, to, comp, swapper ) ); } } /** Sorts the specified range of elements using the specified swapper and according to the order induced by the specified * comparator using a parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with {@link Runtime#availableProcessors()} parallel threads. * * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the order of the generic data. * @param swapper an object that knows how to swap the elements at any two positions. * */ public static void parallelQuickSort( final int from, final int to, final IntComparator comp, final Swapper swapper ) { final ForkJoinPool pool = new ForkJoinPool( Runtime.getRuntime().availableProcessors() ); pool.invoke( new ForkJoinGenericQuickSort( from, to, comp, swapper ) ); pool.shutdown(); } /** Sorts the specified range of elements using the specified swapper and according to the order induced by the specified * comparator using parallel quicksort. * *

The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas * McIlroy, “Engineering a Sort Function”, Software: Practice and Experience, 23(11), pages * 1249−1265, 1993. * *

This implementation uses a {@link ForkJoinPool} executor service with {@link Runtime#availableProcessors()} parallel threads. * * @param from the index of the first element (inclusive) to be sorted. * @param to the index of the last element (exclusive) to be sorted. * @param comp the comparator to determine the order of the generic data. * @param swapper an object that knows how to swap the elements at any two positions. * */ public static void quickSort( final int from, final int to, final IntComparator comp, final Swapper swapper ) { final int len = to - from; // Insertion sort on smallest arrays if ( len < QUICKSORT_NO_REC ) { for ( int i = from; i < to; i++ ) for ( int j = i; j > from && ( comp.compare( j - 1, j ) > 0 ); j-- ) { swapper.swap( j, j - 1 ); } return; } // Choose a partition element, v int m = from + len / 2; // Small arrays, middle element int l = from; int n = to - 1; if ( len > QUICKSORT_MEDIAN_OF_9 ) { // Big arrays, pseudomedian of 9 int s = len / 8; l = med3( l, l + s, l + 2 * s, comp ); m = med3( m - s, m, m + s, comp ); n = med3( n - 2 * s, n - s, n, comp ); } m = med3( l, m, n, comp ); // Mid-size, med of 3 // int v = x[m]; int a = from; int b = a; int c = to - 1; // Establish Invariant: v* (v)* v* int d = c; while ( true ) { int comparison; while ( b <= c && ( ( comparison = comp.compare( b, m ) ) <= 0 ) ) { if ( comparison == 0 ) { // Fix reference to pivot if necessary if ( a == m ) m = b; else if ( b == m ) m = a; swapper.swap( a++, b ); } b++; } while ( c >= b && ( ( comparison = comp.compare( c, m ) ) >= 0 ) ) { if ( comparison == 0 ) { // Fix reference to pivot if necessary if ( c == m ) m = d; else if ( d == m ) m = c; swapper.swap( c, d-- ); } c--; } if ( b > c ) break; // Fix reference to pivot if necessary if ( b == m ) m = d; else if ( c == m ) m = c; swapper.swap( b++, c-- ); } // Swap partition elements back to middle int s; s = Math.min( a - from, b - a ); swap( swapper, from, b - s, s ); s = Math.min( d - c, to - d - 1 ); swap( swapper, b, to - s, s ); // Recursively sort non-partition-elements if ( ( s = b - a ) > 1 ) quickSort( from, from + s, comp, swapper ); if ( ( s = d - c ) > 1 ) quickSort( to - s, to, comp, swapper ); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Swapper.java0000664000000000000000000000176413050705451021304 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** An object that can swap elements whose position is specified by integers * * @see Arrays#quickSort(int, int, it.unimi.dsi.fastutil.ints.IntComparator, Swapper) */ public interface Swapper { /** Swaps the data at the given positions. * * @param a the first position to swap. * @param b the second position to swap. */ void swap( int a, int b ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/BigSwapper.java0000664000000000000000000000200513050705451021713 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** An object that can swap elements whose positions is specified by longs. * * @see BigArrays#quickSort(long, long, it.unimi.dsi.fastutil.longs.LongComparator, BigSwapper) */ public interface BigSwapper { /** Swaps the data at the given positions. * * @param a the first position to swap. * @param b the second position to swap. */ void swap( long a, long b ); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/Size64.java0000664000000000000000000000402513050705451020740 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Collection; /** An interface for data structures whose size can exceed {@link Integer#MAX_VALUE}. * *

The only methods specified by this interfaces are {@link #size64()}, and * a deprecated {@link #size()} identical to {@link Collection#size()}. Implementations * can work around the type problem of {@link java.util.Collection#size()} * (e.g., not being able to return more than {@link Integer#MAX_VALUE}) by implementing this * interface. Callers interested in large structures * can use a reflective call to instanceof to check for the presence of {@link #size64()}. * *

We remark that it is always a good idea to implement both {@link #size()} and {@link #size64()}, * as the former might be implemented by a superclass in an incompatible way. If you implement this interface, * just implement {@link #size()} as a deprecated method returning Math.min(Integer.MAX_VALUE, size64()). */ public interface Size64 { /** Returns the size of this data structure as a long. * * @return the size of this data structure. */ long size64(); /** Returns the size of this data structure, minimized with {@link Integer#MAX_VALUE}. * * @return the size of this data structure, minimized with {@link Integer#MAX_VALUE}. * @see java.util.Collection#size() * @deprecated Use {@link #size64()} instead. */ @Deprecated int size(); } fastutil-7.1.0/src/it/unimi/dsi/fastutil/PriorityQueues.java0000664000000000000000000000762213050705451022673 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Comparator; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.PriorityQueue; /** A class providing static methods and objects that do useful things with priority queues. * * @see it.unimi.dsi.fastutil.PriorityQueue */ public class PriorityQueues { private PriorityQueues() {} /** An immutable class representing the empty priority queue. * *

This class may be useful to implement your own in case you subclass * {@link PriorityQueue}. */ @SuppressWarnings("rawtypes") public static class EmptyPriorityQueue extends AbstractPriorityQueue { protected EmptyPriorityQueue() {} public void enqueue( Object o ) { throw new UnsupportedOperationException(); } public Object dequeue() { throw new NoSuchElementException(); } public boolean isEmpty() { return true; } public int size() { return 0; } public void clear() {} public Object first() { throw new NoSuchElementException(); } public Object last() { throw new NoSuchElementException(); } public void changed() { throw new NoSuchElementException(); } public Comparator comparator() { return null; } } /** An empty indirect priority queue (immutable). */ public final static EmptyPriorityQueue EMPTY_QUEUE = new EmptyPriorityQueue(); /** A synchronized wrapper class for priority queues. */ public static class SynchronizedPriorityQueue implements PriorityQueue { public static final long serialVersionUID = -7046029254386353129L; final protected PriorityQueue q; final protected Object sync; protected SynchronizedPriorityQueue( final PriorityQueue q, final Object sync ) { this.q = q; this.sync = sync; } protected SynchronizedPriorityQueue( final PriorityQueue q ) { this.q = q; this.sync = this; } public void enqueue( K x ) { synchronized( sync ) { q.enqueue( x ); } } public K dequeue() { synchronized( sync ) { return q.dequeue(); } } public K first() { synchronized( sync ) { return q.first(); } } public K last() { synchronized( sync ) { return q.last(); } } public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } } public int size() { synchronized( sync ) { return q.size(); } } public void clear() { synchronized( sync ) { q.clear(); } } public void changed() { synchronized( sync ) { q.changed(); } } public Comparator comparator() { synchronized( sync ) { return q.comparator(); } } } /** Returns a synchronized priority queue backed by the specified priority queue. * * @param q the priority queue to be wrapped in a synchronized priority queue. * @return a synchronized view of the specified priority queue. */ public static PriorityQueue synchronize( final PriorityQueue q ) { return new SynchronizedPriorityQueue( q ); } /** Returns a synchronized priority queue backed by the specified priority queue, using an assigned object to synchronize. * * @param q the priority queue to be wrapped in a synchronized priority queue. * @param sync an object that will be used to synchronize the access to the priority queue. * @return a synchronized view of the specified priority queue. */ public static PriorityQueue synchronize( final PriorityQueue q, final Object sync ) { return new SynchronizedPriorityQueue( q, sync ); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/IndirectPriorityQueues.java0000664000000000000000000001154713050705451024356 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Comparator; import java.util.NoSuchElementException; /** A class providing static methods and objects that do useful things with indirect priority queues. * * @see IndirectPriorityQueue */ public class IndirectPriorityQueues { private IndirectPriorityQueues() {} /** An immutable class representing the empty indirect priority queue. * *

This class may be useful to implement your own in case you subclass * {@link IndirectPriorityQueue}. */ @SuppressWarnings("rawtypes") public static class EmptyIndirectPriorityQueue extends AbstractIndirectPriorityQueue { protected EmptyIndirectPriorityQueue() {} public void enqueue( final int i ) { throw new UnsupportedOperationException(); } public int dequeue() { throw new NoSuchElementException(); } public boolean isEmpty() { return true; } public int size() { return 0; } public boolean contains( int index ) { return false; } public void clear() {} public int first() { throw new NoSuchElementException(); } public int last() { throw new NoSuchElementException(); } public void changed() { throw new NoSuchElementException(); } public void allChanged() {} public Comparator comparator() { return null; } public void changed( final int i ) { throw new IllegalArgumentException( "Index " + i + " is not in the queue" ); } public boolean remove( final int i ) { return false; } public int front( int[] a ) { return 0; } } /** An empty indirect priority queue (immutable). */ public final static EmptyIndirectPriorityQueue EMPTY_QUEUE = new EmptyIndirectPriorityQueue(); /** A synchronized wrapper class for indirect priority queues. */ public static class SynchronizedIndirectPriorityQueue implements IndirectPriorityQueue { public static final long serialVersionUID = -7046029254386353129L; final protected IndirectPriorityQueue q; final protected Object sync; protected SynchronizedIndirectPriorityQueue( final IndirectPriorityQueue q, final Object sync ) { this.q = q; this.sync = sync; } protected SynchronizedIndirectPriorityQueue( final IndirectPriorityQueue q ) { this.q = q; this.sync = this; } public void enqueue( int x ) { synchronized( sync ) { q.enqueue( x ); } } public int dequeue() { synchronized( sync ) { return q.dequeue(); } } public boolean contains( final int index ) { synchronized( sync ) { return q.contains( index ); } } public int first() { synchronized( sync ) { return q.first(); } } public int last() { synchronized( sync ) { return q.last(); } } public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } } public int size() { synchronized( sync ) { return q.size(); } } public void clear() { synchronized( sync ) { q.clear(); } } public void changed() { synchronized( sync ) { q.changed(); } } public void allChanged() { synchronized( sync ) { q.allChanged(); } } public void changed( int i ) { synchronized( sync ) { q.changed( i ); } } public boolean remove( int i ) { synchronized( sync ) { return q.remove( i ); } } public Comparator comparator() { synchronized( sync ) { return q.comparator(); } } public int front( int[] a ) { return q.front( a ); } } /** Returns a synchronized type-specific indirect priority queue backed by the specified type-specific indirect priority queue. * * @param q the indirect priority queue to be wrapped in a synchronized indirect priority queue. * @return a synchronized view of the specified indirect priority queue. */ public static IndirectPriorityQueue synchronize( final IndirectPriorityQueue q ) { return new SynchronizedIndirectPriorityQueue( q ); } /** Returns a synchronized type-specific indirect priority queue backed by the specified type-specific indirect priority queue, using an assigned object to synchronize. * * @param q the indirect priority queue to be wrapped in a synchronized indirect priority queue. * @param sync an object that will be used to synchronize the access to the indirect priority queue. * @return a synchronized view of the specified indirect priority queue. */ public static IndirectPriorityQueue synchronize( final IndirectPriorityQueue q, final Object sync ) { return new SynchronizedIndirectPriorityQueue( q, sync ); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/AbstractPriorityQueue.java0000664000000000000000000000226513050705451024172 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.fastutil.PriorityQueue; /** An abstract class providing basic methods for implementing the {@link PriorityQueue} interface. * *

This class defines {@link #changed()} and {@link #last()} as throwing an * {@link UnsupportedOperationException}. */ public abstract class AbstractPriorityQueue implements PriorityQueue { public void changed() { throw new UnsupportedOperationException(); } public K last() { throw new UnsupportedOperationException(); } public boolean isEmpty() { return size() == 0; } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/AbstractIndirectPriorityQueue.java0000664000000000000000000000300113050705451025641 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2003-2017 Paolo Boldi and Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** An abstract class providing basic methods for implementing the {@link IndirectPriorityQueue} interface. * *

This class defines {@link #changed(int)}, {@link #allChanged()}, {@link #remove(int)} and {@link #last()} as throwing an * {@link UnsupportedOperationException}. */ public abstract class AbstractIndirectPriorityQueue implements IndirectPriorityQueue { public int last() { throw new UnsupportedOperationException(); } public void changed() { changed( first() ); } public void changed( int index ) { throw new UnsupportedOperationException(); } public void allChanged() { throw new UnsupportedOperationException(); } public boolean remove( int index ) { throw new UnsupportedOperationException(); } public boolean contains( int index ) { throw new UnsupportedOperationException(); } public boolean isEmpty() { return size() == 0; } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/AbstractStack.java0000664000000000000000000000251213050705451022404 0ustar rootrootpackage it.unimi.dsi.fastutil; /* * Copyright (C) 2002-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** An abstract class providing basic methods for implementing the {@link Stack} interface. * *

This class just defines {@link Stack#top()} as {@link Stack#peek(int) peek(0)}, and * {@link Stack#peek(int)} as throwing an {@link UnsupportedOperationException}. * * Subclasses of this class may choose to implement just {@link Stack#push(Object)}, * {@link Stack#pop()} and {@link Stack#isEmpty()}, or (but this is not * required) go farther and implement {@link Stack#top()}, or even {@link * Stack#peek(int)}. */ public abstract class AbstractStack implements Stack { public K top() { return peek( 0 ); } public K peek( int i ) { throw new UnsupportedOperationException(); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/FastByteArrayInputStream.java0000664000000000000000000000674013050705451025205 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** Simple, fast and repositionable byte-array input stream. * *

Warning: this class implements the correct semantics * of {@link #read(byte[], int, int)} as described in {@link java.io.InputStream}. * The implementation given in {@link java.io.ByteArrayInputStream} is broken, * but it will never be fixed because it's too late. * * @author Sebastiano Vigna */ public class FastByteArrayInputStream extends MeasurableInputStream implements RepositionableStream { /** The array backing the input stream. */ public byte[] array; /** The first valid entry. */ public int offset; /** The number of valid bytes in {@link #array} starting from {@link #offset}. */ public int length; /** The current position as a distance from {@link #offset}. */ private int position; /** The current mark as a position, or -1 if no mark exists. */ private int mark; /** Creates a new array input stream using a given array fragment. * * @param array the backing array. * @param offset the first valid entry of the array. * @param length the number of valid bytes. */ public FastByteArrayInputStream( final byte[] array, final int offset, final int length ) { this.array = array; this.offset = offset; this.length = length; } /** Creates a new array input stream using a given array. * * @param array the backing array. */ public FastByteArrayInputStream( final byte[] array ) { this( array, 0, array.length ); } public boolean markSupported() { return true; } public void reset() { position = mark; } /** Closing a fast byte array input stream has no effect. */ public void close() {} public void mark( final int dummy ) { mark = position; } public int available() { return length - position; } public long skip( long n ) { if ( n <= length - position ) { position += (int)n; return n; } n = length - position; position = length; return n; } public int read() { if ( length == position ) return -1; return array[ offset + position++ ] & 0xFF; } /** Reads bytes from this byte-array input stream as * specified in {@link java.io.InputStream#read(byte[], int, int)}. * Note that the implementation given in {@link java.io.ByteArrayInputStream#read(byte[], int, int)} * will return -1 on a zero-length read at EOF, contrarily to the specification. We won't. */ public int read( final byte b[], final int offset, final int length ) { if ( this.length == this.position ) return length == 0 ? 0 : -1; final int n = Math.min( length, this.length - this.position ); System.arraycopy( array, this.offset + this.position, b, offset, n ); this.position += n; return n; } public long position() { return position; } public void position( final long newPosition ) { position = (int)Math.min( newPosition, length ); } @Override public long length() { return length; } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/FastByteArrayOutputStream.java0000664000000000000000000000653313050705451025406 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.fastutil.bytes.ByteArrays; import java.io.IOException; /** Simple, fast byte-array output stream that exposes the backing array. * *

{@link java.io.ByteArrayOutputStream} is nice, but to get its content you * must generate each time a new object. This doesn't happen here. * *

This class will automatically enlarge the backing array, doubling its * size whenever new space is needed. The {@link #reset()} method will * mark the content as empty, but will not decrease the capacity: use * {@link #trim()} for that purpose. * * @author Sebastiano Vigna */ public class FastByteArrayOutputStream extends MeasurableOutputStream implements RepositionableStream { /** The array backing the output stream. */ public final static int DEFAULT_INITIAL_CAPACITY = 16; /** The array backing the output stream. */ public byte[] array; /** The number of valid bytes in {@link #array}. */ public int length; /** The current writing position. */ private int position; /** Creates a new array output stream with an initial capacity of {@link #DEFAULT_INITIAL_CAPACITY} bytes. */ public FastByteArrayOutputStream() { this( DEFAULT_INITIAL_CAPACITY ); } /** Creates a new array output stream with a given initial capacity. * * @param initialCapacity the initial length of the backing array. */ public FastByteArrayOutputStream( final int initialCapacity ) { array = new byte[ initialCapacity ]; } /** Creates a new array output stream wrapping a given byte array. * * @param a the byte array to wrap. */ public FastByteArrayOutputStream( final byte[] a ) { array = a; } /** Marks this array output stream as empty. */ public void reset() { length = 0; position = 0; } /** Ensures that the length of the backing array is equal to {@link #length}. */ public void trim() { array = ByteArrays.trim( array, length ); } public void write( final int b ) { if ( position >= array.length ) array = ByteArrays.grow( array, position + 1, length ); array[ position++ ] = (byte)b; if ( length < position ) length = position; } public void write( final byte[] b, final int off, final int len ) throws IOException { ByteArrays.ensureOffsetLength( b, off, len ); if ( position + len > array.length ) array = ByteArrays.grow( array, position + len, position ); System.arraycopy( b, off, array, position, len ); if ( position + len > length ) length = position += len; } public void position( long newPosition ) { if ( position > Integer.MAX_VALUE ) throw new IllegalArgumentException( "Position too large: " + newPosition ); position = (int)newPosition; } public long position() { return position; } @Override public long length() throws IOException { return length; } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/FastMultiByteArrayInputStream.java0000664000000000000000000001265113050705451026216 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.EOFException; import java.io.IOException; import java.io.InputStream; /** Simple, fast and repositionable byte array input stream that multiplexes its content among several arrays. * * This class is significantly slower than {@link FastByteArrayInputStream}, * but it can hold 256 PiB of data. The relevant constructor is {@link #FastMultiByteArrayInputStream(InputStream, long)}, * which fetches a stream and loads it into a sequence of byte arrays. * * @author Sebastiano Vigna * @author Paolo Boldi s */ public class FastMultiByteArrayInputStream extends MeasurableInputStream implements RepositionableStream { /** The number of bits of an array slice index. */ public final static int SLICE_BITS = 10; /** The maximum length of an array slice. */ public final static int SLICE_SIZE = 1 << SLICE_BITS; /** The mask to retrieve a slice offset. */ public final static int SLICE_MASK = SLICE_SIZE - 1; /** The array of arrays backing the input stream, plus an additional {@code null} entry. */ public byte[][] array; /** The current array. */ public byte[] current; /** The number of valid bytes in {@link #array}. */ public long length; /** The current position. */ private long position; /** Creates a new multi-array input stream loading it from a measurable input stream. * * @param is the input stream that will fill the array. */ public FastMultiByteArrayInputStream( final MeasurableInputStream is ) throws IOException { this( is, is.length() ); } /** Creates a new multi-array input stream loading it from an input stream. * * @param is the input stream that will fill the array. * @param size the number of bytes to be read from is. */ public FastMultiByteArrayInputStream( final InputStream is, long size ) throws IOException { length = size; array = new byte[ (int)( ( size + SLICE_SIZE - 1 ) / SLICE_SIZE ) + 1 ][]; for( int i = 0; i < array.length - 1; i++ ) { array[ i ] = new byte[ size >= SLICE_SIZE ? SLICE_SIZE : (int)size ]; // It is important *not* to use is.read() directly because of bug #6478546 if ( BinIO.loadBytes( is, array[ i ] ) != array[ i ].length ) throw new EOFException(); size -= array[ i ].length; } current = array[ 0 ]; } /** Creates a new multi-array input stream sharing the backing arrays of another multi-array input stream. * * @param is the multi-array input stream to replicate. */ public FastMultiByteArrayInputStream( final FastMultiByteArrayInputStream is ) { this.array = is.array; this.length = is.length; this.current = array[ 0 ]; } /** Creates a new multi-array input stream using a given array. * * @param array the backing array. */ public FastMultiByteArrayInputStream( final byte[] array ) { if ( array.length == 0 ) this.array = new byte[ 1 ][]; else { this.array = new byte[ 2 ][]; this.array[ 0 ] = array; this.length = array.length; this.current = array; } } /** Returns the number of bytes that can be read (or skipped over) from this input stream without blocking. * *

Note that this number may be smaller than the number of bytes actually * available from the stream if this number exceeds {@link Integer#MAX_VALUE}. * * @return the minimum among the number of available bytes and {@link Integer#MAX_VALUE}. */ public int available() { return (int)Math.min( Integer.MAX_VALUE, length - position ); } public long skip( long n ) { if ( n > length - position ) n = length - position; position += n; updateCurrent(); return n; } public int read() { if ( length == position ) return -1; final int disp = (int)( position++ & SLICE_MASK ); if ( disp == 0 ) updateCurrent(); return current[ disp ] & 0xFF; } public int read( final byte[] b, int offset, final int length ) { final long remaining = this.length - position; if ( remaining == 0 ) return length == 0 ? 0 : -1; int n = (int)Math.min( length, remaining ); final int m = n; for(;;) { final int disp = (int)( position & SLICE_MASK ); if ( disp == 0 ) updateCurrent(); final int res = Math.min( n, current.length - disp ); System.arraycopy( current, disp, b, offset, res ); n -= res; position += res; if ( n == 0 ) return m; offset += res; } } private void updateCurrent() { current = array[ (int)( position >>> SLICE_BITS ) ]; } public long position() { return position; } public void position( final long newPosition ) { position = Math.min( newPosition, length ); updateCurrent(); } @Override public long length() throws IOException { return length; } /** NOP. */ public void close() {} public boolean markSupported() { return false; } public void mark( final int dummy ) { throw new UnsupportedOperationException(); } public void reset() { throw new UnsupportedOperationException(); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/FastBufferedInputStream.java0000664000000000000000000004722213050705451025025 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.fastutil.bytes.ByteArrays; import it.unimi.dsi.fastutil.io.RepositionableStream; import java.io.IOException; import java.io.InputStream; import java.nio.channels.FileChannel; import java.util.EnumSet; /** Lightweight, unsynchronized, aligned input stream buffering class with * {@linkplain #skip(long) true skipping}, * {@linkplain MeasurableStream measurability}, * {@linkplain RepositionableStream repositionability} * and {@linkplain #readLine(byte[], int, int, EnumSet) line reading} support. * *

This class provides buffering for input streams, but it does so with * purposes and an internal logic that are radically different from the ones * adopted in {@link java.io.BufferedInputStream}. The main features follow. * *

    *
  • There is no support for marking. All methods are unsychronized. * *

  • As an additional feature, this class implements the {@link * RepositionableStream} and {@link MeasurableStream} interfaces. * An instance of this class will try to cast * the underlying byte stream to a {@link RepositionableStream} and to fetch by * reflection the {@link java.nio.channels.FileChannel} underlying the given * output stream, in this order. If either reference can be successfully * fetched, you can use {@link #position(long)} to reposition the stream. * Much in the same way, an instance of this class will try to cast the * the underlying byte stream to a {@link MeasurableStream}, and if this * operation is successful, or if a {@link java.nio.channels.FileChannel} can * be detected, then {@link #position()} and {@link #length()} will work as expected. * * *

  • Due to erratic and unpredictable behaviour of {@link InputStream#skip(long)}, * which does not correspond to its specification and which Sun refuses to fix * (see bug 6222822; * don't be fooled by the “closed, fixed” label), * this class peeks at the underlying stream and if it is {@link System#in} it uses * repeated reads instead of calling {@link InputStream#skip(long)} on the underlying stream; moreover, * skips and reads are tried alternately, so to guarantee that skipping * less bytes than requested can be caused only by reaching the end of file. * *

  • This class keeps also track of the number of bytes read so far, so * to be able to implement {@link MeasurableStream#position()} * independently of underlying input stream. * *

  • This class has limited support for * {@linkplain #readLine(byte[], int, int, EnumSet) “reading a line”} * (whatever that means) from the underlying input stream. You can choose the set of * {@linkplain FastBufferedInputStream.LineTerminator line terminators} that * delimit lines. * *

* *

Warning: Since fastutil 6.0.0, this class detects * a implementations of {@link MeasurableStream} instead of subclasses MeasurableInputStream (which is deprecated). * * @since 4.4 */ public class FastBufferedInputStream extends MeasurableInputStream implements RepositionableStream { /** The default size of the internal buffer in bytes (8Ki). */ public final static int DEFAULT_BUFFER_SIZE = 8 * 1024; /** An enumeration of the supported line terminators. */ public static enum LineTerminator { /** A carriage return (CR, ASCII 13). */ CR, /** A line feed (LF, ASCII 10). */ LF, /** A carriage return followed by a line feed (CR/LF, ASCII 13/10). */ CR_LF } /** A set containing all available line terminators. */ public final static EnumSet ALL_TERMINATORS = EnumSet.allOf( LineTerminator.class ); /** The underlying input stream. */ protected InputStream is; /** The internal buffer. */ protected byte buffer[]; /** The current position in the buffer. */ protected int pos; /** The number of bytes ever read (reset upon a call to {@link #position(long)}). * In particular, this will always represent the index (in the underlying input stream) * of the first available byte in the buffer. */ protected long readBytes; /** The number of buffer bytes available starting from {@link #pos}. */ protected int avail; /** The cached file channel underlying {@link #is}, if any. */ private FileChannel fileChannel; /** {@link #is} cast to a positionable stream, if possible. */ private RepositionableStream repositionableStream; /** {@link #is} cast to a measurable stream, if possible. */ private MeasurableStream measurableStream; private static int ensureBufferSize( final int bufferSize ) { if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size: " + bufferSize ); return bufferSize; } /** Creates a new fast buffered input stream by wrapping a given input stream with a given buffer. * * @param is an input stream to wrap. * @param buffer a buffer of positive length. */ public FastBufferedInputStream( final InputStream is, final byte[] buffer ) { this.is = is; ensureBufferSize( buffer.length ); this.buffer = buffer; if ( is instanceof RepositionableStream ) repositionableStream = (RepositionableStream)is; if ( is instanceof MeasurableStream ) measurableStream = (MeasurableStream)is; if ( repositionableStream == null ) { try { fileChannel = (FileChannel)( is.getClass().getMethod( "getChannel", new Class[] {} ) ).invoke( is, new Object[] {} ); } catch( IllegalAccessException e ) {} catch( IllegalArgumentException e ) {} catch( NoSuchMethodException e ) {} catch( java.lang.reflect.InvocationTargetException e ) {} catch( ClassCastException e ) {} } } /** Creates a new fast buffered input stream by wrapping a given input stream with a given buffer size. * * @param is an input stream to wrap. * @param bufferSize the size in bytes of the internal buffer (greater than zero). */ public FastBufferedInputStream( final InputStream is, final int bufferSize ) { this( is, new byte[ ensureBufferSize( bufferSize ) ] ); } /** Creates a new fast buffered input stream by wrapping a given input stream with a buffer of {@link #DEFAULT_BUFFER_SIZE} bytes. * * @param is an input stream to wrap. */ public FastBufferedInputStream( final InputStream is ) { this( is, DEFAULT_BUFFER_SIZE ); } /** Checks whether no more bytes will be returned. * *

This method will refill the internal buffer. * * @return true if there are no characters in the internal buffer and * the underlying reader is exhausted. */ protected boolean noMoreCharacters() throws IOException { if ( avail == 0 ) { avail = is.read( buffer ); if ( avail <= 0 ) { avail = 0; return true; } pos = 0; } return false; } public int read() throws IOException { if ( noMoreCharacters() ) return -1; avail--; readBytes++; return buffer[ pos++ ] & 0xFF; } public int read( final byte b[], final int offset, final int length ) throws IOException { if ( length <= avail ) { System.arraycopy( buffer, pos, b, offset, length ); pos += length; avail -= length; readBytes += length; return length; } final int head = avail; System.arraycopy( buffer, pos, b, offset, head ); pos = avail = 0; readBytes += head; if ( length > buffer.length ) { // We read directly into the destination final int result = is.read( b, offset + head, length - head ); if ( result > 0 ) readBytes += result; return result < 0 ? ( head == 0 ? -1 : head ) : result + head; } if ( noMoreCharacters() ) return head == 0 ? -1 : head; final int toRead = Math.min( length - head, avail ); readBytes += toRead; System.arraycopy( buffer, 0, b, offset + head, toRead ); pos = toRead; avail -= toRead; // Note that head >= 0, and necessarily toRead > 0 return toRead + head; } /** Reads a line into the given byte array using {@linkplain #ALL_TERMINATORS all terminators}. * * @param array byte array where the next line will be stored. * @return the number of bytes actually placed in array, or -1 at end of file. * @see #readLine(byte[], int, int, EnumSet) */ public int readLine( final byte[] array ) throws IOException { return readLine( array, 0, array.length, ALL_TERMINATORS ); } /** Reads a line into the given byte array. * * @param array byte array where the next line will be stored. * @param terminators a set containing the line termination sequences that we want * to consider as valid. * @return the number of bytes actually placed in array, or -1 at end of file. * @see #readLine(byte[], int, int, EnumSet) */ public int readLine( final byte[] array, final EnumSet terminators ) throws IOException { return readLine( array, 0, array.length, terminators ); } /** Reads a line into the given byte-array fragment using {@linkplain #ALL_TERMINATORS all terminators}. * * @param array byte array where the next line will be stored. * @param off the first byte to use in array. * @param len the maximum number of bytes to read. * @return the number of bytes actually placed in array, or -1 at end of file. * @see #readLine(byte[], int, int, EnumSet) */ public int readLine( final byte[] array, final int off, final int len ) throws IOException { return readLine( array, off, len, ALL_TERMINATORS ); } /** Reads a line into the given byte-array fragment. * *

Reading lines (i.e., characters) out of a byte stream is not always sensible * (methods available to that purpose in old versions of Java have been mercilessly deprecated). * Nonetheless, in several situations, such as when decoding network protocols or headers * known to be ASCII, it is very useful to be able to read a line from a byte stream. * *

This method will attempt to read the next line into array starting at off, * reading at most len bytes. The read, however, will be stopped by the end of file or * when meeting a {@linkplain LineTerminator line terminator}. Of course, for this operation * to be sensible the encoding of the text contained in the stream, if any, must not generate spurious * carriage returns or line feeds. Note that the termination detection uses a maximisation * criterion, so if you specify both {@link LineTerminator#CR} and * {@link LineTerminator#CR_LF} meeting a pair CR/LF will consider the whole pair a terminator. * *

Terminators are not copied into array or included in the returned count. The * returned integer can be used to check whether the line is complete: if it is smaller than * len, then more bytes might be available, but note that this method (contrarily * to {@link #read(byte[], int, int)}) can legitimately return zero when len * is nonzero just because a terminator was found as the first character. Thus, the intended * usage of this method is to call it on a given array, check whether len bytes * have been read, and if so try again (possibly extending the array) until a number of read bytes * strictly smaller than len (possibly, -1) is returned. * *

If you need to guarantee that a full line is read, use the following idiom: *

	 * int start = off, len;
	 * while( ( len = fbis.readLine( array, start, array.length - start, terminators ) ) == array.length - start ) {
	 *     start += len;
	 *     array = ByteArrays.grow( array, array.length + 1 );
	 * }
	 * 
* *

At the end of the loop, the line will be placed in array starting at * off (inclusive) and ending at start + Math.max( len, 0 ) (exclusive). * * @param array byte array where the next line will be stored. * @param off the first byte to use in array. * @param len the maximum number of bytes to read. * @param terminators a set containing the line termination sequences that we want * to consider as valid. * @return the number of bytes actually placed in array, or -1 at end of file. * Note that the returned number will be len if no line termination sequence * specified in terminators has been met before scanning len byte, * and if also we did not meet the end of file. */ public int readLine( final byte[] array, final int off, final int len, final EnumSet terminators ) throws IOException { ByteArrays.ensureOffsetLength( array ,off, len ); if ( len == 0 ) return 0; // 0-length reads always return 0 if ( noMoreCharacters() ) return -1; int i, k = 0, remaining = len, read = 0; // The number of bytes still to be read for(;;) { for( i = 0; i < avail && i < remaining && ( k = buffer[ pos + i ] ) != '\n' && k != '\r' ; i++ ); System.arraycopy( buffer, pos, array, off + read, i ); pos += i; avail -= i; read += i; remaining -= i; if ( remaining == 0 ) { readBytes += read; return read; // We did not stop because of a terminator } if ( avail > 0 ) { // We met a terminator if ( k == '\n' ) { // LF first pos++; avail--; if ( terminators.contains( LineTerminator.LF ) ) { readBytes += read + 1; return read; } else { array[ off + read++ ] = '\n'; remaining--; } } else if ( k == '\r' ) { // CR first pos++; avail--; if ( terminators.contains( LineTerminator.CR_LF ) ) { if ( avail > 0 ) { if ( buffer[ pos ] == '\n' ) { // CR/LF with LF already in the buffer. pos ++; avail--; readBytes += read + 2; return read; } } else { // We must search for the LF. if ( noMoreCharacters() ) { // Not found a matching LF because of end of file, will return CR in buffer if not a terminator if ( ! terminators.contains( LineTerminator.CR ) ) { array[ off + read++ ] = '\r'; remaining--; readBytes += read; } else readBytes += read + 1; return read; } if ( buffer[ 0 ] == '\n' ) { // Found matching LF, won't return terminators in the buffer pos++; avail--; readBytes += read + 2; return read; } } } if ( terminators.contains( LineTerminator.CR ) ) { readBytes += read + 1; return read; } array[ off + read++ ] = '\r'; remaining--; } } else if ( noMoreCharacters() ) { readBytes += read; return read; } } } public void position( long newPosition ) throws IOException { final long position = readBytes; /** Note that this check will succeed also in the case of * an empty buffer and position == newPosition. This behaviour is * intentional, as it delays buffering to when it is actually * necessary and avoids useless class the underlying stream. */ if ( newPosition <= position + avail && newPosition >= position - pos ) { pos += newPosition - position; avail -= newPosition - position; readBytes = newPosition; return; } if ( repositionableStream != null ) repositionableStream.position( newPosition ); else if ( fileChannel != null ) fileChannel.position( newPosition ); else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" ); readBytes = newPosition; avail = pos = 0; } public long position() throws IOException { return readBytes; } /** Returns the length of the underlying input stream, if it is {@linkplain MeasurableStream measurable}. * * @return the length of the underlying input stream. * @throws UnsupportedOperationException if the underlying input stream is not {@linkplain MeasurableStream measurable} and * cannot provide a {@link FileChannel}. */ public long length() throws IOException { if ( measurableStream != null ) return measurableStream.length(); if ( fileChannel != null ) return fileChannel.size(); throw new UnsupportedOperationException(); } /** Skips the given amount of bytes by repeated reads. * * Warning: this method uses destructively the internal buffer. * * @param n the number of bytes to skip. * @return the number of bytes actually skipped. * @see InputStream#skip(long) */ private long skipByReading( final long n ) throws IOException { long toSkip = n; int len; while( toSkip > 0 ) { len = is.read( buffer, 0, (int)Math.min( buffer.length, toSkip ) ); if ( len > 0 ) toSkip -= len; else break; } return n - toSkip; } /** Skips over and discards the given number of bytes of data from this fast buffered input stream. * *

As explained in the {@linkplain FastBufferedInputStream class documentation}, the semantics * of {@link InputStream#skip(long)} is fatally flawed. This method provides additional semantics as follows: * it will skip the provided number of bytes, unless the end of file has been reached. * *

Additionally, if the underlying input stream is {@link System#in} this method will use * repeated reads instead of invoking {@link InputStream#skip(long)}. * * @param n the number of bytes to skip. * @return the number of bytes actually skipped; it can be smaller than n * only if the end of file has been reached. * @see InputStream#skip(long) */ public long skip( final long n ) throws IOException { if ( n <= avail ) { final int m = (int)n; pos += m; avail -= m; readBytes += n; return n; } long toSkip = n - avail, result = 0; avail = 0; while ( toSkip != 0 && ( result = is == System.in ? skipByReading( toSkip ) : is.skip( toSkip ) ) < toSkip ) { if ( result == 0 ) { if ( is.read() == -1 ) break; toSkip--; } else toSkip -= result; } final long t = n - ( toSkip - result ); readBytes += t; return t; } public int available() throws IOException { return (int)Math.min( is.available() + (long)avail, Integer.MAX_VALUE ); } public void close() throws IOException { if ( is == null ) return; if ( is != System.in ) is.close(); is = null; buffer = null; } /** Resets the internal logic of this fast buffered input stream, clearing the buffer. * *

All buffering information is discarded, and the number of bytes read so far * (and thus, also the {@linkplain #position() current position}) * is adjusted to reflect this fact. * *

This method is mainly useful for re-reading * files that have been overwritten externally. */ public void flush() { if ( is == null ) return; readBytes += avail; avail = pos = 0; } /** Resets the internal logic of this fast buffered input stream. * * @deprecated As of fastutil 5.0.4, replaced by {@link #flush()}. The old * semantics of this method does not contradict {@link InputStream}'s contract, as * the semantics of {@link #reset()} is undefined if {@link InputStream#markSupported()} * returns false. On the other hand, the name was really a poor choice. */ @Deprecated public void reset() { flush(); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/FastBufferedOutputStream.java0000664000000000000000000001706013050705451025223 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.OutputStream; import java.nio.channels.FileChannel; /** Lightweight, unsynchronized output stream buffering class with * {@linkplain MeasurableStream measurability} and * {@linkplain RepositionableStream repositionability}. * *

This class provides buffering for output streams, but it does so with * purposes and an internal logic that are radically different from the ones * adopted in {@link java.io.BufferedOutputStream}. The main features follow. * *

    *
  • All methods are unsychronized. * *

  • As an additional feature, this class implements the {@link * RepositionableStream} and {@link MeasurableStream} interfaces. * An instance of this class will try to cast * the underlying byte stream to a {@link RepositionableStream} and to fetch by * reflection the {@link java.nio.channels.FileChannel} underlying the given * output stream, in this order. If either reference can be successfully * fetched, you can use {@link #position(long)} to reposition the stream. * Much in the same way, an instance of this class will try to cast the * the underlying byte stream to a {@link MeasurableStream}, and if this * operation is successful, or if a {@link java.nio.channels.FileChannel} can * be detected, then {@link #position()} and {@link #length()} will work as expected. *

* @since 4.4 */ public class FastBufferedOutputStream extends MeasurableOutputStream implements RepositionableStream { private static final boolean ASSERTS = false; /** The default size of the internal buffer in bytes (8Ki). */ public final static int DEFAULT_BUFFER_SIZE = 8 * 1024; /** The internal buffer. */ protected byte buffer[]; /** The current position in the buffer. */ protected int pos; /** The number of buffer bytes available starting from {@link #pos} * (it must be always equal to buffer.length - pos). */ protected int avail; /** The underlying output stream. */ protected OutputStream os; /** The cached file channel underlying {@link #os}, if any. */ private FileChannel fileChannel; /** {@link #os} cast to a positionable stream, if possible. */ private RepositionableStream repositionableStream; /** {@link #os} cast to a measurable stream, if possible. */ private MeasurableStream measurableStream; private static int ensureBufferSize( final int bufferSize ) { if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size: " + bufferSize ); return bufferSize; } /** Creates a new fast buffered output stream by wrapping a given output stream with a given buffer. * * @param os an output stream to wrap. * @param buffer a buffer of positive length. */ public FastBufferedOutputStream( final OutputStream os, final byte[] buffer ) { this.os = os; ensureBufferSize( buffer.length ); this.buffer = buffer; avail = buffer.length; if ( os instanceof RepositionableStream ) repositionableStream = (RepositionableStream)os; if ( os instanceof MeasurableStream ) measurableStream = (MeasurableStream)os; if ( repositionableStream == null ) { try { fileChannel = (FileChannel)( os.getClass().getMethod( "getChannel", new Class[] {} ) ).invoke( os, new Object[] {} ); } catch( IllegalAccessException e ) {} catch( IllegalArgumentException e ) {} catch( NoSuchMethodException e ) {} catch( java.lang.reflect.InvocationTargetException e ) {} catch( ClassCastException e ) {} } } /** Creates a new fast buffered output stream by wrapping a given output stream with a given buffer size. * * @param os an output stream to wrap. * @param bufferSize the size in bytes of the internal buffer. */ public FastBufferedOutputStream( final OutputStream os, final int bufferSize ) { this( os, new byte[ ensureBufferSize( bufferSize ) ] ); } /** Creates a new fast buffered ouptut stream by wrapping a given output stream with a buffer of {@link #DEFAULT_BUFFER_SIZE} bytes. * * @param os an output stream to wrap. */ public FastBufferedOutputStream( final OutputStream os ) { this( os, DEFAULT_BUFFER_SIZE ); } private void dumpBuffer( final boolean ifFull ) throws IOException { if ( ! ifFull || avail == 0 ) { os.write( buffer, 0, pos ); pos = 0; avail = buffer.length; } } public void write( final int b ) throws IOException { if ( ASSERTS ) assert avail > 0; avail--; buffer[ pos++ ] = (byte)b; dumpBuffer( true ); } public void write( final byte b[], final int offset, final int length ) throws IOException { if ( length >= buffer.length ) { dumpBuffer( false ); os.write( b, offset, length ); return; } if ( length <= avail ) { // Copy in buffer System.arraycopy( b, offset, buffer, pos, length ); pos += length; avail -= length; dumpBuffer( true ); return; } dumpBuffer( false ); System.arraycopy( b, offset, buffer, 0, length ); pos = length; avail -= length; } public void flush() throws IOException { dumpBuffer( false ); os.flush(); } public void close() throws IOException { if ( os == null ) return; flush(); if ( os != System.out ) os.close(); os = null; buffer = null; } public long position() throws IOException { if ( repositionableStream != null ) return repositionableStream.position() + pos; else if ( measurableStream != null ) return measurableStream.position() + pos; else if ( fileChannel != null ) return fileChannel.position() + pos; else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the MeasurableStream or RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" ); } /** Repositions the stream. * *

Note that this method performs a {@link #flush()} before changing the underlying stream position. */ public void position( final long newPosition ) throws IOException { flush(); if ( repositionableStream != null ) repositionableStream.position( newPosition ); else if ( fileChannel != null ) fileChannel.position( newPosition ); else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" ); } /** Returns the length of the underlying output stream, if it is {@linkplain MeasurableStream measurable}. * *

Note that this method performs a {@link #flush()} before detecting the length. * * @return the length of the underlying output stream. * @throws UnsupportedOperationException if the underlying output stream is not {@linkplain MeasurableStream measurable} and * cannot provide a {@link FileChannel}. */ public long length() throws IOException { flush(); if ( measurableStream != null ) return measurableStream.length(); if ( fileChannel != null ) return fileChannel.size(); throw new UnsupportedOperationException(); } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStream.java0000664000000000000000000002504013050705451026600 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import it.unimi.dsi.fastutil.bytes.ByteArrays; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.WritableByteChannel; /** A {@linkplain RepositionableStream repositionable} {@link MeasurableInputStream} based on * cached data received by a {@link WritableByteChannel} whose first bytes can be inspected directly. * *

An instance of this class acts as a buffer holding the bytes written through its * {@link WritableByteChannel} interface (which can be easily turned into an {@link OutputStream} using * {@link Channels#newOutputStream(WritableByteChannel)}). The data can be discarded at any time using * {@link #clear()}. The first {@link #inspectable} bytes of {@link #buffer} contains the first * bytes written. When {@link #buffer} is full, the bytes are written to an overflow * file. * *

At any time, the stream of bytes written since creation (or since the last {@link #clear()}) * are available as a fully implemented {@link MeasurableInputStream} which also implements * {@link RepositionableStream} and {@linkplain #mark(int) supports marking}. * Note that you must arbitrate carefully write and read accesses, * as it is always possible to call {@link #write(ByteBuffer)} * and thus modify the {@linkplain #length() length} of the {@link MeasurableInputStream}. * *

The method {@link #close()} makes the {@link MeasurableInputStream} and {@link WritableByteChannel} state-changing methods temporarily throw an {@link IOException}, but * does not otherwise modify the state of the stream. You can {@linkplain #reopen() reopen} the stream * later, or {@linkplain #clear() clear} it. * The method {@link #dispose()} can be used to release * the resources associated with the stream. * *

Buffering

* *

This class provides no form of buffering except for the memory buffer described above, both * when reading and when writing. Users should consider wrapping instances of this class with a * {@link FastBufferedInputStream}, as reads after the buffer has been exhausted will be performed * directly on a {@link RandomAccessFile}. */ public class InspectableFileCachedInputStream extends MeasurableInputStream implements RepositionableStream, WritableByteChannel { public static final boolean DEBUG = false; /** The default buffer size (64KiB). */ public static final int DEFAULT_BUFFER_SIZE = 64 * 1024; /** The inspection buffer. The first {@link #inspectable} bytes contain the first part of the input stream. * The buffer is available for inspection, but users should not modify its content. */ public final byte[] buffer; /** The number of valid bytes currently in {@link #buffer}. */ public int inspectable; /** The overflow file used by this stream: it is created at construction time, and deleted on {@link #close()}. */ private final File overflowFile; /** The random access file used to access the overflow file. */ private final RandomAccessFile randomAccessFile; /** {@link #randomAccessFile randomAccessFile#getChannel()}, cached. */ private final FileChannel fileChannel; /** The position on this stream (i.e., the index of the next byte to be returned). */ private long position; /** The {@linkplain #mark(int) mark}, if set, or -1. */ private long mark; /** The write position of the {@link #randomAccessFile overflow file}. When {@link #inspectable} is equal * to {@link #buffer buffer.length}, the length of the stream is {@link #inspectable} + {@link #writePosition}. */ private long writePosition; /** Creates a new instance with specified buffer size and overlow-file directory. * * @param bufferSize the buffer size, in bytes. * @param overflowFile the directory where the overflow file should be created, or null for the default temporary directory. */ public InspectableFileCachedInputStream( final int bufferSize, final File overflowFile ) throws IOException { if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size " + bufferSize ); if ( overflowFile != null ) this.overflowFile = overflowFile; else ( this.overflowFile = File.createTempFile( getClass().getSimpleName(), "overflow" ) ).deleteOnExit(); buffer = new byte[ bufferSize ]; randomAccessFile = new RandomAccessFile( this.overflowFile, "rw" ); fileChannel = randomAccessFile.getChannel(); mark = -1; } /** Creates a new instance with specified buffer size and default overflow-file directory. * * @param bufferSize the buffer size, in bytes. */ public InspectableFileCachedInputStream( final int bufferSize ) throws IOException { this( bufferSize, null ); } /** Creates a new instance with default buffer size and overflow-file directory. */ public InspectableFileCachedInputStream() throws IOException { this( DEFAULT_BUFFER_SIZE ); } private void ensureOpen() throws IOException { if ( position == -1 ) throw new IOException( "This " + getClass().getSimpleName() + " is closed" ); } /** Clears the content of this {@link InspectableFileCachedInputStream}, zeroing the length of the represented * stream. */ public void clear() throws IOException { if ( ! fileChannel.isOpen() ) throw new IOException( "This " + getClass().getSimpleName() + " is closed" ); writePosition = position = inspectable = 0; mark = -1; } /** Appends the content of a specified buffer to the end of the currently represented stream. * * @param byteBuffer a byte buffer. * @return the number of bytes appended (i.e., {@link ByteBuffer#remaining() byteBuffer.remaining()}). */ public int write( final ByteBuffer byteBuffer ) throws IOException { ensureOpen(); final int remaining = byteBuffer.remaining(); if ( inspectable < buffer.length ) { // Still some space in the inspectable buffer. final int toBuffer = Math.min( buffer.length - inspectable, remaining ); byteBuffer.get( buffer, inspectable, toBuffer ); inspectable += toBuffer; } if ( byteBuffer.hasRemaining() ) { fileChannel.position( writePosition ); writePosition += fileChannel.write( byteBuffer ); } return remaining; } /** Truncates the overflow file to a given size if possible. * * @param size the new size; the final size is the maximum between the current write position (i.e., the length * of the represented stream minus the length of the inspection buffer) and this value. */ public void truncate( final long size ) throws FileNotFoundException, IOException { fileChannel.truncate( Math.max( size, writePosition ) ); } /** Makes the stream unreadable until the next {@link #clear()}. * * @see #reopen() */ @Override public void close() { position = -1; } /** Makes the stream readable again after a {@link #close()}. * * @see #close() */ public void reopen() throws IOException { if ( ! fileChannel.isOpen() ) throw new IOException( "This " + getClass().getSimpleName() + " is closed" ); position = 0; } /** Disposes this stream, deleting the overflow file. After that, the stream is unusable. */ public void dispose() throws IOException { position = -1; randomAccessFile.close(); overflowFile.delete(); } protected void finalize() throws Throwable { try { dispose(); } finally { super.finalize(); } } @Override public int available() throws IOException { ensureOpen(); return (int)Math.min( Integer.MAX_VALUE, length() - position ); } @Override public int read( byte[] b, int offset, int length ) throws IOException { ensureOpen(); if ( length == 0 ) return 0; if ( position == length() ) return -1; // Nothing to read. ByteArrays.ensureOffsetLength( b, offset, length ); int read = 0; if ( position < inspectable ) { /* The first min(inspectable - readPosition, length) bytes should be taken from the buffer. */ final int toCopy = Math.min( inspectable - (int)position, length ); System.arraycopy( buffer, (int)position, b, offset, toCopy ); length -= toCopy; offset += toCopy; position += toCopy; read = toCopy; } if ( length > 0 ) { // We want to read more. if ( position == length() ) return read != 0 ? read : -1; // There's nothing more to read. fileChannel.position( position - inspectable ); final int toRead = (int)Math.min( length() - position, length ); // This is *intentionally* not a readFully(). Let the language to its stuff. final int t = randomAccessFile.read( b, offset, toRead ); position += t; read += t; } return read; } @Override public int read( byte[] b ) throws IOException { return read( b, 0, b.length ); } @Override public long skip( final long n ) throws IOException { ensureOpen(); final long toSkip = Math.min( n, length() - position ); position += toSkip; return toSkip; } @Override public int read() throws IOException { ensureOpen(); if ( position == length() ) return -1; // Nothing to read if ( position < inspectable ) return buffer[ (int)position++ ] & 0xFF; fileChannel.position( position - inspectable ); position++; return randomAccessFile.read(); } @Override public long length() throws IOException { ensureOpen(); return inspectable + writePosition; } @Override public long position() throws IOException { ensureOpen(); return position; } /** Positions the input stream. * * @param position the new position (will be minimized with {@link #length()}). */ public void position( final long position ) throws IOException { this.position = Math.min( position, length() ); } @Override public boolean isOpen() { return position != -1; } @Override public void mark( final int readlimit ) { mark = position; } @Override public void reset() throws IOException { ensureOpen(); if ( mark == -1 ) throw new IOException( "Mark has not been set" ); position( mark ); } @Override public boolean markSupported() { return true; } } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/MeasurableInputStream.java0000664000000000000000000000156713050705451024547 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.InputStream; /** An {@link InputStream} that implements also the {@link MeasurableStream} interface. * * @since 5.0.4 */ public abstract class MeasurableInputStream extends InputStream implements MeasurableStream { } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/MeasurableOutputStream.java0000664000000000000000000000157213050705451024744 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.OutputStream; /** An {@link OutputStream} that implements also the {@link MeasurableStream} interface. * * @since 6.0.0 */ public abstract class MeasurableOutputStream extends OutputStream implements MeasurableStream { } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/MeasurableStream.java0000664000000000000000000000405413050705451023521 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; /** An stream that provides eager access to its length, * and keeps track of the current position (e.g., the number of bytes read so far, or the current * position of the file pointer). * *

This class has two methods, both specified as optional. This apparently bizarre * behaviour is necessary because of wrapper classes which use reflection * to support those methods (see, e.g., {@link MeasurableInputStream}, {@link FastBufferedInputStream} and {@link FastBufferedOutputStream}). * * @since 6.0.0 */ public interface MeasurableStream { /** Returns the overall length of this stream (optional operation). In most cases, this will require the * stream to perform some extra action, possibly changing the state of the input stream itself (typically, reading * all the bytes up to the end, or flushing on output stream). * Implementing classes should always document what state will the input stream be in * after calling this method, and which kind of exception could be thrown. */ public long length() throws IOException; /** Returns the current position in this stream (optional operation). * *

Usually, the position is just the number of bytes read or written * since the stream was opened, but in the case of a * {@link it.unimi.dsi.fastutil.io.RepositionableStream} it * represent the current position. */ public long position() throws IOException; } fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/RepositionableStream.java0000664000000000000000000000213113050705451024412 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * Copyright (C) 2005-2017 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** A basic interface specifying positioning methods for a byte stream. * * @author Sebastiano Vigna * @since 4.4 */ public interface RepositionableStream { /** Sets the current stream position. * * @param newPosition the new stream position. */ void position( long newPosition ) throws java.io.IOException; /** Returns the current stream position. * * @return the current stream position. */ long position() throws java.io.IOException; } fastutil-7.1.0/src/it/unimi/dsi/fastutil/booleans/package.html0000664000000000000000000000101413050701620023061 0ustar rootroot fastutil

Provides type-specific classes for boolean elements or keys.

Not all classes are provided in a boolean-specific version: sorted sets and maps not generated (as they are completely useless). Unsorted sets and maps are kept for orthogonality, whereas {@link it.unimi.dsi.fastutil.booleans.BooleanCollection} is used by maps with boolean values. fastutil-7.1.0/src/it/unimi/dsi/fastutil/bytes/package.html0000664000000000000000000000032313050701620022407 0ustar rootroot fastutil

Provides type-specific classes for byte elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/chars/package.html0000664000000000000000000000033013050701620022357 0ustar rootroot fastutil

Provides type-specific classes for character elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/shorts/package.html0000664000000000000000000000032413050701620022604 0ustar rootroot fastutil

Provides type-specific classes for short elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/ints/package.html0000664000000000000000000000032613050701620022241 0ustar rootroot fastutil

Provides type-specific classes for integer elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/longs/package.html0000664000000000000000000000032313050701620022403 0ustar rootroot fastutil

Provides type-specific classes for long elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/floats/package.html0000664000000000000000000000032413050701620022552 0ustar rootroot fastutil

Provides type-specific classes for float elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/doubles/package.html0000664000000000000000000000032513050701620022720 0ustar rootroot fastutil

Provides type-specific classes for double elements or keys. fastutil-7.1.0/src/it/unimi/dsi/fastutil/objects/package.html0000664000000000000000000000114213050701620022712 0ustar rootroot fastutil

Provides type-specific classes for object elements or keys.

Whenever possible, fastutil provides both typical collections, which compare objects using equals(), and reference-based collections, which use equality (==). See the related comments in the overview.

Of course, reference-based sorted sets and maps make no sense, and are not generated. fastutil-7.1.0/src/it/unimi/dsi/fastutil/io/package.html0000664000000000000000000000112113050701620021665 0ustar rootroot fastutil

Provides classes and static methods that make object and primitive-type I/O easier and faster.

PackageSpecificaton

Classes in this package provide very efficient, unsynchronised buffered input and output stream (with support for repositioning, too) and fast streams based on byte arrays. Static containers provide instead a wealth of methods that can be used to serialize/deserialize very easily objects and arrays. fastutil-7.1.0/src/overview.html0000664000000000000000000013707513050701620015404 0ustar rootroot fastutil

Extends the the Java™ Collections Framework by providing type-specific maps, sets, lists and priority queues with a small memory footprint and fast access and insertion; provides also big (64-bit) arrays, sets and lists, and fast, practical I/O classes for binary and text files. It is free software distributed under the Apache License 2.0.

Package Specification

fastutil is formed by three cores:

  • type-specific classes that extend naturally the Java™ Collections Framework;
  • classes that support very large collections;
  • classes for fast and practical access to binary and text files.

The three cores are briefly introduced in the next sections, and then discussed at length in the rest of this overview.

Type-specific classes

fastutil specializes the most useful {@link java.util.HashSet}, {@link java.util.HashMap}, {@link java.util.LinkedHashSet}, {@link java.util.LinkedHashMap}, {@link java.util.TreeSet}, {@link java.util.TreeMap}, {@link java.util.IdentityHashMap}, {@link java.util.ArrayList} and {@link java.util.Stack} classes to versions that accept a specific kind of key or value (e.g., {@linkplain it.unimi.dsi.fastutil.ints.IntSet integers}). Besides, there are also several types of {@linkplain it.unimi.dsi.fastutil.PriorityQueue priority queues} and a large collection of static objects and methods (such as {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET immutable empty containers}, {@linkplain it.unimi.dsi.fastutil.ints.IntComparators#OPPOSITE_COMPARATOR comparators implementing the opposite of the natural order}, {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[]) iterators obtained by wrapping an array} and so on.

To understand what's going on at a glance, the best thing is to look at the examples provided. If you already used the Collections Framework, everything should look rather natural. If, in particular, you use an IDE such as Eclipse, which can suggest you the method names, all you need to know is the right name for the class you need.

Support for very large collections

With fastutil 6, a new set of classes makes it possible to handle very large collections: in particular, collections whose size exceeds 231. {@linkplain it.unimi.dsi.fastutil.BigArrays Big arrays} are arrays-of-arrays handled by a wealth of static methods that act on them as if they were monodimensional arrays with 64-bit indices; {@linkplain it.unimi.dsi.fastutil.BigList big lists} provide 64-bit list access; {@linkplain it.unimi.dsi.fastutil.ints.IntOpenHashBigSet big hash sets} provide support for sets whose size is only limited by the amount of core memory.

The usual methods from {@link java.util.Arrays} and similar classes have been extended to big arrays: have a look at the Javadoc documentation of {@link it.unimi.dsi.fastutil.BigArrays} and {@link it.unimi.dsi.fastutil.ints.IntBigArrays} to get an idea of the generic and type-specific methods available.

Fast and practical I/O

fastutil provides replacements for some standard classes of {@link java.io} that are plagued by a number of problems (see, e.g., {@link it.unimi.dsi.fastutil.io.FastBufferedInputStream}). The {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO} static containers contain dozens of methods that make it possible to load and save quickly (big) arrays to disks, to adapt binary and text file to iterators, and so on.

More on type-specific classes

All data structures in fastutil implement their standard counterpart interface whenever possible (e.g., {@link java.util.Map} for maps). Thus, they can be just plugged into existing code, using the standard access methods (of course, any attempt to use the wrong type for keys or values will produce a {@link java.lang.ClassCastException}). However, they also provide (whenever possible) many polymorphic versions of the most used methods that avoid boxing/unboxing. In doing so, they implement more stringent interfaces that extend and strengthen the standard ones (e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntSortedMap} or {@link it.unimi.dsi.fastutil.ints.IntListIterator}).

Warning: automatic boxing and unboxing can lead you to choose the wrong method when using fastutil. It is also extremely inefficient. We suggest that your programming environment is set so to mark boxin/unboxing as a warning, or even better, as an error.

Of course, the main point of type-specific data structures is that the absence of wrappers around primitive types can increase speed and reduce space occupancy by several times. The presence of generics in Java does not change this fact, since there is no genericity for primitive types.

The implementation techniques used in fastutil are quite different than those of {@link java.util}: for instance, open-addressing hash tables, threaded AVL trees, threaded red-black trees and exclusive-or lists. An effort has also been made to provide powerful derived objects and to expose them by overriding covariantly return types: for instance, the {@linkplain it.unimi.dsi.fastutil.objects.Object2IntSortedMap#keySet() keys of sorted maps are sorted} and iterators on sorted containers are always {@linkplain it.unimi.dsi.fastutil.BidirectionalIterator bidirectional}.

More generally, the rationale behing fastutil is that you should never need to code explicitly natural transformations. You do to not need to define an anonymous class to iterate over an array of integers—just {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[]) wrap it}. You do not need to write a loop to put the characters returned by an iterator into a set—just {@linkplain it.unimi.dsi.fastutil.chars.CharOpenHashSet#CharOpenHashSet(CharIterator) use the right constructor}. And so on.

The Names

In general, class names adhere to the general pattern

valuetype collectiontype

for collections, and

keytype 2 valuetype maptype

for maps.

By "type" here I mean a capitalized primitive type, {@link java.lang.Object} or Reference. In the latter case, we are treating objects, but their equality is established by reference equality (that is, without invoking equals()), similarly to {@link java.util.IdentityHashMap}. Of course, reference-based classes are significantly faster.

Thus, an {@link it.unimi.dsi.fastutil.ints.IntOpenHashSet} stores integers efficiently and implements {@link it.unimi.dsi.fastutil.ints.IntSet}, whereas a {@link it.unimi.dsi.fastutil.longs.Long2IntAVLTreeMap} does the same for maps from longs to integers (but the map will be sorted, tree based, and balanced using the AVL criterion), implementing {@link it.unimi.dsi.fastutil.longs.Long2IntMap}. If you need additional flexibility in choosing your {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hash strategy}, you can put, say, arrays of integers in a {@link it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet}, maybe using the ready-made {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY hash strategy for arrays}. A {@link it.unimi.dsi.fastutil.longs.LongLinkedOpenHashSet} stores longs in a hash table, but provides a predictable iteration order (the insertion order) and access to first/last elements of the order. A {@link it.unimi.dsi.fastutil.objects.Reference2ReferenceOpenHashMap} is similar to an {@link java.util.IdentityHashMap}. You can manage a priority queue of characters in a heap using a {@link it.unimi.dsi.fastutil.chars.CharHeapPriorityQueue}, which implements {@link it.unimi.dsi.fastutil.chars.CharPriorityQueue}. {@linkplain it.unimi.dsi.fastutil.bytes.ByteArrayFrontCodedList Front-coded lists} are highly specialized immutable data structures that store compactly a large number of arrays: if you don't know them you probably don't need them.

For a number of data structures that were not available in the Java™ Collections Framework when fastutil was created, an object-based version is contained {@link it.unimi.dsi.fastutil}, and in that case the prefix Object is not used (see, e.g., {@link it.unimi.dsi.fastutil.PriorityQueue}).

Since there are eight primitive types in Java, and we support reference-based containers, we get 1877 (!) classes (some nonsensical classes, such as Boolean2BooleanAVLTreeMap, are not generated). Many classes are generated just to mimic the hierarchy of {@link java.util} so to redistribute common code in a similar way. There are also several abstract classes that ease significantly the creation of new type-specific classes by providing automatically generic methods based on the type-specific ones.

The huge number of classes required a suitable division in subpackages. Each subpackage is characterized by the type of elements or keys: thus, for instance, {@link it.unimi.dsi.fastutil.ints.IntSet} belongs to {@link it.unimi.dsi.fastutil.ints} (the plural is required, as int is a keyword and cannot be used in a package name), as well as {@link it.unimi.dsi.fastutil.ints.Int2ReferenceRBTreeMap}. Note that all classes for non-primitive elements and keys are gathered in {@link it.unimi.dsi.fastutil.objects}. Finally, a number of non-type-specific classes have been gathered in {@link it.unimi.dsi.fastutil}.

An In–Depth Look

The following table summarizes the available interfaces and implementations. To get more information, you can look at a specific implementation in {@link it.unimi.dsi.fastutil} or, for instance, {@link it.unimi.dsi.fastutil.ints}.

InterfacesAbstract ImplementationsImplementations
Iterable
CollectionAbstractCollection
SetAbstractSetOpenHashSet, OpenCustomHashSet, ArraySet, OpenHashBigSet
SortedSetAbstractSortedSetRBTreeSet, AVLTreeSet, LinkedOpenHashSet
FunctionAbstractFunction
MapAbstractMapOpenHashMap, OpenCustomHashMap, ArrayMap
SortedMapAbstractSortedMapRBTreeMap, AVLTreeMap, LinkedOpenHashMap
List, BigList†AbstractList, AbstractBigListArrayList, BigArrayBigList, ArrayFrontCodedList
PriorityQueue†AbstractPriorityQueue†HeapPriorityQueue, ArrayPriorityQueue, ArrayFIFOQueue
IndirectPriorityQueue†AbstractIndirectPriorityQueue†HeapSemiIndirectPriorityQueue, HeapIndirectPriorityQueue, ArrayIndirectPriorityQueue
Stack†AbstractStack†ArrayList
Iterator, BigListIterator†AbstractIterator, AbstractListIterator, AbstractBigListIterator
ComparatorAbstractComparator
BidirectionalIterator†AbstractBidirectionalIterator
ListIteratorAbstractListIterator
Size64‡

†: this class has also a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.

‡: this class has only a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.

Note that abstract implementations are named by prefixing the interface name with Abstract. Thus, if you want to define a type-specific structure holding a set of integers without the hassle of defining object-based methods, you should inherit from {@link it.unimi.dsi.fastutil.ints.AbstractIntSet}.

The following table summarizes static containers, which usually give rise both to a type-specific and to a generic class:

Static Containers
Collections
Sets
SortedSets
Functions
Maps†
SortedMaps
Lists
BigLists
Arrays†
BigArrays†
Heaps
SemiIndirectHeaps
IndirectHeaps
PriorityQueues†
IndirectPriorityQueues†
Iterators
BigListIterators
Comparators
Hash‡
HashCommon‡

†: this class has also a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.

‡: this class has only a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.

The static containers provide also special-purpose implementations for all kinds of {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET empty structures} (including {@linkplain it.unimi.dsi.fastutil.objects.ObjectArrays#EMPTY_ARRAY arrays}) and {@linkplain it.unimi.dsi.fastutil.ints.Int2IntMaps#singleton(int,int) singletons}.

Warnings

All classes are not synchronized. If multiple threads access one of these classes concurrently, and at least one of the threads modifies it, it must be synchronized externally. Iterators will behave unpredictably in the presence of concurrent modifications. Reads, however, can be carried out concurrently.

Reference-based classes violate the {@link java.util.Map} contract. They intentionally compare objects by reference, and do not use the equals() method. They should be used only when reference-based equality is desired (for instance, if all objects involved are canonized, as it happens with interned strings).

Linked classes do not implement wholly the {@link java.util.SortedMap} interface. They provide methods to get the first and last element in iteration order, and to start a bidirectional iterator from any element, but any submap or subset method will cause an {@link java.lang.UnsupportedOperationException} (this may change in future versions).

Substructures in sorted classes allow the creation of arbitrary substructures. In {@link java.util}, instead, you can only create contained sub-substructures (BTW, why?). For instance, (new TreeSet()).tailSet(1).tailSet(0) will throw an exception, but {@link it.unimi.dsi.fastutil.ints.IntRBTreeSet (new IntRBTreeSet()).tailSet(1).tailSet(0)} won't.

Immutability is syntactically based (as opposed to semantically based). All methods that are known not to be causing modifications to the structure at compile time will not throw exceptions (e.g., {@link it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET EMPTY_SET.clear()}). All other methods will cause an {@link java.lang.UnsupportedOperationException}. Note that (as of Java 5) the situation in {@link java.util} is definitely different, and inconsistent: for instance, in singletons add() always throws an exception, whereas remove() does it only if the singleton would be modified. This behaviour agrees with the interface documentation, but it is nonetheless confusing.

Additional Features and Methods

The new interfaces add some very natural methods and strengthen many of the old ones. Moreover, whenever possible, the object returned is type-specific, or implements a more powerful interface. Before fastutil 5, the impossibility of overriding covariantly return types made these features accessible only by means of type casting, but fortunately this is no longer true.

More in detail:

  • Keys and values of a map are of the fastutil type you would expect (e.g., the keys of an {@link it.unimi.dsi.fastutil.ints.Int2LongSortedMap} are an {@link it.unimi.dsi.fastutil.ints.IntSortedSet} and the values are a {@link it.unimi.dsi.fastutil.longs.LongCollection}).
  • Hash-based and tree-based maps that return primitive numeric values have an addTo() method (see, e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#addTo(int,int)}) that adds an increment to the current value of a key; it is most useful to avoid the inefficient procedure of getting a value, incrementing it and then putting it back into the map (typically, when counting the number of occurrences of elements in a sequence).
  • Hash-set implementations have an additional {@link it.unimi.dsi.fastutil.objects.ObjectOpenHashSet#get(Object) get()} method that returns the actual object in the collection that is equal to the query key.
  • Linked hash-based maps and sets have a wealth of additional methods that make it easy to use them as caches. See, for instance, {@link it.unimi.dsi.fastutil.ints.Int2IntLinkedOpenHashMap#putAndMoveToLast(int,int)}, {@link it.unimi.dsi.fastutil.ints.IntLinkedOpenHashSet#addAndMoveToLast(int)} or {@link it.unimi.dsi.fastutil.ints.Int2IntLinkedOpenHashMap#removeFirstInt()}.
  • Submaps of a sorted map and subsets of a sorted sets are of the fastutil type you would expect, too.
  • Iterators returned by iterator() are type-specific.
  • Sorted structures in fastutil return type-specific {@linkplain it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}. This means that you can move back and forth among entries, keys or values.
  • Some classes for maps (check the specification) return a fast entry set (see, e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#int2IntEntrySet()}); fast entry sets can, in turn, provide a {@linkplain it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet#fastIterator()} that is guaranteed not to create a large number of objects, possibly by returning always the same entry (of course, mutated).
  • The type-specific sorted set interfaces, moreover, feature an optional method iterator(from) which creates a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} starting from a given element of the domain (not necessarily in the set). See, for instance, {@link it.unimi.dsi.fastutil.ints.IntSortedSet#iterator(int)}. The method is implemented by all type-specific sorted sets and subsets.
  • Finally, there are constructors that allow you to build easily sets using array and iterators. This means, for instance, that you can create quickly a set of strings with a statement like
    new ObjectOpenHashSet( new String[] { "foo", "bar" } )
    or just "unroll" the integers returned by an iterator into a list with
    new IntArrayList( iterator )

There are a few quirks, however, that you should be aware of:

  • The versions of the {@link java.util.Map#get(Object) get()}, {@link java.util.Map#put(Object,Object) put()} and {@link java.util.Map#remove(Object) remove()} methods that return a primitive type cannot, of course, rely on returning null to denote the absence of a certain pair. Rather, they return a {@linkplain it.unimi.dsi.fastutil.ints.Int2LongMap#defaultReturnValue(long) default return value}, which is set to 0 cast to the return type (false for booleans) at creation, but can be changed using the defaultReturnValue() method (see, e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntMap}). Note that changing the default return value does not change anything about the data structure; it is just a way to return a reasonably meaningful result—it can be changed at any time. For uniformity reasons, even maps returning objects can use defaultReturnValue() (of course, in this case the default return value is initialized to null). A submap or subset has an independent default return value (which however is initialized to the default return value of the originator).
  • For all maps that have objects as keys, the {@link java.util.Map#get(Object) get()} and {@link java.util.Map#remove(Object) remove()} methods do not admit polymorphic versions, as Java does not allow return-value polymorphism. Rather, the extended interfaces introduce new methods of the form {@link it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap#getInt(Object) getvaluetype()} and {@link it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap#removeInt(Object) removevaluetype()}. Similar problems occur with {@link it.unimi.dsi.fastutil.chars.CharSortedSet#firstChar() first()}, {@link it.unimi.dsi.fastutil.chars.CharSortedSet#lastChar() last()}, and so on.
  • Similarly, all iterators have a suitable method {@link it.unimi.dsi.fastutil.ints.IntIterator#nextInt() nexttype()} returning directly a primitive type. And, of course, you have a type-specific version of {@link java.util.ListIterator#previous() previous()}.
  • For the same reason, the method {@link java.util.Collection#toArray} has a polymorphic version accepting a type-specific array, but there are also explicitly typed methods {@link it.unimi.dsi.fastutil.bytes.ByteCollection#toByteArray() tokeytypeArray()}.
  • The standard entry-set iterators for hash-based maps use an entry object that refers to the data contained in the hash table. If you retrieve an entry and deleted it, the entry object will become invalid and will throw an {@link java.util.ArrayIndexOutOfBounds} exception. This does not apply to fast iterators (see above).
  • A name clash between the list and collection interfaces forces the deletion method of a collection to be named {@link it.unimi.dsi.fastutil.doubles.DoubleCollection#rem(double) rem()}. At the risk of creating some confusion, {@link it.unimi.dsi.fastutil.doubles.DoubleSet#remove(double) remove()} reappears in the type-specific set interfaces, so the only really unpleasant effect is that you must use rem() on variables that are collections, but not sets—for instance, {@linkplain it.unimi.dsi.fastutil.ints.IntList type-specific lists}, and that a subclass of a type-specific abstract collection must override rem(), rather than remove(), to make all inherited methods work properly.
  • There are type-specific versions of {@link java.util.Comparator} that require specifying both a type-specific comparison method and an object-based one; this is necessary as a type-specific comparator must implement {@link java.util.Comparator}. However, to simplify the creation of type-specific comparators there are abstract type-specific comparator classes that implement an object-based comparator wrapping the (abstract) type-specific one; thus, if you need to create a type-specific comparator you just have to inherit from those classes and define the type-specific method. Analogously for iterators.
  • Stacks are interfaces implemented by array-based lists: the interface, moreover, is slightly different from the implementation contained in {@link java.util.Stack}.

Functions

{@link it.unimi.dsi.fastutil.Function} (and its type-specific versions) is a new interface geared towards mathematical functions (e.g., hashes) which associates values to keys, but in which enumerating keys or values is not possible. It is essentially a {@link java.util.Map} that does not provide access to set representations. It is of course unfortunate that Java 8 introduced an identically named interface with a different signature.

fastutil provides interfaces, abstract implementations and the usual array of wrappers in the suitable static container (e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntFunctions}). Implementations will be provided by other projects (e.g., Sux4J).

All fastutil type-specific maps extend their respective type-specific functions: but, alas, we cannot have {@link java.util.Map} extending {@link it.unimi.dsi.fastutil.Function}.

Static Container Classes

fastutil provides a number of static methods and singletons, much like {@link java.util.Collections}. To avoid creating classes with hundreds of methods, there are separate containers for sets, lists, maps and so on. Generic containers are placed in {@link it.unimi.dsi.fastutil}, whereas type-specific containers are in the appropriate package. You should look at the documentation of the static classes contained in {@link it.unimi.dsi.fastutil}, and in type-specific static classes such as {@link it.unimi.dsi.fastutil.chars.CharSets}, {@link it.unimi.dsi.fastutil.floats.Float2ByteSortedMaps}, {@link it.unimi.dsi.fastutil.longs.LongArrays}, {@link it.unimi.dsi.fastutil.floats.FloatHeaps}. Presently, you can easily obtain {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET empty collections}, {@linkplain it.unimi.dsi.fastutil.longs.Long2IntMaps#EMPTY_MAP empty type-specific collections}, {@linkplain it.unimi.dsi.fastutil.ints.IntLists#singleton(int) singletons}, {@linkplain it.unimi.dsi.fastutil.objects.Object2ReferenceSortedMaps#synchronize(Object2ReferenceSortedMap) synchronized versions} of any type-specific container and unmodifiable versions of {@linkplain it.unimi.dsi.fastutil.objects.ObjectLists#unmodifiable(ObjectList) containers} and {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#unmodifiable(IntBidirectionalIterator) iterators} (of course, unmodifiable containers always return unmodifiable iterators).

On a completely different side, the {@linkplain it.unimi.dsi.fastutil.ints.IntArrays type-specific static container classes for arrays} provide several useful methods that allow to treat an array much like an array-based list, hiding completely the growth logic. In many cases, using this methods and an array is even simpler then using a full-blown {@linkplain it.unimi.dsi.fastutil.doubles.DoubleArrayList type-specific array-based list} because elements access is syntactically much simpler. The version for objects uses reflection to return arrays of the same type of the argument.

For the same reason, fastutil provides a full implementation of methods that manipulate arrays as type-specific {@linkplain it.unimi.dsi.fastutil.ints.IntHeaps heaps}, {@linkplain it.unimi.dsi.fastutil.ints.IntSemiIndirectHeaps semi-indirect heaps} and {@linkplain it.unimi.dsi.fastutil.ints.IntIndirectHeaps indirect heaps}. There are also quicksort and mergesort implementations that use arbitrary type-specific comparators.

fastutil offers also a less common choice—a very tuned implementation of {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[],int,int) radix sort} for all primitive types. It is significantly faster than quicksort already at small sizes (say, more than 10000 elements), and should be considered the sorting algorithm of choice if you do not need a generic comparator.

There are several variants provided. First of all you can radix sort in parallel {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[],int[], int, int) two} or {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[][],int,int) even more} arrays. You can also perform {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSortIndirect(int[],int[],int,int,boolean) indirect} sorts, for instance if you want to compute the sorting permutation of an array.

The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas McIlroy, “Engineering radix sort”, Computing Systems, 6(1), pages 5−27 (1993), and further improved using the digit-oracle idea described by Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”, String Processing and Information Retrieval, 15th International Symposium, volume 5280 of Lecture Notes in Computer Science, pages 3−14, Springer (2008). The basic algorithm is not stable, but this is immaterial for arrays of primitive types. For the indirect case, there is a parameter specifying whether the algorithm should be stable.

Iterators and Comparators

fastutil provides type-specific iterators and comparators. The interface of a fastutil iterator is slightly more powerful than that of a {@link java.util} iterator, as it contains a {@link it.unimi.dsi.fastutil.objects.ObjectIterator#skip(int) skip()} method that allows to skip over a list of elements (an {@linkplain it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator#back(int) analogous method} is provided for bidirectional iterators). For objects (even those managed by reference), the extended interface is named {@link it.unimi.dsi.fastutil.objects.ObjectIterator}; it is the return type, for instance, of {@link it.unimi.dsi.fastutil.objects.ObjectCollection#iterator()}. fastutil provides also classes and methods that makes it easy to create type-specific iterators and comparators. There are abstract versions of each (type-specific) iterator and comparator that implement in the obvious way some of the methods (see, e.g., {@link it.unimi.dsi.fastutil.ints.AbstractIntIterator} or {@link it.unimi.dsi.fastutil.ints.AbstractIntComparator}).

A plethora of useful static methods is also provided by various type-specific static containers (e.g., {@link it.unimi.dsi.fastutil.ints.IntIterators}) and {@link it.unimi.dsi.fastutil.ints.IntComparators}: among other things, you can {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[]) wrap arrays} and {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#asIntIterator(java.util.Iterator) standard iterators} in type-specific iterators, {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#fromTo(int,int) generate them} giving an interval of elements to be returned, {@linkplain it.unimi.dsi.fastutil.objects.ObjectIterators#concat(ObjectIterator[]) concatenate them} or {@linkplain it.unimi.dsi.fastutil.objects.ObjectIterators#pour(Iterator,ObjectCollection) pour them} into a set.

Queues

fastutil offers two types of queues: direct queues and indirect queues. A direct queue offers type-specific method to {@linkplain it.unimi.dsi.fastutil.longs.LongPriorityQueue#enqueue(long) enqueue} and {@linkplain it.unimi.dsi.fastutil.longs.LongPriorityQueue#dequeueLong() dequeue} elements. An indirect queue needs a reference array, specified at construction time: {@linkplain it.unimi.dsi.fastutil.IndirectPriorityQueue#enqueue(int) enqueue} and {@linkplain it.unimi.dsi.fastutil.IndirectPriorityQueue#dequeue() dequeue} operations refer to indices in the reference array. The advantage is that it may be possible to {@linkplain it.unimi.dsi.fastutil.IndirectPriorityQueue#changed(int) notify the change} of any element of the reference array, or even to {@linkplain it.unimi.dsi.fastutil.IndirectPriorityQueue#remove(int) remove an arbitrary element}.

Queues have two implementations: a trivial array-based implementation, and a heap-based implementation. In particular, heap-based indirect queues may be {@linkplain it.unimi.dsi.fastutil.objects.ObjectHeapIndirectPriorityQueue fully indirect} or just {@linkplain it.unimi.dsi.fastutil.objects.ObjectHeapSemiIndirectPriorityQueue semi-indirect}: in the latter case, there is no need for an explicit indirection array (which saves one integer per queue entry), but not all operations will be available. Note there there are also {@linkplain it.unimi.dsi.fastutil.ints.IntArrayFIFOQueue FIFO queues}.

Custom Hashing

Sometimes, the behaviour of the built-in equality and hashing methods is not what you want. In particular, this happens if you store in a hash-based collection arrays, and you would like to compare them by equality. For this kind of applications, fastutil provides {@linkplain it.unimi.dsi.fastutil.Hash.Strategy custom hash strategies}, which define new equality and hashing methods to be used inside the collection. There are even {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY ready-made strategies} for arrays. Note, however, that fastutil containers do not cache hash codes, so custom hash strategies must be efficient.

Abstract Classes

fastutil provides a wide range of abstract classes, to help in implementing its interfaces. They take care, for instance, of providing wrappers for non-type-specific method calls, so that you have to write just the (usually simpler) type-specific version.

More on the support for very large collections

With the continuous increase in core memory available, Java arrays are starting to show their size limitation (indices cannot be larger than 231). fastutil proposes to store big arrays using arrays-of-arrays subject to certain size restrictions and a number of supporting static methods. Please read the documentation of {@link it.unimi.dsi.fastutil.BigArrays} to understand how big arrays work.

Correspondingly, fastutil proposes a new interface, called {@link it.unimi.dsi.fastutil.Size64}, that should be implemented by very large collections. {@link it.unimi.dsi.fastutil.Size64} contains a method {@link it.unimi.dsi.fastutil.Size64#size64()} which returns the collection size as a long integer.

fastutil provides {@linkplain it.unimi.dsi.fastutil.BigList big lists}, which are lists with 64-bit indices; of course, they implement {@link it.unimi.dsi.fastutil.Size64}. An implementation based on big arrays is provided (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigArrayBigList}), as well as static containers (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigLists}). Whereas it is unlikely that such collection will be in main memory as big arrays, there are number of situations, such as exposing large files through a list interface or storing a large amount of data using succinct data structures, in which a big list interface is natural.

Unfortunately, {@linkplain java.util.List lists} and {@linkplain it.unimi.dsi.fastutil.BigList big lists}, as well as {@linkplain java.util.ListIterator list iterators} and {@linkplain it.unimi.dsi.fastutil.BigListIterator big-list iterators}, cannot be made compatible: we thus provide adapters (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigLists#asBigList(it.unimi.dsi.fastutil.ints.IntList)}).

Finally, fastutil provides {@linkplain it.unimi.dsi.fastutil.longs.LongOpenHashBigSet big hash sets}, which are based on big arrays. They are about 30% slower than non-big sets, but their size is limited only by the amount core memory.

More on fast and practical I/O

fastutil includes an {@linkplain it.unimi.dsi.fastutil.io I/O package} that provides, for instance, {@linkplain it.unimi.dsi.fastutil.io.FastBufferedInputStream fast, unsynchronized buffered input streams}, {@linkplain it.unimi.dsi.fastutil.io.FastBufferedOutputStream fast, unsynchronized buffered output streams}, and a wealth of static methods to store and retrieve data in {@linkplain it.unimi.dsi.fastutil.io.TextIO textual} and {@linkplain it.unimi.dsi.fastutil.io.BinIO binary} form. The latter, in particular, contain methods that load and store big arrays.

Performance

The main reason behind fastutil is performance, both in time and in space. The relevant methods of type-specific hash maps and sets are something like 2 to 10 times faster than those of the standard classes. Note that performance of hash-based classes on object keys is usually slightly worse than that of {@link java.util}, because fastutil classes do not cache hash codes (albeit it will not be that bad if keys cache internally hash codes, as in the case of {@link java.lang.String}). Of course, you can try to get more speed from hash tables using a small load factor: to this purpose, alternative load factors are proposed in {@link it.unimi.dsi.fastutil.Hash#FAST_LOAD_FACTOR} and {@link it.unimi.dsi.fastutil.Hash#VERY_FAST_LOAD_FACTOR}.

For tree-based classes you have two choices: AVL and red-black trees. The essential difference is that AVL trees are more balanced (their height is at most 1.44 log n), whereas red-black trees have faster deletions (but their height is at most 2 log n). So on small trees red-black trees could be faster, but on very large sets AVL trees will shine. In general, AVL trees have slightly slower updates but faster searches; however, on very large collections the smaller height may lead in fact to faster updates, too.

fastutil reduces enormously the creation and collection of objects. First of all, if you use the polymorphic methods and iterators no wrapper objects have to be created. Moreover, since fastutil uses open-addressing hashing techniques, creation and garbage collection of hash-table entries are avoided (but tables have to be rehashed whenever they are filled beyond the load factor). The major reduction of the number of objects around has a definite (but very difficult to measure) impact on the whole application (as garbage collection runs proportionally to the number of alive objects).

Maps whose iteration is very expensive in terms of object creation (e.g., hash-based classes) usually return a type-specific {@link it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet FastEntrySet} whose {@link it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet#fastIterator() fastIterator()} method significantly reduces object creation by returning always the same entry object, suitably mutated.

Whenever possible, fastutil tries to gain some speed by checking for faster interfaces: for instance, the various set-theoretic methods addAll(), retainAll(), ecc. check whether their arguments are type-specific and use faster iterators and accessors accordingly.

Faster Hash Tables

fastutil 6.1.0 changes significantly the implementation of hash-based classes. Instead of double hashing, we use linear probing. This has some consequences:

  • the classes are now about two times faster;
  • deletions are effective—there is no “marking” of deleted entries (the claim that this was impossible with open addressing was, of course, wrong);
  • given a size and a load factor, the backing array of a table will be in general larger (in the worst case about two times larger);
  • it is no longer possible to set the growth factor of the table, which is fixed at 2 (the old methods to control the growth factor {@linkplain it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#growthFactor(int) are now no-ops}—they are kept just for backward compatibility);
  • there are efficient implementations of {@linkplain it.unimi.dsi.fastutil.ints.IntOpenHashBigSet big sets}.

Memory Usage

The absence of wrappers makes data structures in fastutil much smaller: even in the case of objects, however, data structures in fastutil try to be space-efficient.

Hash Tables

To avoid memory waste, (unlinked) hash tables in fastutil keep no additional information about elements (such as a list of keys). In particular, this means that enumerations are always linear in the size of the table (rather than in the number of keys). Usually, this would imply slower iterators. Nonetheless, the iterator code includes a single, tight loop; moreover, it is possible to avoid the creation of wrappers. These two facts make in practice fastutil iterators faster than {@link java.util}'s.

The memory footprint for a table of length ℓ is exactly the memory required for the related types times ℓ. The absence of wrappers around primitive types can reduce space occupancy by several times (this applies even more to serialized data, e.g., when you save such a data structure in a file). These figures can greatly vary with your virtual machine, JVM versions, CPU etc.

More precisely, when you ask for a map that will hold n elements with load factor 0 < f ≤ 1, 2⌈log n / f entries are allocated. When the table is filled up beyond the load factor, it is rehashed doubling its size. When it is emptied below a fourth of the load factor, it is rehashed halving its size.

In the case of linked hash tables, there is an additional vector of 2⌈log n / f longs that is used to store link information. Each element records the next and previous element (packed together so to be more cache friendly).

Balanced Trees

The balanced trees implementation is also very parsimonious. fastutil is based on the excellent (and unbelievably well documented) code contained in Ben Pfaff's GNU libavl, which describes in detail how to handle balanced trees with threads. Thus, the overhead per entry is two pointers and one integer, which compares well to three pointers plus one boolean of the standard tree maps. The trick is that we use the integer bit by bit, so we consume two bits to store thread information, plus one or two bits to handle balancing. As a result, we get bidirectional iterators in constant space and amortized constant time without having to store references to parent nodes.

It should be mentioned that all tree-based classes have a fixed overhead for some arrays that are used as stacks to simulate recursion; in particular, we need 48 booleans for AVL trees and 64 pointers plus 64 booleans for red-black trees.

An Example

Suppose you want to store a sorted map from longs to integers. The first step is to define a variable of the right interface, and assign it a new tree map (say, of the AVL type):

Long2IntSortedMap m = new Long2IntAVLTreeMap();
    

Now we can easily modify and access its content:

m.put( 1, 5 );
m.put( 2, 6 );
m.put( 3, 7 );
m.put( 1000000000L, 10 );
m.get( 1 ); // This method call will return 5
m.get( 4 ); // This method call will return 0
    

We can also try to change the default return value:

m.defaultReturnValue( -1 );
m.get( 4 ); // This method call will return -1
    

We can obtain a type-specific iterator on the key set:

LongBidirectionalIterator i = m.keySet().iterator();
// Now we sum all keys
long s = 0;
while( i.hasNext() ) s += i.nextLong();
    

We now generate a head map, and iterate bidirectionally over it starting from a given point:

// This map contains only keys smaller than 4
Long2IntSortedMap m1 = m.headMap( 4 );
// This iterator is positioned between 2 and 3
LongBidirectionalIterator t = m1.keySet().iterator( 2 );
t.previous(); // This method call will return 2 (t.next() would return 3)
    

Should we need to access the map concurrently, we can wrap it:

// This map can be safely accessed by many threads
Long2IntSortedMap m2 = Long2IntSortedMaps.synchronize( m1 );
    

Linked maps are very flexible data structures which can be used to implement, for instance, queues whose content can be probed efficiently:

// This map remembers insertion order (note that we are using the array-based constructor)
IntSortedSet s = new IntLinkedOpenHashSet( new int[] { 4, 3, 2, 1 } );
s.firstInt(); // This method call will return 4
s.lastInt(); // This method call will return 1
s.contains(5); // This method will return false
IntBidirectionalIterator i = s.iterator( s.lastInt() ); // We could even cast it to a list iterator 
i.previous(); // This method call will return 1
i.previous(); // This method call will return 2
s.remove(s.lastInt()); // This will remove the last element in constant time
    

Now, we play with iterators. It is easy to create iterators over intervals or over arrays, and combine them:

IntIterator i = IntIterators.fromTo( 0, 10 ); // This iterator will return 0, 1, ..., 9
int[] a = new int[] { 5, 1, 9 };
IntIterator j = IntIterators.wrap( a ); // This iterator will return 5, 1, 9.
IntIterator k = IntIterators.concat( new IntIterator[] { i , j } ); // This iterator will return 0, 1, ..., 9, 5, 1, 9
    

It is easy to build sets and maps on the fly using the array-based constructors:

IntSet s = new IntOpenHashSet( new int[] { 1, 2, 3 } ); // This set will contain 1, 2, and 3
Char2IntMap m = new Char2IntRBTreeMap( new char[] { '@', '-' }, new int[] { 0, 1 } ); // This map will map '@' to 0 and '-' to 1

Whenever you have some data structure, it is easy to serialize it in an efficient (buffered) way, or to dump their content in textual form:

BinIO.storeObject( s, "foo" ); // This method call will save s in the file named "foo"
TextIO.storeInts( s.intIterator(), "foo.txt" ); // This method call will save the content of s in ASCII
i = TextIO.asIntIterator( "foo.txt" ); // This iterator will parse the file and return the integers therein
    
fastutil-7.1.0/test/it/unimi/dsi/fastutil/ArraysTest.java0000664000000000000000000001125013050705451022143 0ustar rootrootpackage it.unimi.dsi.fastutil; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.ints.AbstractIntComparator; import it.unimi.dsi.fastutil.ints.IntArrays; import it.unimi.dsi.fastutil.ints.IntArraysTest; import java.util.Random; import org.junit.Test; public class ArraysTest { private static void testMergeSort( final int x[] ) { testMergeSort( x, 0, x.length ); } private static void testMergeSort( final int x[], int from, int to ) { Arrays.mergeSort( from, to, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return Integer.compare( x[ k1 ], x[ k2 ] ); } }, new Swapper() { @Override public void swap( int k1, int k2 ) { final int t = x[ k1 ]; x[ k1 ] = x[ k2 ]; x[ k2 ] = t; } }); for( int i = to - 1; i-- != from; ) assertTrue( x[ i ] <= x[ i + 1 ] ); } @Test public void testMergeSort() { testMergeSort( new int[] { 2, 1, 0, 4 } ); testMergeSort( new int[] { 2, -1, 0, -4 } ); testMergeSort( IntArrays.shuffle( IntArraysTest.identity( 100 ), new Random( 0 ) ) ); int[] t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testMergeSort( t ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testMergeSort( t ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); testMergeSort( t, 10, 100 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt() & 0xF; testMergeSort( t ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testMergeSort( t ); } private static void testQuickSort( final int x[] ) { testQuickSort( x, 0, x.length ); } private static void testQuickSort( final int x[], int from, int to ) { Arrays.quickSort( from, to, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return Integer.compare( x[ k1 ], x[ k2 ] ); } }, new Swapper() { @Override public void swap( int k1, int k2 ) { final int t = x[ k1 ]; x[ k1 ] = x[ k2 ]; x[ k2 ] = t; } }); for( int i = to - 1; i-- != from; ) assertTrue( x[ i ] <= x[ i + 1 ] ); } @Test public void testQuickSort() { testQuickSort( new int[] { 2, 1, 0, 4 } ); testQuickSort( new int[] { 2, -1, 0, -4 } ); testQuickSort( IntArrays.shuffle( IntArraysTest.identity( 100 ), new Random( 0 ) ) ); int[] t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testQuickSort( t ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testQuickSort( t ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); testQuickSort( t, 10, 100 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt() & 0xF; testQuickSort( t ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testQuickSort( t ); } private static void testParallelQuickSort( final int x[] ) { testParallelQuickSort( x, 0, x.length ); } private static void testParallelQuickSort( final int x[], int from, int to ) { Arrays.parallelQuickSort( from, to, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return Integer.compare( x[ k1 ], x[ k2 ] ); } }, new Swapper() { @Override public void swap( int k1, int k2 ) { final int t = x[ k1 ]; x[ k1 ] = x[ k2 ]; x[ k2 ] = t; } }); for( int i = to - 1; i-- != from; ) assertTrue( x[ i ] <= x[ i + 1 ] ); } @Test public void testParallelQuickSort() { testParallelQuickSort( new int[] { 2, 1, 0, 4 } ); testParallelQuickSort( new int[] { 2, -1, 0, -4 } ); testParallelQuickSort( IntArrays.shuffle( IntArraysTest.identity( 100 ), new Random( 0 ) ) ); int[] t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testParallelQuickSort( t ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testParallelQuickSort( t ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); testParallelQuickSort( t, 10, 100 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt() & 0xF; testParallelQuickSort( t ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); testParallelQuickSort( t ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/BigArraysTest.java0000664000000000000000000000475713050705451022603 0ustar rootrootpackage it.unimi.dsi.fastutil; import it.unimi.dsi.fastutil.BigArrays; import it.unimi.dsi.fastutil.BigSwapper; import it.unimi.dsi.fastutil.ints.IntBigArrays; import it.unimi.dsi.fastutil.longs.AbstractLongComparator; import java.util.Arrays; import static org.junit.Assert.*; import org.junit.Test; public class BigArraysTest { @Test public void testMergeSort() { int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; final int[][] a = IntBigArrays.wrap( s.clone() ); Arrays.sort( s ); int[][] sorted = IntBigArrays.wrap( s.clone() ); BigArrays.mergeSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() { private static final long serialVersionUID = 1L; @Override public int compare( long k1, long k2 ) { return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); } }, new BigSwapper() { @Override public void swap( long k1, long k2 ) { IntBigArrays.swap( a, k1, k2 ); } }); assertArrayEquals( sorted, a ); BigArrays.mergeSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() { private static final long serialVersionUID = 1L; @Override public int compare( long k1, long k2 ) { return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); } }, new BigSwapper() { @Override public void swap( long k1, long k2 ) { IntBigArrays.swap( a, k1, k2 ); } }); assertArrayEquals( sorted, a ); } @Test public void testQuickSort() { int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Arrays.sort( s ); int[][] sorted = IntBigArrays.wrap( s.clone() ); final int[][] a = IntBigArrays.wrap( s.clone() ); BigArrays.quickSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() { private static final long serialVersionUID = 1L; @Override public int compare( long k1, long k2 ) { return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); } }, new BigSwapper() { @Override public void swap( long k1, long k2 ) { IntBigArrays.swap( a, k1, k2 ); } }); assertArrayEquals( sorted, a ); BigArrays.quickSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() { private static final long serialVersionUID = 1L; @Override public int compare( long k1, long k2 ) { return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); } }, new BigSwapper() { @Override public void swap( long k1, long k2 ) { IntBigArrays.swap( a, k1, k2 ); } }); assertArrayEquals( sorted, a ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/HashCommonTest.java0000664000000000000000000000210313050705451022733 0ustar rootrootpackage it.unimi.dsi.fastutil; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; import org.junit.Test; public class HashCommonTest { @Test public void testMaxFillSmall() { for( float f: new float[] { 0.0001f, .25f, .50f, .75f, .9999f } ) { for( int i = 0; i < 16; i++ ) { final int n = HashCommon.arraySize( i, f ); final int maxFill = HashCommon.maxFill( n, f ); assertTrue( n + " <= " + maxFill, n > maxFill ); } for( long i = 0; i < 16; i++ ) { final long n = HashCommon.bigArraySize( i, f ); final long maxFill = HashCommon.maxFill( n, f ); assertTrue( n + " <= " + maxFill, n > maxFill ); } } } @Test public void testInverses() { for( int i = 0 ; i < 1 << 30; i += 10000 ) { assertEquals( i, HashCommon.invMix( HashCommon.mix( i ) ) ); assertEquals( i, HashCommon.mix( HashCommon.invMix( i ) ) ); } for( long i = 0 ; i < 1 << 62; i += 1000000 ) { assertEquals( i, HashCommon.invMix( HashCommon.mix( i ) ) ); assertEquals( i, HashCommon.mix( HashCommon.invMix( i ) ) ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/bytes/ByteArrayFrontCodedListTest.java0000664000000000000000000001105213050705451026536 0ustar rootrootpackage it.unimi.dsi.fastutil.bytes; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import java.io.IOException; import org.junit.Test; @SuppressWarnings({"rawtypes", "unchecked"}) public class ByteArrayFrontCodedListTest { private static java.util.Random r = new java.util.Random( 0 ); private static byte genKey() { return (byte)( r.nextInt() ); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for ( int i = 0; i < x.size(); i++ ) if ( !java.util.Arrays.equals( (byte[])x.get( i ), (byte[])y.get( i ) ) ) return false; return true; } private static int l[]; private static byte[][] a; private static void test( int n ) throws IOException, ClassNotFoundException { l = new int[ n ]; a = new byte[ n ][]; for ( int i = 0; i < n; i++ ) l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 ); for ( int i = 0; i < n; i++ ) a[ i ] = new byte[ l[ i ] ]; for ( int i = 0; i < n; i++ ) for ( int j = 0; j < l[ i ]; j++ ) a[ i ][ j ] = genKey(); ByteArrayFrontCodedList m = new ByteArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); // System.out.println(m); // for( i = 0; i < t.size(); i++ ) // System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (byte[])i.next(), (byte[])j.next() ) ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (byte[])i.previous(), (byte[])j.previous() ) ); } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { int from = r.nextInt( m.size() + 1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (byte[])i.next(), (byte[])j.next() ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (byte[])i.previous(), (byte[])j.previous() ) ); } } } java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (ByteArrayFrontCodedList)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1 ); } @Test public void test10() throws Exception, ClassNotFoundException { test( 10 ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100 ); } @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000 ); } @Test public void test10000() throws IOException, ClassNotFoundException { test( 10000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/bytes/ByteArraysTest.java0000664000000000000000000001644713050705451024132 0ustar rootrootpackage it.unimi.dsi.fastutil.bytes; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class ByteArraysTest { private static byte[] castIdentity( int n ) { final byte[] a = new byte[ n ]; while( n-- != 0 ) a[ n ] = (byte)n; return a; } @Test public void testRadixSort1() { byte[] t = { 2, 1, 0, 4 }; ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new byte[] { 2, (byte)-1, 0, (byte)-4 }; ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = ByteArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new byte[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt(); ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new byte[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt(); ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new byte[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt(); ByteArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { byte[][] d = new byte[ 2 ][]; d[ 0 ] = new byte[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 3 - i % 3 ); d[ 1 ] = ByteArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); ByteArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 100 - i % 100 ); d[ 1 ] = ByteArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); ByteArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( i % 3 - 2 ); Random random = new Random( 0 ); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt(); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt(); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { byte[][] t = { { 2, 1, 0, 4 } }; ByteArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = ByteArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); ByteArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); byte[][] d = new byte[ 2 ][]; d[ 0 ] = new byte[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 3 - i % 3 ); d[ 1 ] = ByteArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); ByteArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 100 - i % 100 ); d[ 1 ] = ByteArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); ByteArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt(); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt(); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new byte[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt(); d[ 1 ] = new byte[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt(); ByteArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/chars/CharArrayFrontCodedListTest.java0000664000000000000000000001105513050705451026465 0ustar rootrootpackage it.unimi.dsi.fastutil.chars; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import java.io.IOException; import org.junit.Test; @SuppressWarnings({ "rawtypes", "unchecked" }) public class CharArrayFrontCodedListTest { private static java.util.Random r = new java.util.Random( 0 ); private static char genKey() { return (char)( r.nextInt() ); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for ( int i = 0; i < x.size(); i++ ) if ( !java.util.Arrays.equals( (char[])x.get( i ), (char[])y.get( i ) ) ) return false; return true; } private static int l[]; private static char[][] a; private static void test( int n ) throws IOException, ClassNotFoundException { l = new int[ n ]; a = new char[ n ][]; for ( int i = 0; i < n; i++ ) l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 ); for ( int i = 0; i < n; i++ ) a[ i ] = new char[ l[ i ] ]; for ( int i = 0; i < n; i++ ) for ( int j = 0; j < l[ i ]; j++ ) a[ i ][ j ] = genKey(); CharArrayFrontCodedList m = new CharArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); // System.out.println(m); // for( i = 0; i < t.size(); i++ ) // System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (char[])i.next(), (char[])j.next() ) ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (char[])i.previous(), (char[])j.previous() ) ); } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { int from = r.nextInt( m.size() + 1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (char[])i.next(), (char[])j.next() ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (char[])i.previous(), (char[])j.previous() ) ); } } } java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (CharArrayFrontCodedList)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1 ); } @Test public void test10() throws Exception, ClassNotFoundException { test( 10 ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100 ); } @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000 ); } @Test public void test10000() throws IOException, ClassNotFoundException { test( 10000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/chars/CharArraysTest.java0000664000000000000000000001644713050705451024056 0ustar rootrootpackage it.unimi.dsi.fastutil.chars; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class CharArraysTest { private static char[] castIdentity( int n ) { final char[] a = new char[ n ]; while( n-- != 0 ) a[ n ] = (char)n; return a; } @Test public void testRadixSort1() { char[] t = { 2, 1, 0, 4 }; CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new char[] { 2, (char)-1, 0, (char)-4 }; CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = CharArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new char[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt(); CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new char[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt(); CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new char[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt(); CharArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { char[][] d = new char[ 2 ][]; d[ 0 ] = new char[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 3 - i % 3 ); d[ 1 ] = CharArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); CharArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 100 - i % 100 ); d[ 1 ] = CharArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); CharArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( i % 3 - 2 ); Random random = new Random( 0 ); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt(); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt(); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { char[][] t = { { 2, 1, 0, 4 } }; CharArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = CharArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); CharArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); char[][] d = new char[ 2 ][]; d[ 0 ] = new char[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 3 - i % 3 ); d[ 1 ] = CharArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); CharArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 100 - i % 100 ); d[ 1 ] = CharArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); CharArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt(); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt(); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new char[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt(); d[ 1 ] = new char[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt(); CharArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/doubles/DoubleArraysTest.java0000664000000000000000000006264413050705451024750 0ustar rootrootpackage it.unimi.dsi.fastutil.doubles; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.ints.IntArrays; import java.util.Random; import org.junit.Test; public class DoubleArraysTest { private static double[] identity( int n ) { final double[] a = new double[ n ]; while( n-- != 0 ) a[ n ] = n; return a; } private static int[] identityInt( int n ) { final int[] a = new int[ n ]; while( n-- != 0 ) a[ n ] = n; return a; } @Test public void testRadixSort1() { double[] t = { 2, 1, 0, 4 }; DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new double[] { 2, -1, 0, -4 }; DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new double[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new double[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new double[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); DoubleArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { double[][] d = new double[ 2 ][]; d[ 0 ] = new double[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = DoubleArrays.shuffle( identity( 10 ), new Random( 0 ) ); DoubleArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = DoubleArrays.shuffle( identity( 100000 ), new Random( 6 ) ); DoubleArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { double[][] t = { { 2, 1, 0, 4 } }; DoubleArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); DoubleArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); double[][] d = new double[ 2 ][]; d[ 0 ] = new double[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = DoubleArrays.shuffle( identity( 10 ), new Random( 0 ) ); DoubleArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = DoubleArrays.shuffle( identity( 100000 ), new Random( 6 ) ); DoubleArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new double[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new double[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); DoubleArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSortIndirectStable() { double[] d = { 2, 1, 0, 4 }; int[] perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ d.length ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); d = new double[] { 2, -1, 0, -4 }; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ 100 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); Random random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ d.length ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); d = new double[ d.length ]; for( int i = 0; i < d.length; i++ ) d[ i ] = random.nextInt( 4 ); perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) if ( d[ perm[ i ] ] == d[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); d = new double[ 100 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, 10, 90, true ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); d = new double[ 100000 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ 10000000 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ d.length ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); d = new double[ d.length ]; for( int i = 0; i < d.length; i++ ) d[ i ] = random.nextInt( 8 ); perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, true ); for( int i = d.length - 1; i-- != 0; ) if ( d[ perm[ i ] ] == d[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); } @Test public void testRadixSortIndirectUnstable() { double[] d = { 2, 1, 0, 4 }; int[] perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ d.length ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); d = new double[] { 2, -1, 0, -4 }; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ 100 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); Random random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ 100 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, 10, 90, false ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); d = new double[ 100000 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ 10000000 ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); random = new Random( 0 ); for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] ); d = new double[ d.length ]; perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length ); DoubleArrays.radixSortIndirect( perm, d, false ); for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); } @Test public void testRadixSort2IndirectStable() { double[] t = { 2, 1, 0, 4 }; double[] u = { 3, 2, 1, 0 }; int[] perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); u = DoubleArrays.shuffle( identity( 100 ), new Random( 1 ) ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ 100 ]; u = new double[ 100 ]; perm = identityInt( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; u = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); t = new double[ 100 ]; u = new double[ 100 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, 10, 90, true ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new double[ 100000 ]; u = new double[ 100000 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ 10000000 ]; u = new double[ 10000000 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; u = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new double[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); } @Test public void testRadixSort2IndirectUnstable() { double[] t = { 2, 1, 0, 4 }; double[] u = { 3, 2, 1, 0 }; int[] perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) ); u = DoubleArrays.shuffle( identity( 100 ), new Random( 1 ) ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ 100 ]; u = new double[ 100 ]; perm = identityInt( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; u = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ]&& u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ 100 ]; u = new double[ 100 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, 10, 90, false ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new double[ 100000 ]; u = new double[ 100000 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ 10000000 ]; u = new double[ 10000000 ]; perm = identityInt( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new double[ t.length ]; u = new double[ t.length ]; perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new double[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); perm = identityInt( t.length ); DoubleArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue(i + " " + t[perm[i]]+ " "+ t[perm[i+1]] + " " + u[perm[i]] + " " + u[perm[i+1]] + " " + perm[i]+ " " +perm[i+1], t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); } @Test public void testMergeSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleArrays.mergeSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleArrays.radixSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSortIndirectNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final int perm[] = new int[ t.length ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; DoubleArrays.radixSortIndirect( perm, t, from, to, true ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 ); } } @Test public void testRadixSortIndirect2NaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final int perm[] = new int[ t.length ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; DoubleArrays.radixSortIndirect( perm, t, t, from, to, true ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 ); } } @Test public void testQuickSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleArrays.quickSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/doubles/DoubleBigArraysTest.java0000664000000000000000000003174513050705451025370 0ustar rootrootpackage it.unimi.dsi.fastutil.doubles; import static it.unimi.dsi.fastutil.doubles.DoubleBigArrays.get; import static it.unimi.dsi.fastutil.doubles.DoubleBigArrays.set; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Random; import org.junit.Test; public class DoubleBigArraysTest { public static double[][] identity( final int n ) { final double[][] perm = DoubleBigArrays.newBigArray( n ); for( int i = n; i-- != 0; ) DoubleBigArrays.set( perm, i , i ); return perm; } @Test public void testQuickSort() { double[] s = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Arrays.sort( s ); double[][] sorted = DoubleBigArrays.wrap( s.clone() ); double[][] a = DoubleBigArrays.wrap( s.clone() ); DoubleBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); DoubleBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); a = DoubleBigArrays.wrap( s.clone() ); DoubleBigArrays.quickSort( a, DoubleComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); DoubleBigArrays.quickSort( a, DoubleComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); } private void testCopy( int n ) { double[][] a = DoubleBigArrays.newBigArray( n ); for ( int i = 0; i < n; i++ ) set( a, i, i ); DoubleBigArrays.copy( a, 0, a, 1, n - 2 ); assertEquals( 0, a[ 0 ][ 0 ], 0 ); for ( int i = 0; i < n - 2; i++ ) assertEquals( i, get( a, i + 1 ), 0 ); for ( int i = 0; i < n; i++ ) set( a, i, i ); DoubleBigArrays.copy( a, 1, a, 0, n - 1 ); for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) ,0 ); for ( int i = 0; i < n; i++ ) set( a, i, i ); double[] b = new double[ n ]; for ( int i = 0; i < n; i++ ) b[ i ] = i; assertArrayEquals( a, DoubleBigArrays.wrap( b ) ); } @Test public void testCopy10() { testCopy( 10 ); } @Test public void testCopy1000() { testCopy( 1000 ); } @Test public void testCopy1000000() { testCopy( 1000000 ); } @Test public void testBinarySearch() { double[] a = new double[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 }; Arrays.sort( a ); double[][] b = DoubleBigArrays.wrap( a.clone() ); for( int i = -1; i < 20; i++ ) { assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), DoubleBigArrays.binarySearch( b, i ) ); assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), DoubleBigArrays.binarySearch( b, i, DoubleComparators.NATURAL_COMPARATOR ) ); } for( int i = -1; i < 20; i++ ) { assertEquals( Arrays.binarySearch( a, 5, 13, i ), DoubleBigArrays.binarySearch( b, 5, 13, i ) ); assertEquals( Arrays.binarySearch( a, 5, 13, i ), DoubleBigArrays.binarySearch( b, 5, 13, i, DoubleComparators.NATURAL_COMPARATOR ) ); } } @Test public void testTrim() { double[] a = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; double[][] b = DoubleBigArrays.wrap( a.clone() ); for( int i = a.length; i-- != 0; ) { double[][] t = DoubleBigArrays.trim( b, i ); final long l = DoubleBigArrays.length( t ); assertEquals( i, l ); for( int p = 0; p < l; p++ ) assertEquals( a[ p ], DoubleBigArrays.get( t, p ), 0 ); } } @Test public void testEquals() { double[] a = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; double[][] b = DoubleBigArrays.wrap( a.clone() ); double[][] c = DoubleBigArrays.wrap( a.clone() ); assertTrue( DoubleBigArrays.equals( b, c ) ); b[ 0 ][ 0 ] = 0; assertFalse( DoubleBigArrays.equals( b, c ) ); } @Test public void testRadixSort1() { double[][] t = DoubleBigArrays.wrap( new double[] { 2, 1, 0, 4 } ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); t = DoubleBigArrays.wrap( new double[] { 2, -1, 0, -4 } ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); t = DoubleBigArrays.shuffle( identity( 100 ), new Random( 0 ) ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); t = DoubleBigArrays.newBigArray( 100 ); Random random = new Random( 0 ); for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); t = DoubleBigArrays.newBigArray( 100000 ); random = new Random( 0 ); for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); for( long i = 100; i-- != 10; ) DoubleBigArrays.set( t, i, random.nextInt() ); DoubleBigArrays.radixSort( t, 10, 100 ); for( long i = 99; i-- != 10; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); t = DoubleBigArrays.newBigArray( 1000000 ); random = new Random( 0 ); for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() ); DoubleBigArrays.radixSort( t ); for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) ); } @Test public void testRadixSort2() { double d[][], e[][]; d = DoubleBigArrays.newBigArray( 10 ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( 3 - i % 3 ) ); e = DoubleBigArrays.shuffle( identity( 10 ), new Random( 0 ) ); DoubleBigArrays.radixSort( d, e ); for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); d = DoubleBigArrays.newBigArray( 100000 ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( 100 - i % 100 ) ); e = DoubleBigArrays.shuffle( identity( 100000 ), new Random( 6 ) ); DoubleBigArrays.radixSort( d, e ); for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); d = DoubleBigArrays.newBigArray( 10 ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( i % 3 - 2 ) ); Random random = new Random( 0 ); e = DoubleBigArrays.newBigArray( DoubleBigArrays.length( d ) ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() ); DoubleBigArrays.radixSort( d, e ); for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); d = DoubleBigArrays.newBigArray( 100000 ); random = new Random( 0 ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, random.nextInt() ); e = DoubleBigArrays.newBigArray( DoubleBigArrays.length( d ) ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() ); DoubleBigArrays.radixSort( d, e ); for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); for( long i = 100; i-- != 10; ) DoubleBigArrays.set( e, i, random.nextInt() ); DoubleBigArrays.radixSort( d, e, 10, 100 ); for( long i = 99; i-- != 10; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); d = DoubleBigArrays.newBigArray( 1000000 ); random = new Random( 0 ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, random.nextInt() ); e = DoubleBigArrays.newBigArray( DoubleBigArrays.length( d ) ); for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() ); DoubleBigArrays.radixSort( d, e ); for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " + DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) ); } @Test public void testShuffle() { double[] a = new double[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = i; double[][] b = DoubleBigArrays.wrap( a ); DoubleBigArrays.shuffle( b, new Random() ); boolean[] c = new boolean[ a.length ]; for( long i = DoubleBigArrays.length( b ); i-- != 0; ) { assertFalse( c[ (int)DoubleBigArrays.get( b, i ) ] ); c[ (int)DoubleBigArrays.get( b, i ) ] = true; } } @Test public void testShuffleFragment() { double[] a = new double[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = -1; for( int i = 10; i < 30; i++ ) a[ i ] = i - 10; double[][] b = DoubleBigArrays.wrap( a ); DoubleBigArrays.shuffle( b, 10, 30, new Random() ); boolean[] c = new boolean[ 20 ]; for( int i = 20; i-- != 0; ) { assertFalse( c[ (int)DoubleBigArrays.get( b, i + 10 ) ] ); c[ (int)DoubleBigArrays.get( b, i + 10 ) ] = true; } } @Test public void testBinarySearchLargeKey() { final double[][] a = DoubleBigArrays.wrap( new double[] { 1, 2, 3 } ); DoubleBigArrays.binarySearch( a, 4 ); } @Test public void testMergeSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleArrays.mergeSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleBigArrays.radixSort( DoubleBigArrays.wrap( a ), from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSort2NaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); final double[] b = t.clone(); DoubleBigArrays.radixSort( DoubleBigArrays.wrap( a ), DoubleBigArrays.wrap( b ), from, to ); for( int i = to - 1; i-- != from; ) { assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); assertTrue( Double.compare( b[ i ], b[ i + 1 ] ) <= 0 ); } } } @Test public void testQuickSortNaNs() { final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final double[] a = t.clone(); DoubleBigArrays.quickSort( DoubleBigArrays.wrap( a ), from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } }fastutil-7.1.0/test/it/unimi/dsi/fastutil/doubles/DoubleOpenHashSetTest.java0000664000000000000000000000103313050705451025651 0ustar rootrootpackage it.unimi.dsi.fastutil.doubles; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; public class DoubleOpenHashSetTest { @Test public void testNaNs() { DoubleOpenHashSet s = new DoubleOpenHashSet(); s.add( Double.NaN ); s.add( Double.NaN ); assertEquals( 1, s.size() ); } @Test public void testZeros() { DoubleOpenHashSet s = new DoubleOpenHashSet(); assertTrue( s.add( -0.0d ) ); assertTrue( s.add( +0.0d ) ); assertEquals( 2, s.size() ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/floats/FloatArraysTest.java0000664000000000000000000002260213050705451024424 0ustar rootrootpackage it.unimi.dsi.fastutil.floats; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class FloatArraysTest { private static float[] identity( int n ) { final float[] a = new float[ n ]; while( n-- != 0 ) a[ n ] = n; return a; } @Test public void testRadixSort1() { float[] t = { 2, 1, 0, 4 }; FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new float[] { 2, -1, 0, -4 }; FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = FloatArrays.shuffle( identity( 100 ), new Random( 0 ) ); FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new float[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new float[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new float[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); FloatArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { float[][] d = new float[ 2 ][]; d[ 0 ] = new float[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = FloatArrays.shuffle( identity( 10 ), new Random( 0 ) ); FloatArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = FloatArrays.shuffle( identity( 100000 ), new Random( 6 ) ); FloatArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { float[][] t = { { 2, 1, 0, 4 } }; FloatArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = FloatArrays.shuffle( identity( 100 ), new Random( 0 ) ); FloatArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); float[][] d = new float[ 2 ][]; d[ 0 ] = new float[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = FloatArrays.shuffle( identity( 10 ), new Random( 0 ) ); FloatArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = FloatArrays.shuffle( identity( 100000 ), new Random( 6 ) ); FloatArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new float[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new float[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); FloatArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testMergeSortNaNs() { final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final float[] a = t.clone(); FloatArrays.mergeSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSortNaNs() { final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final float[] a = t.clone(); FloatArrays.radixSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } @Test public void testRadixSortIndirectNaNs() { final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final int perm[] = new int[ t.length ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; FloatArrays.radixSortIndirect( perm, t, from, to, true ); for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 ); } } @Test public void testRadixSortIndirect2NaNs() { final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final int perm[] = new int[ t.length ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; FloatArrays.radixSortIndirect( perm, t, t, from, to, true ); for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 ); } } @Test public void testQuickSortNaNs() { final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < t.length; to++ ) for( int from = 0; from < to; from++ ) { final float[] a = t.clone(); FloatArrays.quickSort( a, from, to ); for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/floats/FloatOpenHashSetTest.java0000664000000000000000000000102213050705451025335 0ustar rootrootpackage it.unimi.dsi.fastutil.floats; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; public class FloatOpenHashSetTest { @Test public void testNaNs() { FloatOpenHashSet s = new FloatOpenHashSet(); s.add( Float.NaN ); s.add( Float.NaN ); assertEquals( 1, s.size() ); } @Test public void testZeros() { FloatOpenHashSet s = new FloatOpenHashSet(); assertTrue( s.add( -0.0f ) ); assertTrue( s.add( +0.0f ) ); assertEquals( 2, s.size() ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntAVLTreeMapTest.java0000664000000000000000000000370413050705451024774 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import java.util.AbstractMap; import org.junit.Test; public class Int2IntAVLTreeMapTest { @SuppressWarnings("deprecation") @Test public void testContainsNull() { Int2IntAVLTreeMap m = new Int2IntAVLTreeMap( new int[] { 1, 2, 3 }, new int[] { 1, 2, 3 } ); assertFalse( m.containsKey( null ) ); assertTrue( m.get( null ) == null ); } @SuppressWarnings("boxing") @Test public void testEquals() { Int2IntAVLTreeMap m = new Int2IntAVLTreeMap( new int[] { 1, 2 }, new int[] { 1, 2 } ); assertFalse( m.equals( new Object2ObjectOpenHashMap( new Integer[] { 1, null }, new Integer[] { 1, 1 } ) ) ); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetContainsTest() { Int2IntAVLTreeMap m = new Int2IntAVLTreeMap(); m.put(0, 0); assertFalse(m.int2IntEntrySet().contains(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetRemoveTest() { Int2IntAVLTreeMap m = new Int2IntAVLTreeMap(); m.put(0, 0); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @Test public void removeFromKeySetTest() { Int2IntAVLTreeMap m = new Int2IntAVLTreeMap(); m.put(0, 0); assertTrue(m.keySet().remove(0)); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntArrayMapTest.java0000664000000000000000000001711513050705451024611 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.io.BinIO; import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import it.unimi.dsi.fastutil.objects.ObjectIterator; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.AbstractMap; import java.util.Map.Entry; import org.junit.Test; public class Int2IntArrayMapTest { @Test public void testRemove() { Int2IntMap map = new Int2IntArrayMap(); assertFalse( map.entrySet().remove( new Object() ) ); map.put( 1, 2 ); map.put( 2, 3 ); assertFalse( map.entrySet().remove( new AbstractInt2IntMap.BasicEntry( 1, 1 ) ) ); assertFalse( map.entrySet().remove( new AbstractInt2IntMap.BasicEntry( 3, 2 ) ) ); assertTrue( map.entrySet().remove( new AbstractInt2IntMap.BasicEntry( 1, 2 ) ) ); assertFalse( map.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 1, 2 ) ) ); assertEquals( map.size(), map.entrySet().size() ); assertFalse( map.containsKey( 1 ) ); } @Test(expected = IllegalStateException.class) public void testArrayMapEmptyEntrySetThrowsExceptionOnIteratorRemove() { ObjectIterator> iterator = new Int2IntArrayMap( 4 ).entrySet().iterator(); assertFalse( iterator.hasNext() ); iterator.remove(); } @Test(expected = IllegalStateException.class) public void testArrayMapEmptyEntrySetThrowsExceptionTwoIteratorRemoves() { Int2IntArrayMap m = new Int2IntArrayMap(); m.put( 0, 0 ); m.put( 1, 1 ); ObjectIterator> iterator = m.entrySet().iterator(); iterator.next(); iterator.remove(); iterator.remove(); } @Test(expected = IllegalStateException.class) public void testArrayMapEmptyEntrySetThrowsExceptionOnFastIteratorRemove() { ObjectIterator iterator = new Int2IntArrayMap().int2IntEntrySet().fastIterator(); assertFalse( iterator.hasNext() ); iterator.remove(); } @Test(expected = IllegalStateException.class) public void testArrayMapEmptyEntrySetThrowsExceptionTwoFastIteratorRemoves() { Int2IntArrayMap m = new Int2IntArrayMap(); m.put( 0, 0 ); m.put( 1, 1 ); ObjectIterator iterator = m.int2IntEntrySet().fastIterator(); iterator.next(); iterator.remove(); iterator.remove(); } @SuppressWarnings("deprecation") @Test public void testContainsNull() { Int2IntArrayMap m = new Int2IntArrayMap( new int[] { 1, 2, 3 }, new int[] { 1, 2, 3 } ); assertFalse( m.containsKey( null ) ); assertTrue( m.get( null ) == null ); } @SuppressWarnings("boxing") @Test public void testEquals() { Int2IntArrayMap a1 = new Int2IntArrayMap(); a1.put(0, 1); a1.put(1000, -1); a1.put(2000, 3); Int2IntArrayMap a2 = new Int2IntArrayMap(); a2.put(0, 1); a2.put(1000, -1); a2.put(2000, 3); assertEquals(a1, a2); Int2IntArrayMap m = new Int2IntArrayMap( new int[] { 1, 2 }, new int[] { 1, 2 } ); assertFalse( m.equals( new Object2ObjectOpenHashMap( new Integer[] { 1, null }, new Integer[] { 1, 1 } ) ) ); } @SuppressWarnings("deprecation") @Test public void testMap() { for( int i = 0; i <= 1; i++ ) { Int2IntArrayMap m = i == 0 ? new Int2IntArrayMap() : new Int2IntArrayMap( new int[ i ], new int[ i ] ); assertEquals( 0, m.put( 1, 1 ) ); assertEquals( 1 + i, m.size() ); assertTrue( m.containsKey( 1 ) ); assertTrue( m.containsValue( 1 ) ); assertEquals( 0, m.put( 2, 2 ) ); assertTrue( m.containsKey( 2 ) ); assertTrue( m.containsValue( 2 ) ); assertEquals( 2 + i, m.size() ); assertEquals( 1, m.put( 1, 3 ) ); assertTrue( m.containsValue( 3 ) ); assertEquals( 0, m.remove( 3 ) ); assertEquals( 0, m.put( 3, 3 ) ); assertTrue( m.containsKey( 3 ) ); assertTrue( m.containsValue( 3 ) ); assertEquals( 3 + i, m.size() ); assertEquals( 3, m.get( 1 ) ); assertEquals( 2, m.get( 2 ) ); assertEquals( 3, m.get( 3 ) ); assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 1, 2, 3 } : new int[] { 0, 1, 2, 3 } ), new IntOpenHashSet( m.keySet().iterator() ) ); assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 3, 2, 3 } : new int[] { 0, 3, 2, 3 } ), new IntOpenHashSet( m.values().iterator() ) ); for( Entry e: m.entrySet() ) assertEquals( e.getValue(), m.get( e.getKey() ) ); assertTrue( i != 0 == m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 0, 0 ) ) ); assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 1, 3 ) ) ); assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 2, 2 ) ) ); assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 3, 3 ) ) ); assertFalse( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 1, 2 ) ) ); assertFalse( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 2, 1 ) ) ); assertEquals( 3, m.remove( 3 ) ); assertEquals( 2 + i, m.size() ); assertEquals( 3, m.remove( 1 ) ); assertEquals( 1 + i, m.size() ); assertFalse( m.containsKey( 1 ) ); assertEquals( 2, m.remove( 2 ) ); assertEquals( 0 + i, m.size() ); assertFalse( m.containsKey( 1 ) ); } } @Test public void testClone() { Int2IntArrayMap m = new Int2IntArrayMap(); assertEquals( m, m.clone() ); m.put( 0, 1 ); assertEquals( m, m.clone() ); m.put( 0, 2 ); assertEquals( m, m.clone() ); m.put( 1, 2 ); assertEquals( m, m.clone() ); m.remove( 1 ); assertEquals( m, m.clone() ); } @Test public void testSerialisation() throws IOException, ClassNotFoundException { Int2IntArrayMap m = new Int2IntArrayMap(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); m.put( 0, 1 ); m.put( 1, 2 ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } @Test public void testIteratorRemove() { Int2IntArrayMap m = new Int2IntArrayMap( new int[] { 1, 2, 3 }, new int[] { 1, 2, 3 } ); ObjectIterator> keySet = m.entrySet().iterator(); keySet.next(); keySet.next(); keySet.remove(); assertTrue( keySet.hasNext() ); Entry next = keySet.next(); assertEquals( Integer.valueOf( 3 ), next.getKey() ); assertEquals( Integer.valueOf( 3 ), next.getValue() ); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetContainsTest() { Int2IntArrayMap m = new Int2IntArrayMap(); m.put(0, 0); assertFalse(m.int2IntEntrySet().contains(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetRemoveTest() { Int2IntArrayMap m = new Int2IntArrayMap(); m.put(0, 0); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), new Object()))); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntLinkedOpenHashMapTest.java0000664000000000000000000007412713050705451026375 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import it.unimi.dsi.fastutil.ints.Int2IntMap.Entry; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectIterator; import java.io.IOException; import java.util.Map; import java.util.NoSuchElementException; import org.junit.Ignore; import org.junit.Test; public class Int2IntLinkedOpenHashMapTest { @Test public void testWrapAround() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( 4, .5f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 6, 7 and 0 m.put( HashCommon.invMix( 6 ), 0 ); m.put( HashCommon.invMix( 7 ), 0 ); m.put( HashCommon.invMix( 6 + 8 ), 0 ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); t.add( iterator.nextInt() ); t.add( iterator.nextInt() ); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice iterator.remove(); t.add( iterator.nextInt() ); assertEquals( keys, t ); } @Test public void testWrapAround2() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( 4, .75f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 4, 5, 6, 7 and 0 m.put( HashCommon.invMix( 4 ), 0 ); m.put( HashCommon.invMix( 5 ), 0 ); m.put( HashCommon.invMix( 4 + 8 ), 0 ); m.put( HashCommon.invMix( 5 + 8 ), 0 ); m.put( HashCommon.invMix( 4 + 16 ), 0 ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 4 ] ); assertNotEquals( 0, m.key[ 5 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); //System.err.println(Arraym.toString( m.key )); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arraym.toString( m.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); assertEquals( 3, m.size() ); assertEquals( keys, t ); } @Test public void testWrapAround3() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( 4, .75f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 5, 6, 7, 0 and 1 m.put( HashCommon.invMix( 5 ), 0 ); m.put( HashCommon.invMix( 5 + 8 ), 0 ); m.put( HashCommon.invMix( 5 + 16 ), 0 ); m.put( HashCommon.invMix( 5 + 32 ), 0 ); m.put( HashCommon.invMix( 5 + 64 ), 0 ); assertNotEquals( 0, m.key[ 5 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 1 ] ); //System.err.println(Arraym.toString( m.key )); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); assertEquals( 0, m.size() ); assertEquals( keys, t ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } private static int genValue() { return r.nextInt(); } private static boolean valEquals( Object o1, Object o2 ) { return o1 == null ? o2 == null : o1.equals( o2 ); } @SuppressWarnings({ "unchecked", "deprecation" }) protected static void test( int n, float f ) throws IOException, ClassNotFoundException { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f ); Map t = new java.util.LinkedHashMap(); /* First of all, we fill t with random data. */ for ( int i = 0; i < n; i++ ) t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) ); /* Now we add to m the same data */ m.putAll( t ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keys. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same values. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) ); } /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) ); assertFalse( "Error: divergence between t and m (polymorphic method)", ( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( t.get( ( Integer.valueOf( T ) ) ) ).intValue() ) ) ) != ( 0 ) ) || t.get( ( Integer.valueOf( T ) ) ) != null && !m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) ); } assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds the same data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keys. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same values. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) ); } int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (Int2IntLinkedOpenHashMap)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); assertEquals( "Error: clone()", m, m.clone() ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m after save/read", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we play with iterators. */ { java.util.ListIterator i, j; Object J = null; Map.Entry E = null, F = null; i = (java.util.ListIterator)m.entrySet().iterator(); j = new java.util.LinkedList( t.entrySet() ).listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()" , i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()" , i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()" , ( E = (java.util.Map.Entry)i.next() ).getKey().equals( J = ( F = (Map.Entry)j.next() ).getKey() ) ); if ( r.nextFloat() < 0.3 ) { i.remove(); j.remove(); t.remove( J ); } else if ( r.nextFloat() < 0.3 ) { Integer U = Integer.valueOf( genValue() ); E.setValue( U ); t.put( F.getKey(), U ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()" , ( E = (java.util.Map.Entry)i.previous() ).getKey().equals( J = ( F = (Map.Entry)j.previous() ).getKey() ) ); if ( r.nextFloat() < 0.3 ) { i.remove(); j.remove(); t.remove( J ); } else if ( r.nextFloat() < 0.3 ) { Integer U = Integer.valueOf( genValue() ); E.setValue( U ); t.put( F.getKey(), U ); } } assertTrue( "Error: divergence in nextIndex()" , i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()" , i.previousIndex() == j.previousIndex() ); } } if ( t.size() > 0 ) { java.util.ListIterator i, j; Object J = null; j = new java.util.LinkedList( t.keySet() ).listIterator(); int e = r.nextInt( t.size() ); Object from; do from = j.next(); while ( e-- != 0 ); i = (java.util.ListIterator)m.keySet().iterator( ( ( ( (Integer)( from ) ).intValue() ) ) ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")" , i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")" , i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")" , i.next().equals( J = j.next() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")" , i.previous().equals( J = j.previous() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } assertTrue( "Error: divergence in nextIndex() (iterator with starting point " + from + ")" , i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex() (iterator with starting point " + from + ")" , i.previousIndex() == j.previousIndex() ); } } /* Now we check that m actually holds that data. */ assertTrue( "Error: ! m.equals( t ) after iteration" , m.equals( t ) ); assertTrue( "Error: ! t.equals( m ) after iteration" , t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) m.remove( i.next() ); assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); /* * Now we check that the iteration order of m is properly affected, using random movements */ { m.clear(); final java.util.Deque d = new java.util.ArrayDeque(); for ( int k = 0; k < 2 * n; k++ ) { int T = genKey(); int U = genValue(); boolean dr = d.remove( ( Integer.valueOf( T ) ) ); int rU = m.put( T, U ); assertTrue( "Error: deque reported previous key differently than map." , dr == ( m.defaultReturnValue() != rU ) ); if ( 1 == ( r.nextInt( 2 ) % 2 ) ) { d.addFirst( ( Integer.valueOf( T ) ) ); m.getAndMoveToFirst( T ); } else { d.addLast( ( Integer.valueOf( T ) ) ); m.getAndMoveToLast( T ); } } // Iteration order should be identical assertTrue( "Error: Iteration order of map different than iteration order of deque." , new java.util.ArrayList( m.keySet() ).equals( new java.util.ArrayList( d ) ) ); } m.clear(); t.clear(); m.trim(); assertTrue( "Error: !m.equals(t) after rehash()", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after rehash()", t.equals( m ) ); m.trim(); assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void testAddTo() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); assertEquals( 0, m.addTo( 0, 2 ) ); assertEquals( 2, m.get( 0 ) ); assertEquals( 2, m.addTo( 0, 3 ) ); assertEquals( 5, m.get( 0 ) ); ObjectIterator fastIterator = m.int2IntEntrySet().fastIterator(); Int2IntMap.Entry next = fastIterator.next(); assertEquals( 0, next.getIntKey() ); assertEquals( 5, next.getIntValue() ); assertFalse( fastIterator.hasNext() ); m.defaultReturnValue( -1 ); assertEquals( -1, m.addTo( 1, 1 ) ); assertEquals( 0, m.get( 1 ) ); assertEquals( 0, m.addTo( 1, 1 ) ); assertEquals( 1, m.get( 1 ) ); assertEquals( 1, m.addTo( 1, -2 ) ); assertEquals( -1, m.get( 1 ) ); fastIterator = m.int2IntEntrySet().fastIterator(); next = fastIterator.next(); assertEquals( 0, next.getIntKey() ); assertEquals( 5, next.getIntValue() ); next = fastIterator.next(); assertEquals( 1, next.getIntKey() ); assertEquals( -1, next.getIntValue() ); assertFalse( fastIterator.hasNext() ); for( int i = 0; i < 100; i++ ) m.addTo( i, 1 ); assertEquals( 0, m.firstIntKey() ); assertEquals( 99, m.lastIntKey() ); } @SuppressWarnings("deprecation") @Test public void testPut() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); assertEquals( 0, m.put( 0, 2 ) ); assertEquals( 2, m.put( 0, 3 ) ); assertEquals( 3, m.get( 0 ) ); assertEquals( null, m.put( Integer.valueOf( 1 ), Integer.valueOf( 2 ) ) ); assertEquals( Integer.valueOf( 2 ), m.put( Integer.valueOf( 1 ), Integer.valueOf( 3 ) ) ); assertEquals( Integer.valueOf( 3 ), m.get( Integer.valueOf( 0 ) ) ); } @Test public void testRemove() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.remove( i + 100 ) ); for( int i = 50; i < 150; i++ ) assertEquals( i % 100, m.remove( i % 100 ) ); } @Test public void testRemove0() { Int2IntLinkedOpenHashMap s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertEquals( 0, s.put( i, i ) ); s.remove( 0 ); IntIterator iterator = s.keySet().iterator(); IntOpenHashSet z = new IntOpenHashSet(); z.add( iterator.nextInt() ); z.add( iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertEquals( new IntOpenHashSet( new int[] { -1, 1 } ), z ); s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertEquals( 0, s.put( i, i ) ); iterator = s.keySet().iterator(); assertEquals( -1, iterator.nextInt() ); assertEquals( 0, iterator.nextInt() ); iterator.remove(); assertEquals( 1, iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertFalse( s.containsKey( 0 ) ); iterator = s.keySet().iterator(); assertEquals( -1, iterator.nextInt() ); assertEquals( 1, iterator.nextInt() ); assertFalse( iterator.hasNext() ); } @Test public void testFirtLast0() { Int2IntLinkedOpenHashMap s; s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 1; i < 100; i++ ) assertEquals( 0, s.put( i, i ) ); for( int i = 1; i < 100; i++ ) assertEquals( i, s.removeFirstInt() ); assertTrue( s.isEmpty() ); s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertEquals( 0, s.put( i, i ) ); for( int i = 100; i-- != 0; ) assertEquals( i, s.removeLastInt() ); assertTrue( s.isEmpty() ); s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 100; i-- != 0; ) assertEquals( 0, s.put( i, i ) ); for( int i = 0; i < 100; i++ ) assertEquals( i, s.removeLastInt() ); assertTrue( s.isEmpty() ); s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 100; i-- != 0; ) assertEquals( 0, s.put( i, i ) ); for( int i = 100; i-- != 0; ) assertEquals( i, s.removeFirstInt() ); assertTrue( s.isEmpty() ); } @Test public void testContainsValue() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); assertEquals( 0, m.put( 0, 2 ) ); assertEquals( 0, m.put( 1, 3 ) ); assertTrue( m.containsValue( 2 ) ); assertTrue( m.containsValue( 3 ) ); assertFalse( m.containsValue( 4 ) ); assertTrue( m.containsKey( 0 ) ); assertTrue( m.containsKey( 1 ) ); assertFalse( m.containsKey( 2 ) ); } @Test public void testIterator() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); assertEquals( 0, m.firstIntKey() ); IntListIterator iterator = (IntListIterator)m.keySet().iterator(); for( int i = 0; i <= 100; i++ ) { assertEquals( Integer.toString( i ), i - 1, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i, iterator.nextIndex() ); if ( i != 100 ) assertEquals( Integer.toString( i ), i, iterator.nextInt() ); } iterator = (IntListIterator)m.keySet().iterator( m.lastIntKey() ); for( int i = 100; i-- != 0; ) { assertEquals( Integer.toString( i ), i, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() ); if ( i != 0 ) assertEquals( Integer.toString( i ), i, iterator.previousInt() ); } iterator = (IntListIterator)m.keySet().iterator( 50 ); for( int i = 50; i < 100; i++ ) { assertEquals( Integer.toString( i ), i, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() ); if ( i != 99 ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() ); } iterator = (IntListIterator)m.keySet().iterator( 50 ); for( int i = 50; i-- != -1; ) { assertEquals( Integer.toString( i ), i + 1, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 2, iterator.nextIndex() ); if ( i != -1 ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() ); } iterator = (IntListIterator)m.keySet().iterator( 50 ); for( int i = 50; i-- != -1; ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() ); assertEquals( -1, iterator.previousIndex() ); assertEquals( 0, iterator.nextIndex() ); iterator = (IntListIterator)m.keySet().iterator( 50 ); for( int i = 50; i < 100 - 1; i++ ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() ); assertEquals( 99, iterator.previousIndex() ); assertEquals( 100, iterator.nextIndex() ); iterator = (IntListIterator)m.keySet().iterator( 50 ); iterator.previousInt(); iterator.remove(); assertEquals( 49, iterator.previousIndex() ); assertEquals( 49, iterator.previousInt() ); iterator = (IntListIterator)m.keySet().iterator( 49 ); iterator.nextInt(); iterator.remove(); assertEquals( 50, iterator.nextIndex() ); assertEquals( 52, iterator.nextInt() ); } @Test(expected=NoSuchElementException.class) public void testIteratorMissingElement() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); m.keySet().iterator( 1000 ); } @Test public void testPutAndMove() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( Integer.MIN_VALUE ); for( int i = 0; i < 100; i++ ) assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( i, i ) ); m.clear(); for( int i = 0; i < 100; i++ ) assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( i, i ) ); assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( -1, -1 ) ); assertEquals( -1, m.firstIntKey() ); assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( -2, -2 ) ); assertEquals( -2, m.firstIntKey() ); assertEquals( -1, m.putAndMoveToFirst( -1, -1 ) ); assertEquals( -1, m.firstIntKey() ); assertEquals( -1, m.putAndMoveToFirst( -1, -1 ) ); assertEquals( -1, m.firstIntKey() ); assertEquals( -1, m.putAndMoveToLast( -1, -1 ) ); assertEquals( -1, m.lastIntKey() ); assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( 100, 100 ) ); assertEquals( 100, m.lastIntKey() ); assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( 101, 101 ) ); assertEquals( 101, m.lastIntKey() ); assertEquals( 100, m.putAndMoveToLast( 100, 100 ) ); assertEquals( 100, m.lastIntKey() ); assertEquals( 100, m.putAndMoveToLast( 100, 100 ) ); assertEquals( 100, m.lastIntKey() ); assertEquals( 100, m.putAndMoveToFirst( 100, 100 ) ); assertEquals( 100, m.firstIntKey() ); } @Test public void testRemoveFirstLast() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, 1 + i ) ); assertEquals( 1, m.removeFirstInt() ); assertEquals( 2, m.removeFirstInt() ); assertEquals( 100, m.removeLastInt() ); } @Test(expected=NoSuchElementException.class) public void testRemoveFirstEmpty() { new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ).removeFirstInt(); } @Test(expected=NoSuchElementException.class) public void testRemoveLastEmpty() { new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ).removeLastInt(); } @Test public void testFastIterator() { Int2IntLinkedOpenHashMap s = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); s.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, s.put( i, i ) ); ObjectIterator fastIterator = s.int2IntEntrySet().fastIterator(); Entry entry = fastIterator.next(); int key = entry.getIntKey(); entry.setValue( -1000 ); assertEquals( s.get( key ), -1000 ); fastIterator.remove(); assertEquals( s.get( key ), -1 ); } @Test(expected=NoSuchElementException.class) public void testNextAtEnd() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap(); m.addTo( 1, 1 ); m.addTo( 2, 2 ); m.addTo( 3, 3 ); ObjectBidirectionalIterator> iterator = m.entrySet().iterator( m.entrySet().last() ); assertFalse( iterator.hasNext() ); iterator.next(); } @Test(expected=NoSuchElementException.class) public void testNextAtEndFast() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap(); m.addTo( 1, 1 ); m.addTo( 2, 2 ); m.addTo( 3, 3 ); ObjectBidirectionalIterator iterator = m.int2IntEntrySet().iterator( m.int2IntEntrySet().last() ); assertFalse( iterator.hasNext() ); iterator.next(); } @Test(expected=NoSuchElementException.class) public void testPreviousAtStart() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap(); m.addTo( 1, 1 ); m.addTo( 2, 2 ); m.addTo( 3, 3 ); ObjectBidirectionalIterator> iterator = m.entrySet().iterator(); assertFalse( iterator.hasPrevious() ); iterator.previous(); } @Test(expected=NoSuchElementException.class) public void testPreviousAtStartFast() { Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap(); m.addTo( 1, 1 ); m.addTo( 2, 2 ); m.addTo( 3, 3 ); ObjectBidirectionalIterator iterator = m.int2IntEntrySet().iterator(); assertFalse( iterator.hasPrevious() ); iterator.previous(); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntMapsTest.java0000664000000000000000000000150613050705451023772 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import java.util.Collections; import org.junit.Test; public class Int2IntMapsTest { @SuppressWarnings("boxing") @Test public void testSingletonMapEqualsShouldCheckTheTypeOfParamters() { Int2IntMap map = Int2IntMaps.singleton(1, 2); assertFalse(map.equals(Collections.singletonMap(null, 2))); assertFalse(map.equals(Collections.singletonMap(1, null))); assertFalse(map.equals(Collections.singletonMap("foo", 2))); assertFalse(map.equals(Collections.singletonMap(1, "foo"))); } @Test public void testToArrayShouldNullElementAfterLastEntry() { Int2IntMap map = Int2IntMaps.EMPTY_MAP; Object[] values = new Object[] { "test" }; map.entrySet().toArray(values); assertNull(values[0]); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntOpenCustomHashMapTest.java0000664000000000000000000002474513050705451026442 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Map.Entry; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") /** Not a particularly good test, but it will check that we use everywhere the same hashing strategy. */ public class Int2IntOpenCustomHashMapTest { @Test public void testGetNullKey() { final Int2IntOpenCustomHashMap s = new Int2IntOpenCustomHashMap( new IntHash.Strategy() { @Override public int hashCode( int o ) { return o % 10; } @Override public boolean equals( int a, int b ) { return ( a - b ) % 10 == 0; } } ); s.put( 10, 10 ); assertTrue( s.containsKey( 0 ) ); Entry e = s.entrySet().iterator().next(); assertEquals( 10, e.getKey().intValue() ); assertEquals( 10, e.getValue().intValue() ); s.remove( 0 ); assertTrue( s.isEmpty() ); } private static final class Strategy implements IntHash.Strategy, Serializable { private static final long serialVersionUID = 1L; @Override public int hashCode( int e ) { return Integer.reverse( e ); } @Override public boolean equals( int a, int b ) { return a == b; } } private final static Strategy strategy = new Strategy(); private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt( 10 ); } @SuppressWarnings("boxing") private static void checkTable( Int2IntOpenCustomHashMap s ) { final int[]key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == s.key.length - 1; int n = s.n; while ( n-- != 0 ) if ( key[ n ] != 0 && !s.containsKey( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); java.util.HashMap t = new java.util.HashMap(); for ( int i = s.size(); i-- != 0; ) if ( key[ i ] != 0 && t.put( key[ i ], key[ i ] ) != null ) throw new AssertionError( "Key " + key[ i ] + " appears twice" ); } private static void printProbes( Int2IntOpenCustomHashMap m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final int[] key = m.key; final double f = (double)m.size / m.n; for ( int i = 0, c = 0; i < m.n; i++ ) { if ( key[ i ] != 0 ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings("deprecation") private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; final Integer key[] = new Integer[ (int)Math.ceil( n * f ) ]; HashMap t = new HashMap(); /* First of all, we fill t with random data. */ for ( int i = 0; i < key.length; i++ ) t.put( key[ i ] = new Integer( genKey() ), key[ i ] ); Int2IntOpenCustomHashMap m = new Int2IntOpenCustomHashMap( Hash.DEFAULT_INITIAL_SIZE, f, strategy ); /* Now we add to m the same data */ m.putAll( t ); checkTable( m ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.get( e ).equals( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.get( e ).equals( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); if ( m.containsKey( T ) ) assertEquals( "Error: divergence in keys between t and m (polymorphic method)", Integer.valueOf( m.get( T ) ), t.get( Integer.valueOf( T ) ) ); else assertFalse( "Error: divergence in keys between t and m (polymorphic method)", t.containsKey( Integer.valueOf( T ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertEquals( "Error: divergence between t and m (standard method)", m.get( Integer.valueOf( T ) ), t.get( Integer.valueOf( T ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertEquals( "Error: divergence in add() between t and m", m.put( Integer.valueOf( T ), Integer.valueOf( T ) ), t.put( Integer.valueOf( T ), Integer.valueOf( T ) ) ); T = genKey(); assertEquals( "Error: divergence in remove() between t and m", m.remove( Integer.valueOf( T ) ), t.remove( Integer.valueOf( T ) ) ); } checkTable( m ); assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.get( e ).equals( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.get( e ).equals( e ) ); } /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (Int2IntOpenCustomHashMap)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.get( e ).equals( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertEquals( "Error: divergence in add() between t and m after save/read", m.put( Integer.valueOf( T ), Integer.valueOf( T ) ), t.put( Integer.valueOf( T ), Integer.valueOf( T ) ) ); T = genKey(); assertEquals( "Error: divergence in remove() between t and m after save/read", m.remove( Integer.valueOf( T ) ), t.remove( Integer.valueOf( T ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() ); m = new Int2IntOpenCustomHashMap( n, f, strategy ); t.clear(); /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ for( int i = n; i-- != 0; ) m.put( i, i ); t.putAll( m ); printProbes( m ); checkTable( m ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", Integer.valueOf( m.put( i, i ) ), t.put( ( Integer.valueOf( i ) ), ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", Integer.valueOf( m.remove( i ) ), t.remove( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntOpenHashMapTest.java0000664000000000000000000005347013050705451025244 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import it.unimi.dsi.fastutil.ints.Int2IntMap.Entry; import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import it.unimi.dsi.fastutil.objects.ObjectIterator; import java.io.IOException; import java.util.AbstractMap; import java.util.Arrays; import java.util.Map; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class Int2IntOpenHashMapTest { @SuppressWarnings("deprecation") @Test public void testContainsNull() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( new int[] { 1, 2, 3 }, new int[] { 1, 2, 3 } ); assertFalse( m.containsKey( null ) ); assertTrue( m.get( null ) == null ); } @SuppressWarnings("boxing") @Test public void testEquals() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( new int[] { 1, 2 }, new int[] { 1, 2 } ); assertFalse( m.equals( new Object2ObjectOpenHashMap( new Integer[] { 1, null }, new Integer[] { 1, 1 } ) ) ); } @Test public void testStrangeRetainAllCase() { IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 }); // Initialize both implementations with the same data Int2IntOpenHashMap instance = new Int2IntOpenHashMap(initialElements.elements(), new int[ initialElements.size() ]); IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements); instance.keySet().retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance.keySet() ); } @SuppressWarnings("unchecked") @Test public void entrySetContainsTest() { Int2IntOpenHashMap m = new Int2IntOpenHashMap(); m.put(0, 0); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @SuppressWarnings("unchecked") @Test public void entrySetRemoveTest() { Int2IntOpenHashMap m = new Int2IntOpenHashMap(); m.put(0, 0); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), new Object()))); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } private static int genValue() { return r.nextInt(); } private static boolean valEquals( Object o1, Object o2 ) { return o1 == null ? o2 == null : o1.equals( o2 ); } @SuppressWarnings({ "unchecked", "boxing", "deprecation" }) protected static void test( int n, float f ) throws IOException, ClassNotFoundException { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f ); Map t = new java.util.HashMap(); /* First of all, we fill t with random data. */ for ( int i = 0; i < n; i++ ) t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) ); /* Now we add to m the same data */ m.putAll( t ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); /* * Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keym. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same valuem. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) ); } /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) ); assertTrue( "Error: divergence between t and m (polymorphic method)", !( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( Integer.valueOf( T ) ) ) ) ).intValue() ) ) ) != ( 0 ) ) || t.get( ( Integer.valueOf( T ) ) ) != null && !m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) ); } assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* * Now we check that m actually holds the same data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keym. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same valuem. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) ); } int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (Int2IntOpenHashMap)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", m.hashCode(), h ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m after save/read", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); Integer result; assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( T ), ( result = (Integer)t.remove( ( Integer.valueOf( T ) ) ) ) != null ? result.intValue() : 0 ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* * Now we take out of m everything , and check that it is empty. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) m.remove( i.next() ); assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); m = new Int2IntOpenHashMap( n, f ); t.clear(); for( int i = n; i-- != 0; ) m.put( i, 1 ); t.putAll( m ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.put( i, 2 ), t.put( Integer.valueOf( i ), 2 ) ); assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); m.trim(); assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void testAddTo() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); assertEquals( 0, m.addTo( 0, 2 ) ); assertEquals( 2, m.get( 0 ) ); assertEquals( 2, m.addTo( 0, 3 ) ); assertEquals( 5, m.get( 0 ) ); m.defaultReturnValue( -1 ); assertEquals( -1, m.addTo( 1, 1 ) ); assertEquals( 0, m.get( 1 ) ); assertEquals( 0, m.addTo( 1, 1 ) ); assertEquals( 1, m.get( 1 ) ); assertEquals( 1, m.addTo( 1, -2 ) ); assertEquals( -1, m.get( 1 ) ); } @Test public void testRemove() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.remove( 100 + i ) ); for( int i = 50; i < 150; i++ ) assertEquals( Integer.toString( i % 100 ), i % 100, m.remove( i % 100 ) ); } @Test public void testRemove0() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = -1; i <= 1; i++ ) assertEquals( -1, m.put( i, i ) ); assertEquals( 0, m.remove( 0 ) ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet z = new IntOpenHashSet(); z.add( iterator.nextInt() ); z.add( iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertEquals( new IntOpenHashSet( new int[] { -1, 1 } ), z ); m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = -1; i <= 1; i++ ) assertEquals( -1, m.put( i, i ) ); iterator = m.keySet().iterator(); while( iterator.hasNext() ) if ( iterator.nextInt() == 0 ) iterator.remove(); assertFalse( m.containsKey( 0 ) ); assertEquals( -1, m.get( 0 ) ); iterator = m.keySet().iterator(); int[] content = new int[ 2 ]; content[ 0 ] = iterator.nextInt(); content[ 1 ] = iterator.nextInt(); assertFalse( iterator.hasNext() ); Arrays.sort( content ); assertArrayEquals( new int[] { -1, 1 }, content ); } @Test public void testWrapAround() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( 4, .5f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 6, 7 and 0 m.put( HashCommon.invMix( 6 ), 0 ); m.put( HashCommon.invMix( 7 ), 0 ); m.put( HashCommon.invMix( 6 + 8 ), 0 ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); t.add( iterator.nextInt() ); t.add( iterator.nextInt() ); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice iterator.remove(); t.add( iterator.nextInt() ); assertEquals( keys, t ); } @Test public void testWrapAround2() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( 4, .75f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 4, 5, 6, 7 and 0 m.put( HashCommon.invMix( 4 ), 0 ); m.put( HashCommon.invMix( 5 ), 0 ); m.put( HashCommon.invMix( 4 + 8 ), 0 ); m.put( HashCommon.invMix( 5 + 8 ), 0 ); m.put( HashCommon.invMix( 4 + 16 ), 0 ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 4 ] ); assertNotEquals( 0, m.key[ 5 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); //System.err.println(Arraym.toString( m.key )); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arraym.toString( m.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); assertEquals( 3, m.size() ); assertEquals( keys, t ); } @Test public void testWrapAround3() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( 4, .75f ); assertEquals( 8, m.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 5, 6, 7, 0 and 1 m.put( HashCommon.invMix( 5 ), 0 ); m.put( HashCommon.invMix( 5 + 8 ), 0 ); m.put( HashCommon.invMix( 5 + 16 ), 0 ); m.put( HashCommon.invMix( 5 + 32 ), 0 ); m.put( HashCommon.invMix( 5 + 64 ), 0 ); assertNotEquals( 0, m.key[ 5 ] ); assertNotEquals( 0, m.key[ 6 ] ); assertNotEquals( 0, m.key[ 7 ] ); assertNotEquals( 0, m.key[ 0 ] ); assertNotEquals( 0, m.key[ 1 ] ); //System.err.println(Arraym.toString( m.key )); IntOpenHashSet keys = new IntOpenHashSet( m.keySet() ); IntIterator iterator = m.keySet().iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arraym.toString( m.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); assertEquals( 0, m.size() ); assertEquals( keys, t ); } @Test public void testEntrySet() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); for( int i = 0; i < 100; i++ ) assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 0, 0 ) ) ); for( int i = 0; i < 100; i++ ) assertFalse( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( i, -1 ) ) ); for( int i = 0; i < 100; i++ ) assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( i, i ) ) ); for( int i = 0; i < 100; i++ ) assertFalse( m.entrySet().remove( new AbstractInt2IntMap.BasicEntry( i, -1 ) ) ); for( int i = 0; i < 100; i++ ) assertTrue( m.entrySet().remove( new AbstractInt2IntMap.BasicEntry( i, i ) ) ); assertTrue( m.entrySet().isEmpty() ); } @Test public void testFastIterator() { Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ); m.defaultReturnValue( -1 ); for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) ); ObjectIterator fastIterator = m.int2IntEntrySet().fastIterator(); Entry entry = fastIterator.next(); int key = entry.getIntKey(); entry.setValue( -1000 ); assertEquals( m.get( key ), -1000 ); fastIterator.remove(); assertEquals( m.get( key ), -1 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/Int2IntRBTreeMapTest.java0000664000000000000000000000367113050705451024660 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import java.util.AbstractMap; import org.junit.Test; public class Int2IntRBTreeMapTest { @SuppressWarnings("deprecation") @Test public void testContainsNull() { Int2IntRBTreeMap m = new Int2IntRBTreeMap( new int[] { 1, 2, 3 }, new int[] { 1, 2, 3 } ); assertFalse( m.containsKey( null ) ); assertTrue( m.get( null ) == null ); } @SuppressWarnings("boxing") @Test public void testEquals() { Int2IntRBTreeMap m = new Int2IntRBTreeMap( new int[] { 1, 2 }, new int[] { 1, 2 } ); assertFalse( m.equals( new Object2ObjectOpenHashMap( new Integer[] { 1, null }, new Integer[] { 1, 1 } ) ) ); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetContainsTest() { Int2IntRBTreeMap m = new Int2IntRBTreeMap(); m.put(0, 0); assertFalse(m.int2IntEntrySet().contains(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().contains(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void entrySetRemoveTest() { Int2IntRBTreeMap m = new Int2IntRBTreeMap(); m.put(0, 0); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, new Object()))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(null, null))); assertFalse(m.entrySet().remove(new AbstractMap.SimpleEntry(new Object(), new Object()))); } @Test public void removeFromKeySetTest() { Int2IntRBTreeMap m = new Int2IntRBTreeMap(); m.put(0, 0); assertTrue(m.keySet().remove(0)); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArrayFIFOQueueTest.java0000664000000000000000000001176013050705451025067 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import it.unimi.dsi.fastutil.io.BinIO; import java.io.File; import java.io.IOException; import org.junit.Test; public class IntArrayFIFOQueueTest { @Test public void testEnqueueDequeue() { IntArrayFIFOQueue q = new IntArrayFIFOQueue(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); assertEquals( i, q.lastInt() ); } for( int i = 0; i < 100; i++ ) { assertEquals( i, q.firstInt() ); assertEquals( i, q.dequeueInt() ); if ( i != 99 ) assertEquals( 99, q.lastInt() ); } q = new IntArrayFIFOQueue( 10 ); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); assertEquals( i, q.lastInt() ); } for( int i = 0; i < 100; i++ ) { assertEquals( i, q.firstInt() ); assertEquals( i, q.dequeueInt() ); if ( i != 99 ) assertEquals( 99, q.lastInt() ); } q = new IntArrayFIFOQueue( 200 ); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); assertEquals( i, q.lastInt() ); } for( int i = 0; i < 100; i++ ) { assertEquals( i, q.firstInt() ); assertEquals( i, q.dequeueInt() ); if ( i != 99 ) assertEquals( 99, q.lastInt() ); } } @Test public void testMix() { IntArrayFIFOQueue q = new IntArrayFIFOQueue(); for( int i = 0, p = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); assertEquals( j + i * 20, q.lastInt() ); } for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() ); } q = new IntArrayFIFOQueue( 10 ); for( int i = 0, p = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); assertEquals( j + i * 20, q.lastInt() ); } for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() ); } q = new IntArrayFIFOQueue( 200 ); for( int i = 0, p = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); assertEquals( j + i * 20, q.lastInt() ); } for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() ); } } @Test public void testWrap() { IntArrayFIFOQueue q = new IntArrayFIFOQueue( 30 ); for( int i = 0; i < 20; i++ ) { q.enqueue( i ); assertEquals( i, q.lastInt() ); } for( int j = 0; j < 10; j++ ) assertEquals( j, q.dequeueInt() ); for( int i = 0; i < 15; i++ ) { q.enqueue( i ); assertEquals( i, q.lastInt() ); } for( int j = 10; j < 20; j++ ) assertEquals( j, q.dequeueInt() ); for( int j = 0; j < 15; j++ ) assertEquals( j, q.dequeueInt() ); } @Test public void testTrim() { IntArrayFIFOQueue q = new IntArrayFIFOQueue( 30 ); for( int j = 0; j < 20; j++ ) q.enqueue( j ); for( int j = 0; j < 10; j++ ) assertEquals( j, q.dequeueInt() ); for( int j = 0; j < 15; j++ ) q.enqueue( j ); q.trim(); for( int j = 10; j < 20; j++ ) assertEquals( j, q.dequeueInt() ); for( int j = 0; j < 15; j++ ) assertEquals( j, q.dequeueInt() ); q = new IntArrayFIFOQueue( 30 ); for( int j = 0; j < 20; j++ ) q.enqueue( j ); q.trim(); for( int j = 0; j < 20; j++ ) assertEquals( j, q.dequeueInt() ); } @Test public void testDeque() { IntArrayFIFOQueue q = new IntArrayFIFOQueue( 4 ); q.enqueue( 0 ); q.enqueue( 1 ); q.enqueue( 2 ); assertEquals( q.dequeueInt(), 0 ); assertEquals( q.dequeueInt(), 1 ); q.enqueue( 3 ); assertEquals( q.dequeueLastInt(), 3 ); assertEquals( q.dequeueLastInt(), 2 ); q.enqueueFirst( 1 ); q.enqueueFirst( 0 ); assertEquals( 0, q.dequeueInt() ); assertEquals( 1, q.dequeueInt() ); q = new IntArrayFIFOQueue( 4 ); q.enqueueFirst( 0 ); q.enqueueFirst( 1 ); assertEquals( 1, q.dequeueInt() ); assertEquals( 0, q.dequeueInt() ); q.enqueueFirst( 0 ); q.enqueueFirst( 1 ); q.enqueueFirst( 2 ); q.enqueueFirst( 3 ); assertEquals( 3, q.dequeueInt() ); assertEquals( 2, q.dequeueInt() ); assertEquals( 1, q.dequeueInt() ); assertEquals( 0, q.dequeueInt() ); } @SuppressWarnings("deprecation") @Test public void testImmediateReduce() { IntArrayFIFOQueue q = new IntArrayFIFOQueue(); q.enqueue( 0 ); q.dequeue(); } @SuppressWarnings("deprecation") private final static void assertSameQueue( IntArrayFIFOQueue a, IntArrayFIFOQueue b ) { assertEquals( a.size(), b.size() ); while( ! a.isEmpty() && ! b.isEmpty() ) assertEquals( a.dequeue(), b.dequeue() ); assertEquals( Boolean.valueOf( a.isEmpty() ) , Boolean.valueOf( b.isEmpty() ) ); } @Test public void testSerialization() throws IOException, ClassNotFoundException { File temp = File.createTempFile( IntArrayFIFOQueueTest.class.getSimpleName() + "-", "-test" ); temp.deleteOnExit(); IntArrayFIFOQueue q = new IntArrayFIFOQueue(); BinIO.storeObject( q, temp ); assertSameQueue( q, (IntArrayFIFOQueue)BinIO.loadObject( temp ) ); for( int i = 0; i < 100; i++ ) q.enqueue( i ); BinIO.storeObject( q, temp ); assertSameQueue( q, (IntArrayFIFOQueue)BinIO.loadObject( temp ) ); q.trim(); for( int i = 0; i < 128; i++ ) q.enqueue( i ); BinIO.storeObject( q, temp ); assertSameQueue( q, (IntArrayFIFOQueue)BinIO.loadObject( temp ) ); temp.delete(); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArrayFrontCodedListTest.java0000664000000000000000000001100613050705451026213 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import java.io.IOException; import static org.junit.Assert.*; import org.junit.Test; @SuppressWarnings({ "rawtypes", "unchecked" }) public class IntArrayFrontCodedListTest { private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for ( int i = 0; i < x.size(); i++ ) if ( !java.util.Arrays.equals( (int[])x.get( i ), (int[])y.get( i ) ) ) return false; return true; } private static int l[]; private static int[][] a; private static void test( int n ) throws IOException, ClassNotFoundException { l = new int[ n ]; a = new int[ n ][]; for ( int i = 0; i < n; i++ ) l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 ); for ( int i = 0; i < n; i++ ) a[ i ] = new int[ l[ i ] ]; for ( int i = 0; i < n; i++ ) for ( int j = 0; j < l[ i ]; j++ ) a[ i ][ j ] = genKey(); IntArrayFrontCodedList m = new IntArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); // System.out.println(m); // for( i = 0; i < t.size(); i++ ) // System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (int[])i.next(), (int[])j.next() ) ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (int[])i.previous(), (int[])j.previous() ) ); } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { int from = r.nextInt( m.size() + 1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (int[])i.next(), (int[])j.next() ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (int[])i.previous(), (int[])j.previous() ) ); } } } java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (IntArrayFrontCodedList)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1 ); } @Test public void test10() throws Exception, ClassNotFoundException { test( 10 ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100 ); } @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000 ); } @Test public void test10000() throws IOException, ClassNotFoundException { test( 10000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArrayIndirectPriorityQueueTest.java0000664000000000000000000002722413050705451027651 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class IntArrayIndirectPriorityQueueTest { @Test public void testFront() { int refArray[] = { 4, 3, 2, 1, 0, 3, 2, 1, 0, 2, 1, 0, 1, 0, 0 }; int tops[] = new int[ refArray.length ]; final IntArrayIndirectPriorityQueue queue = new IntArrayIndirectPriorityQueue( refArray ); for ( int i = refArray.length; i-- != 0; ) queue.enqueue( i ); assertEquals( 5, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 4, 8, 11, 13, 14 } ), new IntOpenHashSet( tops, 0, 5 ) ); for ( int i = 4; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 4, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 3, 7, 10, 12 } ), new IntOpenHashSet( tops, 0, 4 ) ); for ( int i = 3; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 3, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 2, 6, 9 } ), new IntOpenHashSet( tops, 0, 3 ) ); for ( int i = 2; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 2, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 1, 5 } ), new IntOpenHashSet( tops, 0, 2 ) ); queue.dequeue(); assertEquals( 1, queue.front( tops ) ); queue.dequeue(); assertEquals( 1, queue.front( tops ) ); } private int[] ref; private boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) { if ( sizea != sizeb ) return false; int[] aa = new int[ sizea ]; int[] bb = new int[ sizea ]; for ( int i = 0; i < sizea; i++ ) { aa[ i ] = ref[ a[ i ] ]; bb[ i ] = ref[ b[ i ] ]; } java.util.Arrays.sort( aa ); java.util.Arrays.sort( bb ); while ( sizea-- != 0 ) if ( !( ( aa[ sizea ] ) == ( bb[ sizea ] ) ) ) return false; return true; } public void test( int n, IntComparator comparator ) { Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement; int rm = 0, rt = 0; Random r = new Random( 0 ); ref = new int[ n ]; for ( int i = 0; i < n; i++ ) ref[ i ] = r.nextInt(); IntArrayIndirectPriorityQueue m = new IntArrayIndirectPriorityQueue( ref, comparator ); IntHeapIndirectPriorityQueue t = new IntHeapIndirectPriorityQueue( ref, comparator ); /* We add pairs to t. */ for ( int i = 0; i < n / 2; i++ ) { t.enqueue( i ); m.enqueue( i ); } assertTrue( "Error: m and t differ after creation (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); /* Now we add and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 2 * n; i++ ) { if ( r.nextDouble() < 0.01 ) { t.clear(); m.clear(); for ( int j = 0; j < n / 2; j++ ) { t.enqueue( j ); m.enqueue( j ); } } int T = r.nextInt( 2 * n ); mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { t.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } if ( tThrowsIllegal == null ) { // To skip duplicates try { m.enqueue( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } } mThrowsIllegal = tThrowsIllegal = null; // To skip duplicates assertTrue( "Error: enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error: enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")", ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) ); assertTrue( "Error: m and t differ after enqueue (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after enqueue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; try { rm = m.dequeue(); while ( !m.isEmpty() && ( ( ref[ m.first() ] ) == ( ref[ rm ] ) ) ) m.dequeue(); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { rt = t.dequeue(); while ( !t.isEmpty() && ( ( ref[ t.first() ] ) == ( ref[ rt ] ) ) ) t.dequeue(); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } assertTrue( "Error: dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error: dequeue() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")", ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) ); assertTrue( "Error: dequeue() divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")", ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error: divergence in dequeue() between m and t (" + rm + "->" + ref[ rm ] + ", " + rt + "->" + ref[ rt ] + ")", ( ( ref[ rt ] ) == ( ref[ rm ] ) ) ); assertTrue( "Error: m and t differ after dequeue (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after dequeue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; int pos = r.nextInt( n * 2 ); try { m.remove( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } try { t.remove( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } assertTrue( "Error: remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error: remove(int) divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")", ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) ); assertTrue( "Error: remove(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")", ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) ); assertTrue( "Error: m and t differ after remove(int) (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after remove(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null; pos = r.nextInt( n ); try { t.changed( pos ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { tThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; } if ( tThrowsIllegal == null ) { try { m.changed( pos ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } catch ( IllegalArgumentException e ) { mThrowsIllegal = e; } catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; } } assertTrue( "Error: change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); // assertTrue( "Error: change(int) divergence in IllegalArgumentException (" + // mThrowsIllegal + ", " + tThrowsIllegal + ")" , ( mThrowsIllegal == null ) == ( // tThrowsIllegal == null ) ); assertTrue( "Error: change(int) divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")", ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) ); assertTrue( "Error: m and t differ after change(int) (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after change(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } int[] temp = t.heap.clone(); IntArrays.quickSort( temp, 0, t.size() ); // To scramble a bit m = new IntArrayIndirectPriorityQueue( m.refArray, temp, t.size(), comparator ); assertTrue( "Error: m and t differ after wrap (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after wrap (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.array, 0, m.size ) ).size() == m.size() ) ) { int first = m.first(); ref[ first ] = r.nextInt(); // System.err.println("Pre-change m: " +m ); // System.err.println("Pre-change t: " +t ); m.changed(); t.changed( first ); // System.err.println("Post-change m: " +m ); // System.err.println("Post-change t: " +t ); assertTrue( "Error: m and t differ after change (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) ); if ( m.size() != 0 ) { assertTrue( "Error: m and t differ in first element after change (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")", ( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) ); } } } /* Now we check that m actually holds the same data. */ m.clear(); assertTrue( "Error: m is not empty after clear()", m.isEmpty() ); } @Test public void test1() { test( 1, null ); test( 1, IntComparators.OPPOSITE_COMPARATOR ); } @Test public void test10() { test( 10, null ); test( 10, IntComparators.OPPOSITE_COMPARATOR ); } @Test public void test100() { test( 100, null ); test( 100, IntComparators.OPPOSITE_COMPARATOR ); } @Test public void test1000() { test( 1000, null ); test( 1000, IntComparators.OPPOSITE_COMPARATOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArrayListTest.java0000664000000000000000000000227513050705451024253 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Collections; import org.junit.Test; public class IntArrayListTest { @Test public void testEmptyListIsDifferentFromEmptySet() { assertFalse(IntLists.EMPTY_LIST.equals(IntSets.EMPTY_SET)); assertFalse(IntSets.EMPTY_SET.equals(IntLists.EMPTY_LIST)); } @Test public void testNullInContains() { assertFalse(new IntArrayList().contains(null)); } @Test public void testAddUsingIteratorToTheFirstPosition() { IntArrayList list = new IntArrayList(); list.add(24); IntListIterator it = list.listIterator(); it.add(42); assertTrue(it.hasNext()); assertEquals(IntArrayList.wrap( new int[] { 42, 24 } ), list); } @Test public void testRemoveAll() { IntArrayList l = IntArrayList.wrap( new int[] { 0, 1, 1, 2 } ); l.removeAll( IntSets.singleton( 1 ) ); assertEquals( IntArrayList.wrap( new int[] { 0, 2 } ), l ); l = IntArrayList.wrap( new int[] { 0, 1, 1, 2 } ); l.removeAll( Collections.singleton( Integer.valueOf( 1 ) ) ); assertEquals( IntArrayList.wrap( new int[] { 0, 2 } ), l ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArrayPriorityQueueTest.java0000664000000000000000000001142213050705451026160 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import it.unimi.dsi.fastutil.io.BinIO; import java.io.File; import java.io.IOException; import org.junit.Test; @SuppressWarnings("deprecation") public class IntArrayPriorityQueueTest { @Test public void testEnqueueDequeue() { IntArrayPriorityQueue q = new IntArrayPriorityQueue(); IntHeapPriorityQueue h = new IntHeapPriorityQueue(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new IntArrayPriorityQueue( 10 ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new IntArrayPriorityQueue( 200 ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testEnqueueDequeueComp() { IntArrayPriorityQueue q = new IntArrayPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); IntHeapPriorityQueue h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new IntArrayPriorityQueue( 10, IntComparators.OPPOSITE_COMPARATOR ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new IntArrayPriorityQueue( 200, IntComparators.OPPOSITE_COMPARATOR ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testMix() { IntArrayPriorityQueue q = new IntArrayPriorityQueue(); IntHeapPriorityQueue h = new IntHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } q = new IntArrayPriorityQueue( 10 ); h = new IntHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * -20 ); h.enqueue( j + i * -20 ); q.first(); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } q = new IntArrayPriorityQueue( 200 ); h = new IntHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } } @Test public void testMixComp() { IntArrayPriorityQueue q = new IntArrayPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); IntHeapPriorityQueue h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } q = new IntArrayPriorityQueue( 10, IntComparators.OPPOSITE_COMPARATOR ); h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * -20 ); h.enqueue( j + i * -20 ); q.first(); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } q = new IntArrayPriorityQueue( 200, IntComparators.OPPOSITE_COMPARATOR ); h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() ); } } @Test public void testSerialize() throws IOException, ClassNotFoundException { IntArrayPriorityQueue q = new IntArrayPriorityQueue(); for( int i = 0; i < 100; i++ ) q.enqueue( i ); File file = File.createTempFile( getClass().getPackage().getName() + "-", "-tmp" ); file.deleteOnExit(); BinIO.storeObject( q, file ); IntArrayPriorityQueue r = (IntArrayPriorityQueue)BinIO.loadObject( file ); file.delete(); for( int i = 0; i < 100; i++ ) { assertEquals( q.first(), r.first() ); assertEquals( q.dequeue(), r.dequeue() ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArraySetTest.java0000664000000000000000000000613013050705451024065 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import it.unimi.dsi.fastutil.ints.IntArraySet; import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.io.BinIO; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import static org.junit.Assert.*; public class IntArraySetTest { @SuppressWarnings("boxing") @Test public void testNullInEquals() { assertFalse( new IntArraySet( Arrays.asList( 42 ) ).equals( Collections.singleton( null ) ) ); } @Test public void testSet() { for( int i = 0; i <= 1; i++ ) { final IntArraySet s = i == 0 ? new IntArraySet() : new IntArraySet( new int[ i ] ); assertTrue( s.add( 1 ) ); assertEquals( 1 + i, s.size() ); assertTrue( s.contains( 1 ) ); assertTrue( s.add( 2 ) ); assertTrue( s.contains( 2 ) ); assertEquals( 2 + i, s.size() ); assertFalse( s.add( 1 ) ); assertFalse( s.remove( 3 ) ); assertTrue( s.add( 3 ) ); assertEquals( 3 + i, s.size() ); assertTrue( s.contains( 1 ) ); assertTrue( s.contains( 2 ) ); assertTrue( s.contains( 2 ) ); assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 1, 2, 3 } : new int[] { 0, 1, 2, 3 } ), new IntOpenHashSet( s.iterator() ) ); assertTrue( s.remove( 3 ) ); assertEquals( 2 + i, s.size() ); assertTrue( s.remove( 1 ) ); assertEquals( 1 + i, s.size() ); assertFalse( s.contains( 1 ) ); assertTrue( s.remove( 2 ) ); assertEquals( 0 + i, s.size() ); assertFalse( s.contains( 1 ) ); } } @Test public void testClone() { IntArraySet s = new IntArraySet(); assertEquals( s, s.clone() ); s.add( 0 ); assertEquals( s, s.clone() ); s.add( 0 ); assertEquals( s, s.clone() ); s.add( 1 ); assertEquals( s, s.clone() ); s.add( 2 ); assertEquals( s, s.clone() ); s.remove( 0 ); assertEquals( s, s.clone() ); } @Test public void testSerialisation() throws IOException, ClassNotFoundException { IntArraySet s = new IntArraySet(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); s.add( 0 ); s.add( 1 ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } @Test public void testRemove() { IntSet set = new IntArraySet( new int[] { 42 } ); IntIterator iterator = set.iterator(); assertTrue(iterator.hasNext()); iterator.next(); iterator.remove(); assertFalse( iterator.hasNext() ); assertEquals( 0, set.size() ); set = new IntArraySet( new int[] { 42, 43, 44 } ); iterator = set.iterator(); assertTrue(iterator.hasNext()); iterator.next(); iterator.next(); iterator.remove(); assertEquals( 44, iterator.nextInt () ); assertFalse( iterator.hasNext() ); assertEquals( new IntArraySet( new int[] { 42, 44 } ), set ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntArraysTest.java0000664000000000000000000016213713050705451023606 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Random; import org.junit.Test; public class IntArraysTest { public static int[] identity( final int n ) { final int[] perm = new int[ n ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; return perm; } @Test public void testMergeSort() { int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); IntArrays.mergeSort( b ); assertArrayEquals( sorted, b ); IntArrays.mergeSort( b ); assertArrayEquals( sorted, b ); final int[] d = a.clone(); IntArrays.mergeSort( d, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return k1 - k2; } }); assertArrayEquals( sorted, d ); IntArrays.mergeSort( d, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return k1 - k2; } }); assertArrayEquals( sorted, d ); } @Test public void testMergeSortSmallSupport() { int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < a.length; to++ ) for( int from = 0; from <= to; from++ ) { final int[] support = new int[ to ]; System.arraycopy( a, 0, support, 0, to ); IntArrays.mergeSort( a, from, to, support ); if ( from < to ) for( int i = to - 1; i-- != from; ) assertTrue( a[ i ] <= a[ i + 1 ] ); } } @Test public void testQuickSort() { int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); Arrays.sort( b ); assertArrayEquals( sorted, b ); Arrays.sort( b ); assertArrayEquals( sorted, b ); final int[] d = a.clone(); IntArrays.quickSort( d, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return k1 - k2; } }); assertArrayEquals( sorted, d ); IntArrays.quickSort( d, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return k1 - k2; } }); assertArrayEquals( sorted, d ); } @Test public void testParallelQuickSort() { int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); Arrays.sort( b ); assertArrayEquals( sorted, b ); Arrays.sort( b ); assertArrayEquals( sorted, b ); final int[] d = a.clone(); IntArrays.parallelQuickSort( d, 0, d.length ); assertArrayEquals( sorted, d ); } @Test public void testQuickSort1() { int[] t = { 2, 1, 0, 4 }; IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testQuickSort1Undirect() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSortIndirect( perm, t, 10, 90 ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.quickSortIndirect( perm, t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.quickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); } @Test public void testQuickSort1Comp() { int[] t = { 2, 1, 0, 4 }; IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t, 10, 100, IntComparators.OPPOSITE_COMPARATOR ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.quickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); } @Test public void testParallelQuickSort1Comp() { int[] t = { 2, 1, 0, 4 }; IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t, 10, 100, IntComparators.OPPOSITE_COMPARATOR ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t, IntComparators.OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); } @Test public void testParallelQuickSort1() { int[] t = { 2, 1, 0, 4 }; IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testParallelQuickSort1Undirect() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSortIndirect( perm, t, 10, 90 ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSortIndirect( perm, t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelQuickSortIndirect( perm, t ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); } @Test public void testQuickSort2() { int[][] d = new int[ 2 ][]; d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.quickSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testParallelQuickSort2() { int[][] d = new int[ 2 ][]; d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testShuffle() { int[] a = new int[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = i; IntArrays.shuffle( a, new Random() ); boolean[] b = new boolean[ a.length ]; for( int i = a.length; i-- != 0; ) { assertFalse( b[ a[ i ] ] ); b[ a[ i ] ] = true; } } @Test public void testShuffleFragment() { int[] a = new int[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = -1; for( int i = 10; i < 30; i++ ) a[ i ] = i - 10; IntArrays.shuffle( a, 10, 30, new Random() ); boolean[] b = new boolean[ 20 ]; for( int i = 20; i-- != 0; ) { assertFalse( b[ a[ i + 10 ] ] ); b[ a[ i + 10 ] ] = true; } } @Test public void testRadixSort1() { int[] t = { 2, 1, 0, 4 }; IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.radixSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testParallelRadixSort1() { int[] t = { 2, 1, 0, 4 }; IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[] { 2, -1, 0, -4 }; IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( "@" + i + ": " + t[ i ] + " > " + t[ i + 1 ], t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new int[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { int[][] d = new int[ 2 ][]; d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testParallelRadixSort2() { int[][] d = new int[ 2 ][]; d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.parallelRadixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { int[][] t = { { 2, 1, 0, 4 } }; IntArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); int[][] d = new int[ 2 ][]; d[ 0 ] = new int[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new int[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new int[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); IntArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSortIndirectStable() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 4 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, 10, 90, true ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, 10, 100, true ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); } @Test public void testRadixSortIndirectUnstable() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, 10, 90, false ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, 10, 100, false ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); } @Test public void testParallelRadixSortIndirectStable() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 4 ); perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, 10, 90, true ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, 10, 100, true ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); } @Test public void testParallelRadixSortIndirectUnstable() { int[] t = { 2, 1, 0, 4 }; int[] perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[] { 2, -1, 0, -4 }; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, 10, 90, false ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, 10, 100, false ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.parallelRadixSortIndirect( perm, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); } @Test public void testRadixSort2IndirectStable() { int[] t = { 2, 1, 0, 4 }; int[] u = { 3, 2, 1, 0 }; int[] perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, t, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); u = IntArrays.shuffle( identity( 100 ), new Random( 1 ) ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ 100 ]; u = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; u = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); t = new int[ 100 ]; u = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, 10, 90, true ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; u = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, 10, 100, true ); for( int i = 99; i-- != 10; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; u = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; u = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, true ); for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] ); } @Test public void testRadixSort2IndirectUnstable() { int[] t = { 2, 1, 0, 4 }; int[] u = { 3, 2, 1, 0 }; int[] perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, t, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) ); u = IntArrays.shuffle( identity( 100 ), new Random( 1 ) ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ 100 ]; u = new int[ 100 ]; perm = identity( t.length ); Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; u = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ]&& u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ 100 ]; u = new int[ 100 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, 10, 90, false ); for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] ); for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] ); t = new int[ 100000 ]; u = new int[ 100000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); IntArrays.shuffle( perm, new Random( 0 ) ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, 10, 100, false ); for( int i = 99; i-- != 10; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ 10000000 ]; u = new int[ 10000000 ]; perm = identity( t.length ); random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt(); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); t = new int[ t.length ]; u = new int[ t.length ]; perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] ); t = new int[ t.length ]; for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); perm = identity( t.length ); IntArrays.radixSortIndirect( perm, t, u, false ); for( int i = t.length - 1; i-- != 0; ) assertTrue(i + " " + t[perm[i]]+ " "+ t[perm[i+1]] + " " + u[perm[i]] + " " + u[perm[i+1]] + " " + perm[i]+ " " +perm[i+1], t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] ); } @Test public void testBinarySearchLargeKey() { final int[] a = { 1, 2, 3 }; IntArrays.binarySearch( a, 4 ); } @Test public void testReverse() { assertArrayEquals( new int[] { 0, 1, 2, 3 }, IntArrays.reverse( new int[] { 3, 2, 1, 0 } ) ); assertArrayEquals( new int[] { 0, 1, 2, 3, 4 }, IntArrays.reverse( new int[] { 4, 3, 2, 1, 0 } ) ); assertArrayEquals( new int[] { 4, 1, 2, 3, 0 }, IntArrays.reverse( new int[] { 4, 3, 2, 1, 0 }, 1, 4 ) ); assertArrayEquals( new int[] { 4, 2, 3, 1, 0 }, IntArrays.reverse( new int[] { 4, 3, 2, 1, 0 }, 1, 3 ) ); assertArrayEquals( new int[] { 0, 1, 2, 3, 4 }, IntArrays.reverse( new int[] { 0, 1, 2, 3, 4 }, 1, 2 ) ); } @Test public void testStabilize() { int[] perm, val; perm = new int[] { 0, 1, 2, 3 }; val = new int[] { 0, 0, 0, 0 }; IntArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 1, 2, 0 }; val = new int[] { 0, 0, 0, 0 }; IntArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new int[] { 0, 1, 1, 2 }; IntArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 3, 1, 2, 0 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new int[] { 0, 0, 1, 1 }; IntArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 2, 3, 0, 1 }, perm ); perm = new int[] { 4, 3, 2, 1, 0 }; val = new int[] { 1, 1, 0, 0, 0 }; IntArrays.stabilize( perm, val, 1, 3 ); assertArrayEquals( new int[] { 4, 2, 3, 1, 0 }, perm ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntBigArrayBigListTest.java0000664000000000000000000005264313050705451025323 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.BigArrays; import java.util.Collections; import java.util.Iterator; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class IntBigArrayBigListTest { @Test public void testRemoveAllModifiesCollection() { IntBigList list = new IntBigArrayBigList(); assertFalse( list.removeAll( Collections.emptySet() ) ); assertEquals( IntBigLists.EMPTY_BIG_LIST, list ); } @SuppressWarnings("boxing") @Test public void testRemoveAllSkipSegment() { IntBigList list = new IntBigArrayBigList(); for( long i = 0; i < BigArrays.SEGMENT_SIZE + 10; i++ ) list.add( (int)( i % 2 ) ); assertTrue( list.removeAll( IntSets.singleton( 1 ) ) ); assertEquals( BigArrays.SEGMENT_SIZE / 2 + 5, list.size64() ); for( long i = 0; i < BigArrays.SEGMENT_SIZE / 2 + 5; i++ ) assertEquals( 0, list.getInt( i ) ); list = new IntBigArrayBigList(); for( long i = 0; i < BigArrays.SEGMENT_SIZE + 10; i++ ) list.add( (int)( i % 2 ) ); assertTrue( list.removeAll( Collections.singleton( 1 ) ) ); assertEquals( BigArrays.SEGMENT_SIZE / 2 + 5, list.size64() ); for( long i = 0; i < BigArrays.SEGMENT_SIZE / 2 + 5; i++ ) assertEquals( 0, list.getInt( i ) ); } @Test(expected = IndexOutOfBoundsException.class) public void testListIteratorTooLow() { new IntBigArrayBigList().listIterator( -1L ); } @Test(expected = IndexOutOfBoundsException.class) public void testListIteratorTooHigh() { new IntBigArrayBigList().listIterator( 1L ); } @Test public void testAddWithIterator() { IntBigList list = new IntBigArrayBigList(); list.iterator().add( 1 ); assertEquals( IntBigLists.singleton( 1 ), list ); } @Test public void testRemoveAll() { IntBigArrayBigList l = IntBigArrayBigList.wrap( new int[][] { { 0, 1, 2 } } ); l.removeAll( IntSets.singleton( 1 ) ); assertEquals( IntBigArrayBigList.wrap( new int[][] { { 0, 2 } } ), l ); l = IntBigArrayBigList.wrap( new int[][] { { 0, 1, 1, 2 } } ); l.removeAll( Collections.singleton( Integer.valueOf( 1 ) ) ); assertEquals( IntBigArrayBigList.wrap( new int[][] { { 0, 2 } } ), l ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } private static Object[] k, nk; private static int kt[]; private static int nkt[]; @SuppressWarnings("unchecked") protected static void testLists( IntBigList m, IntBigList t, int n, int level ) { Exception mThrowsOutOfBounds, tThrowsOutOfBounds; Object rt = null; int rm = ( 0 ); if ( level > 4 ) return; /* Now we check that both sets agree on random keys. For m we use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); int T = genKey(); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.set( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.set( p, ( Integer.valueOf( T ) ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } if ( mThrowsOutOfBounds == null ) p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.getInt( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ aftre get() on position " + p + " (" + m.getInt( p ) + ", " + t.get( p ) + ")" , t.get( p ).equals( ( Integer.valueOf( m.getInt( p ) ) ) ) ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p+ " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" , t.get( p ).equals( m.get( p ) ) ); } /* Now we check that m and t are equal. */ if ( !m.equals( t ) || !t.equals( m ) ) System.err.println( "m: " + m + " t: " + t ); assertTrue( "Error (" + level + "): ! m.equals( t ) at start" , m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) at start" , t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( Iterator i = t.iterator(); i.hasNext(); ) { assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on t)" , m.contains( i.next() ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( Iterator i = m.listIterator(); i.hasNext(); ) { assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on m)" , t.contains( i.next() ) ); } /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)" , m.contains( T ) == t.contains( ( Integer.valueOf( T ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)" , m.contains( ( Integer.valueOf( T ) ) ) == t.contains( ( Integer.valueOf( T ) ) ) ); } /* Now we add and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 2 * n; i++ ) { int T = genKey(); try { m.add( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.add( ( Integer.valueOf( T ) ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } T = genKey(); int p = r.nextInt() % ( 2 * n + 1 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.add( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.add( p, ( Integer.valueOf( T ) ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T+ " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); p = r.nextInt() % ( 2 * n + 1 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { rm = m.removeInt( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { rt = t.remove( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" , rt.equals( ( Integer.valueOf( rm ) ) ) ); } assertTrue( "Error (" + level + "): ! m.equals( t ) after add/remove" , m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after add/remove" , t.equals( m ) ); /* * Now we add random data in m and t using addAll on a collection, checking that the result * is the same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); java.util.Collection m1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) m1.add( ( Integer.valueOf( genKey() ) ) ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for "+ m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after addAll" , m.equals( t ) ); assertTrue( "Error (" + level + "," + m + t + "): ! t.equals( m ) after addAll" , t.equals( m ) ); } if ( m.size64() > n ) { m.size( n ); while ( t.size() != n ) t.remove( t.size() - 1 ); } /* * Now we add random data in m and t using addAll on a type-specific collection, checking * that the result is the same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); IntCollection m1 = new IntBigArrayBigList(); java.util.Collection t1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) { int x = genKey(); m1.add( x ); t1.add( ( Integer.valueOf( x ) ) ); } mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, t1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index "+ p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after polymorphic addAll" , m.equals( t ) ); assertTrue( "Error (" + level + "," + m + t + "): ! t.equals( m ) after polymorphic addAll" , t.equals( m ) ); } if ( m.size64() > n ) { m.size( n ); while ( t.size() != n ) t.remove( t.size() - 1 ); } /* * Now we add random data in m and t using addAll on a list, checking that the result is the * same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); IntBigList m1 = new IntBigArrayBigList(); java.util.Collection t1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) { int x = genKey(); m1.add( x ); t1.add( ( Integer.valueOf( x ) ) ); } mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, t1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): list addAll() divergence in IndexOutOfBoundsException for index " + p+ " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + "): ! m.equals( t ) after list addAll" , m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after list addAll" , t.equals( m ) ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" , ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" , t.get( p ).equals( m.get( p ) ) ); } /* Now we inquiry about the content with indexOf()/lastIndexOf(). */ for ( int i = 0; i < 10 * n; i++ ) { int T = genKey(); assertTrue( "Error (" + level + "): indexOf() divergence for " + T + " (" + m.indexOf( ( Integer.valueOf( T ) ) ) + ", " + t.indexOf( ( Integer.valueOf( T ) ) ) + ")", m.indexOf( ( Integer.valueOf( T ) ) ) == t.indexOf( ( Integer.valueOf( T ) ) ) ); assertTrue( "Error (" + level + "): lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( ( Integer.valueOf( T ) ) ) + ", " + t.lastIndexOf( ( Integer.valueOf( T ) ) ) + ")", m.lastIndexOf( ( Integer.valueOf( T ) ) ) == t.lastIndexOf( ( Integer.valueOf( T ) ) ) ); assertTrue( "Error (" + level + "): polymorphic indexOf() divergence for " + T + " (" + m.indexOf( T ) + ", " + t.indexOf( ( Integer.valueOf( T ) ) ) + ")" , m.indexOf( T ) == t.indexOf( ( Integer.valueOf( T ) ) ) ); assertTrue( "Error (" + level + "): polymorphic lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( ( Integer.valueOf( T ) ) ) + ")" , m.lastIndexOf( T ) == t.lastIndexOf( ( Integer.valueOf( T ) ) ) ); } /* Now we check cloning. */ if ( level == 0 ) { assertTrue( "Error (" + level + "): m does not equal m.clone()" , m.equals( ( (IntBigArrayBigList)m ).clone() ) ); assertTrue( "Error (" + level + "): m.clone() does not equal m" , ( (IntBigArrayBigList)m ).clone().equals( m ) ); } /* Now we play with constructors. */ assertTrue( "Error (" + level + "): m does not equal new ( type-specific Collection m )" , m.equals( new IntBigArrayBigList( (IntCollection)m ) ) ); assertTrue( "Error (" + level + "): new ( type-specific nCollection m ) does not equal m" , ( new IntBigArrayBigList( (IntCollection)m ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( type-specific List m )" , m.equals( new IntBigArrayBigList( m ) ) ); assertTrue( "Error (" + level + "): new ( type-specific List m ) does not equal m" , ( new IntBigArrayBigList( m ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( m.listIterator() )" , m.equals( new IntBigArrayBigList( m.listIterator() ) ) ); assertTrue( "Error (" + level + "): new ( m.listIterator() ) does not equal m" , ( new IntBigArrayBigList( m.listIterator() ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( m.type_specific_iterator() )" , m.equals( new IntBigArrayBigList( m.iterator() ) ) ); assertTrue( "Error (" + level + "): new ( m.type_specific_iterator() ) does not equal m" , ( new IntBigArrayBigList( m.iterator() ) ).equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ IntBigList m2 = null; try { java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m2 = (IntBigList)ois.readObject(); ois.close(); ff.delete(); } catch ( Exception e ) { e.printStackTrace(); System.exit( 1 ); } assertTrue( "Error (" + level + "): hashCode() changed after save/read" , m2.hashCode() == h ); /* Now we check that m2 actually holds that data. */ assertTrue( "Error (" + level + "): ! m2.equals( t ) after save/read" , m2.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m2 ) after save/read" , t.equals( m2 ) ); /* Now we take out of m everything, and check that it is empty. */ for ( Iterator i = t.iterator(); i.hasNext(); ) m2.remove( i.next() ); assertTrue( "Error (" + level + "): m2 is not empty (as it should be)" , m2.isEmpty() ); /* Now we play with iterators. */ { IntBigListIterator i; IntBigListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error (" + level + "): divergence in hasNext()" , i.hasNext() == j.hasNext() ); assertTrue( "Error (" + level + "): divergence in hasPrevious()" , i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error (" + level + "): divergence in next()" , i.next().equals( j.next() ) ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.set( T ); j.set( ( Integer.valueOf( T ) ) ); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.add( T ); j.add( ( Integer.valueOf( T ) ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error (" + level + "): divergence in previous()" , i.previous().equals( j.previous() ) ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.set( T ); j.set( ( Integer.valueOf( T ) ) ); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.add( T ); j.add( ( Integer.valueOf( T ) ) ); } } assertTrue( "Error (" + level + "): divergence in nextIndex()" , i.nextIndex() == j.nextIndex() ); assertTrue( "Error (" + level + "): divergence in previousIndex()" , i.previousIndex() == j.previousIndex() ); } } { Object I, J; int from = r.nextInt( m.size() + 1 ); IntBigListIterator i; IntBigListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error (" + level + "): divergence in hasNext() (iterator with starting point " + from + ")" , i.hasNext() == j.hasNext() ); assertTrue( "Error (" + level + "): divergence in hasPrevious() (iterator with starting point " + from + ")" , i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { I = i.next(); J = j.next(); assertTrue( "Error (" + level + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" , I.equals( J ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); if ( r.nextFloat() < 0.2 ) { // System.err.println("Removing in next"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.set( T ); j.set( ( Integer.valueOf( T ) ) ); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.add( T ); j.add( ( Integer.valueOf( T ) ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { I = i.previous(); J = j.previous(); assertTrue( "Error (" + level + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" , I.equals( J ) ); if ( r.nextFloat() < 0.2 ) { // System.err.println("Removing in prev"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.set( T ); j.set( ( Integer.valueOf( T ) ) ); } else if ( r.nextFloat() < 0.2 ) { int T = genKey(); i.add( T ); j.add( ( Integer.valueOf( T ) ) ); } } } } /* Now we check that m actually holds that data. */ assertTrue( "Error (" + level + "): ! m.equals( t ) after iteration" , m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after iteration" , t.equals( m ) ); /* Now we select a pair of keys and create a subset. */ if ( !m.isEmpty() ) { int start = r.nextInt( m.size() ); int end = start + r.nextInt( m.size() - start ); // System.err.println("Checking subList from " + start + " to " + end + " (level=" + // (level+1) + ")..." ); testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 ); assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after subList" , m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after subList" , t.equals( m ) ); } m.clear(); t.clear(); assertTrue( "Error (" + level + "): m is not empty after clear()" , m.isEmpty() ); } protected static void test( int n ) { IntBigArrayBigList m = new IntBigArrayBigList(); IntBigList t = IntBigLists.asBigList( new IntArrayList() ); k = new Object[ n ]; nk = new Object[ n ]; kt = new int[ n ]; nkt = new int[ n ]; for ( int i = 0; i < n; i++ ) { k[ i ] = new Integer( kt[ i ] = genKey() ); nk[ i ] = new Integer( nkt[ i ] = genKey() ); } /* We add pairs to t. */ for ( int i = 0; i < n; i++ ) t.add( (Integer)k[ i ] ); /* We add to m the same data */ m.addAll( t ); testLists( m, t, n, 0 ); return; } @Test public void test1() { test( 1 ); } @Test public void test10() { test( 10 ); } @Test public void test100() { test( 100 ); } @Ignore("Too long") @Test public void test1000() { test( 1000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntBigArraysTest.java0000664000000000000000000002452113050705451024222 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.*; import static it.unimi.dsi.fastutil.ints.IntBigArrays.set; import static it.unimi.dsi.fastutil.ints.IntBigArrays.get; import java.util.Arrays; import java.util.Random; import org.junit.Test; public class IntBigArraysTest { public static int[][] identity( final int n ) { final int[][] perm = IntBigArrays.newBigArray( n ); for( int i = n; i-- != 0; ) IntBigArrays.set( perm, i , i ); return perm; } @Test public void testQuickSort() { int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Arrays.sort( s ); int[][] sorted = IntBigArrays.wrap( s.clone() ); int[][] a = IntBigArrays.wrap( s.clone() ); IntBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); IntBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); a = IntBigArrays.wrap( s.clone() ); IntBigArrays.quickSort( a, IntComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); IntBigArrays.quickSort( a, IntComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); } private void testCopy( int n ) { int[][] a = IntBigArrays.newBigArray( n ); for ( int i = 0; i < n; i++ ) set( a, i, i ); IntBigArrays.copy( a, 0, a, 1, n - 2 ); assertEquals( 0, a[ 0 ][ 0 ] ); for ( int i = 0; i < n - 2; i++ ) assertEquals( i, get( a, i + 1 ) ); for ( int i = 0; i < n; i++ ) set( a, i, i ); IntBigArrays.copy( a, 1, a, 0, n - 1 ); for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) ); for ( int i = 0; i < n; i++ ) set( a, i, i ); int[] b = new int[ n ]; for ( int i = 0; i < n; i++ ) b[ i ] = i; assertArrayEquals( a, IntBigArrays.wrap( b ) ); } @Test public void testCopy10() { testCopy( 10 ); } @Test public void testCopy1000() { testCopy( 1000 ); } @Test public void testCopy1000000() { testCopy( 1000000 ); } @Test public void testBinarySearch() { int[] a = new int[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 }; Arrays.sort( a ); int[][] b = IntBigArrays.wrap( a.clone() ); for( int i = -1; i < 20; i++ ) { assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), IntBigArrays.binarySearch( b, i ) ); assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), IntBigArrays.binarySearch( b, i, IntComparators.NATURAL_COMPARATOR ) ); } for( int i = -1; i < 20; i++ ) { assertEquals( Arrays.binarySearch( a, 5, 13, i ), IntBigArrays.binarySearch( b, 5, 13, i ) ); assertEquals( Arrays.binarySearch( a, 5, 13, i ), IntBigArrays.binarySearch( b, 5, 13, i, IntComparators.NATURAL_COMPARATOR ) ); } } @Test public void testTrim() { int[] a = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; int[][] b = IntBigArrays.wrap( a.clone() ); for( int i = a.length; i-- != 0; ) { int[][] t = IntBigArrays.trim( b, i ); final long l = IntBigArrays.length( t ); assertEquals( i, l ); for( int p = 0; p < l; p++ ) assertEquals( a[ p ], IntBigArrays.get( t, p ) ); } } @Test public void testEquals() { int[] a = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; int[][] b = IntBigArrays.wrap( a.clone() ); int[][] c = IntBigArrays.wrap( a.clone() ); assertTrue( IntBigArrays.equals( b, c ) ); b[ 0 ][ 0 ] = 0; assertFalse( IntBigArrays.equals( b, c ) ); } @Test public void testRadixSort1() { int[][] t = IntBigArrays.wrap( new int[] { 2, 1, 0, 4 } ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); t = IntBigArrays.wrap( new int[] { 2, -1, 0, -4 } ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); t = IntBigArrays.shuffle( identity( 100 ), new Random( 0 ) ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); t = IntBigArrays.newBigArray( 100 ); Random random = new Random( 0 ); for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); t = IntBigArrays.newBigArray( 100000 ); random = new Random( 0 ); for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); for( long i = 100; i-- != 10; ) IntBigArrays.set( t, i, random.nextInt() ); IntBigArrays.radixSort( t, 10, 100 ); for( long i = 99; i-- != 10; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); t = IntBigArrays.newBigArray( 1000000 ); random = new Random( 0 ); for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() ); IntBigArrays.radixSort( t ); for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) ); } @Test public void testRadixSort2() { int d[][], e[][]; d = IntBigArrays.newBigArray( 10 ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( 3 - i % 3 ) ); e = IntBigArrays.shuffle( identity( 10 ), new Random( 0 ) ); IntBigArrays.radixSort( d, e ); for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); d = IntBigArrays.newBigArray( 100000 ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( 100 - i % 100 ) ); e = IntBigArrays.shuffle( identity( 100000 ), new Random( 6 ) ); IntBigArrays.radixSort( d, e ); for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); d = IntBigArrays.newBigArray( 10 ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( i % 3 - 2 ) ); Random random = new Random( 0 ); e = IntBigArrays.newBigArray( IntBigArrays.length( d ) ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() ); IntBigArrays.radixSort( d, e ); for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); d = IntBigArrays.newBigArray( 100000 ); random = new Random( 0 ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, random.nextInt() ); e = IntBigArrays.newBigArray( IntBigArrays.length( d ) ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() ); IntBigArrays.radixSort( d, e ); for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); for( long i = 100; i-- != 10; ) IntBigArrays.set( e, i, random.nextInt() ); IntBigArrays.radixSort( d, e, 10, 100 ); for( long i = 99; i-- != 10; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); d = IntBigArrays.newBigArray( 1000000 ); random = new Random( 0 ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, random.nextInt() ); e = IntBigArrays.newBigArray( IntBigArrays.length( d ) ); for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() ); IntBigArrays.radixSort( d, e ); for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " + IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) ); } @Test public void testShuffle() { int[] a = new int[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = i; int[][] b = IntBigArrays.wrap( a ); IntBigArrays.shuffle( b, new Random() ); boolean[] c = new boolean[ a.length ]; for( long i = IntBigArrays.length( b ); i-- != 0; ) { assertFalse( c[ IntBigArrays.get( b, i ) ] ); c[ IntBigArrays.get( b, i ) ] = true; } } @Test public void testShuffleFragment() { int[] a = new int[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = -1; for( int i = 10; i < 30; i++ ) a[ i ] = i - 10; int[][] b = IntBigArrays.wrap( a ); IntBigArrays.shuffle( b, 10, 30, new Random() ); boolean[] c = new boolean[ 20 ]; for( int i = 20; i-- != 0; ) { assertFalse( c[ IntBigArrays.get( b, i + 10 ) ] ); c[ IntBigArrays.get( b, i + 10 ) ] = true; } } @Test public void testBinarySearchLargeKey() { final int[][] a = IntBigArrays.wrap( new int[] { 1, 2, 3 } ); IntBigArrays.binarySearch( a, 4 ); } }fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntCollectionsTest.java0000664000000000000000000000124613050705451024614 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; public class IntCollectionsTest { @Test public void testIsNotEmpty() { IntCollection test = IntCollections.asCollection(new IntIterable() { @Override public IntIterator iterator() { return IntSets.singleton(0).iterator(); } }); assertFalse(test.isEmpty()); } @Test public void testEmpty() { IntCollection test = IntCollections.asCollection(new IntIterable() { @Override public IntIterator iterator() { return IntSets.EMPTY_SET.iterator(); } }); assertTrue(test.isEmpty()); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntHeapPriorityQueueTest.java0000664000000000000000000000146613050705451025766 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import it.unimi.dsi.fastutil.io.BinIO; import java.io.File; import java.io.IOException; import org.junit.Test; public class IntHeapPriorityQueueTest { @SuppressWarnings("deprecation") @Test public void testSerialize() throws IOException, ClassNotFoundException { IntHeapPriorityQueue q = new IntHeapPriorityQueue(); for( int i = 0; i < 100; i++ ) q.enqueue( i ); File file = File.createTempFile( getClass().getPackage().getName() + "-", "-tmp" ); file.deleteOnExit(); BinIO.storeObject( q, file ); IntHeapPriorityQueue r = (IntHeapPriorityQueue)BinIO.loadObject( file ); file.delete(); for( int i = 0; i < 100; i++ ) { assertEquals( q.first(), r.first() ); assertEquals( q.dequeue(), r.dequeue() ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntHeapSemiIndirectPriorityQueueTest.java0000664000000000000000000000430113050705451030255 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertArrayEquals; import java.util.Arrays; import junit.framework.TestCase; import org.junit.Test; public class IntHeapSemiIndirectPriorityQueueTest extends TestCase { public void testTops() { int refArray[] = { 4, 3, 2, 1, 0, 3, 2, 1, 0, 2, 1, 0, 1, 0, 0 }; int tops[] = new int[ refArray.length ]; final IntHeapSemiIndirectPriorityQueue queue = new IntHeapSemiIndirectPriorityQueue( refArray ); for( int i = refArray.length; i-- != 0; ) queue.enqueue( i ); assertEquals( 5, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 4, 8, 11, 13, 14 } ), new IntOpenHashSet( tops, 0, 5 ) ); for( int i = 4; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 4, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 3, 7, 10, 12 } ), new IntOpenHashSet( tops, 0, 4 ) ); for( int i = 3; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 3, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 2, 6, 9 } ), new IntOpenHashSet( tops, 0, 3 ) ); for( int i = 2; i-- != 0; ) { queue.dequeue(); assertEquals( i + 1, queue.front( tops ) ); } queue.dequeue(); assertEquals( 2, queue.front( tops ) ); assertEquals( new IntOpenHashSet( new int[] { 1, 5 } ), new IntOpenHashSet( tops, 0, 2 ) ); queue.dequeue(); assertEquals( 1, queue.front( tops ) ); queue.dequeue(); assertEquals( 1, queue.front( tops ) ); } @Test public void testFrontWithComparator() { final int[] refArray = { 8, 16, 9 }; IntComparator comparator = new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return ( k1 & 3 ) - ( k2 & 3 ); } }; IntHeapSemiIndirectPriorityQueue queue = new IntHeapSemiIndirectPriorityQueue( refArray, comparator ); queue.enqueue( 0 ); queue.enqueue( 1 ); queue.enqueue( 2 ); final int[] front = new int[ 2 ]; assertEquals( 2, queue.front( front ) ); Arrays.sort( front ); assertArrayEquals( new int[] { 0, 1 }, front ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntLinkedOpenCustomHashSetTest.java0000664000000000000000000000174213050705451027042 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.*; import org.junit.Test; public class IntLinkedOpenCustomHashSetTest { @Test public void testGetNullKey() { final IntLinkedOpenCustomHashSet s = new IntLinkedOpenCustomHashSet( new IntHash.Strategy() { @Override public int hashCode( int o ) { return o % 10; } @Override public boolean equals( int a, int b ) { return ( a - b ) % 10 == 0; } } ); s.add( 3 ); s.add( 10 ); s.add( 0 ); assertTrue( s.contains( 0 ) ); assertTrue( s.contains( 10 ) ); assertTrue( s.contains( 3 ) ); assertFalse( s.contains( 1 ) ); IntListIterator i = s.iterator(); assertEquals( 3, i.nextInt() ); assertEquals( 10, i.nextInt() ); assertFalse( i.hasNext() ); s.remove( 0 ); assertFalse( s.contains( 0 ) ); assertFalse( s.contains( 10 ) ); s.add( 10 ); i = s.iterator(); assertEquals( 3, i.nextInt() ); assertEquals( 10, i.nextInt() ); assertFalse( i.hasNext() ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntLinkedOpenHashSetTest.java0000664000000000000000000004422213050705451025647 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import java.util.NoSuchElementException; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class IntLinkedOpenHashSetTest { @Test public void testStrangeRetainAllCase() { IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 }); // Initialize both implementations with the same data IntLinkedOpenHashSet instance = new IntLinkedOpenHashSet(initialElements); IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements); instance.retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } @SuppressWarnings("unchecked") private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f ); java.util.Set t = new java.util.LinkedHashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < f * n; i++ ) t.add( ( Integer.valueOf( genKey() ) ) ); /* Now we add to m the same data */ s.addAll( t ); assertTrue( "Error: !m.equals(t) after insertion", s.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( s ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", s.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", t.size(), c ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence in keys between t and m (polymorphic method)", s.contains( T ) == t.contains( ( Integer.valueOf( T ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", s.contains( ( Integer.valueOf( T ) ) ) == t.contains( ( Integer.valueOf( T ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertTrue( "Error: divergence in add() between t and m", s.add( ( Integer.valueOf( T ) ) ) == t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", s.remove( ( Integer.valueOf( T ) ) ) == t.remove( ( Integer.valueOf( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after removal", s.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( s ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", s.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ int a[] = s.toIntArray(); assertEquals( "Error: toArray() output (or array-based constructor) is not OK", new IntLinkedOpenHashSet( a ), s ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", s.equals( s.clone() ) ); assertTrue( "Error: m.clone() does not equal m", s.clone().equals( s ) ); int h = s.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( s ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); s = (IntLinkedOpenHashSet)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, s.hashCode() ); assertEquals( "Error: clone()", s, s.clone() ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertTrue( "Error: divergence in add() between t and m after save/read", s.add( ( Integer.valueOf( T ) ) ) == t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m after save/read", s.remove( ( Integer.valueOf( T ) ) ) == t.remove( ( Integer.valueOf( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", s.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( s ) ); /* Now we play with iterators, but only in the linked case. */ { java.util.ListIterator i, j; Integer J = null; i = s.iterator(); j = new java.util.LinkedList( t ).listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", i.next().equals( J = j.next() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", i.previous().equals( J = j.previous() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } if ( t.size() > 0 ) { java.util.ListIterator i, j; Object J = null; j = new java.util.LinkedList( t ).listIterator(); int e = r.nextInt( t.size() ); Object from; do from = j.next(); while ( e-- != 0 ); i = s.iterator( ( ( ( (Integer)( from ) ).intValue() ) ) ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", i.next().equals( J = j.next() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", i.previous().equals( J = j.previous() ) ); if ( r.nextFloat() < 0.5 ) { i.remove(); j.remove(); t.remove( J ); } } assertTrue( "Error: divergence in nextIndex() (iterator with starting point " + from + ")", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex() (iterator with starting point " + from + ")", i.previousIndex() == j.previousIndex() ); } } /* Now we check that m actually holds that data. */ assertTrue( "Error: ! m.equals( t ) after iteration", s.equals( t ) ); assertTrue( "Error: ! t.equals( m ) after iteration", t.equals( s ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertTrue( "Error: m is not empty (as it should be)", s.isEmpty() ); s.clear(); t.clear(); s.trim(); assertTrue( "Error: !m.equals(t) after rehash()", s.equals( t ) ); assertTrue( "Error: !t.equals(m) after rehash()", t.equals( s ) ); s.trim(); assertTrue( "Error: !m.equals(t) after trim()", s.equals( t ) ); assertTrue( "Error: !t.equals(m) after trim()", t.equals( s ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void testAdd() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); assertTrue( s.add( 0 ) ); assertTrue( s.contains( 0 ) ); assertFalse( s.contains( 1 ) ); assertTrue( s.add( Integer.valueOf( 1 ) ) ); assertTrue( s.contains( Integer.valueOf( 1 ) ) ); assertFalse( s.contains( Integer.valueOf( 2 ) ) ); } @Test public void testRemove() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); for( int i = 0; i < 100; i++ ) assertFalse( s.remove( 100 + i ) ); assertEquals( 0, s.firstInt() ); assertEquals( 99, s.lastInt() ); for( int i = 50; i < 150; i++ ) assertTrue( Integer.toString( i % 100 ), s.remove( i % 100 ) ); } @Test public void testRemove0() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); assertTrue( s.remove( 0 ) ); IntListIterator iterator = s.iterator(); IntOpenHashSet z = new IntOpenHashSet(); z.add( iterator.nextInt() ); z.add( iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertEquals( new IntOpenHashSet( new int[] { -1, 1 } ), z ); s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); iterator = s.iterator(); assertEquals( -1, iterator.nextInt() ); assertEquals( 0, iterator.nextInt() ); iterator.remove(); assertEquals( 1, iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertFalse( s.contains( 0 ) ); iterator = s.iterator(); assertEquals( -1, iterator.nextInt() ); assertEquals( 1, iterator.nextInt() ); assertFalse( iterator.hasNext() ); } @Test public void testFirtLast0() { IntLinkedOpenHashSet s; s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); for( int i = 0; i < 100; i++ ) assertEquals( i, s.removeFirstInt() ); assertTrue( s.isEmpty() ); s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); for( int i = 100; i-- != 0; ) assertEquals( i, s.removeLastInt() ); assertTrue( s.isEmpty() ); s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 100; i-- != 0; ) assertTrue( s.add( i ) ); for( int i = 0; i < 100; i++ ) assertEquals( i, s.removeLastInt() ); assertTrue( s.isEmpty() ); s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 100; i-- != 0; ) assertTrue( s.add( i ) ); for( int i = 100; i-- != 0; ) assertEquals( i, s.removeFirstInt() ); assertTrue( s.isEmpty() ); } @Test public void testIterator() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); assertEquals( 0, s.firstInt() ); IntListIterator iterator = s.iterator(); for( int i = 0; i <= 100; i++ ) { assertEquals( Integer.toString( i ), i - 1, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i, iterator.nextIndex() ); if ( i != 100 ) assertEquals( Integer.toString( i ), i, iterator.nextInt() ); } iterator = s.iterator( s.lastInt() ); for( int i = 100; i-- != 0; ) { assertEquals( Integer.toString( i ), i, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() ); if ( i != 0 ) assertEquals( Integer.toString( i ), i, iterator.previousInt() ); } iterator = s.iterator( 50 ); for( int i = 50; i < 100; i++ ) { assertEquals( Integer.toString( i ), i, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() ); if ( i != 99 ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() ); } iterator = s.iterator( 50 ); for( int i = 50; i-- != -1; ) { assertEquals( Integer.toString( i ), i + 1, iterator.previousIndex() ); assertEquals( Integer.toString( i ), i + 2, iterator.nextIndex() ); if ( i != -1 ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() ); } iterator = s.iterator( 50 ); for( int i = 50; i-- != -1; ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() ); assertEquals( -1, iterator.previousIndex() ); assertEquals( 0, iterator.nextIndex() ); iterator = s.iterator( 50 ); for( int i = 50; i < 100 - 1; i++ ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() ); assertEquals( 99, iterator.previousIndex() ); assertEquals( 100, iterator.nextIndex() ); iterator = s.iterator( 50 ); iterator.previousInt(); iterator.remove(); assertEquals( 49, iterator.previousIndex() ); assertEquals( 49, iterator.previousInt() ); iterator = s.iterator( 49 ); iterator.nextInt(); iterator.remove(); assertEquals( 50, iterator.nextIndex() ); assertEquals( 52, iterator.nextInt() ); } @Test(expected=NoSuchElementException.class) public void testIteratorMissingElement() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); s.iterator( 1000 ); } @Test public void testPutAndMove() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.addAndMoveToFirst( i ) ); s.clear(); for( int i = 0; i < 100; i++ ) assertTrue( s.addAndMoveToLast( i ) ); assertTrue( s.addAndMoveToFirst( -1 ) ); assertEquals( -1, s.firstInt() ); assertTrue( s.addAndMoveToFirst( -2 ) ); assertEquals( -2, s.firstInt() ); assertFalse( s.addAndMoveToFirst( -1 ) ); assertEquals( -1, s.firstInt() ); assertFalse( s.addAndMoveToFirst( -1 ) ); assertEquals( -1, s.firstInt() ); assertFalse( s.addAndMoveToLast( -1 ) ); assertEquals( -1, s.lastInt() ); assertTrue( s.addAndMoveToLast( 100 ) ); assertEquals( 100, s.lastInt() ); assertTrue( s.addAndMoveToLast( 101 ) ); assertEquals( 101, s.lastInt() ); assertFalse( s.addAndMoveToLast( 100 ) ); assertEquals( 100, s.lastInt() ); assertFalse( s.addAndMoveToLast( 100 ) ); assertEquals( 100, s.lastInt() ); assertFalse( s.addAndMoveToFirst( 100 ) ); assertEquals( 100, s.firstInt() ); } @Test public void testRemoveFirstLast() { IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); assertEquals( 0, s.removeFirstInt() ); assertEquals( 1, s.removeFirstInt() ); assertEquals( 99, s.removeLastInt() ); } @Test(expected=NoSuchElementException.class) public void testRemoveFirstEmpty() { new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ).firstInt(); } @Test(expected=NoSuchElementException.class) public void testRemoveLastEmpty() { new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ).lastInt(); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntOpenCustomHashSetTest.java0000664000000000000000000002551713050705451025721 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import java.io.Serializable; import java.util.HashSet; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") /** Not a particularly good test, but it will check that we use everywhere the same hashing strategy. */ public class IntOpenCustomHashSetTest { @Test public void testGetNullKey() { final IntOpenCustomHashSet s = new IntOpenCustomHashSet( new IntHash.Strategy() { @Override public int hashCode( int o ) { return o % 10; } @Override public boolean equals( int a, int b ) { return ( a - b ) % 10 == 0; } } ); s.add( 10 ); assertTrue( s.contains( 0 ) ); assertEquals( 10, s.iterator().nextInt() ); } @Test public void testCustomUsed() { IntOpenCustomHashSet set = new IntOpenCustomHashSet( new IntHash.Strategy() { @Override public int hashCode( int e ) { return Integer.hashCode( e & 0xFFFF ); } @Override public boolean equals( int a, int b ) { return ( a & 0xFFFF ) == ( b & 0xFFFF ); } } ); set.add( 1 << 16 | 1 ); set.add( 1 ); assertEquals( 1, set.size() ); assertTrue( set.contains( 1 ) ); assertTrue( set.contains( 1 << 16 | 1 ) ); } private static final class Strategy implements IntHash.Strategy, Serializable { private static final long serialVersionUID = 1L; @Override public int hashCode( int e ) { return Integer.reverse( e ); } @Override public boolean equals( int a, int b ) { return a == b; } } private final static Strategy strategy = new Strategy(); private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt( 10 ); } @SuppressWarnings("boxing") private static void checkTable( IntOpenCustomHashSet s ) { final int[] key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == s.key.length - 1; int n = s.n; while ( n-- != 0 ) if ( key[ n ] != 0 && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); if ( s.containsNull && !s.contains( 0 ) ) throw new AssertionError( "Hash table should contain zero by internal state, but it doesn't when queried" ); if ( !s.containsNull && s.contains( 0 ) ) throw new AssertionError( "Hash table should not contain zero by internal state, but it does when queried" ); java.util.HashSet t = new java.util.HashSet(); for ( int i = s.size(); i-- != 0; ) if ( key[ i ] != 0 && !t.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" ); } private static void printProbes( IntOpenCustomHashSet m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final int[] key = m.key; final double f = (double)m.size / m.n; for ( int i = 0, c = 0; i < m.n; i++ ) { if ( key[ i ] != 0 ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings({ "boxing" }) private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; final Integer key[] = new Integer[ (int)Math.ceil( n * f ) ]; HashSet t = new HashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < key.length; i++ ) t.add( ( key[ i ] = new Integer( genKey() ) ) ); IntOpenCustomHashSet m = new IntOpenCustomHashSet( Hash.DEFAULT_INITIAL_SIZE, f, strategy ); /* Now we add to m the same data */ m.addAll( t ); checkTable( m ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* Now we check that inquiries about random data give the same answer in m and t. For m we use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) ); } /* Again, we check that inquiries about random data give the same answer in m and t, but for m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } checkTable( m ); assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ int a[] = m.toIntArray(); assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashSet( a ).equals( m ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (IntOpenCustomHashSet)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() ); m = new IntOpenCustomHashSet( n, f, strategy ); t.clear(); /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ for ( int i = n; i-- != 0; ) m.add( i ); t.addAll( m ); printProbes( m ); checkTable( m ); for ( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) ); for ( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntOpenHashBigSetTest.java0000664000000000000000000003050313050705451025137 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import java.util.Arrays; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class IntOpenHashBigSetTest { @Test public void testStrangeRetainAllCase() { IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 }); // Initialize both implementations with the same data IntOpenHashBigSet instance = new IntOpenHashBigSet(initialElements); IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements); instance.retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance ); } @Test public void testRemove0() { IntOpenHashBigSet s = new IntOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); assertTrue( s.remove( 0 ) ); IntIterator iterator = s.iterator(); IntOpenHashSet z = new IntOpenHashSet(); z.add( iterator.nextInt() ); z.add( iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertEquals( new IntOpenHashSet( new int[] { -1, 1 } ), z ); s = new IntOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); iterator = s.iterator(); while( iterator.hasNext() ) if ( iterator.nextInt() == 0 ) iterator.remove(); assertFalse( s.contains( 0 ) ); iterator = s.iterator(); int[] content = new int[ 2 ]; content[ 0 ] = iterator.nextInt(); content[ 1 ] = iterator.nextInt(); assertFalse( iterator.hasNext() ); Arrays.sort( content ); assertArrayEquals( new int[] { -1, 1 }, content ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } @SuppressWarnings("boxing") private static void checkTable( IntOpenHashBigSet s ) { final int[][] key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == IntBigArrays.length( key ); long n = s.n; while ( n-- != 0 ) if ( IntBigArrays.get( key, n ) != 0 && !s.contains( IntBigArrays.get( key, n ) ) ) throw new AssertionError( "Hash table has key " + IntBigArrays.get( key, n ) + " marked as occupied, but the key does not belong to the table" ); java.util.HashSet t = new java.util.HashSet(); for ( long i = s.size64(); i-- != 0; ) if ( IntBigArrays.get( key, i ) != 0 && !t.add( IntBigArrays.get( key, i ) ) ) throw new AssertionError( "Key " + IntBigArrays.get( key, i ) + " appears twice" ); } private static void printProbes( IntOpenHashBigSet m ) { long totProbes = 0; double totSquareProbes = 0; long maxProbes = 0; final double f = (double)m.size / m.n; for ( long i = 0, c = 0; i < m.n; i++ ) { if ( IntBigArrays.get( m.key, i ) != 0 ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings({ "unchecked", "boxing" }) private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; IntOpenHashBigSet m = new IntOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE, f ); java.util.Set t = new java.util.HashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < f * n; i++ ) t.add( ( Integer.valueOf( genKey() ) ) ); /* Now we add to m the same data */ m.addAll( t ); checkTable( m ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } checkTable( m ); assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ int a[] = m.toIntArray(); assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashBigSet( a ).equals( m ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (IntOpenHashBigSet)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() ); m = new IntOpenHashBigSet( n, f ); t.clear(); /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ for( int i = n; i-- != 0; ) m.add( i ); t.addAll( m ); printProbes( m ); checkTable( m ); /* Now all table entries are REMOVED. */ for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); m.trim(); assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntOpenHashSetTest.java0000664000000000000000000004360313050705451024522 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; import java.io.IOException; import java.util.Arrays; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class IntOpenHashSetTest { @SuppressWarnings("boxing") @Test public void testToArrayNullAtEnd() { IntOpenHashSet s = new IntOpenHashSet( new int[] { 1, 2, 3 } ); assertEquals( 3, s.toArray( new Object[ 0 ] ).length ); assertEquals( 3, s.toArray( new Integer[ 0 ] ).length ); assertTrue( s.toArray( new Integer[] { -1, -1, -1, -1 } )[ 3 ] == null ); } @Test public void testContainsNull() { IntOpenHashSet s = new IntOpenHashSet( new int[] { 1, 2, 3 } ); assertFalse( s.contains( null ) ); } @SuppressWarnings("boxing") @Test public void testEquals() { IntOpenHashSet s = new IntOpenHashSet( new int[] { 1, 2, 3 } ); assertFalse( s.equals( new ObjectOpenHashSet( new Integer[] { 1, null } ) ) ); } @Test public void testInfiniteLoop0() { IntOpenHashSet set = new IntOpenHashSet(4, 1.0f); set.add(1); set.add(2); set.add(3); set.remove(2); set.trim(); set.remove(1); // Will hang inside this call } @Test public void testInfiniteLoop1() { IntOpenHashSet set = new IntOpenHashSet(); set.add(1); set.add(2); set.add(3); set.trim(1); } @Test public void testStrangeRetainAllCase() { IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 }); // Initialize both implementations with the same data IntOpenHashSet instance = new IntOpenHashSet(initialElements); IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements); instance.retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } @Test public void testSmallExpectedValuesWeirdLoadFactors() { for( int expected = 0; expected < 5; expected ++ ) for( float loadFactor: new float[] { Float.MIN_VALUE, .25f, .5f, .75f, 1 - Float.MIN_VALUE } ) { IntOpenHashSet s = new IntOpenHashSet( 0, loadFactor ); assertTrue( s.add( 2 ) ); assertTrue( s.add( 3 ) ); assertFalse( s.add( 2 ) ); assertFalse( s.add( 3 ) ); } } @Test public void testRemove() { IntOpenHashSet s = new IntOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) ); for( int i = 0; i < 100; i++ ) assertFalse( s.remove( 100 + i ) ); for( int i = 50; i < 150; i++ ) assertTrue( Integer.toString( i % 100 ), s.remove( i % 100 ) ); } @Test public void testRemove0() { IntOpenHashSet s = new IntOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); assertTrue( s.remove( 0 ) ); IntIterator iterator = s.iterator(); IntOpenHashSet z = new IntOpenHashSet(); z.add( iterator.nextInt() ); z.add( iterator.nextInt() ); assertFalse( iterator.hasNext() ); assertEquals( new IntOpenHashSet( new int[] { -1, 1 } ), z ); s = new IntOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); iterator = s.iterator(); while( iterator.hasNext() ) if ( iterator.nextInt() == 0 ) iterator.remove(); assertFalse( s.contains( 0 ) ); iterator = s.iterator(); int[] content = new int[ 2 ]; content[ 0 ] = iterator.nextInt(); content[ 1 ] = iterator.nextInt(); assertFalse( iterator.hasNext() ); Arrays.sort( content ); assertArrayEquals( new int[] { -1, 1 }, content ); } @Test public void testWrapAround() { IntOpenHashSet s = new IntOpenHashSet( 4, .5f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 6, 7 and 0 s.add( HashCommon.invMix( 6 ) ); s.add( HashCommon.invMix( 7 ) ); s.add( HashCommon.invMix( 6 + 8 ) ); assertNotEquals( 0, s.key[ 0 ] ); assertNotEquals( 0, s.key[ 6 ] ); assertNotEquals( 0, s.key[ 7 ] ); IntOpenHashSet keys = s.clone(); IntIterator iterator = s.iterator(); IntOpenHashSet t = new IntOpenHashSet(); t.add( iterator.nextInt() ); t.add( iterator.nextInt() ); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice iterator.remove(); t.add( iterator.nextInt() ); assertEquals( keys, t ); } @Test public void testWrapAround2() { IntOpenHashSet s = new IntOpenHashSet( 4, .75f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 4, 5, 6, 7 and 0 s.add( HashCommon.invMix( 4 ) ); s.add( HashCommon.invMix( 5 ) ); s.add( HashCommon.invMix( 4 + 8 ) ); s.add( HashCommon.invMix( 5 + 8 ) ); s.add( HashCommon.invMix( 4 + 16 ) ); assertNotEquals( 0, s.key[ 0 ] ); assertNotEquals( 0, s.key[ 4 ] ); assertNotEquals( 0, s.key[ 5 ] ); assertNotEquals( 0, s.key[ 6 ] ); assertNotEquals( 0, s.key[ 7 ] ); //System.err.println(Arrays.toString( s.key )); IntOpenHashSet keys = s.clone(); IntIterator iterator = s.iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arrays.toString( s.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); assertEquals( 3, s.size() ); assertEquals( keys, t ); } @Test public void testWrapAround3() { IntOpenHashSet s = new IntOpenHashSet( 4, .75f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 5, 6, 7, 0 and 1 s.add( HashCommon.invMix( 5 ) ); s.add( HashCommon.invMix( 5 + 8 ) ); s.add( HashCommon.invMix( 5 + 16 ) ); s.add( HashCommon.invMix( 5 + 32 ) ); s.add( HashCommon.invMix( 5 + 64 ) ); assertNotEquals( 0, s.key[ 5 ] ); assertNotEquals( 0, s.key[ 6 ] ); assertNotEquals( 0, s.key[ 7 ] ); assertNotEquals( 0, s.key[ 0 ] ); assertNotEquals( 0, s.key[ 1 ] ); //System.err.println(Arrays.toString( s.key )); IntOpenHashSet keys = s.clone(); IntIterator iterator = s.iterator(); IntOpenHashSet t = new IntOpenHashSet(); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key )); assertTrue( t.add( iterator.nextInt() ) ); iterator.remove(); assertEquals( 0, s.size() ); assertEquals( keys, t ); } @SuppressWarnings("boxing") private static void checkTable( IntOpenHashSet s ) { final int[] key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == s.key.length - 1; int n = s.n; while ( n-- != 0 ) if ( key[ n ] != 0 && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); if ( s.containsNull && ! s.contains( 0 ) ) throw new AssertionError( "Hash table should contain zero by internal state, but it doesn't when queried" ); if ( ! s.containsNull && s.contains( 0 ) ) throw new AssertionError( "Hash table should not contain zero by internal state, but it does when queried" ); java.util.HashSet t = new java.util.HashSet(); for ( int i = s.size(); i-- != 0; ) if ( key[ i ] != 0 && !t.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" ); } private static void printProbes( IntOpenHashSet m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final int[] key = m.key; final double f = (double)m.size / m.n; for ( int i = 0, c = 0; i < m.n; i++ ) { if ( key[ i ] != 0 ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings({ "unchecked", "boxing" }) private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; IntOpenHashSet m = new IntOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f ); java.util.Set t = new java.util.HashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < Math.ceil( f * n ); i++ ) t.add( ( Integer.valueOf( genKey() ) ) ); /* Now we add to m the same data */ m.addAll( t ); checkTable( m ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int T = genKey(); assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } checkTable( m ); assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ int a[] = m.toIntArray(); assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashSet( a ).equals( m ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (IntOpenHashSet)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { int T = genKey(); assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) ); T = genKey(); assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() ); m = new IntOpenHashSet( n, f ); t.clear(); /* Now we torture-test the hash table. This part is implemented only for integers and longs. */ for( int i = n; i-- != 0; ) m.add( i ); t.addAll( m ); printProbes( m ); checkTable( m ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) ); for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) ); assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntSemiIndirectHeapsTest.java0000664000000000000000000000251113050705451025672 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import java.util.Arrays; import org.junit.Test; import it.unimi.dsi.fastutil.ints.IntSemiIndirectHeaps; import static org.junit.Assert.*; public class IntSemiIndirectHeapsTest { @Test public void testFront() { final int numBits = 20; int[] refArray = new int[ 100 ], heap = new int[ 100 ], front = new int[ 100 ]; for( int i = ( 1 << numBits ) - 1; i-- != 0; ) { for( int j = 0; j < numBits; j++ ) { refArray[ j ] = ( i & ( 1 << j ) ); heap[ j ] = j; } IntSemiIndirectHeaps.makeHeap( refArray, heap, numBits, null ); assertEquals( "Heap " + Integer.toBinaryString( i ), numBits - Integer.bitCount( i ), IntSemiIndirectHeaps.front( refArray, heap, numBits, front ) ); } } @Test public void testFrontWithComparator() { final int[] refArray = { 8, 16, 9 }; final int[] heap = { 2, 1, 0 }; IntComparator comparator = new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare( int k1, int k2 ) { return ( k1 & 3 ) - ( k2 & 3 ); } }; IntSemiIndirectHeaps.makeHeap( refArray, heap, 3, comparator ); final int[] front = new int[ 2 ]; assertEquals( 2, IntSemiIndirectHeaps.front( refArray, heap, 3, front, comparator ) ); Arrays.sort( front ); assertArrayEquals( new int[] { 0, 1 }, front ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/IntSetsTest.java0000664000000000000000000000051313050705451023250 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; import static org.junit.Assert.assertNull; import org.junit.Test; public class IntSetsTest { @Test public void testToArrayShouldNullElementAfterLastEntry() { IntSet set = IntSets.EMPTY_SET; Object[] values = new Object[] { "test" }; set.toArray(values); assertNull(values[0]); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/ints/StripedInt2IntOpenHashMapTest.java0000664000000000000000000002610713050705451026574 0ustar rootrootpackage it.unimi.dsi.fastutil.ints; public class StripedInt2IntOpenHashMapTest { // // // private static java.util.Random r = new java.util.Random( 0 ); // // private static int genKey() { // return r.nextInt(); // } // // private static int genValue() { // return r.nextInt(); // } // // private static boolean valEquals( Object o1, Object o2 ) { // return o1 == null ? o2 == null : o1.equals( o2 ); // } // // @SuppressWarnings({ "unchecked", "boxing" }) // protected static void test( int n, float f ) throws IOException, ClassNotFoundException { // StripedInt2IntOpenHashMap m = new StripedInt2IntOpenHashMap(); // Map t = new java.util.HashMap(); // /* First of all, we fill t with random data. */ // for ( int i = 0; i < n; i++ ) // t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) ); // /* Now we add to m the same data */ // m.putAll( t ); // assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); // assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); // /* // * Now we check that m actually holds that data. // */ // for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { // java.util.Map.Entry e = (java.util.Map.Entry)i.next(); // assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); // } // /* Now we check that m actually holds that data, but iterating on m. */ // for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { // java.util.Map.Entry e = (java.util.Map.Entry)i.next(); // assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); // } // /* Now we check that m actually holds the same keys. */ // for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) ); // assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) ); // } // /* Now we check that m actually holds the same keys, but iterating on m. */ // for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) ); // assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) ); // } // /* Now we check that m actually hold the same values. */ // for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) ); // assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) ); // } // /* Now we check that m actually hold the same values, but iterating on m. */ // for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) ); // assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) ); // } // /* // * Now we check that inquiries about random data give the same answer in m and t. For m we // * use the polymorphic method. // */ // for ( int i = 0; i < n; i++ ) { // int T = genKey(); // assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) ); // assertTrue( "Error: divergence between t and m (polymorphic method)", // !( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( Integer.valueOf( T ) ) ) ) ).intValue() ) ) ) != ( 0 ) ) || // t.get( ( Integer.valueOf( T ) ) ) != null && // !m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) ); // } // /* // * Again, we check that inquiries about random data give the same answer in m and t, but for // * m we use the standard method. // */ // for ( int i = 0; i < n; i++ ) { // int T = genKey(); // assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) ); // } // /* Now we put and remove random data in m and t, checking that the result is the same. */ // for ( int i = 0; i < 20 * n; i++ ) { // int T = genKey(); // int U = genValue(); // assertTrue( "Error: divergence in put() between t and m", // valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); // T = genKey(); // assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) ); // } // assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); // assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); // /* // * Now we check that m actually holds the same data. // */ // for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { // java.util.Map.Entry e = (java.util.Map.Entry)i.next(); // assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); // } // /* Now we check that m actually holds that data, but iterating on m. */ // for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { // java.util.Map.Entry e = (java.util.Map.Entry)i.next(); // assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); // } // /* Now we check that m actually holds the same keys. */ // for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) ); // assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) ); // } // /* Now we check that m actually holds the same keys, but iterating on m. */ // for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) ); // assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) ); // } // /* Now we check that m actually hold the same values. */ // for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) ); // assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) ); // } // /* Now we check that m actually hold the same values, but iterating on m. */ // for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) ); // assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) ); // } // int h = m.hashCode(); // /* Now we save and read m. */ // java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); // java.io.OutputStream os = new java.io.FileOutputStream( ff ); // java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); // oos.writeObject( m ); // oos.close(); // java.io.InputStream is = new java.io.FileInputStream( ff ); // java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); // m = (StripedInt2IntOpenHashMap)ois.readObject(); // ois.close(); // ff.delete(); // assertEquals( "Error: hashCode() changed after save/read", m.hashCode(), h ); // /* Now we check that m actually holds that data. */ // for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { // Object o = i.next(); // assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) ); // } // /* Now we put and remove random data in m and t, checking that the result is the same. */ // for ( int i = 0; i < 20 * n; i++ ) { // int T = genKey(); // int U = genValue(); // assertTrue( "Error: divergence in put() between t and m after save/read", // valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) ); // T = genKey(); // Integer result; // assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( T ), ( result = (Integer)t.remove( ( Integer.valueOf( T ) ) ) ) != null ? result.intValue() : 0 ) ); // } // assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); // assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); // /* // * Now we take out of m everything , and check that it is empty. // */ // for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) // m.remove( i.next() ); // assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); // m = new StripedInt2IntOpenHashMap(); // t.clear(); // for( int i = n; i-- != 0; ) m.put( i, 1 ); // t.putAll( m ); // for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.put( i, 2 ), t.put( Integer.valueOf( i ), 2 ) ); // // assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) ); // assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) ); // //assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) ); // //assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) ); // //m.trim(); // assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) ); // assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) ); // return; // } // // @Test // public void test1() throws IOException, ClassNotFoundException { // test( 1, Hash.DEFAULT_LOAD_FACTOR ); // test( 1, Hash.FAST_LOAD_FACTOR ); // test( 1, Hash.VERY_FAST_LOAD_FACTOR ); // } // // @Test // public void test10() throws IOException, ClassNotFoundException { // test( 10, Hash.DEFAULT_LOAD_FACTOR ); // test( 10, Hash.FAST_LOAD_FACTOR ); // test( 10, Hash.VERY_FAST_LOAD_FACTOR ); // } // // @Test // public void test100() throws IOException, ClassNotFoundException { // test( 100, Hash.DEFAULT_LOAD_FACTOR ); // test( 100, Hash.FAST_LOAD_FACTOR ); // test( 100, Hash.VERY_FAST_LOAD_FACTOR ); // } // // @Ignore("Too long") // @Test // public void test1000() throws IOException, ClassNotFoundException { // test( 1000, Hash.DEFAULT_LOAD_FACTOR ); // test( 1000, Hash.FAST_LOAD_FACTOR ); // test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); // } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/BinIOTest.java0000664000000000000000000001656613050705451022270 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.bytes.ByteBigArrays; import it.unimi.dsi.fastutil.doubles.DoubleIterator; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import org.junit.Test; public class BinIOTest { final static byte[] SMALL = new byte[ 1024 ]; final static byte[] LARGE = new byte[ 1024 * 1024 + 42 ]; static { for( int i = SMALL.length; i-- != 0; ) SMALL[ i ] = (byte)i; for( int i = LARGE.length; i-- != 0; ) LARGE[ i ] = (byte)i; } public void testBytes( byte[] a ) throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final byte[] aShifted = new byte[ a.length + 1 ]; System.arraycopy( a, 0, aShifted, 1, a.length ); for( int i = 0; i < 6; i++ ) { file.delete(); switch(i) { case 0: BinIO.storeBytes( a, file ); break; case 1: BinIO.storeBytes( a, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break; case 2: BinIO.storeBytes( a, new FileOutputStream( file ) ); break; case 3: BinIO.storeBytes( aShifted, 1, a.length, file ); break; case 4: BinIO.storeBytes( aShifted, 1, a.length, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break; case 5: BinIO.storeBytes( aShifted, 1, a.length, new FileOutputStream( file ) ); break; } assertArrayEquals( a, BinIO.loadBytes( file ) ); byte[] b = new byte[ a.length ]; assertEquals( a.length, BinIO.loadBytes( file, b ) ); assertArrayEquals( a, b ); assertEquals( a.length, BinIO.loadBytes( file, b, 0, a.length ) ); assertArrayEquals( a, b ); assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), b ) ); assertArrayEquals( a, b ); assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), b, 0, a.length ) ); assertArrayEquals( a, b ); byte[] c = new byte[ a.length + 1 ]; assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), c ) ); assertEquals( 0, c[ a.length ] ); System.arraycopy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), c, 1, a.length ) ); assertEquals( 0, c[ 0 ] ); System.arraycopy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); c[ a.length ] = 0; assertEquals( a.length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c ) ); assertEquals( 0, c[ a.length ] ); System.arraycopy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( a.length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c, 1, a.length ) ); assertEquals( 0, c[ 0 ] ); System.arraycopy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); } } @Test public void testBytes() throws IOException { testBytes( SMALL ); testBytes( LARGE ); } public void testBigBytes( byte[][] a ) throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final long length = ByteBigArrays.length( a ); final byte[][] aShifted = ByteBigArrays.newBigArray( length + 1 ); ByteBigArrays.copy( a, 0, aShifted, 1, length ); for( int i = 0; i < 6; i++ ) { file.delete(); switch(i) { case 0: BinIO.storeBytes( a, file ); break; case 1: BinIO.storeBytes( a, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break; case 2: BinIO.storeBytes( a, new FileOutputStream( file ) ); break; case 3: BinIO.storeBytes( aShifted, 1, length, file ); break; case 4: BinIO.storeBytes( aShifted, 1, length, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break; case 5: BinIO.storeBytes( aShifted, 1, length, new FileOutputStream( file ) ); break; } assertArrayEquals( a, BinIO.loadBytesBig( file ) ); byte[][] b = ByteBigArrays.newBigArray( length ); assertEquals( length, BinIO.loadBytes( file, b ) ); assertArrayEquals( a, b ); assertEquals( length, BinIO.loadBytes( file, b, 0, length ) ); assertArrayEquals( a, b ); assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), b ) ); assertArrayEquals( a, b ); assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), b, 0, length ) ); assertArrayEquals( a, b ); byte[][] c = ByteBigArrays.newBigArray( length + 1 ); assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), c ) ); assertEquals( 0, ByteBigArrays.get( c, length ) ); ByteBigArrays.copy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), c, 1, length ) ); assertEquals( 0, ByteBigArrays.get( c, 0 ) ); ByteBigArrays.copy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); ByteBigArrays.set( c, length, (byte)0 ); assertEquals( length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c ) ); assertEquals( 0, ByteBigArrays.get( c, length ) ); ByteBigArrays.copy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c, 1, length ) ); assertEquals( 0, ByteBigArrays.get( c, 0 ) ); ByteBigArrays.copy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); } } @Test public void testBigBytes() throws IOException { testBigBytes( ByteBigArrays.wrap( SMALL ) ); testBigBytes( ByteBigArrays.wrap( LARGE ) ); } public void testFileDataWrappers() throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final DataOutputStream dos = new DataOutputStream( new FileOutputStream( file ) ); for( int i = 0; i < 100; i++ ) dos.writeDouble( i ); dos.close(); DoubleIterator di = BinIO.asDoubleIterator( file ); for( int i = 0; i < 100; i++ ) assertEquals( i, di.nextDouble(), 0. ); assertFalse( di.hasNext() ); di = BinIO.asDoubleIterator( file ); for( int i = 0; i < 100; i++ ) { assertTrue( di.hasNext() ); assertEquals( i, di.nextDouble(), 0. ); } di = BinIO.asDoubleIterator( file ); int s = 1; for( int i = 0; i < 100; i++ ) { assertEquals( Math.min( s, 100 - i ), di.skip( s ) ); i += s; if ( i >= 100 ) break; assertEquals( i, di.nextDouble(), 0. ); s *= 2; } di = BinIO.asDoubleIterator( file ); s = 1; for( int i = 0; i < 100; i++ ) { if ( s > 100 - i ) break; assertTrue( di.hasNext() ); assertEquals( Math.min( s, 100 - i ), di.skip( s ) ); i += s; if ( i >= 100 ) { assertFalse( di.hasNext() ); break; } assertTrue( di.hasNext() ); assertTrue( di.hasNext() ); // To increase coverage assertEquals( i, di.nextDouble(), 0. ); s *= 2; } } public void testInts(int[] a) throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); for(int i = 0; i < a.length; i++) a[i] = i; BinIO.storeInts(a, file); assertArrayEquals(a, BinIO.loadInts(file)); } @Test public void testInts() throws IOException { testInts(new int[1024]); testInts(new int[1024 * 1024]); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/FastBufferedInputStreamTest.java0000664000000000000000000002777013050705451026063 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import it.unimi.dsi.fastutil.io.FastBufferedInputStream; import it.unimi.dsi.fastutil.io.FastBufferedInputStream.LineTerminator; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.channels.FileChannel; import java.util.Arrays; import java.util.EnumSet; import java.util.Random; import org.junit.Test; import static org.junit.Assert.*; public class FastBufferedInputStreamTest { private final static boolean DEBUG = false; /** A byte array input stream that will return its data in small chunks, * even it could actually return more data, and skips less bytes than it could. */ private static class BastardByteArrayInputStream extends ByteArrayInputStream { private final static long seed = System.currentTimeMillis(); private final static Random r = new Random( seed ); static { System.err.println( "Seed: " + seed ); } public BastardByteArrayInputStream( byte[] array ) { super( array ); } @Override public int read( byte[] buffer, int offset, int length ) { int k = r.nextInt( 2 ) + 1; return super.read( buffer, offset, length < k ? length : k ); } public long skip( long n ) { int k = r.nextInt( 2 ); return super.skip( n < k ? n : k ); } } @SuppressWarnings("resource") public void testReadline( int bufferSize ) throws IOException { FastBufferedInputStream stream; byte[] b; stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); b = new byte[ 4 ]; stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR ) ); assertTrue( Arrays.toString( b ), Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) ); assertEquals( 4, stream.position() ); assertEquals( -1, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR ) ) ); stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.LF ) ) ); assertEquals( 4, stream.position() ); stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.LF ) ) ); assertEquals( 4, stream.position() ); stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR_LF ) ) ); assertEquals( 4, stream.position() ); stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR_LF ) ) ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) ); assertEquals( 4, stream.position() ); b = new byte[ 4 ]; stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); stream.readLine( b, 0, 2, EnumSet.of( LineTerminator.CR ) ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 0, 0 } ) ); assertEquals( 2, stream.position() ); // Reads with only LF as terminator stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize ); assertEquals( 4, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.LF ) ) ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) ); assertEquals( 4, stream.position() ); assertEquals( 0, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.LF ) ) ); assertEquals( 5, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) ); assertEquals( 1, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.LF ) ) ); assertEquals( 6, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'D', '\r' } ) ); // Reads with both LF and CR/LF as terminators b = new byte[ 4 ]; stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize ); assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR, LineTerminator.CR_LF ) ) ); assertEquals( 5, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) ); assertEquals( 1, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.CR, LineTerminator.CR_LF ) ) ); assertEquals( 6, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'D', 0 } ) ); // Reads with only CR as terminator b = new byte[ 4 ]; stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize ); assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR ) ) ); assertEquals( 4, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) ); assertEquals( 2, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.CR ) ) ); assertEquals( 6, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', '\n', 'D' } ) ); // Reads with only CR/LF as terminator stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize ); b = new byte[ 4 ]; assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) ); assertEquals( 5, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) ); assertEquals( 1, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) ); assertEquals( 6, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'D', 'B', 'C', 0 } ) ); assertEquals( -1, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) ); // Reads with both CR and CR/LF as terminator // CR at end-of-file stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize ); b = new byte[ 4 ]; assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF, LineTerminator.CR ) ) ); assertEquals( 4, stream.position() ); assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) ); } @Test public void testReadLine() throws IOException { testReadline( 1 ); testReadline( 2 ); testReadline( 3 ); testReadline( 4 ); testReadline( 5 ); testReadline( 6 ); testReadline( 7 ); testReadline( 100 ); } @SuppressWarnings("resource") public void testSkip( int bufferSize ) throws IOException { FastBufferedInputStream stream; stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize ); assertEquals( 2, stream.skip( 2 ) ); assertEquals( 2, stream.position() ); assertEquals( 1, stream.skip( 1 ) ); assertEquals( 3, stream.position() ); assertEquals( 3, stream.skip( 4 ) ); assertEquals( 6, stream.position() ); assertEquals( 0, stream.skip( 1 ) ); assertEquals( 6, stream.position() ); } @Test public void testSkip() throws IOException { testSkip( 1 ); testSkip( 2 ); testSkip( 3 ); testSkip( 4 ); testSkip( 5 ); testSkip( 6 ); testSkip( 7 ); testSkip( 100 ); } @Test public void testPosition() throws IOException { File temp = File.createTempFile( this.getClass().getSimpleName(), ".tmp" ); temp.deleteOnExit(); FileOutputStream fos = new FileOutputStream( temp ); fos.write( new byte[] { 0, 1, 2, 3, 4 } ); fos.close(); FastBufferedInputStream stream = new FastBufferedInputStream( new FileInputStream( temp ), 2 ); byte[] b = new byte[ 2 ]; stream.read( b ); stream.flush(); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); stream = new FastBufferedInputStream( new FileInputStream( temp ) ); b = new byte[ 1 ]; stream.read( b ); stream.flush(); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); stream = new FastBufferedInputStream( new FileInputStream( temp ) ); b = new byte[ 5 ]; stream.read( b ); stream.flush(); assertEquals( -1, stream.read() ); stream.position( 5 ); assertEquals( -1, stream.read() ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.position( 1 ); assertEquals( 1, stream.read() ); stream.position( 3 ); assertEquals( 3, stream.read() ); stream.position( 1 ); assertEquals( 1, stream.read() ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); } @Test public void testRead() throws IOException { // Reads with length larger than buffer size // No head, no stream InputStream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] {} ), 1 ); byte[] b = new byte[ 4 ]; assertEquals( -1, stream.read( b, 0, 2 ) ); // Some head, no stream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B' } ), 2 ); b = new byte[ 4 ]; assertEquals( 1, stream.read( b, 0, 1 ) ); assertEquals( 1, stream.read( b, 0, 3 ) ); // Some head, some stream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ), 2 ); b = new byte[ 4 ]; assertEquals( 1, stream.read( b, 0, 1 ) ); assertEquals( 3, stream.read( b, 0, 3 ) ); // No head, some stream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ), 2 ); b = new byte[ 4 ]; assertEquals( 3, stream.read( b, 0, 3 ) ); // Reads with length smaller than or equal to buffer size // No head, no stream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] {} ), 4 ); b = new byte[ 4 ]; assertEquals( -1, stream.read( b, 0, 2 ) ); } @SuppressWarnings("resource") public void testRandom( int bufferSize ) throws IOException { File temp = File.createTempFile( this.getClass().getSimpleName(), "tmp" ); temp.deleteOnExit(); // Create temp random file FileOutputStream out = new FileOutputStream( temp ); Random random = new Random(); int length = 100000 + random.nextInt( 10000 ); for( int i = 0; i < length; i++ ) out.write( random.nextInt() ); out.close(); FastBufferedInputStream bis = new FastBufferedInputStream( new FileInputStream( temp ), bufferSize ); FileInputStream test = new FileInputStream( temp ); FileChannel fc = test.getChannel(); int a1, a2, off, len, pos; byte b1[] = new byte[ 32768 ]; byte b2[] = new byte[ 32768 ]; while( true ) { switch( random.nextInt( 6 ) ) { case 0: if ( DEBUG ) System.err.println("read()"); a1 = bis.read(); a2 = test.read(); assertEquals( a1, a2 ); if ( a1 == -1 ) return; break; case 1: off = random.nextInt( b1.length ); len = random.nextInt( b1.length - off + 1 ); a1 = bis.read( b1, off, len ); a2 = test.read( b2, off, len ); if ( DEBUG ) System.err.println("read(b, " + off + ", " + len + ")"); assertEquals( a1, a2 ); for( int i = off; i < off + len; i++ ) assertEquals( "Position " + i, b1[ i ], b2[ i ] ); break; case 2: if ( DEBUG ) System.err.println("available()"); assertEquals( bis.available(), test.available() ); break; case 3: if ( DEBUG ) System.err.println("position()" ); pos = (int)bis.position(); assertEquals( (int)fc.position(), pos ); break; case 4: pos = random.nextInt( length ); bis.position( pos ); if ( DEBUG ) System.err.println("position(" + pos + ")" ); (test = new FileInputStream( temp )).skip( pos ); fc = test.getChannel(); break; case 5: pos = random.nextInt( (int)(length - bis.position() + 1) ); a1 = (int)bis.skip( pos ); a2 = (int)test.skip( pos ); if ( DEBUG ) System.err.println("skip(" + pos + ")" ); assertEquals( a1, a2 ); break; } } } @Test public void testRandom() throws IOException { testRandom( 1 ); testRandom( 2 ); testRandom( 3 ); testRandom( 100 ); testRandom( 2048 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/FastBufferedOutputStreamTest.java0000664000000000000000000000450113050705451026247 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.Arrays; import java.util.Random; import org.junit.Test; import static org.junit.Assert.*; public class FastBufferedOutputStreamTest { private static final boolean DEBUG = false; @Test public void testWriteEqualToBufferSize() throws IOException { @SuppressWarnings("resource") final FastBufferedOutputStream fbos = new FastBufferedOutputStream( new ByteArrayOutputStream(), 4 ); fbos.write( 0 ); fbos.write( new byte[ 4 ] ); fbos.write( 0 ); } public void testRandom( int bufSize ) throws FileNotFoundException, IOException { File file = File.createTempFile( getClass().getSimpleName(), "test" ); file.deleteOnExit(); FastBufferedOutputStream fbos = new FastBufferedOutputStream( new FileOutputStream( file + "1" ), bufSize ); FileOutputStream bos = new FileOutputStream( file + "2" ); FileChannel fc = bos.getChannel(); Random r = new Random(); long pos, len; int j = r.nextInt( 10000 ); while( j-- != 0 ) { switch( r.nextInt( 6 ) ) { case 0: int x = (byte)r.nextInt(); fbos.write( x ); bos.write(x ); break; case 1: byte[] b = new byte[ r.nextInt( 32768 ) + 16 ]; for( int i = 0; i < b.length; i++ ) b[ i ] = (byte)r.nextInt(); int offset = r.nextInt( b.length / 4 ); int length = r.nextInt( b.length - offset ); fbos.write( b, offset, length ); bos.write( b, offset, length ); break; case 2: fbos.flush(); break; case 3: if ( DEBUG ) System.err.println("position()" ); pos = (int)fbos.position(); assertEquals( (int)fc.position(), pos ); break; case 4: assertEquals( fc.size(), len = fbos.length() ); pos = len != 0 ? r.nextInt( (int)len ) : 0; fbos.position( pos ); fc.position( pos ); if ( DEBUG ) System.err.println("position(" + pos + ")" ); break; } } fbos.close(); bos.close(); assertTrue( Arrays.equals( BinIO.loadBytes( file + "1" ), BinIO.loadBytes( file + "2" ) ) ); } @Test public void testRandom() throws FileNotFoundException, IOException { testRandom( 1 ); testRandom( 2 ); testRandom( 3 ); testRandom( 1024 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/FastByteArrayOutputStreamTest.java0000664000000000000000000000544713050705451026441 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import java.io.IOException; import org.junit.Test; import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream; import static org.junit.Assert.*; public class FastByteArrayOutputStreamTest { @SuppressWarnings("resource") @Test public void testWrite() { FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream(); fbaos.write( 1 ); fbaos.write( 2 ); assertEquals( 1, fbaos.array[ 0 ] ); assertEquals( 2, fbaos.array[ 1 ] ); assertEquals( 2, fbaos.length ); assertEquals( 2, fbaos.position() ); fbaos.position( 1 ); fbaos.write( 3 ); assertEquals( 2, fbaos.position() ); assertEquals( 2, fbaos.length ); assertEquals( 3, fbaos.array[ 1 ] ); fbaos.write( 4 ); assertEquals( 3, fbaos.length ); assertEquals( 4, fbaos.array[ 2 ] ); for( int i = 0; i < 14; i++ ) fbaos.write( i + 10 ); assertEquals( 17, fbaos.length ); for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] ); } @SuppressWarnings("resource") @Test public void testWriteArray() throws IOException { FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream(); fbaos.write( 1 ); fbaos.write( 2 ); fbaos.write( 3 ); byte[] a = new byte[ 14 ]; for( int i = 0; i < 14; i++ ) a[ i ] = (byte)( i + 10 ); fbaos.write( a ); assertEquals( 17, fbaos.length ); assertEquals( 1, fbaos.array[ 0 ] ); assertEquals( 2, fbaos.array[ 1 ] ); assertEquals( 3, fbaos.array[ 2 ] ); for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] ); fbaos.write( a ); assertEquals( 31, fbaos.length ); for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 17 + i ] ); fbaos = new FastByteArrayOutputStream(); fbaos.write( 1 ); fbaos.write( 2 ); fbaos.write( 3 ); fbaos.position( 2 ); fbaos.write( a ); assertEquals( 16, fbaos.length ); assertEquals( 1, fbaos.array[ 0 ] ); assertEquals( 2, fbaos.array[ 1 ] ); for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 2 + i ] ); fbaos = new FastByteArrayOutputStream(); fbaos.write( 1 ); fbaos.write( 2 ); fbaos.write( 3 ); fbaos.write( 4 ); fbaos.position( 3 ); fbaos.write( a ); assertEquals( 17, fbaos.length ); assertEquals( 1, fbaos.array[ 0 ] ); assertEquals( 2, fbaos.array[ 1 ] ); assertEquals( 3, fbaos.array[ 2 ] ); for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] ); } @SuppressWarnings("resource") @Test public void testPositionWrite() { FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream(); fbaos.position( 1 ); fbaos.write( 1 ); assertEquals( 2, fbaos.length ); } @SuppressWarnings("resource") @Test public void testPositionWrite2() { FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream(); fbaos.position( fbaos.array.length + 2 ); fbaos.write( 1 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/FastMultiByteArrayInputStreamTest.java0000664000000000000000000001523213050705451027244 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.channels.FileChannel; import java.util.Random; import org.junit.Test; /** It is a good idea to run this test after lowering manually {@link FastMultiByteArrayInputStream#SLICE_BITS} to 10. */ public class FastMultiByteArrayInputStreamTest { private final static boolean DEBUG = false; @Test public void testSkip() { FastMultiByteArrayInputStream stream; stream = new FastMultiByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ); assertEquals( 2, stream.skip( 2 ) ); assertEquals( 2, stream.position() ); assertEquals( 1, stream.skip( 1 ) ); assertEquals( 3, stream.position() ); assertEquals( 3, stream.skip( 4 ) ); assertEquals( 6, stream.position() ); assertEquals( 0, stream.skip( 1 ) ); assertEquals( 6, stream.position() ); stream.close(); } @Test public void testPosition() throws IOException { File temp = File.createTempFile( this.getClass().getSimpleName(), ".tmp" ); temp.deleteOnExit(); FileOutputStream fos = new FileOutputStream( temp ); fos.write( new byte[] { 0, 1, 2, 3, 4 } ); fos.close(); FastMultiByteArrayInputStream stream = new FastMultiByteArrayInputStream( new FastBufferedInputStream( new FileInputStream( temp ) ) ); byte[] b = new byte[ 2 ]; stream.read( b ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); stream = new FastMultiByteArrayInputStream( new FastBufferedInputStream( new FileInputStream( temp ) ) ); b = new byte[ 1 ]; stream.read( b ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); stream = new FastMultiByteArrayInputStream( new FastBufferedInputStream ( new FileInputStream( temp ) ) ); b = new byte[ 5 ]; stream.read( b ); assertEquals( -1, stream.read() ); stream.position( 5 ); assertEquals( -1, stream.read() ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.position( 1 ); assertEquals( 1, stream.read() ); stream.position( 3 ); assertEquals( 3, stream.read() ); stream.position( 1 ); assertEquals( 1, stream.read() ); stream.position( 0 ); assertEquals( 0, stream.read() ); stream.close(); } @SuppressWarnings("resource") @Test public void testRead() throws IOException { // Reads with length larger than buffer size // No head, no stream InputStream stream = new FastMultiByteArrayInputStream( new FastByteArrayInputStream( new byte[] {} ) ); byte[] b = new byte[ 4 ]; assertEquals( -1, stream.read( b, 0, 2 ) ); // Some head, no stream stream = new FastMultiByteArrayInputStream( new FastByteArrayInputStream( new byte[] { 'A', 'B' } ) ); b = new byte[ 4 ]; assertEquals( 1, stream.read( b, 0, 1 ) ); assertEquals( 1, stream.read( b, 0, 3 ) ); // Some head, some stream stream = new FastMultiByteArrayInputStream( new FastByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ) ); b = new byte[ 4 ]; assertEquals( 1, stream.read( b, 0, 1 ) ); assertEquals( 3, stream.read( b, 0, 3 ) ); // No head, some stream stream = new FastMultiByteArrayInputStream( new FastByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ) ); b = new byte[ 4 ]; assertEquals( 3, stream.read( b, 0, 3 ) ); // Reads with length smaller than or equal to buffer size // No head, no stream stream = new FastMultiByteArrayInputStream( new FastByteArrayInputStream( new byte[] {} ) ); b = new byte[ 4 ]; assertEquals( -1, stream.read( b, 0, 2 ) ); } @SuppressWarnings("resource") @Test public void testRandom() throws IOException { File temp = File.createTempFile( this.getClass().getSimpleName(), "tmp" ); temp.deleteOnExit(); // Create temp random file FileOutputStream out = new FileOutputStream( temp ); Random random = new Random(); int length = 10000000 + random.nextInt( 1000000 ); for( int i = 0; i < length; i++ ) out.write( random.nextInt() ); out.close(); FastMultiByteArrayInputStream bis = new FastMultiByteArrayInputStream( new FastBufferedInputStream( new FileInputStream( temp ) ) ); FileInputStream test = new FileInputStream( temp ); FileChannel fc = test.getChannel(); int a1, a2, off, len, pos; byte b1[] = new byte[ 32768 ]; byte b2[] = new byte[ 32768 ]; while( true ) { switch( random.nextInt( 6 ) ) { case 0: if ( DEBUG ) System.err.println("read()"); a1 = bis.read(); a2 = test.read(); assertEquals( a1, a2 ); if ( a1 == -1 ) return; break; case 1: off = random.nextInt( b1.length ); len = random.nextInt( b1.length - off + 1 ); a1 = bis.read( b1, off, len ); a2 = test.read( b2, off, len ); if ( DEBUG ) System.err.println("read(b, " + off + ", " + len + ")"); assertEquals( a1, a2 ); for( int i = off; i < off + len; i++ ) assertEquals( "Position " + i, b1[ i ], b2[ i ] ); break; case 2: if ( DEBUG ) System.err.println("available()"); assertTrue( bis.available() <= test.available() ); break; case 3: if ( DEBUG ) System.err.println("position()" ); pos = (int)bis.position(); assertEquals( (int)fc.position(), pos ); break; case 4: pos = random.nextInt( length ); bis.position( pos ); if ( DEBUG ) System.err.println("position(" + pos + ")" ); (test = new FileInputStream( temp )).skip( pos ); fc = test.getChannel(); break; case 5: pos = random.nextInt( (int)(length - bis.position() + 1) ); a1 = (int)bis.skip( pos ); a2 = (int)test.skip( pos ); if ( DEBUG ) System.err.println("skip(" + pos + ")" ); assertEquals( a1, a2 ); break; } } } @SuppressWarnings("resource") @Test public void testPositionOnEnd() throws IOException { FastMultiByteArrayInputStream stream = new FastMultiByteArrayInputStream( new byte[ FastMultiByteArrayInputStream.SLICE_SIZE ] ); stream.position( stream.length() ); assertEquals( 0, stream.available() ); assertEquals( -1, stream.read() ); assertEquals( -1, stream.read() ); assertNull( stream.current ); stream.position( stream.length() - 1 ); assertEquals( 1, stream.available() ); assertEquals( 0, stream.read() ); assertEquals( -1, stream.read() ); assertNotNull( stream.current ); stream.position( stream.length() - 2 ); assertEquals( 2, stream.read( new byte[ 2 ] ) ); assertNotNull( stream.current ); stream.position( stream.length() - 2 ); assertEquals( 2, stream.skip( 3 ) ); assertNull( stream.current ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStreamTest.java0000664000000000000000000001456613050705451027643 0ustar rootrootpackage it.unimi.dsi.fastutil.io; /* * fastutil: Fast & compact type-specific collections for Java * * Copyright (C) 2013 Sebastiano Vigna * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.io.InspectableFileCachedInputStream; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.junit.Test; public class InspectableFileCachedInputStreamTest { private static final Random r = new Random( 0 ); public static List byteArrays; static { byteArrays = new ArrayList(); byte[] b; // Now generates byte buffers from 1 byte up to 64KiB; we shuffle them so that they are not increasing in size... for ( int k = 0; k < 10; k++ ) { b = new byte[ 1 << k ]; r.nextBytes( b ); byteArrays.add( b ); } for ( int k = 16; k >= 10; k-- ) { b = new byte[ 1 << k ]; r.nextBytes( b ); byteArrays.add( b ); } byteArrays.add( new byte[] {} ); byteArrays.add( "This is a short\nnon empty and purely ASCII\nbyte sequence".getBytes() ); } @Test public void testSmall() throws IOException { InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 ); assertTrue( icis.isOpen() ); byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); assertEquals( 2, icis.length() ); assertEquals( 1, icis.read() ); assertEquals( 2, icis.read() ); assertEquals( -1, icis.read() ); icis.position( 0 ); byte b[] = new byte[ 2 ]; assertEquals( 2, icis.read( b ) ); assertArrayEquals( data, b ); assertEquals( -1, icis.read() ); assertEquals( -1, icis.read( b, 0, b.length ) ); assertEquals( 0, icis.read( b, 0, 0 ) ); icis.clear(); assertTrue( icis.isOpen() ); data = new byte[] { 1, 2, 3, 4, 5 }; icis.write( ByteBuffer.wrap( data ) ); assertEquals( 5, icis.length() ); assertEquals( 1, icis.read() ); assertEquals( 2, icis.read() ); assertEquals( 3, icis.read() ); assertEquals( 4, icis.read() ); assertEquals( 5, icis.read() ); assertEquals( -1, icis.read() ); icis.position( 0 ); assertEquals( 0, icis.position() ); b = new byte[ 5 ]; assertEquals( 5, icis.read( b ) ); assertArrayEquals( data, b ); icis.position( 2 ); b = new byte[ 4 ]; assertEquals( 3, icis.read( b ) ); assertArrayEquals( Arrays.copyOfRange( data, 2, 5 ), Arrays.copyOfRange( b, 0, 3 ) ); icis.position( 0 ); assertEquals( 1, icis.read() ); icis.position( 4 ); assertEquals( 1, icis.available() ); assertEquals( 5, icis.read() ); assertEquals( 5, icis.position() ); icis.position( 0 ); assertEquals( 2, icis.skip( 2 ) ); assertEquals( 2, icis.skip( 2 ) ); assertEquals( 5, icis.read() ); assertEquals( 5, icis.position() ); icis.position( 5 ); assertEquals( -1, icis.read() ); assertEquals( -1, icis.read( b, 0, b.length ) ); icis.close(); icis.dispose(); } @Test public void test() throws IOException { for( int bufferSize: new int[] { 1, 2, 1024, 16384, 1024 * 1024 } ) { InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( bufferSize ); for( byte[] a: byteArrays ) icis.write( ByteBuffer.wrap( a ) ); for( byte[] a: byteArrays ) { final byte[] buffer = new byte[ a.length ]; icis.read( buffer ); assertArrayEquals( a, buffer ); } icis.position( 0 ); icis.truncate( 0 ); for( byte[] a: byteArrays ) for( byte b: a ) assertEquals( b, (byte)icis.read() ); icis.close(); icis.dispose(); } } @Test public void testWithSpecifiedFile() throws IOException { final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4, File.createTempFile( getClass().getSimpleName(), "overflow" ) ); final byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); assertEquals( 2, icis.length() ); assertEquals( 1, icis.read() ); assertEquals( 2, icis.read() ); assertEquals( -1, icis.read() ); icis.close(); icis.dispose(); } @Test(expected=IOException.class) public void testClosed() throws IOException { final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 ); final byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); icis.close(); assertFalse( icis.isOpen() ); icis.read(); } @Test(expected=IOException.class) public void testDisposed() throws IOException { @SuppressWarnings("resource") final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 ); final byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); icis.dispose(); assertFalse( icis.isOpen() ); icis.read(); } @Test(expected=IOException.class) public void testClearDisposed() throws IOException { @SuppressWarnings("resource") final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream(); final byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); icis.dispose(); icis.clear(); } @Test(expected=IOException.class) public void testResetDisposed() throws IOException { @SuppressWarnings("resource") final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream(); final byte[] data = new byte[] { 1, 2 }; icis.write( ByteBuffer.wrap( data ) ); icis.dispose(); icis.reset(); } @SuppressWarnings("resource") @Test(expected=IllegalArgumentException.class) public void testNegativeBuffer() throws IOException { new InspectableFileCachedInputStream( -1 ); } }fastutil-7.1.0/test/it/unimi/dsi/fastutil/io/TestIOTest.java0000664000000000000000000001300313050705451022456 0ustar rootrootpackage it.unimi.dsi.fastutil.io; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.bytes.ByteBigArrays; import it.unimi.dsi.fastutil.doubles.DoubleIterator; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import org.junit.Test; public class TestIOTest { final static byte[] SMALL = new byte[ 1024 ]; final static byte[] LARGE = new byte[ 1024 * 1024 + 42 ]; static { for( int i = SMALL.length; i-- != 0; ) SMALL[ i ] = (byte)i; for( int i = LARGE.length; i-- != 0; ) LARGE[ i ] = (byte)i; } public void testBytes( byte[] a ) throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final byte[] aShifted = new byte[ a.length + 1 ]; System.arraycopy( a, 0, aShifted, 1, a.length ); for( int i = 0; i < 4; i++ ) { file.delete(); switch(i) { case 0: TextIO.storeBytes( a, file ); break; case 1: TextIO.storeBytes( a, new PrintStream( file ) ); break; case 2: TextIO.storeBytes( aShifted, 1, a.length, file ); break; case 3: TextIO.storeBytes( aShifted, 1, a.length, new PrintStream( file ) ); break; } byte[] b = new byte[ a.length ]; assertEquals( a.length, TextIO.loadBytes( file, b ) ); assertArrayEquals( a, b ); TextIO.loadBytes( file, b, 0, a.length ); assertArrayEquals( a, b ); assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b ) ); assertArrayEquals( a, b ); assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b, 0, a.length ) ); assertArrayEquals( a, b ); byte[] c = new byte[ a.length + 1 ]; assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c ) ); assertEquals( 0, c[ a.length ] ); System.arraycopy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c, 1, a.length ) ); assertEquals( 0, c[ 0 ] ); System.arraycopy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); } } @Test public void testBytes() throws IOException { testBytes( SMALL ); testBytes( LARGE ); } public void testBigBytes( byte[][] a ) throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final long length = ByteBigArrays.length( a ); final byte[][] aShifted = ByteBigArrays.newBigArray( length + 1 ); ByteBigArrays.copy( a, 0, aShifted, 1, length ); for( int i = 0; i < 4; i++ ) { file.delete(); switch(i) { case 0: TextIO.storeBytes( a, file ); break; case 1: TextIO.storeBytes( a, new PrintStream( file ) ); break; case 2: TextIO.storeBytes( aShifted, 1, length, file ); break; case 3: TextIO.storeBytes( aShifted, 1, length, new PrintStream( file ) ); break; } byte[][] b = ByteBigArrays.newBigArray( length ); assertEquals( length, TextIO.loadBytes( file, b ) ); assertArrayEquals( a, b ); assertEquals( length, TextIO.loadBytes( file, b, 0, length ) ); assertArrayEquals( a, b ); assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b ) ); assertArrayEquals( a, b ); assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b, 0, length ) ); assertArrayEquals( a, b ); byte[][] c = ByteBigArrays.newBigArray( length + 1 ); assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c ) ); assertEquals( 0, ByteBigArrays.get( c, length ) ); ByteBigArrays.copy( c, 0, b, 0, b.length ); assertArrayEquals( a, b ); assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c, 1, length ) ); assertEquals( 0, ByteBigArrays.get( c, 0 ) ); ByteBigArrays.copy( c, 1, b, 0, b.length ); assertArrayEquals( a, b ); } } @Test public void testBigBytes() throws IOException { testBigBytes( ByteBigArrays.wrap( SMALL ) ); testBigBytes( ByteBigArrays.wrap( LARGE ) ); } public void testFileDataWrappers() throws IOException { final File file = File.createTempFile( getClass().getSimpleName(), "dump" ); file.deleteOnExit(); final DataOutputStream dos = new DataOutputStream( new FileOutputStream( file ) ); for( int i = 0; i < 100; i++ ) dos.writeDouble( i ); dos.close(); DoubleIterator di = TextIO.asDoubleIterator( file ); for( int i = 0; i < 100; i++ ) assertEquals( i, di.nextDouble(), 0. ); assertFalse( di.hasNext() ); di = TextIO.asDoubleIterator( file ); for( int i = 0; i < 100; i++ ) { assertTrue( di.hasNext() ); assertEquals( i, di.nextDouble(), 0. ); } di = TextIO.asDoubleIterator( file ); int s = 1; for( int i = 0; i < 100; i++ ) { assertEquals( Math.min( s, 100 - i ), di.skip( s ) ); i += s; if ( i >= 100 ) break; assertEquals( i, di.nextDouble(), 0. ); s *= 2; } di = TextIO.asDoubleIterator( file ); s = 1; for( int i = 0; i < 100; i++ ) { if ( s > 100 - i ) break; assertTrue( di.hasNext() ); assertEquals( Math.min( s, 100 - i ), di.skip( s ) ); i += s; if ( i >= 100 ) { assertFalse( di.hasNext() ); break; } assertTrue( di.hasNext() ); assertTrue( di.hasNext() ); // To increase coverage assertEquals( i, di.nextDouble(), 0. ); s *= 2; } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/longs/LongArrayFrontCodedListTest.java0000664000000000000000000001103213050705451026524 0ustar rootrootpackage it.unimi.dsi.fastutil.longs; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import java.io.IOException; import static org.junit.Assert.*; import org.junit.Test; @SuppressWarnings({ "rawtypes", "unchecked" }) public class LongArrayFrontCodedListTest { private static java.util.Random r = new java.util.Random( 0 ); private static long genKey() { return r.nextLong(); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for ( int i = 0; i < x.size(); i++ ) if ( !java.util.Arrays.equals( (long[])x.get( i ), (long[])y.get( i ) ) ) return false; return true; } private static int l[]; private static long[][] a; private static void test( int n ) throws IOException, ClassNotFoundException { l = new int[ n ]; a = new long[ n ][]; for ( int i = 0; i < n; i++ ) l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 ); for ( int i = 0; i < n; i++ ) a[ i ] = new long[ l[ i ] ]; for ( int i = 0; i < n; i++ ) for ( int j = 0; j < l[ i ]; j++ ) a[ i ][ j ] = genKey(); LongArrayFrontCodedList m = new LongArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); // System.out.println(m); // for( i = 0; i < t.size(); i++ ) // System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (long[])i.next(), (long[])j.next() ) ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (long[])i.previous(), (long[])j.previous() ) ); } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { int from = r.nextInt( m.size() + 1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (long[])i.next(), (long[])j.next() ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (long[])i.previous(), (long[])j.previous() ) ); } } } java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (LongArrayFrontCodedList)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1 ); } @Test public void test10() throws Exception, ClassNotFoundException { test( 10 ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100 ); } @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000 ); } @Test public void test10000() throws IOException, ClassNotFoundException { test( 10000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/longs/LongArraysTest.java0000664000000000000000000001571313050705451024115 0ustar rootrootpackage it.unimi.dsi.fastutil.longs; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class LongArraysTest { private static long[] identity( int n ) { final long[] a = new long[ n ]; while( n-- != 0 ) a[ n ] = n; return a; } @Test public void testRadixSort1() { long[] t = { 2, 1, 0, 4 }; LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new long[] { 2, -1, 0, -4 }; LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = LongArrays.shuffle( identity( 100 ), new Random( 0 ) ); LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new long[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong(); LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new long[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong(); LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new long[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong(); LongArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { long[][] d = new long[ 2 ][]; d[ 0 ] = new long[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = LongArrays.shuffle( identity( 10 ), new Random( 0 ) ); LongArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = LongArrays.shuffle( identity( 100000 ), new Random( 6 ) ); LongArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new long[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong(); LongArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong(); d[ 1 ] = new long[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong(); LongArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { long[][] t = { { 2, 1, 0, 4 } }; LongArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = LongArrays.shuffle( identity( 100 ), new Random( 0 ) ); LongArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); long[][] d = new long[ 2 ][]; d[ 0 ] = new long[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = LongArrays.shuffle( identity( 10 ), new Random( 0 ) ); LongArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = LongArrays.shuffle( identity( 100000 ), new Random( 6 ) ); LongArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong(); d[ 1 ] = new long[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong(); LongArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new long[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong(); d[ 1 ] = new long[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong(); LongArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testStabilize() { int[] perm; long[] val; perm = new int[] { 0, 1, 2, 3 }; val = new long[] { 0, 0, 0, 0 }; LongArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 1, 2, 0 }; val = new long[] { 0, 0, 0, 0 }; LongArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new long[] { 0, 1, 1, 2 }; LongArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 3, 1, 2, 0 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new long[] { 0, 0, 1, 1 }; LongArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 2, 3, 0, 1 }, perm ); perm = new int[] { 4, 3, 2, 1, 0 }; val = new long[] { 1, 1, 0, 0, 0 }; LongArrays.stabilize( perm, val, 1, 3 ); assertArrayEquals( new int[] { 4, 2, 3, 1, 0 }, perm ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/longs/LongOpenHashBigSetTest.java0000664000000000000000000001227713050705451025461 0ustar rootrootpackage it.unimi.dsi.fastutil.longs; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import java.util.Arrays; import org.junit.Test; public class LongOpenHashBigSetTest { @Test public void testRemove0() { LongOpenHashBigSet s = new LongOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); assertTrue( s.remove( 0 ) ); LongIterator iterator = s.iterator(); LongOpenHashSet z = new LongOpenHashSet(); z.add( iterator.nextLong() ); z.add( iterator.nextLong() ); assertFalse( iterator.hasNext() ); assertEquals( new LongOpenHashSet( new long[] { -1, 1 } ), z ); s = new LongOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE ); for( int i = -1; i <= 1; i++ ) assertTrue( s.add( i ) ); iterator = s.iterator(); while( iterator.hasNext() ) if ( iterator.nextLong() == 0 ) iterator.remove(); assertFalse( s.contains( 0 ) ); iterator = s.iterator(); long[] content = new long[ 2 ]; content[ 0 ] = iterator.nextLong(); content[ 1 ] = iterator.nextLong(); assertFalse( iterator.hasNext() ); Arrays.sort( content ); assertArrayEquals( new long[] { -1, 1 }, content ); } @Test public void testWrapAround() { LongOpenHashBigSet s = new LongOpenHashBigSet( 4, .5f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 6, 7 and 0 s.add( HashCommon.invMix( 6L ) ); s.add( HashCommon.invMix( 7L ) ); s.add( HashCommon.invMix( 6L + 8 ) ); assertNotEquals( 0, s.key[ 0 ][ 0 ] ); assertNotEquals( 0, s.key[ 0 ][ 6 ] ); assertNotEquals( 0, s.key[ 0 ][ 7 ] ); LongOpenHashBigSet keys = s.clone(); LongIterator iterator = s.iterator(); LongOpenHashBigSet t = new LongOpenHashBigSet(); t.add( iterator.nextLong() ); t.add( iterator.nextLong() ); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice iterator.remove(); t.add( iterator.nextLong() ); assertEquals( keys, t ); } @Test public void testWrapAround2() { LongOpenHashBigSet s = new LongOpenHashBigSet( 4, .75f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 4, 5, 6, 7 and 0 s.add( HashCommon.invMix( 4L ) ); s.add( HashCommon.invMix( 5L ) ); s.add( HashCommon.invMix( 4L + 8 ) ); s.add( HashCommon.invMix( 5L + 8 ) ); s.add( HashCommon.invMix( 4L + 16 ) ); assertNotEquals( 0, s.key[ 0 ][ 0 ] ); assertNotEquals( 0, s.key[ 0 ][ 4 ] ); assertNotEquals( 0, s.key[ 0 ][ 5 ] ); assertNotEquals( 0, s.key[ 0 ][ 6 ] ); assertNotEquals( 0, s.key[ 0 ][ 7 ] ); //System.err.println(Arrays.toString( s.key[ 0 ] )); LongOpenHashBigSet keys = s.clone(); LongIterator iterator = s.iterator(); LongOpenHashBigSet t = new LongOpenHashBigSet(); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); //System.err.println(Arrays.toString( s.key[ 0 ] )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextLong() ) ); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); assertEquals( 3, s.size64() ); assertEquals( keys, t ); } @Test public void testWrapAround3() { LongOpenHashBigSet s = new LongOpenHashBigSet( 4, .75f ); assertEquals( 8, s.n ); // The following code inverts HashCommon.phiMix() and places strategically keys in slots 5, 6, 7, 0 and 1 s.add( HashCommon.invMix( 5L ) ); s.add( HashCommon.invMix( 5L + 8 ) ); s.add( HashCommon.invMix( 5L + 16 ) ); s.add( HashCommon.invMix( 5L + 32 ) ); s.add( HashCommon.invMix( 5L + 64 ) ); assertNotEquals( 0, s.key[ 0 ][ 5 ] ); assertNotEquals( 0, s.key[ 0 ][ 6 ] ); assertNotEquals( 0, s.key[ 0 ][ 7 ] ); assertNotEquals( 0, s.key[ 0 ][ 0 ] ); assertNotEquals( 0, s.key[ 0 ][ 1 ] ); //System.err.println(Arrays.toString( s.key[ 0 ] )); LongOpenHashBigSet keys = s.clone(); LongIterator iterator = s.iterator(); LongOpenHashBigSet t = new LongOpenHashBigSet(); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); // Originally, this remove would move the entry in slot 0 in slot 6 and we would return the entry in 0 twice assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); //System.err.println(Arrays.toString( s.key[ 0 ] )); assertTrue( t.add( iterator.nextLong() ) ); iterator.remove(); assertEquals( 0, s.size64() ); assertEquals( keys, t ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/AbstractObject2IntFunctionTest.java0000664000000000000000000000062413050705451027473 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.*; import org.junit.Test; public class AbstractObject2IntFunctionTest { @SuppressWarnings("deprecation") @Test public void testRemove() { final Object2IntArrayMap a = new Object2IntArrayMap(); final Object key = new Object(); a.put( key, 1 ); assertEquals( Integer.valueOf( 1 ), a.remove( key ) ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/Object2IntOpenHashMapTest.java0000664000000000000000000002337313050705451026373 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import java.util.Map; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.*; @SuppressWarnings({"rawtypes","deprecation"}) public class Object2IntOpenHashMapTest { private static java.util.Random r = new java.util.Random( 0 ); private static Object genKey() { return Integer.toBinaryString( r.nextInt() ); } private static int genValue() { return r.nextInt(); } private static boolean valEquals( Object o1, Object o2 ) { return o1 == null ? o2 == null : o1.equals( o2 ); } @SuppressWarnings("unchecked") protected static void test( int n, float f ) throws IOException, ClassNotFoundException { Object2IntOpenHashMap m = new Object2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f ); Map t = new java.util.HashMap(); /* First of all, we fill t with random data. */ for ( int i = 0; i < n; i++ ) t.put( ( genKey() ), ( Integer.valueOf( genValue() ) ) ); /* Now we add to m the same data */ m.putAll( t ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keys. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same values. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) ); } /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertFalse( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( T ) ) != t.containsKey( ( T ) ) ); assertFalse( "Error: divergence between t and m (polymorphic method)", ( m.getInt( T ) != ( 0 ) ) != ( ( t.get( ( T ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( T ) ) ) ).intValue() ) ) ) != ( 0 ) ) || t.get( ( T ) ) != null && !( Integer.valueOf( m.getInt( T ) ) ).equals( t.get( ( T ) ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( T ) ), t.get( ( T ) ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m", valEquals( m.put( ( T ), ( Integer.valueOf( U ) ) ), t.put( ( T ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( T ) ), t.remove( ( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); /* Now we check that m actually holds the same data. */ for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) { java.util.Map.Entry e = (java.util.Map.Entry)i.next(); assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) ); } /* Now we check that m actually holds the same keys. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) ); } /* Now we check that m actually holds the same keys, but iterating on m. */ for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) ); assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) ); } /* Now we check that m actually hold the same values. */ for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) ); } /* Now we check that m actually hold the same values, but iterating on m. */ for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) ); assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) ); } int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (Object2IntOpenHashMap)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: hashCode() changed after save/read", m.hashCode() == h ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) { Object o = i.next(); assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); int U = genValue(); assertTrue( "Error: divergence in put() between t and m after save/read", valEquals( m.put( ( T ), ( Integer.valueOf( U ) ) ), t.put( ( T ), ( Integer.valueOf( U ) ) ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( ( T ) ), t.remove( ( T ) ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* Now we take out of m everything, and check that it is empty. */ for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) m.remove( i.next() ); assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/Object2ObjectArrayMapTest.java0000664000000000000000000001264413050705451026417 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.io.BinIO; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.Map.Entry; import org.junit.Test; public class Object2ObjectArrayMapTest { @SuppressWarnings("boxing") @Test public void testContainsNull() { Object2ObjectArrayMap m = new Object2ObjectArrayMap( new Integer[] { 1, 2, 3 }, new Integer[] { 1, 2, 3 } ); assertFalse( m.containsKey( null ) ); assertTrue( m.get( null ) == null ); } @SuppressWarnings("boxing") @Test public void testEquals() { Object2ObjectArrayMap a1 = new Object2ObjectArrayMap(); a1.put(0, 1); a1.put(1000, -1); a1.put(2000, 3); Object2ObjectArrayMap a2 = new Object2ObjectArrayMap(); a2.put(0, 1); a2.put(1000, -1); a2.put(2000, 3); assertEquals(a1, a2); Object2ObjectArrayMap m = new Object2ObjectArrayMap( new Integer[] { 1, 2 }, new Integer[] { 1, 2 } ); assertFalse( m.equals( new Object2ObjectOpenHashMap( new Integer[] { 1, null }, new Integer[] { 1, 1 } ) ) ); } @SuppressWarnings({ "boxing" }) @Test public void testMap() { for( int i = 0; i <= 1; i++ ) { Object2ObjectArrayMap m = i == 0 ? new Object2ObjectArrayMap() : new Object2ObjectArrayMap( new Integer[] { 0 }, new Integer[] { 0 } ); assertEquals( null, m.put( 1, 1 ) ); assertEquals( 1 + i, m.size() ); assertTrue( m.containsKey( 1 ) ); assertTrue( m.containsValue( 1 ) ); assertEquals( null, m.put( 2, 2 ) ); assertTrue( m.containsKey( 2 ) ); assertTrue( m.containsValue( 2 ) ); assertEquals( 2 + i, m.size() ); assertEquals( Integer.valueOf( 1 ), m.put( 1, 3 ) ); assertTrue( m.containsValue( 3 ) ); assertEquals( null, m.remove( 3 ) ); assertEquals( null, m.put( 3, 3 ) ); assertTrue( m.containsKey( 3 ) ); assertTrue( m.containsValue( 3 ) ); assertEquals( 3 + i, m.size() ); assertEquals( Integer.valueOf( 3 ), m.get( 1 ) ); assertEquals( Integer.valueOf( 2 ), m.get( 2 ) ); assertEquals( Integer.valueOf( 3 ), m.get( 3 ) ); assertEquals( new ObjectOpenHashSet( i == 0 ? new Integer[] { 1, 2, 3 } : new Integer[] { 0, 1, 2, 3 } ), new ObjectOpenHashSet( m.keySet().iterator() ) ); assertEquals( new ObjectOpenHashSet( i == 0 ? new Integer[] { 3, 2, 3 } : new Integer[] { 0, 3, 2, 3 } ), new ObjectOpenHashSet( m.values().iterator() ) ); for( Entry e: m.entrySet() ) assertEquals( e.getValue(), m.get( e.getKey() ) ); assertTrue( i != 0 == m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 0, 0 ) ) ); assertTrue( m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 1, 3 ) ) ); assertTrue( m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 2, 2 ) ) ); assertTrue( m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 3, 3 ) ) ); assertFalse( m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 1, 2 ) ) ); assertFalse( m.entrySet().contains( new AbstractObject2ObjectMap.BasicEntry( 2, 1 ) ) ); assertEquals( Integer.valueOf( 3 ), m.remove( 3 ) ); assertEquals( 2 + i, m.size() ); assertEquals( Integer.valueOf( 3 ), m.remove( 1 ) ); assertEquals( 1 + i, m.size() ); assertFalse( m.containsKey( 1 ) ); assertEquals( Integer.valueOf( 2 ), m.remove( 2 ) ); assertEquals( 0 + i, m.size() ); assertFalse( m.containsKey( 1 ) ); } } @SuppressWarnings("boxing") @Test public void testClone() { Object2ObjectArrayMap m = new Object2ObjectArrayMap(); assertEquals( m, m.clone() ); m.put( 0, 1 ); assertEquals( m, m.clone() ); m.put( 0, 2 ); assertEquals( m, m.clone() ); m.put( 1, 2 ); assertEquals( m, m.clone() ); m.remove( 1 ); assertEquals( m, m.clone() ); } @SuppressWarnings("boxing") @Test public void testSerialisation() throws IOException, ClassNotFoundException { Object2ObjectArrayMap m = new Object2ObjectArrayMap(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); m.put( 0, 1 ); m.put( 1, 2 ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } @SuppressWarnings("boxing") @Test public void testIteratorRemove() { Object2ObjectArrayMap m = new Object2ObjectArrayMap( new Integer[] { 1, 2, 3 }, new Integer[] { 1, 2, 3 } ); ObjectIterator> keySet = m.entrySet().iterator(); keySet.next(); keySet.next(); keySet.remove(); assertTrue( keySet.hasNext() ); Entry next = keySet.next(); assertEquals( Integer.valueOf( 3 ), next.getKey() ); assertEquals( Integer.valueOf( 3 ), next.getValue() ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectAVLTreeSetTest.java0000664000000000000000000000255713050705451025412 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import it.unimi.dsi.fastutil.ints.Int2IntAVLTreeMap; import org.junit.Test; public class ObjectAVLTreeSetTest { @Test public void testGet() { ObjectAVLTreeSet s = new ObjectAVLTreeSet(); Integer o = new Integer( 0 ); s.add( o ); assertSame( o, s.get( new Integer( 0 ) ) ); } @Test public void testAddTo() { Int2IntAVLTreeMap a = new Int2IntAVLTreeMap(); Int2IntAVLTreeMap b = new Int2IntAVLTreeMap(); // test addTo with empty map a.addTo(0, 1); // 0 -> 1 assertEquals(1, a.get(0)); // test addTo with empty map and weird defaultReturnValue b.defaultReturnValue(100); a.addTo(0, 0); // 0 -> 100 assertEquals(100, b.get(0)); // test addTo with existing values a.addTo(0, 1); // 0 -> 2 b.addTo(0, -100); // 0 -> 0 assertEquals(2, a.get(0)); assertEquals(0, b.get(0)); // test addTo with overflow values a.put(0, Integer.MAX_VALUE); a.addTo(0, 1); // 0 -> MIN_VALUE assertEquals(Integer.MIN_VALUE, a.get(0)); // test various addTo operations a.put(0, 0); a.put(1, 1); a.put(2, 2); a.addTo(0, 10); // 0 -> 10 a.addTo(1, 9); // 1 -> 10 a.addTo(2, 8); // 2 -> 10 assertEquals(10, a.get(0)); assertEquals(10, a.get(1)); assertEquals(10, a.get(2)); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectArrayListTest.java0000664000000000000000000000104113050705451025371 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; public class ObjectArrayListTest { @SuppressWarnings("boxing") @Test public void testRemoveAll() { ObjectArrayList l = ObjectArrayList.wrap( new Integer[] { 0, 1, 1, 2 } ); l.removeAll( ObjectSets.singleton( 1 ) ); assertEquals( ObjectArrayList.wrap( new Integer[] { 0, 2 } ), l ); assertTrue( l.elements()[ 2 ] == null ); assertTrue( l.elements()[ 3 ] == null ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectArrayPriorityQueueTest.java0000664000000000000000000001223313050705451027311 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import it.unimi.dsi.fastutil.io.BinIO; import java.io.File; import java.io.IOException; import org.junit.Test; @SuppressWarnings({"boxing","unchecked"}) public class ObjectArrayPriorityQueueTest { @Test public void testEnqueueDequeue() { ObjectArrayPriorityQueue q = new ObjectArrayPriorityQueue(); ObjectHeapPriorityQueue h = new ObjectHeapPriorityQueue(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 10 ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 200 ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testEnqueueDequeueComp() { ObjectArrayPriorityQueue q = new ObjectArrayPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); ObjectHeapPriorityQueue h = new ObjectHeapPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 10, ObjectComparators.OPPOSITE_COMPARATOR ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 200, ObjectComparators.OPPOSITE_COMPARATOR ); h.clear(); for( int i = 0; i < 100; i++ ) { q.enqueue( i ); h.enqueue( i ); } for( int i = 0; i < 100; i++ ) { assertEquals( h.first(), q.first() ); assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testMix() { ObjectArrayPriorityQueue q = new ObjectArrayPriorityQueue(); ObjectHeapPriorityQueue h = new ObjectHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 10 ); h = new ObjectHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * -20 ); h.enqueue( j + i * -20 ); q.first(); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 200 ); h = new ObjectHeapPriorityQueue(); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testMixComp() { ObjectArrayPriorityQueue q = new ObjectArrayPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); ObjectHeapPriorityQueue h = new ObjectHeapPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 10, ObjectComparators.OPPOSITE_COMPARATOR ); h = new ObjectHeapPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * -20 ); h.enqueue( j + i * -20 ); q.first(); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } q = new ObjectArrayPriorityQueue( 200, ObjectComparators.OPPOSITE_COMPARATOR ); h = new ObjectHeapPriorityQueue( ObjectComparators.OPPOSITE_COMPARATOR ); for( int i = 0; i < 200; i++ ) { for( int j = 0; j < 20; j++ ) { q.enqueue( j + i * 20 ); h.enqueue( j + i * 20 ); } for( int j = 0; j < 10; j++ ) assertEquals( h.dequeue(), q.dequeue() ); } } @Test public void testSerialize() throws IOException, ClassNotFoundException { ObjectArrayPriorityQueue q = new ObjectArrayPriorityQueue(); for( int i = 0; i < 100; i++ ) q.enqueue( i ); File file = File.createTempFile( getClass().getPackage().getName() + "-", "-tmp" ); file.deleteOnExit(); BinIO.storeObject( q, file ); ObjectArrayPriorityQueue r = (ObjectArrayPriorityQueue)BinIO.loadObject( file ); file.delete(); for( int i = 0; i < 100; i++ ) { assertEquals( q.first(), r.first() ); assertEquals( q.dequeue(), r.dequeue() ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectArraySetTest.java0000664000000000000000000000663413050705451025226 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; import it.unimi.dsi.fastutil.io.BinIO; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import org.junit.Test; import static org.junit.Assert.*; public class ObjectArraySetTest { @SuppressWarnings("boxing") @Test public void testNullInEquals() { assertFalse( new ObjectArraySet( Arrays.asList( 42 ) ).equals( Collections.singleton( null ) ) ); } @SuppressWarnings("boxing") @Test public void testSet() { for( int i = 0; i <= 1; i++ ) { final ObjectArraySet s = i == 0 ? new ObjectArraySet() : new ObjectArraySet( new Integer[] { 0 } ); assertTrue( s.add( 1 ) ); assertEquals( 1 + i, s.size() ); assertTrue( s.contains( 1 ) ); assertTrue( s.add( 2 ) ); assertTrue( s.contains( 2 ) ); assertEquals( 2 + i, s.size() ); assertFalse( s.add( 1 ) ); assertFalse( s.remove( 3 ) ); assertTrue( s.add( 3 ) ); assertEquals( 3 + i, s.size() ); assertTrue( s.contains( 1 ) ); assertTrue( s.contains( 2 ) ); assertTrue( s.contains( 2 ) ); assertEquals( new ObjectOpenHashSet( i == 0 ? new Integer[] { 1, 2, 3 } : new Integer[] { 0, 1, 2, 3 } ), new ObjectOpenHashSet( s.iterator() ) ); assertTrue( s.remove( 3 ) ); assertEquals( 2 + i, s.size() ); assertTrue( s.remove( 1 ) ); assertEquals( 1 + i, s.size() ); assertFalse( s.contains( 1 ) ); assertTrue( s.remove( 2 ) ); assertEquals( 0 + i, s.size() ); assertFalse( s.contains( 1 ) ); } } @SuppressWarnings("boxing") @Test public void testClone() { ObjectArraySet s = new ObjectArraySet(); assertEquals( s, s.clone() ); s.add( 0 ); assertEquals( s, s.clone() ); s.add( 0 ); assertEquals( s, s.clone() ); s.add( 1 ); assertEquals( s, s.clone() ); s.add( 2 ); assertEquals( s, s.clone() ); s.remove( 0 ); assertEquals( s, s.clone() ); } @SuppressWarnings("boxing") @Test public void testSerialisation() throws IOException, ClassNotFoundException { ObjectArraySet s = new ObjectArraySet(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); s.add( 0 ); s.add( 1 ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } @Test @SuppressWarnings("boxing") public void testRemove() { ObjectSet set = new ObjectArraySet( new Integer[] { 42 } ); Iterator iterator = set.iterator(); assertTrue(iterator.hasNext()); iterator.next(); iterator.remove(); assertFalse( iterator.hasNext() ); assertEquals( 0, set.size() ); set = new ObjectArraySet( new Integer[] { 42, 43, 44 } ); iterator = set.iterator(); assertTrue(iterator.hasNext()); iterator.next(); iterator.next(); iterator.remove(); assertEquals( Integer.valueOf( 44 ), iterator.next() ); assertFalse( iterator.hasNext() ); assertEquals( new ObjectArraySet( new Integer[] { 42, 44 } ), set ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectArraysTest.java0000664000000000000000000004150313050705451024727 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Comparator; import java.util.Random; import org.junit.Test; @SuppressWarnings("boxing") public class ObjectArraysTest { public static Integer[] identity( final int n ) { final Integer[] perm = new Integer[ n ]; for( int i = perm.length; i-- != 0; ) perm[ i ] = i; return perm; } @Test public void testMergeSort() { Integer[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); ObjectArrays.mergeSort( b ); assertArrayEquals( sorted, b ); ObjectArrays.mergeSort( b ); assertArrayEquals( sorted, b ); final Integer[] d = a.clone(); ObjectArrays.mergeSort( d, new Comparator() { @Override public int compare( Integer k1, Integer k2 ) { return k1.compareTo( k2 ); } }); assertArrayEquals( sorted, d ); ObjectArrays.mergeSort( d, new Comparator() { @Override public int compare( Integer k1, Integer k2 ) { return k1.compareTo( k2 ); } }); assertArrayEquals( sorted, d ); } @Test public void testMergeSortSmallSupport() { Integer[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; for( int to = 1; to < a.length; to++ ) for( int from = 0; from <= to; from++ ) { final Integer[] support = new Integer[ to ]; System.arraycopy( a, 0, support, 0, to ); ObjectArrays.mergeSort( a, from, to, support ); if ( from < to ) for( int i = to - 1; i-- != from; ) assertTrue( a[ i ] <= a[ i + 1 ] ); } } @Test public void testQuickSort() { Integer[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); Arrays.sort( b ); assertArrayEquals( sorted, b ); Arrays.sort( b ); assertArrayEquals( sorted, b ); final Integer[] d = a.clone(); ObjectArrays.quickSort( d, new Comparator() { @Override public int compare( Integer k1, Integer k2 ) { return k1.compareTo( k2 ); } }); assertArrayEquals( sorted, d ); ObjectArrays.quickSort( d, new Comparator() { @Override public int compare( Integer k1, Integer k2 ) { return k1.compareTo( k2 ); } }); assertArrayEquals( sorted, d ); } @Test public void testParallelQuickSort() { Integer[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone(); Arrays.sort( sorted ); Arrays.sort( b ); assertArrayEquals( sorted, b ); Arrays.sort( b ); assertArrayEquals( sorted, b ); final Integer[] d = a.clone(); ObjectArrays.parallelQuickSort( d, 0, d.length ); assertArrayEquals( sorted, d ); } @Test public void testLargeParallelQuickSortWithComparator() { Object [] a = new Object[8192+1]; // PARALLEL_QUICKSORT_NO_FORK for (int i = 0; i < a.length; i++) { a[i] = new Object(); } ObjectArrays.parallelQuickSort(a, new Comparator(){ @Override public int compare(Object o1, Object o2) { return Integer.compare(System.identityHashCode(o1), System.identityHashCode(o2)); }}); } @Test public void testSmallParallelQuickSortWithComparator() { Object [] a = new Object[8]; for (int i = 0; i < a.length; i++) { a[i] = new Object(); } ObjectArrays.parallelQuickSort(a, new Comparator(){ @Override public int compare(Object o1, Object o2) { return Integer.compare(System.identityHashCode(o1), System.identityHashCode(o2)); }}); } @Test public void testQuickSort1() { Integer[] t = { 2, 1, 0, 4 }; ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[] { 2, -1, 0, -4 }; ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = ObjectArrays.shuffle( identity( 100 ), new Random( 0 ) ); ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } private final static Comparator OPPOSITE_COMPARATOR = new Comparator() { @Override public int compare( Integer o1, Integer o2 ) { return o2.compareTo( o1 ); }}; @Test public void testQuickSort1Comp() { Integer[] t = { 2, 1, 0, 4 }; ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new Integer[] { 2, -1, 0, -4 }; ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = ObjectArrays.shuffle( identity( 100 ), new Random( 0 ) ); ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new Integer[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new Integer[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t, 10, 100, OPPOSITE_COMPARATOR ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] >= t[ i + 1 ] ); t = new Integer[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.quickSort( t, OPPOSITE_COMPARATOR ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] >= t[ i + 1 ] ); } @Test public void testParallelQuickSort1() { Integer[] t = { 2, 1, 0, 4 }; ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[] { 2, -1, 0, -4 }; ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = ObjectArrays.shuffle( identity( 100 ), new Random( 0 ) ); ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( t, 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new Integer[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testQuickSort2() { Integer[][] d = new Integer[ 2 ][]; d[ 0 ] = new Integer[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = ObjectArrays.shuffle( identity( 10 ), new Random( 0 ) ); ObjectArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = ObjectArrays.shuffle( identity( 100000 ), new Random( 6 ) ); ObjectArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.quickSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.quickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testParallelQuickSort2() { Integer[][] d = new Integer[ 2 ][]; d[ 0 ] = new Integer[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3; d[ 1 ] = ObjectArrays.shuffle( identity( 10 ), new Random( 0 ) ); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100; d[ 1 ] = ObjectArrays.shuffle( identity( 100000 ), new Random( 6 ) ); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2; Random random = new Random( 0 ); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt(); for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ], 10, 100 ); for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new Integer[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt(); d[ 1 ] = new Integer[ d[ 0 ].length ]; for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt(); ObjectArrays.parallelQuickSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ].equals( d[ 0 ][ i + 1 ] ) && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testShuffle() { Integer[] a = new Integer[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = i; ObjectArrays.shuffle( a, new Random() ); boolean[] b = new boolean[ a.length ]; for( int i = a.length; i-- != 0; ) { assertFalse( b[ a[ i ] ] ); b[ a[ i ] ] = true; } } @Test public void testShuffleFragment() { Integer[] a = new Integer[ 100 ]; for( int i = a.length; i-- != 0; ) a[ i ] = -1; for( int i = 10; i < 30; i++ ) a[ i ] = i - 10; ObjectArrays.shuffle( a, 10, 30, new Random() ); boolean[] b = new boolean[ 20 ]; for( int i = 20; i-- != 0; ) { assertFalse( b[ a[ i + 10 ] ] ); b[ a[ i + 10 ] ] = true; } } @Test public void testBinarySearchLargeKey() { final Integer[] a = { 1, 2, 3 }; ObjectArrays.binarySearch( a, 4 ); } @Test public void testReverse() { assertArrayEquals( new Integer[] { 0, 1, 2, 3 }, ObjectArrays.reverse( new Integer[] { 3, 2, 1, 0 } ) ); assertArrayEquals( new Integer[] { 0, 1, 2, 3, 4 }, ObjectArrays.reverse( new Integer[] { 4, 3, 2, 1, 0 } ) ); assertArrayEquals( new Integer[] { 4, 1, 2, 3, 0 }, ObjectArrays.reverse( new Integer[] { 4, 3, 2, 1, 0 }, 1, 4 ) ); assertArrayEquals( new Integer[] { 4, 2, 3, 1, 0 }, ObjectArrays.reverse( new Integer[] { 4, 3, 2, 1, 0 }, 1, 3 ) ); assertArrayEquals( new Integer[] { 0, 1, 2, 3, 4 }, ObjectArrays.reverse( new Integer[] { 0, 1, 2, 3, 4 }, 1, 2 ) ); } @Test public void testStabilize() { int[] perm; Integer[] val; perm = new int[] { 0, 1, 2, 3 }; val = new Integer[] { 0, 0, 0, 0 }; ObjectArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 1, 2, 0 }; val = new Integer[] { 0, 0, 0, 0 }; ObjectArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 0, 1, 2, 3 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new Integer[] { 0, 1, 1, 2 }; ObjectArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 3, 1, 2, 0 }, perm ); perm = new int[] { 3, 2, 1, 0 }; val = new Integer[] { 0, 0, 1, 1 }; ObjectArrays.stabilize( perm, val ); assertArrayEquals( new int[] { 2, 3, 0, 1 }, perm ); perm = new int[] { 4, 3, 2, 1, 0 }; val = new Integer[] { 1, 1, 0, 0, 0 }; ObjectArrays.stabilize( perm, val, 1, 3 ); assertArrayEquals( new int[] { 4, 2, 3, 1, 0 }, perm ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectBigArrayBigListTest.java0000664000000000000000000005100613050705451026443 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.BigArrays; import java.util.Collections; import java.util.Iterator; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class ObjectBigArrayBigListTest { @Test public void testRemoveAllModifiesCollection() { ObjectBigList list = new ObjectBigArrayBigList(); assertFalse( list.removeAll( Collections.emptySet() ) ); assertEquals( ObjectBigLists.EMPTY_BIG_LIST, list ); } @SuppressWarnings("boxing") @Test public void testRemoveAllSkipSegment() { ObjectBigList list = new ObjectBigArrayBigList(); for( long i = 0; i < BigArrays.SEGMENT_SIZE + 10; i++ ) list.add( Integer.valueOf( (int)( i % 2 ) ) ); assertTrue( list.removeAll( ObjectSets.singleton( 1 ) ) ); assertEquals( BigArrays.SEGMENT_SIZE / 2 + 5, list.size64() ); for( long i = 0; i < BigArrays.SEGMENT_SIZE / 2 + 5; i++ ) assertEquals( Integer.valueOf( 0 ), list.get( i ) ); } private static java.util.Random r = new java.util.Random( 0 ); private static int genKey() { return r.nextInt(); } private static Object[] k, nk; private static Object kt[]; private static Object nkt[]; @SuppressWarnings({ "unchecked", "boxing" }) protected static void testLists( ObjectBigList m, ObjectBigList t, int n, int level ) { Exception mThrowsOutOfBounds, tThrowsOutOfBounds; Object rt = null; Object rm = ( null ); if ( level > 4 ) return; /* Now we check that both sets agree on random keys. For m we use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); Object T = genKey(); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.set( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.set( p, ( T ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): set() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ after set() on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p ).equals( ( m.get( p ) ) ) ); p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ aftre get() on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p ).equals( ( m.get( p ) ) ) ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p ) .equals( m.get( p ) ) ); } /* Now we check that m and t are equal. */ if ( !m.equals( t ) || !t.equals( m ) ) System.err.println( "m: " + m + " t: " + t ); assertTrue( "Error (" + level + "): ! m.equals( t ) at start", m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) at start", t.equals( m ) ); /* Now we check that m actually holds that data. */ for ( Iterator i = t.iterator(); i.hasNext(); ) { assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on t)", m.contains( i.next() ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( Iterator i = m.listIterator(); i.hasNext(); ) { assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on m)", t.contains( i.next() ) ); } /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains( ( T ) ) == t.contains( ( T ) ) ); } /* Now we add and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 2 * n; i++ ) { Object T = genKey(); try { m.add( T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.add( ( T ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } T = genKey(); int p = r.nextInt() % ( 2 * n + 1 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.add( p, T ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.add( p, ( T ) ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); p = r.nextInt() % ( 2 * n + 1 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { rm = m.remove( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { rt = t.remove( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): divergence in remove() between t and m (" + rt + ", " + rm + ")", rt.equals( ( rm ) ) ); } assertTrue( "Error (" + level + "): ! m.equals( t ) after add/remove", m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after add/remove", t.equals( m ) ); /* * Now we add random data in m and t using addAll on a collection, checking that the result * is the same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); java.util.Collection m1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) m1.add( ( genKey() ) ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after addAll", m.equals( t ) ); assertTrue( "Error (" + level + m + t + "): ! t.equals( m ) after addAll", t.equals( m ) ); } if ( m.size64() > n ) { m.size( n ); while ( t.size() != n ) t.remove( t.size() - 1 ); } /* * Now we add random data in m and t using addAll on a type-specific collection, checking * that the result is the same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); ObjectCollection m1 = new ObjectBigArrayBigList(); java.util.Collection t1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) { Object x = genKey(); m1.add( x ); t1.add( ( x ) ); } mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, t1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after polymorphic addAll", m.equals( t ) ); assertTrue( "Error (" + level + m + t + "): ! t.equals( m ) after polymorphic addAll", t.equals( m ) ); } if ( m.size64() > n ) { m.size( n ); while ( t.size() != n ) t.remove( t.size() - 1 ); } /* * Now we add random data in m and t using addAll on a list, checking that the result is the * same. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( 2 * n + 1 ); ObjectBigList m1 = new ObjectBigArrayBigList(); java.util.Collection t1 = new java.util.ArrayList(); int s = r.nextInt( n / 2 + 1 ); for ( int j = 0; j < s; j++ ) { Object x = genKey(); m1.add( x ); t1.add( ( x ) ); } mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.addAll( p, m1 ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.addAll( p, t1 ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): list addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); assertTrue( "Error (" + level + "): ! m.equals( t ) after list addAll", m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after list addAll", t.equals( m ) ); } /* Now we check that both sets agree on random keys. For m we use the standard method. */ for ( int i = 0; i < n; i++ ) { int p = r.nextInt() % ( n * 2 ); mThrowsOutOfBounds = tThrowsOutOfBounds = null; try { m.get( p ); } catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; } try { t.get( p ); } catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; } assertTrue( "Error (" + level + "): get() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) ); if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p ).equals( m.get( p ) ) ); } /* Now we inquiry about the content with indexOf()/lastIndexOf(). */ for ( int i = 0; i < 10 * n; i++ ) { Object T = genKey(); assertTrue( "Error (" + level + "): indexOf() divergence for " + T + " (" + m.indexOf( ( T ) ) + ", " + t.indexOf( ( T ) ) + ")", m.indexOf( ( T ) ) == t.indexOf( ( T ) ) ); assertTrue( "Error (" + level + "): lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( ( T ) ) + ", " + t.lastIndexOf( ( T ) ) + ")", m.lastIndexOf( ( T ) ) == t.lastIndexOf( ( T ) ) ); assertTrue( "Error (" + level + "): polymorphic indexOf() divergence for " + T + " (" + m.indexOf( T ) + ", " + t.indexOf( ( T ) ) + ")", m.indexOf( T ) == t.indexOf( ( T ) ) ); assertTrue( "Error (" + level + "): polymorphic lastIndexOf() divergence for " + T + " (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( ( T ) ) + ")", m.lastIndexOf( T ) == t.lastIndexOf( ( T ) ) ); } /* Now we check cloning. */ if ( level == 0 ) { assertTrue( "Error (" + level + "): m does not equal m.clone()", m.equals( ( (ObjectBigArrayBigList)m ).clone() ) ); assertTrue( "Error (" + level + "): m.clone() does not equal m", ( (ObjectBigArrayBigList)m ).clone().equals( m ) ); } /* Now we play with constructors. */ assertTrue( "Error (" + level + "): m does not equal new ( type-specific Collection m )", m.equals( new ObjectBigArrayBigList( (ObjectCollection)m ) ) ); assertTrue( "Error (" + level + "): new ( type-specific nCollection m ) does not equal m", ( new ObjectBigArrayBigList( (ObjectCollection)m ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( type-specific List m )", m.equals( new ObjectBigArrayBigList( m ) ) ); assertTrue( "Error (" + level + "): new ( type-specific List m ) does not equal m", ( new ObjectBigArrayBigList( m ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( m.listIterator() )", m.equals( new ObjectBigArrayBigList( m.listIterator() ) ) ); assertTrue( "Error (" + level + "): new ( m.listIterator() ) does not equal m", ( new ObjectBigArrayBigList( m.listIterator() ) ).equals( m ) ); assertTrue( "Error (" + level + "): m does not equal new ( m.type_specific_iterator() )", m.equals( new ObjectBigArrayBigList( m.iterator() ) ) ); assertTrue( "Error (" + level + "): new ( m.type_specific_iterator() ) does not equal m", ( new ObjectBigArrayBigList( m.iterator() ) ).equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ ObjectBigList m2 = null; try { java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m2 = (ObjectBigList)ois.readObject(); ois.close(); ff.delete(); } catch ( Exception e ) { e.printStackTrace(); System.exit( 1 ); } assertTrue( "Error (" + level + "): hashCode() changed after save/read", m2.hashCode() == h ); /* Now we check that m2 actually holds that data. */ assertTrue( "Error (" + level + "): ! m2.equals( t ) after save/read", m2.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m2 ) after save/read", t.equals( m2 ) ); /* Now we take out of m everything, and check that it is empty. */ for ( Iterator i = t.iterator(); i.hasNext(); ) m2.remove( i.next() ); assertTrue( "Error (" + level + "): m2 is not empty (as it should be)", m2.isEmpty() ); /* Now we play with iterators. */ { ObjectBigListIterator i; ObjectBigListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error (" + level + "): divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error (" + level + "): divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error (" + level + "): divergence in next()", i.next().equals( j.next() ) ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.set( T ); j.set( ( T ) ); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.add( T ); j.add( ( T ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error (" + level + "): divergence in previous()", i.previous().equals( j.previous() ) ); if ( r.nextFloat() < 0.2 ) { i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.set( T ); j.set( ( T ) ); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.add( T ); j.add( ( T ) ); } } assertTrue( "Error (" + level + "): divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error (" + level + "): divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { Object I, J; int from = r.nextInt( m.size() + 1 ); ObjectBigListIterator i; ObjectBigListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error (" + level + "): divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error (" + level + "): divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { I = i.next(); J = j.next(); assertTrue( "Error (" + level + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals( J ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); if ( r.nextFloat() < 0.2 ) { // System.err.println("Removing in next"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.set( T ); j.set( ( T ) ); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.add( T ); j.add( ( T ) ); } } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { I = i.previous(); J = j.previous(); assertTrue( "Error (" + level + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals( J ) ); if ( r.nextFloat() < 0.2 ) { // System.err.println("Removing in prev"); i.remove(); j.remove(); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.set( T ); j.set( ( T ) ); } else if ( r.nextFloat() < 0.2 ) { Object T = genKey(); i.add( T ); j.add( ( T ) ); } } } } /* Now we check that m actually holds that data. */ assertTrue( "Error (" + level + "): ! m.equals( t ) after iteration", m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after iteration", t.equals( m ) ); /* Now we select a pair of keys and create a subset. */ if ( !m.isEmpty() ) { int start = r.nextInt( m.size() ); int end = start + r.nextInt( m.size() - start ); // System.err.println("Checking subList from " + start + " to " + end + " (level=" + // (level+1) + ")..." ); testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 ); assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after subList", m.equals( t ) ); assertTrue( "Error (" + level + "): ! t.equals( m ) after subList", t.equals( m ) ); } m.clear(); t.clear(); assertTrue( "Error (" + level + "): m is not empty after clear()", m.isEmpty() ); } @SuppressWarnings({ "boxing", "unchecked" }) protected static void test( int n ) { ObjectBigArrayBigList m = new ObjectBigArrayBigList(); ObjectBigList t = ObjectBigLists.asBigList( new ObjectArrayList() ); k = new Object[ n ]; nk = new Object[ n ]; kt = new Object[ n ]; nkt = new Object[ n ]; for ( int i = 0; i < n; i++ ) { k[ i ] = kt[ i ] = genKey(); nk[ i ] = nkt[ i ] = genKey(); } /* We add pairs to t. */ for ( int i = 0; i < n; i++ ) t.add( k[ i ] ); /* We add to m the same data */ m.addAll( t ); testLists( m, t, n, 0 ); // This tests all reflection-based methods. m = ObjectBigArrayBigList.wrap( ObjectBigArrays.EMPTY_BIG_ARRAY ); t = ObjectBigLists.asBigList( new ObjectArrayList() ); /* We add pairs to t. */ for ( int i = 0; i < n; i++ ) t.add( k[ i ] ); /* We add to m the same data */ m.addAll( t ); testLists( m, t, n, 0 ); return; } @Test public void test1() { test( 1 ); } @Test public void test10() { test( 10 ); } @Test public void test100() { test( 100 ); } @Ignore("Too long") @Test public void test1000() { test( 1000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectBigArraysTest.java0000664000000000000000000000671013050705451025352 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.*; import static it.unimi.dsi.fastutil.objects.ObjectBigArrays.set; import static it.unimi.dsi.fastutil.objects.ObjectBigArrays.get; import java.util.Arrays; import org.junit.Test; public class ObjectBigArraysTest { @SuppressWarnings({ "unchecked", "boxing" }) @Test public void testQuickSort() { Integer[] s = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Arrays.sort( s ); Integer[][] sorted = ObjectBigArrays.wrap( s.clone() ); Integer[][] a = ObjectBigArrays.wrap( s.clone() ); ObjectBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); ObjectBigArrays.quickSort( a ); assertArrayEquals( sorted, a ); a = ObjectBigArrays.wrap( s.clone() ); ObjectBigArrays.quickSort( a, ObjectComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); ObjectBigArrays.quickSort( a, ObjectComparators.NATURAL_COMPARATOR ); assertArrayEquals( sorted, a ); } @SuppressWarnings("boxing") private void testCopy( int n ) { Object[][] a = ObjectBigArrays.newBigArray( n ); for ( int i = 0; i < n; i++ ) set( a, i, i ); ObjectBigArrays.copy( a, 0, a, 1, n - 2 ); assertEquals( 0, a[ 0 ][ 0 ] ); for ( int i = 0; i < n - 2; i++ ) assertEquals( i, get( a, i + 1 ) ); for ( int i = 0; i < n; i++ ) set( a, i, i ); ObjectBigArrays.copy( a, 1, a, 0, n - 1 ); for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) ); for ( int i = 0; i < n; i++ ) set( a, i, i ); Integer[] b = new Integer[ n ]; for ( int i = 0; i < n; i++ ) b[ i ] = i; assertArrayEquals( a, ObjectBigArrays.wrap( b ) ); } @Test public void testCopy10() { testCopy( 10 ); } @Test public void testCopy1000() { testCopy( 1000 ); } @Test public void testCopy1000000() { testCopy( 1000000 ); } @SuppressWarnings({ "boxing", "unchecked" }) @Test public void testBinarySearch() { Integer[] a = new Integer[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 }; Arrays.sort( a ); Integer[][] b = ObjectBigArrays.wrap( a.clone() ); for( int i = -1; i < 20; i++ ) { assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), ObjectBigArrays.binarySearch( b, i ) ); assertEquals( String.valueOf(i), Arrays.binarySearch( a, i ), ObjectBigArrays.binarySearch( b, i, ObjectComparators.NATURAL_COMPARATOR ) ); } for( int i = -1; i < 20; i++ ) { assertEquals( Arrays.binarySearch( a, 5, 13, i ), ObjectBigArrays.binarySearch( b, 5, 13, i ) ); assertEquals( Arrays.binarySearch( a, 5, 13, i ), ObjectBigArrays.binarySearch( b, 5, 13, i, ObjectComparators.NATURAL_COMPARATOR ) ); } } @SuppressWarnings("boxing") @Test public void testTrim() { Integer[] a = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Integer[][] b = ObjectBigArrays.wrap( a.clone() ); for( int i = a.length; i-- != 0; ) { Integer[][] t = ObjectBigArrays.trim( b, i ); final long l = ObjectBigArrays.length( t ); assertEquals( i, l ); for( int p = 0; p < l; p++ ) assertEquals( a[ p ], ObjectBigArrays.get( t, p ) ); } } @SuppressWarnings("boxing") @Test public void testEquals() { Integer[] a = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }; Integer[][] b = ObjectBigArrays.wrap( a.clone() ); Integer[][] c = ObjectBigArrays.wrap( a.clone() ); assertTrue( ObjectBigArrays.equals( b, c ) ); b[ 0 ][ 0 ] = 0; assertFalse( ObjectBigArrays.equals( b, c ) ); } }fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectHeapPriorityQueueTest.java0000664000000000000000000000157213050705451027114 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import it.unimi.dsi.fastutil.io.BinIO; import java.io.File; import java.io.IOException; import org.junit.Test; public class ObjectHeapPriorityQueueTest { @SuppressWarnings({ "unchecked", "boxing" }) @Test public void testSerialize() throws IOException, ClassNotFoundException { ObjectHeapPriorityQueue q = new ObjectHeapPriorityQueue(); for( int i = 0; i < 100; i++ ) q.enqueue( i ); File file = File.createTempFile( getClass().getPackage().getName() + "-", "-tmp" ); file.deleteOnExit(); BinIO.storeObject( q, file ); ObjectHeapPriorityQueue r = (ObjectHeapPriorityQueue)BinIO.loadObject( file ); file.delete(); for( int i = 0; i < 100; i++ ) { assertEquals( q.first(), r.first() ); assertEquals( q.dequeue(), r.dequeue() ); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectOpenCustomHashSetTest.java0000664000000000000000000000235213050705451027041 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.Random; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.bytes.ByteArrays; import org.junit.Test; public class ObjectOpenCustomHashSetTest { @Test public void testGetNullKey() { final ObjectOpenCustomHashSet s = new ObjectOpenCustomHashSet( new Hash.Strategy() { @Override public int hashCode( Integer o ) { return o == null ? 0 : o.intValue() % 10; } @Override public boolean equals( Integer a, Integer b ) { if ( ( ( a == null ) != ( b == null ) ) || a == null ) return false; return ( (a.intValue() - b.intValue()) % 10 ) == 0; } }); s.add( Integer.valueOf( 10 ) ); assertTrue( s.contains( Integer.valueOf( 0 ) ) ); assertEquals( 10, s.iterator().next().intValue() ); } @Test public void testNullKey() { Random random = new Random(0); ObjectOpenCustomHashSet s = new ObjectOpenCustomHashSet(ByteArrays.HASH_STRATEGY); for(int i = 0; i < 1000000; i++) { byte[] a = new byte[random.nextInt(10)]; for(int j = a.length; j-- != 0; ) a[j] = (byte) random.nextInt(4); s.add(a); } } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashBigSetTest.java0000664000000000000000000002544013050705451026273 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import java.io.IOException; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class ObjectOpenHashBigSetTest { @Test @SuppressWarnings("boxing") public void testStrangeRetainAllCase() { ObjectArrayList initialElements = ObjectArrayList.wrap(new Integer[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); ObjectArrayList retainElements = ObjectArrayList.wrap(new Integer[] { 586 }); // Initialize both implementations with the same data ObjectOpenHashBigSet instance = new ObjectOpenHashBigSet(initialElements); ObjectRBTreeSet referenceInstance = new ObjectRBTreeSet(initialElements); instance.retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance ); } private static java.util.Random r = new java.util.Random( 0 ); private static Object genKey() { return Integer.toBinaryString( r.nextInt() ); } private static void checkTable( ObjectOpenHashBigSet s ) { final Object[][] key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == ObjectBigArrays.length( key ); long n = s.n; while ( n-- != 0 ) if ( ObjectBigArrays.get( key, n ) != null && !s.contains( ObjectBigArrays.get( key, n ) ) ) throw new AssertionError( "Hash table has key " + ObjectBigArrays.get( key, n ) + " marked as occupied, but the key does not belong to the table" ); java.util.HashSet t = new java.util.HashSet(); for ( long i = s.size64(); i-- != 0; ) if ( ObjectBigArrays.get( key, i ) != null && !t.add( ObjectBigArrays.get( key, i ) ) ) throw new AssertionError( "Key " + ObjectBigArrays.get( key, i ) + " appears twice" ); } private static void printProbes( ObjectOpenHashBigSet m ) { long totProbes = 0; double totSquareProbes = 0; long maxProbes = 0; final double f = (double)m.size / m.n; for ( long i = 0, c = 0; i < m.n; i++ ) { if ( ObjectBigArrays.get( m.key, i ) != null ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings("unchecked") private static void test( int n, float f ) throws IOException, ClassNotFoundException { int c; ObjectOpenHashBigSet m = new ObjectOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE, f ); java.util.Set t = new java.util.HashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < f * n; i++ ) t.add( ( genKey() ) ); /* Now we add to m the same data */ m.addAll( t ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", m.contains( ( T ) ) == t.contains( ( T ) ) ); } /* * Check that addOrGet does indeed return the original instance, not a copy */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); Object e2 = m.addOrGet( new StringBuilder((String)e).toString() ); // Make a new object! assertTrue( "addOrGet does not return the same object", e == e2 /* NOT just equals, but identity */ ); } /* This should not have modified the table */ assertTrue( "Error: !m.equals(t) after addOrGet no-op", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after addOrGet no-op", t.equals( m ) ); /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in add() between t and m", m.add( ( T ) ) == t.add( ( T ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", m.remove( ( T ) ) == t.remove( ( T ) ) ); } assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); checkTable( m ); printProbes( m ); /* * Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ Object a[] = m.toArray(); assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new ObjectOpenHashBigSet( a ).equals( m ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (ObjectOpenHashBigSet)ois.readObject(); ois.close(); ff.delete(); assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); checkTable( m ); printProbes( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in add() between t and m after save/read", m.add( ( T ) ) == t.add( ( T ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m after save/read", m.remove( ( T ) ) == t.remove( ( T ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* * Now we take out of m everything , and check that it is empty. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() throws IOException, ClassNotFoundException { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void testGet() { final ObjectOpenHashBigSet s = new ObjectOpenHashBigSet(); String a = "a"; assertTrue( s.add( a ) ); assertSame( a, s.get( "a" ) ); assertNull( s.get( "b" ) ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashSetTest.java0000664000000000000000000002540213050705451025647 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.Hash; import org.junit.Ignore; import org.junit.Test; @SuppressWarnings("rawtypes") public class ObjectOpenHashSetTest { @Test @SuppressWarnings("boxing") public void testStrangeRetainAllCase() { ObjectArrayList initialElements = ObjectArrayList.wrap(new Integer[] { 586, 940, 1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241, 1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360, 1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437, 1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547, 1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588, 1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654, 1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705, 1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756, 1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801, 1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839, 1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052, 3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137, 3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209, 3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247, 3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871, 4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925, 4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990, 4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076, 7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466, 7467 }); ObjectArrayList retainElements = ObjectArrayList.wrap(new Integer[] { 586 }); // Initialize both implementations with the same data ObjectOpenHashSet instance = new ObjectOpenHashSet(initialElements); ObjectRBTreeSet referenceInstance = new ObjectRBTreeSet(initialElements); instance.retainAll(retainElements); referenceInstance.retainAll(retainElements); // print the correct result {586} // System.out.println("ref: " + referenceInstance); // prints {586, 7379}, which is clearly wrong // System.out.println("ohm: " + instance); // Fails assertEquals( referenceInstance, instance ); } private static java.util.Random r = new java.util.Random( 0 ); private static Object genKey() { return Integer.toBinaryString( r.nextInt() ); } private static void checkTable( ObjectOpenHashSet s ) { final Object[] key = s.key; assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n; assert s.n == key.length - 1; int n = s.n; while ( n-- != 0 ) if ( key[ n ] != null && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" ); if ( s.containsNull && ! s.contains( null ) ) throw new AssertionError( "Hash table should contain null by internal state, but it doesn't when queried" ); if ( ! s.containsNull && s.contains( null ) ) throw new AssertionError( "Hash table should not contain null by internal state, but it does when queried" ); java.util.HashSet t = new java.util.HashSet(); for ( int i = s.size(); i-- != 0; ) if ( key[ i ] != null && !t.add( (String)key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" ); } private static void printProbes( ObjectOpenHashSet m ) { long totProbes = 0; double totSquareProbes = 0; int maxProbes = 0; final Object[] key = m.key; final double f = (double)m.size / m.n; for ( int i = 0, c = 0; i < m.n; i++ ) { if ( key[ i ] != null ) c++; else { if ( c != 0 ) { final long p = ( c + 1 ) * ( c + 2 ) / 2; totProbes += p; totSquareProbes += (double)p * p; } maxProbes = Math.max( c, maxProbes ); c = 0; totProbes++; totSquareProbes++; } } final double expected = (double)totProbes / m.n; System.err.println( "Expected probes: " + ( 3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1 ) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes ); } @SuppressWarnings("unchecked") private static void test( int n, float f ) { int c; ObjectOpenHashSet m = new ObjectOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f ); java.util.Set t = new java.util.HashSet(); /* First of all, we fill t with random data. */ for ( int i = 0; i < f * n; i++ ) t.add( ( genKey() ) ); /* Now we add to m the same data */ m.addAll( t ); assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) ); printProbes( m ); checkTable( m ); /* Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ c = 0; for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); c++; assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) ); } assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() ); /* * Now we check that inquiries about random data give the same answer in m and t. For m we * use the polymorphic method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) ); } /* * Again, we check that inquiries about random data give the same answer in m and t, but for * m we use the standard method. */ for ( int i = 0; i < n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence between t and m (standard method)", m.contains( ( T ) ) == t.contains( ( T ) ) ); } /* * Check that addOrGet does indeed return the original instance, not a copy */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); Object e2 = m.addOrGet( new StringBuilder((String)e).toString() ); // Make a new object! assertTrue( "addOrGet does not return the same object", e == e2 ); } /* This should not have modified the table */ assertTrue( "Error: !m.equals(t) after addOrGet no-op", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after addOrGet no-op", t.equals( m ) ); /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in add() between t and m", m.add( ( T ) ) == t.add( ( T ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m", m.remove( ( T ) ) == t.remove( ( T ) ) ); } assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) ); checkTable( m ); printProbes( m ); /* * Now we check that m actually holds that data. */ for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", m.contains( e ) ); } /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) ); } /* Now we make m into an array, make it again a set and check it is OK. */ Object a[] = m.toArray(); assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new ObjectOpenHashSet( a ).equals( m ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) ); assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) ); int h = m.hashCode(); /* Now we save and read m. */ try { java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (ObjectOpenHashSet)ois.readObject(); ois.close(); ff.delete(); } catch ( Exception e ) { e.printStackTrace(); System.exit( 1 ); } assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() ); checkTable( m ); printProbes( m ); /* Now we check that m actually holds that data, but iterating on m. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { Object e = i.next(); assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) ); } /* Now we put and remove random data in m and t, checking that the result is the same. */ for ( int i = 0; i < 20 * n; i++ ) { Object T = genKey(); assertTrue( "Error: divergence in add() between t and m after save/read", m.add( ( T ) ) == t.add( ( T ) ) ); T = genKey(); assertTrue( "Error: divergence in remove() between t and m after save/read", m.remove( ( T ) ) == t.remove( ( T ) ) ); } assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) ); assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); /* * Now we take out of m everything , and check that it is empty. */ for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) { i.next(); i.remove(); } assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() ); return; } @Test public void test1() { test( 1, Hash.DEFAULT_LOAD_FACTOR ); test( 1, Hash.FAST_LOAD_FACTOR ); test( 1, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test10() { test( 10, Hash.DEFAULT_LOAD_FACTOR ); test( 10, Hash.FAST_LOAD_FACTOR ); test( 10, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void test100() { test( 100, Hash.DEFAULT_LOAD_FACTOR ); test( 100, Hash.FAST_LOAD_FACTOR ); test( 100, Hash.VERY_FAST_LOAD_FACTOR ); } @Ignore("Too long") @Test public void test1000() { test( 1000, Hash.DEFAULT_LOAD_FACTOR ); test( 1000, Hash.FAST_LOAD_FACTOR ); test( 1000, Hash.VERY_FAST_LOAD_FACTOR ); } @Test public void testGet() { final ObjectOpenHashSet s = new ObjectOpenHashSet(); String a = "a"; assertTrue( s.add( a ) ); assertSame( a, s.get( "a" ) ); assertNull( s.get( "b" ) ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectRBTreeSetTest.java0000664000000000000000000000254713050705451025272 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import it.unimi.dsi.fastutil.ints.Int2IntRBTreeMap; import org.junit.Test; public class ObjectRBTreeSetTest { @Test public void testGet() { ObjectRBTreeSet s = new ObjectRBTreeSet(); Integer o = new Integer( 0 ); s.add( o ); assertSame( o, s.get( new Integer( 0 ) ) ); } @Test public void testAddTo() { Int2IntRBTreeMap a = new Int2IntRBTreeMap(); Int2IntRBTreeMap b = new Int2IntRBTreeMap(); // test addTo with empty map a.addTo(0, 1); // 0 -> 1 assertEquals(1, a.get(0)); // test addTo with empty map and weird defaultReturnValue b.defaultReturnValue(100); a.addTo(0, 0); // 0 -> 100 assertEquals(100, b.get(0)); // test addTo with existing values a.addTo(0, 1); // 0 -> 2 b.addTo(0, -100); // 0 -> 0 assertEquals(2, a.get(0)); assertEquals(0, b.get(0)); // test addTo with overflow values a.put(0, Integer.MAX_VALUE); a.addTo(0, 1); // 0 -> MIN_VALUE assertEquals(Integer.MIN_VALUE, a.get(0)); // test various addTo operations a.put(0, 0); a.put(1, 1); a.put(2, 2); a.addTo(0, 10); // 0 -> 10 a.addTo(1, 9); // 1 -> 10 a.addTo(2, 8); // 2 -> 10 assertEquals(10, a.get(0)); assertEquals(10, a.get(1)); assertEquals(10, a.get(2)); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ObjectSetsTest.java0000664000000000000000000000053213050705451024401 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import static org.junit.Assert.assertNull; import org.junit.Test; public class ObjectSetsTest { @Test public void testToArrayShouldNullElementAfterLastEntry() { ObjectSet set = ObjectSets.EMPTY_SET; Object[] values = new Object[] { "test" }; set.toArray(values); assertNull(values[0]); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/Reference2ReferenceArrayMapTest.java0000664000000000000000000001050413050705451027570 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import it.unimi.dsi.fastutil.io.BinIO; import it.unimi.dsi.fastutil.objects.AbstractReference2ReferenceMap; import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap; import it.unimi.dsi.fastutil.objects.Reference2ReferenceArrayMap; import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.Map.Entry; import org.junit.Test; import static org.junit.Assert.*; public class Reference2ReferenceArrayMapTest { @Test public void testMap() { for( int i = 0; i <= 2; i++ ) { final Reference2ReferenceArrayMap m = i == 0 ? new Reference2ReferenceArrayMap() : new Reference2ReferenceArrayMap( i ); Integer one = new Integer( 1 ), two = new Integer( 2 ), three = new Integer( 3 ); assertEquals( null, m.put( one, one ) ); assertEquals( 1, m.size() ); assertTrue( m.containsKey( one ) ); assertTrue( m.containsValue( one ) ); assertEquals( null, m.put( two, two ) ); assertTrue( m.containsKey( two ) ); assertTrue( m.containsValue( two ) ); assertEquals( 2, m.size() ); assertEquals( one, m.put( one, three ) ); assertTrue( m.containsValue( three ) ); assertEquals( null, m.remove( three ) ); assertEquals( null, m.put( three, three ) ); assertTrue( m.containsKey( three ) ); assertEquals( new ReferenceOpenHashSet( new Object[] { one, two, three } ), new ReferenceOpenHashSet( m.keySet().iterator() ) ); assertEquals( new ReferenceOpenHashSet( new Object[] { three, two, three } ), new ReferenceOpenHashSet( m.values().iterator() ) ); for( Entry e: m.entrySet() ) assertEquals( e.getValue(), m.get( e.getKey() ) ); assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( one, three ) ) ); assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( one, new Integer( 3 ) ) ) ); assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( new Integer( 1 ), three ) ) ); assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( two, two ) ) ); assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( one, two ) ) ); assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( two, one ) ) ); assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( three, three ) ) ); assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry( new Integer( 3 ), two ) ) ); assertEquals( three, m.remove( three ) ); assertEquals( 2, m.size() ); assertEquals( three, m.remove( one ) ); assertEquals( 1, m.size() ); assertFalse( m.containsKey( one ) ); assertEquals( two, m.remove( two ) ); assertEquals( 0, m.size() ); assertFalse( m.containsKey( one ) ); } } @Test public void testClone() { Reference2ReferenceArrayMap m = new Reference2ReferenceArrayMap(); assertEquals( m, m.clone() ); m.put( new Integer( 0 ), new Integer( 1 ) ); assertEquals( m, m.clone() ); m.put( new Integer( 0 ), new Integer( 2 ) ); assertEquals( m, m.clone() ); Integer one; m.put( one = new Integer( 1 ), new Integer( 2 ) ); assertEquals( m, m.clone() ); m.remove( one ); assertEquals( m, m.clone() ); } @Test public void testSerialisation() throws IOException, ClassNotFoundException { // We can't really test reference maps as equals() doesnt' work Object2ObjectArrayMap m = new Object2ObjectArrayMap(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); m.put( new Integer( 0 ), new Integer( 1 ) ); m.put( new Integer( 1 ), new Integer( 2 ) ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( m ); oos.close(); assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/objects/ReferenceArraySetTest.java0000664000000000000000000000550413050705451025711 0ustar rootrootpackage it.unimi.dsi.fastutil.objects; import it.unimi.dsi.fastutil.io.BinIO; import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ReferenceArraySet; import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import org.junit.Test; import static org.junit.Assert.*; public class ReferenceArraySetTest { @Test public void testSet() { for( int i = 0; i <= 2; i++ ) { final ReferenceArraySet s = i == 0 ? new ReferenceArraySet() : new ReferenceArraySet( 2 ); Integer one = new Integer( 1 ), two = new Integer( 2 ), three = new Integer( 3 ); assertTrue( s.add( one ) ); assertEquals( 1, s.size() ); assertTrue( s.contains( one ) ); assertFalse( s.contains( new Integer( 1 ) ) ); assertTrue( s.add( two ) ); assertTrue( s.contains( two ) ); assertFalse( s.contains( new Integer( 2 ) ) ); assertEquals( 2, s.size() ); assertFalse( s.add( one ) ); assertFalse( s.remove( three ) ); assertTrue( s.add( three ) ); assertEquals( 3, s.size() ); assertTrue( s.contains( one ) ); assertTrue( s.contains( two ) ); assertTrue( s.contains( three ) ); assertEquals( new ReferenceOpenHashSet( new Object[] { one, two, three } ), new ReferenceOpenHashSet( s.iterator() ) ); assertTrue( s.remove( three ) ); assertEquals( 2, s.size() ); assertTrue( s.remove( one ) ); assertEquals( 1, s.size() ); assertFalse( s.contains( one ) ); assertTrue( s.remove( two ) ); assertEquals( 0, s.size() ); assertFalse( s.contains( one ) ); } } @Test public void testClone() { ReferenceArraySet s = new ReferenceArraySet(); assertEquals( s, s.clone() ); Integer zero; s.add( zero = new Integer( 0 ) ); assertEquals( s, s.clone() ); s.add( new Integer( 0 ) ); assertEquals( s, s.clone() ); s.add( new Integer( 1 ) ); assertEquals( s, s.clone() ); s.add( new Integer( 2 ) ); assertEquals( s, s.clone() ); s.remove( zero ); assertEquals( s, s.clone() ); } @Test public void testSerialisation() throws IOException, ClassNotFoundException { // We can't really test reference maps as equals() doesnt' work ObjectArraySet s = new ObjectArraySet(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); s.add( new Integer( 0 ) ); s.add( new Integer( 1 ) ); baos.reset(); oos = new ObjectOutputStream( baos ); oos.writeObject( s ); oos.close(); assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/shorts/ShortArrayFrontCodedListTest.java0000664000000000000000000001110013050705451027120 0ustar rootrootpackage it.unimi.dsi.fastutil.shorts; import static org.junit.Assert.assertTrue; import it.unimi.dsi.fastutil.objects.ObjectListIterator; import java.io.IOException; import org.junit.Test; @SuppressWarnings({ "rawtypes", "unchecked" }) public class ShortArrayFrontCodedListTest { private static java.util.Random r = new java.util.Random( 0 ); private static short genKey() { return (short)( r.nextInt() ); } private static boolean contentEquals( java.util.List x, java.util.List y ) { if ( x.size() != y.size() ) return false; for ( int i = 0; i < x.size(); i++ ) if ( !java.util.Arrays.equals( (short[])x.get( i ), (short[])y.get( i ) ) ) return false; return true; } private static int l[]; private static short[][] a; private static void test( int n ) throws IOException, ClassNotFoundException { l = new int[ n ]; a = new short[ n ][]; for ( int i = 0; i < n; i++ ) l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 ); for ( int i = 0; i < n; i++ ) a[ i ] = new short[ l[ i ] ]; for ( int i = 0; i < n; i++ ) for ( int j = 0; j < l[ i ]; j++ ) a[ i ][ j ] = genKey(); ShortArrayFrontCodedList m = new ShortArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 ); it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a ); // System.out.println(m); // for( i = 0; i < t.size(); i++ ) // System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i))); /* Now we check that m actually holds that data. */ assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) ); /* Now we check cloning. */ assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) ); /* Now we play with iterators. */ { ObjectListIterator i; java.util.ListIterator j; i = m.listIterator(); j = t.listIterator(); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (short[])i.next(), (short[])j.next() ) ); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (short[])i.previous(), (short[])j.previous() ) ); } assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() ); assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() ); } } { int from = r.nextInt( m.size() + 1 ); ObjectListIterator i; java.util.ListIterator j; i = m.listIterator( from ); j = t.listIterator( from ); for ( int k = 0; k < 2 * n; k++ ) { assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() ); assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() ); if ( r.nextFloat() < .8 && i.hasNext() ) { assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (short[])i.next(), (short[])j.next() ) ); // System.err.println("Done next " + I + " " + J + " " + badPrevious); } else if ( r.nextFloat() < .2 && i.hasPrevious() ) { assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (short[])i.previous(), (short[])j.previous() ) ); } } } java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" ); java.io.OutputStream os = new java.io.FileOutputStream( ff ); java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os ); oos.writeObject( m ); oos.close(); java.io.InputStream is = new java.io.FileInputStream( ff ); java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is ); m = (ShortArrayFrontCodedList)ois.readObject(); ois.close(); ff.delete(); assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) ); return; } @Test public void test1() throws IOException, ClassNotFoundException { test( 1 ); } @Test public void test10() throws Exception, ClassNotFoundException { test( 10 ); } @Test public void test100() throws IOException, ClassNotFoundException { test( 100 ); } @Test public void test1000() throws IOException, ClassNotFoundException { test( 1000 ); } @Test public void test10000() throws IOException, ClassNotFoundException { test( 10000 ); } } fastutil-7.1.0/test/it/unimi/dsi/fastutil/shorts/ShortArraysTest.java0000664000000000000000000001654613050705451024522 0ustar rootrootpackage it.unimi.dsi.fastutil.shorts; import static org.junit.Assert.assertTrue; import java.util.Random; import org.junit.Test; public class ShortArraysTest { private static short[] castIdentity( int n ) { final short[] a = new short[ n ]; while( n-- != 0 ) a[ n ] = (short)n; return a; } @Test public void testRadixSort1() { short[] t = { 2, 1, 0, 4 }; ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new short[] { 2, -1, 0, -4 }; ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = ShortArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new short[ 100 ]; Random random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt(); ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new short[ 100000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt(); ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); t = new short[ 10000000 ]; random = new Random( 0 ); for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt(); ShortArrays.radixSort( t ); for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] ); } @Test public void testRadixSort2() { short[][] d = new short[ 2 ][]; d[ 0 ] = new short[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 3 - i % 3 ); d[ 1 ] = ShortArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); ShortArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 100 - i % 100 ); d[ 1 ] = ShortArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); ShortArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( i % 3 - 2 ); Random random = new Random( 0 ); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt(); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt(); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d[ 0 ], d[ 1 ] ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } @Test public void testRadixSort() { short[][] t = { { 2, 1, 0, 4 } }; ShortArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); t[ 0 ] = ShortArrays.shuffle( castIdentity( 100 ), new Random( 0 ) ); ShortArrays.radixSort( t ); for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] ); short[][] d = new short[ 2 ][]; d[ 0 ] = new short[ 10 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 3 - i % 3 ); d[ 1 ] = ShortArrays.shuffle( castIdentity( 10 ), new Random( 0 ) ); ShortArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 100000 ]; for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 100 - i % 100 ); d[ 1 ] = ShortArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) ); ShortArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 10 ]; Random random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt(); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 100000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt(); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); d[ 0 ] = new short[ 10000000 ]; random = new Random( 0 ); for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt(); d[ 1 ] = new short[ d[ 0 ].length ]; for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt(); ShortArrays.radixSort( d ); for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " + d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] ); } }