/** * Creates a new <code>TByteHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>byte</code> primitives */ public TByteHashSet( byte[] array ) { this( Math.max( array.length, DEFAULT_CAPACITY ) ); addAll( array ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing Collection. * * @param collection a <tt>Collection</tt> that will be duplicated. */ public TByteHashSet( Collection<? extends Byte> collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing set. * * @param collection a <tt>TByteSet</tt> that will be duplicated. */ public TByteHashSet( TByteCollection collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); if ( collection instanceof TByteHashSet ) { TByteHashSet hashset = ( TByteHashSet ) collection; this._loadFactor = hashset._loadFactor; this.no_entry_value = hashset.no_entry_value; //noinspection RedundantCast if ( this.no_entry_value != ( byte ) 0 ) { Arrays.fill( _set, this.no_entry_value ); } setUp( (int) Math.ceil( DEFAULT_CAPACITY / _loadFactor ) ); } addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>byte</code> primitives */ public TByteHashSet( byte[] array ) { this( Math.max( array.length, DEFAULT_CAPACITY ) ); addAll( array ); }
/** * Creates a new <code>TByteHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>byte</code> primitives */ public TByteHashSet( byte[] array ) { this( Math.max( array.length, DEFAULT_CAPACITY ) ); addAll( array ); }
/** * Creates a new <code>TByteHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>byte</code> primitives */ public TByteHashSet( byte[] array ) { this( Math.max( array.length, DEFAULT_CAPACITY ) ); addAll( array ); }
/** * Creates a new <code>TByteHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>byte</code> primitives */ public TByteHashSet( byte[] array ) { this( Math.max( array.length, DEFAULT_CAPACITY ) ); addAll( array ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing Collection. * * @param collection a <tt>Collection</tt> that will be duplicated. */ public TByteHashSet( Collection<? extends Byte> collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing Collection. * * @param collection a <tt>Collection</tt> that will be duplicated. */ public TByteHashSet( Collection<? extends Byte> collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing Collection. * * @param collection a <tt>Collection</tt> that will be duplicated. */ public TByteHashSet( Collection<? extends Byte> collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing Collection. * * @param collection a <tt>Collection</tt> that will be duplicated. */ public TByteHashSet( Collection<? extends Byte> collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing set. * * @param collection a <tt>TByteSet</tt> that will be duplicated. */ public TByteHashSet( TByteCollection collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); if ( collection instanceof TByteHashSet ) { TByteHashSet hashset = ( TByteHashSet ) collection; this._loadFactor = hashset._loadFactor; this.no_entry_value = hashset.no_entry_value; //noinspection RedundantCast if ( this.no_entry_value != ( byte ) 0 ) { Arrays.fill( _set, this.no_entry_value ); } setUp( (int) Math.ceil( DEFAULT_CAPACITY / _loadFactor ) ); } addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing set. * * @param collection a <tt>TByteSet</tt> that will be duplicated. */ public TByteHashSet( TByteCollection collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); if ( collection instanceof TByteHashSet ) { TByteHashSet hashset = ( TByteHashSet ) collection; this._loadFactor = hashset._loadFactor; this.no_entry_value = hashset.no_entry_value; //noinspection RedundantCast if ( this.no_entry_value != ( byte ) 0 ) { Arrays.fill( _set, this.no_entry_value ); } setUp( (int) Math.ceil( DEFAULT_CAPACITY / _loadFactor ) ); } addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing set. * * @param collection a <tt>TByteSet</tt> that will be duplicated. */ public TByteHashSet( TByteCollection collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); if ( collection instanceof TByteHashSet ) { TByteHashSet hashset = ( TByteHashSet ) collection; this._loadFactor = hashset._loadFactor; this.no_entry_value = hashset.no_entry_value; //noinspection RedundantCast if ( this.no_entry_value != ( byte ) 0 ) { Arrays.fill( _set, this.no_entry_value ); } setUp( (int) Math.ceil( DEFAULT_CAPACITY / _loadFactor ) ); } addAll( collection ); }
/** * Creates a new <code>TByteHashSet</code> instance that is a copy * of the existing set. * * @param collection a <tt>TByteSet</tt> that will be duplicated. */ public TByteHashSet( TByteCollection collection ) { this( Math.max( collection.size(), DEFAULT_CAPACITY ) ); if ( collection instanceof TByteHashSet ) { TByteHashSet hashset = ( TByteHashSet ) collection; this._loadFactor = hashset._loadFactor; this.no_entry_value = hashset.no_entry_value; //noinspection RedundantCast if ( this.no_entry_value != ( byte ) 0 ) { Arrays.fill( _set, this.no_entry_value ); } setUp( saturatedCast( fastCeil( DEFAULT_CAPACITY / (double) _loadFactor ) ) ); } addAll( collection ); }
@Override public double compare(final byte[] h1, final byte[] h2) { TByteHashSet union = new TByteHashSet(h1); union.addAll(h2); TByteHashSet intersection = new TByteHashSet(h1); intersection.retainAll(h2.clone()); //retainAll sorts the input, so we need to make a copy return 1.0 - (((double)intersection.size()) / (double)union.size()); } },
@Override public double compare(final byte[] h1, final byte[] h2) { TByteHashSet union = new TByteHashSet(h1); union.addAll(h2); TByteHashSet intersection = new TByteHashSet(h1); intersection.retainAll(h2.clone()); //retainAll sorts the input, so we need to make a copy return 1.0 - (((double)intersection.size()) / (double)union.size()); } },
@Override public double compare(final SparseByteArray h1, final SparseByteArray h2) { byte[] h1v = h1.values(); byte[] h2v = h2.values(); TByteHashSet union = new TByteHashSet(h1v); union.addAll(h2v); if (h1v.length != h1.length || h2v.length != h2.length) union.add((byte)0); TByteHashSet intersection = new TByteHashSet(h1v); intersection.retainAll(h2v); if (h1v.length != h1.length && h2v.length != h2.length) union.add((byte)0); return 1.0 - (((double)intersection.size()) / (double)union.size()); } },
@Override public double compare(final SparseByteArray h1, final SparseByteArray h2) { byte[] h1v = h1.values(); byte[] h2v = h2.values(); TByteHashSet union = new TByteHashSet(h1v); union.addAll(h2v); if (h1v.length != h1.length || h2v.length != h2.length) union.add((byte)0); TByteHashSet intersection = new TByteHashSet(h1v); intersection.retainAll(h2v); if (h1v.length != h1.length && h2v.length != h2.length) union.add((byte)0); return 1.0 - (((double)intersection.size()) / (double)union.size()); } },