/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
@Override public void and(Filter filter) { if(filter == null || !(filter instanceof CountingBloomFilter) || filter.vectorSize != this.vectorSize || filter.nbHash != this.nbHash) { throw new IllegalArgumentException("filters cannot be and-ed"); } CountingBloomFilter cbf = (CountingBloomFilter)filter; int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { this.buckets[i] &= cbf.buckets[i]; } }
@Override public void or(Filter filter) { if(filter == null || !(filter instanceof CountingBloomFilter) || filter.vectorSize != this.vectorSize || filter.nbHash != this.nbHash) { throw new IllegalArgumentException("filters cannot be or-ed"); } CountingBloomFilter cbf = (CountingBloomFilter)filter; int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { this.buckets[i] |= cbf.buckets[i]; } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }
/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
/** * Constructor * @param vectorSize The vector size of <i>this</i> filter. * @param nbHash The number of hash function to consider. * @param hashType type of the hashing function (see * {@link org.apache.hadoop.util.hash.Hash}). */ public CountingBloomFilter(int vectorSize, int nbHash, int hashType) { super(vectorSize, nbHash, hashType); buckets = new long[buckets2words(vectorSize)]; }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int sizeInWords = buckets2words(vectorSize); buckets = new long[sizeInWords]; for(int i = 0; i < sizeInWords; i++) { buckets[i] = in.readLong(); } } }
@Override public void write(DataOutput out) throws IOException { super.write(out); int sizeInWords = buckets2words(vectorSize); for(int i = 0; i < sizeInWords; i++) { out.writeLong(buckets[i]); } }