public static boolean isPowerOf2(long value) { return value>0 && Long.highestOneBit(value)==value; }
public static boolean isPowerOf2(long value) { return value>0 && Long.highestOneBit(value)==value; }
private static boolean inlineValues( long[] values, int maxBitsPerLabel, Bits target ) { long limit = 1L << maxBitsPerLabel; for ( long value : values ) { if ( highestOneBit( value ) < limit ) { target.put( value, maxBitsPerLabel ); } else { return false; } } return true; }
static int bitsForNumElementsWithLoadFactor(long numElements) { if (numElements == 0) { return 1; } int candidateBits = Long.bitCount(numElements) == 1 ? Math.max(1, Long.numberOfTrailingZeros(numElements)) : Long.numberOfTrailingZeros(Long.highestOneBit(numElements) << 1L); //May need an extra bit due to load factor if (((long) (LongMath.pow(2, candidateBits) * 0.75)) < numElements) { candidateBits++; } return candidateBits; }
static long getMaxSize(long poolSize) { long l = Long.highestOneBit(poolSize); long sizeInKb = KILOBYTES.convert(l, BYTES); long maxSize = sizeInKb >> 5; if (maxSize >= MAX_PAGE_SIZE_IN_KB) { maxSize = MAX_PAGE_SIZE_IN_KB; } return maxSize; }
@Override public Buffer writeHexadecimalUnsignedLong(long v) { if (v == 0) { // Both a shortcut and required since the following code can't handle zero. return writeByte('0'); } int width = Long.numberOfTrailingZeros(Long.highestOneBit(v)) / 4 + 1; Segment tail = writableSegment(width); byte[] data = tail.data; for (int pos = tail.limit + width - 1, start = tail.limit; pos >= start; pos--) { data[pos] = DIGITS[(int) (v & 0xF)]; v >>>= 4; } tail.limit += width; size += width; return this; }
public ByteStringBuilder appendPackedUnsignedBE(long v) { if (v > 0) { final int bits = Long.numberOfTrailingZeros(Long.highestOneBit(v)) + 1; final int size = (bits + 7) / 7; for (int x = 0, b = (size - 1) * 7; x < size - 1; x ++, b -= 7) { doAppend((byte) (0x80L | v >>> b)); } } doAppend((byte) (~0x80L & v)); return this; }
public ByteStringBuilder appendPackedUnsignedBE(long v) { if (v > 0) { final int bits = Long.numberOfTrailingZeros(Long.highestOneBit(v)) + 1; final int size = (bits + 7) / 7; for (int x = 0, b = (size - 1) * 7; x < size - 1; x ++, b -= 7) { doAppend((byte) (0x80L | v >>> b)); } } doAppend((byte) (~0x80L & v)); return this; }
/** * Deparse an action bit set, using the given function to map action bits to strings. If the bits are all clear, * the empty string {@code ""} is returned. * * @param actionBits the action bit set * @param mappingFunction the mapping function (must not be {@code null}) * @return the actions string (not {@code null}) */ public static String toActionsString(long actionBits, LongFunction<String> mappingFunction) { Assert.checkNotNullParam("mappingFunction", mappingFunction); final StringBuilder sb = new StringBuilder(); if (actionBits == 0) return ""; long lb = Long.highestOneBit(actionBits); sb.append(mappingFunction.apply(lb)); actionBits &= ~lb; while (actionBits != 0) { lb = Long.highestOneBit(actionBits); sb.append(',').append(mappingFunction.apply(lb)); actionBits &= ~lb; } return sb.toString(); }
private void addCuboidBitSet(long cuboidId, List<Integer[]> allCuboidsBitSet) { Integer[] indice = new Integer[Long.bitCount(cuboidId)]; long mask = Long.highestOneBit(baseCuboidId); int position = 0; for (int i = 0; i < ROW_LENGTH; i++) { if ((mask & cuboidId) > 0) { indice[position] = i; position++; } mask = mask >> 1; } allCuboidsBitSet.add(indice); }
private void addCuboidBitSet(long cuboidId, List<Integer[]> allCuboidsBitSet) { Integer[] indice = new Integer[Long.bitCount(cuboidId)]; long mask = Long.highestOneBit(baseCuboidId); int position = 0; for (int i = 0; i < ROW_LENGTH; i++) { if ((mask & cuboidId) > 0) { indice[position] = i; position++; } mask = mask >> 1; } allCuboidsBitSet.add(indice); }
Integer[] cuboidBitSet = new Integer[Long.bitCount(cuboidId)]; long mask = Long.highestOneBit(baseCuboidId); int position = 0; for (int i = 0; i < rowkeyLength; i++) {
public static final Pair<ImmutableBitSet, ImmutableBitSet> getDimensionAndMetricColumnBitSet(final long baseCuboidId, final long childCuboidId, final int measureCount) { final Pair<ImmutableBitSet, ImmutableBitSet> parentDimensionAndMetricColumnBitSet = getDimensionAndMetricColumnBitSet(baseCuboidId, measureCount); ImmutableBitSet parentDimensions = parentDimensionAndMetricColumnBitSet.getFirst(); ImmutableBitSet measureColumns = parentDimensionAndMetricColumnBitSet.getSecond(); ImmutableBitSet childDimensions = parentDimensions; long mask = Long.highestOneBit(baseCuboidId); long parentCuboidIdActualLength = (long)Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId); int index = 0; for (int i = 0; i < parentCuboidIdActualLength; i++) { if ((mask & baseCuboidId) > 0) { if ((mask & childCuboidId) == 0) { // this dim will be aggregated childDimensions = childDimensions.set(index, false); } index++; } mask = mask >> 1; } return Pair.newPair(childDimensions, measureColumns); } }
@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
private void buildKeyInternal(Cuboid parentCuboid, Cuboid childCuboid, ByteArray[] splitBuffers, ByteArray newKeyBodyBuf) { RowKeyEncoder rowkeyEncoder = rowKeyEncoderProvider.getRowkeyEncoder(childCuboid); // rowkey columns long mask = Long.highestOneBit(parentCuboid.getId()); long parentCuboidId = parentCuboid.getId(); long childCuboidId = childCuboid.getId(); long parentCuboidIdActualLength = (long)Long.SIZE - Long.numberOfLeadingZeros(parentCuboid.getId()); int index = rowKeySplitter.getBodySplitOffset(); // skip shard and cuboidId int offset = RowConstants.ROWKEY_SHARDID_LEN + RowConstants.ROWKEY_CUBOIDID_LEN; // skip shard and cuboidId for (int i = 0; i < parentCuboidIdActualLength; i++) { if ((mask & parentCuboidId) > 0) {// if the this bit position equals // 1 if ((mask & childCuboidId) > 0) {// if the child cuboid has this // column System.arraycopy(splitBuffers[index].array(), splitBuffers[index].offset(), newKeyBodyBuf.array(), offset, splitBuffers[index].length()); offset += splitBuffers[index].length(); } index++; } mask = mask >> 1; } rowkeyEncoder.fillHeader(newKeyBodyBuf.array()); }
KylinConfig kylinConf = cubeSegment.getConfig(); long mask = Long.highestOneBit(baseCuboidId); long parentCuboidIdActualLength = (long) Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId); for (int i = 0; i < parentCuboidIdActualLength; i++) {
private long[] bits(long groupAllBitMask) { int size = Long.bitCount(groupAllBitMask); long[] r = new long[size]; long l = groupAllBitMask; int i = 0; while (l != 0) { long bit = Long.highestOneBit(l); r[i++] = bit; l ^= bit; } return r; }
AbstractPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { if (capacity < 0) { throw new IllegalArgumentException("capacity must be >= 0"); } if (maxLoadFactor <= 0 || maxLoadFactor >= 1) { throw new IllegalArgumentException("maxLoadFactor must be > 0 and < 1"); } this.bigArrays = bigArrays; this.maxLoadFactor = maxLoadFactor; long buckets = 1L + (long) (capacity / maxLoadFactor); buckets = Math.max(1, Long.highestOneBit(buckets - 1) << 1); // next power of two assert buckets == Long.highestOneBit(buckets); maxSize = (long) (buckets * maxLoadFactor); assert maxSize >= capacity; size = 0; mask = buckets - 1; }
int findMaxValue() { // find the last index that is initialized int index = buckets.length() - 1; while (index >= 0) { if (buckets.get(index) != null) break; index--; } // if no buckets are initialized, then return -1 ( meaning set is empty) if (index < 0) return -1; // find the highest bit in indexAtLong to see which is last long init in bucket int highestBitSetInIndexAtLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(buckets.get(index).idx)); long[] longs = buckets.get(index).longs; long value = longs[longs.length - 1]; long highestBitSetInLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(value)); return (int) ((index << BUCKET_SHIFT) + (highestBitSetInIndexAtLong << 6) + highestBitSetInLong); }
private int buildKey(Cuboid parentCuboid, Cuboid childCuboid, SplittedBytes[] splitBuffers) { int offset = 0; // cuboid id System.arraycopy(childCuboid.getBytes(), 0, keyBuf, offset, childCuboid.getBytes().length); offset += childCuboid.getBytes().length; // rowkey columns long mask = Long.highestOneBit(parentCuboid.getId()); long parentCuboidId = parentCuboid.getId(); long childCuboidId = childCuboid.getId(); long parentCuboidIdActualLength = Long.SIZE - Long.numberOfLeadingZeros(parentCuboid.getId()); int index = 1; // skip cuboidId for (int i = 0; i < parentCuboidIdActualLength; i++) { if ((mask & parentCuboidId) > 0) {// if the this bit position equals // 1 if ((mask & childCuboidId) > 0) {// if the child cuboid has this // column System.arraycopy(splitBuffers[index].value, 0, keyBuf, offset, splitBuffers[index].length); offset += splitBuffers[index].length; } index++; } mask = mask >> 1; } return offset; }