@Override public void sortDescending() { IntArrays.parallelQuickSort(data.elements(), descendingComparator); }
@Override public void sortDescending() { IntArrays.parallelQuickSort(data.elements(), descendingIntComparator); }
@Override public void sortAscending() { IntArrays.parallelQuickSort(data.elements()); }
@Override public void sortAscending() { Arrays.parallelSort(data.elements()); }
@Override public void sortDescending() { IntArrays.parallelQuickSort(data.elements(), reverseIntComparator); }
@Override public synchronized PartitioningSpillResult partitionAndSpill(Page page, IntPredicate spillPartitionMask) { requireNonNull(page, "page is null"); requireNonNull(spillPartitionMask, "spillPartitionMask is null"); checkArgument(page.getChannelCount() == types.size(), "Wrong page channel count, expected %s but got %s", types.size(), page.getChannelCount()); checkState(!readingStarted, "reading already started"); IntArrayList unspilledPositions = partitionPage(page, spillPartitionMask); ListenableFuture<?> future = flushFullBuilders(); return new PartitioningSpillResult(future, page.getPositions(unspilledPositions.elements(), 0, unspilledPositions.size())); }
@Override public void load(LazyBlock lazyBlock) { if (block == null) { return; } lazyBlock.setBlock(block.getPositions(rowsToKeep.elements(), 0, rowsToKeep.size())); // clear reference to loader to free resources, since load was successful block = null; } }
private static Block[] createKeyValueBlock(int positionCount, Block keys, Block values, int[] lengths) { if (!hasNull(keys)) { return new Block[] {keys, values}; } // // Map entries with a null key are skipped in the Hive ORC reader, so skip them here also // IntArrayList nonNullPositions = new IntArrayList(keys.getPositionCount()); int position = 0; for (int mapIndex = 0; mapIndex < positionCount; mapIndex++) { int length = lengths[mapIndex]; for (int entryIndex = 0; entryIndex < length; entryIndex++) { if (keys.isNull(position)) { // key is null, so remove this entry from the map lengths[mapIndex]--; } else { nonNullPositions.add(position); } position++; } } Block newKeys = keys.copyPositions(nonNullPositions.elements(), 0, nonNullPositions.size()); Block newValues = values.copyPositions(nonNullPositions.elements(), 0, nonNullPositions.size()); return new Block[] {newKeys, newValues}; }
/** * Returns a new IntColumn containing a value for each value in this column, truncating if necessary. * * A narrowing primitive conversion such as this one may lose information about the overall magnitude of a * numeric value and may also lose precision and range. Specifically, if the value is too small (a negative value * of large magnitude or negative infinity), the result is the smallest representable value of type int. * * Similarly, if the value is too large (a positive value of large magnitude or positive infinity), the result is the * largest representable value of type int. * * Despite the fact that overflow, underflow, or other loss of information may occur, a narrowing primitive * conversion never results in a run-time exception. * * A missing value in the receiver is converted to a missing value in the result */ @Override public IntColumn asIntColumn() { IntArrayList values = new IntArrayList(); for (double d : data) { values.add((int) d); } values.trim(); return IntColumn.create(this.name(), values.elements()); }
/** * Returns a new IntColumn containing a value for each value in this column, truncating if necessary. * * A narrowing primitive conversion such as this one may lose information about the overall magnitude of a * numeric value and may also lose precision and range. Specifically, if the value is too small (a negative value * of large magnitude or negative infinity), the result is the smallest representable value of type int. * * Similarly, if the value is too large (a positive value of large magnitude or positive infinity), the result is the * largest representable value of type int. * * Despite the fact that overflow, underflow, or other loss of information may occur, a narrowing primitive * conversion never results in a run-time exception. * * A missing value in the receiver is converted to a missing value in the result */ @Override public IntColumn asIntColumn() { IntArrayList values = new IntArrayList(); for (float d : data) { values.add((int) d); } values.trim(); return IntColumn.create(this.name(), values.elements()); }
/** * Returns a new IntColumn containing a value for each value in this column * * A narrowing conversion of a signed integer to an integral type T simply discards all but the n lowest order bits, * where n is the number of bits used to represent type T. In addition to a possible loss of information about * the magnitude of the numeric value, this may cause the sign of the resulting value to differ from the sign of * the input value. * * In other words, if the element being converted is larger (or smaller) than Integer.MAX_VALUE * (or Integer.MIN_VALUE) you will not get a conventionally good conversion. * * Despite the fact that overflow, underflow, or other loss of information may occur, a narrowing primitive * conversion never results in a run-time exception. * * A missing value in the receiver is converted to a missing value in the result */ @Override public IntColumn asIntColumn() { IntArrayList values = new IntArrayList(); for (long f : data) { values.add((int) f); } values.trim(); return IntColumn.create(this.name(), values.elements()); }
adaptedBlocks[i] = block.getPositions(rowsToKeep.elements(), 0, rowsToKeep.size());
@Override public synchronized void accept(Page page) { // reset the assignment lists for (IntList partitionAssignment : partitionAssignments) { partitionAssignment.clear(); } // assign each row to a partition for (int position = 0; position < page.getPositionCount(); position++) { int partition = partitionGenerator.getPartition(page, position); partitionAssignments[partition].add(position); } // build a page for each partition Block[] outputBlocks = new Block[page.getChannelCount()]; for (int partition = 0; partition < buffers.size(); partition++) { IntArrayList positions = partitionAssignments[partition]; if (!positions.isEmpty()) { for (int i = 0; i < page.getChannelCount(); i++) { outputBlocks[i] = page.getBlock(i).copyPositions(positions.elements(), 0, positions.size()); } Page pageSplit = new Page(positions.size(), outputBlocks); memoryManager.updateMemoryUsage(pageSplit.getRetainedSizeInBytes()); buffers.get(partition).accept(new PageReference(pageSplit, 1, () -> memoryManager.updateMemoryUsage(-pageSplit.getRetainedSizeInBytes()))); } } }
return SelectedPositions.positionsList(selectedPositions.elements(), 3, selectedPositions.size() - 6);
private void decompact() { assert compactImpl != null && fullImpl == null; fullImpl = new LongArrayList(compactImpl.elements().length); for (IntIterator iter = compactImpl.iterator(); iter.hasNext();) { final int value = iter.nextInt(); fullImpl.add(value); } compactImpl = null; }
@Override public int[] successorArray() { if (curr == -1) throw new IllegalStateException(); return curr == following ? successors.elements() : IntArrays.EMPTY_ARRAY; }
private static long usage(SortedMap<String, IntArrayList> map) { long size = TREE_MAP_USAGE; for (final Map.Entry<String, IntArrayList> e : map.entrySet()) { size += TREE_MAP_ENTRY_USAGE; size += usage(e.getKey()); size += INT_ARRAY_LIST_USAGE + 4 * e.getValue().elements().length; } return size; }
@Override public synchronized PartitioningSpillResult partitionAndSpill(Page page, IntPredicate spillPartitionMask) { requireNonNull(page, "page is null"); requireNonNull(spillPartitionMask, "spillPartitionMask is null"); checkArgument(page.getChannelCount() == types.size(), "Wrong page channel count, expected %s but got %s", types.size(), page.getChannelCount()); checkState(!readingStarted, "reading already started"); IntArrayList unspilledPositions = partitionPage(page, spillPartitionMask); ListenableFuture<?> future = flushFullBuilders(); return new PartitioningSpillResult(future, page.getPositions(unspilledPositions.elements(), 0, unspilledPositions.size())); }
private void decompact() { assert compactImpl != null && fullImpl == null; fullImpl = new LongArrayList(compactImpl.elements().length); for (IntIterator iter = compactImpl.iterator(); iter.hasNext();) { final int value = iter.nextInt(); fullImpl.add(value); } compactImpl = null; }
private void decompact() { assert compactImpl != null && fullImpl == null; fullImpl = new LongArrayList(compactImpl.elements().length); for (IntIterator iter = compactImpl.iterator(); iter.hasNext();) { final int value = iter.nextInt(); fullImpl.add(value); } compactImpl = null; }