private LookupSourceSupplier buildLookupSource() { LookupSourceSupplier partition = index.createLookupSourceSupplier(operatorContext.getSession(), hashChannels, preComputedHashChannel, filterFunctionFactory, sortChannel, searchFunctionFactories, Optional.of(outputChannels)); hashCollisionsCounter.recordHashCollision(partition.getHashCollisions(), partition.getExpectedHashCollisions()); checkState(lookupSourceSupplier == null, "lookupSourceSupplier is already set"); this.lookupSourceSupplier = partition; return partition; }
@Override public final void addInput(Page page) { checkState(!finishing, "Operator is already finishing"); requireNonNull(page, "page is null"); checkState(mergingOutput.needsInput(), "Page buffer is full"); mergingOutput.addInput(processor.process( operatorContext.getSession().toConnectorSession(), operatorContext.getDriverContext().getYieldSignal(), pageProcessorMemoryContext, page)); outputMemoryContext.setBytes(mergingOutput.getRetainedSizeInBytes() + pageProcessorMemoryContext.getBytes()); }
public OperatorContext addOperatorContext(int operatorId, PlanNodeId planNodeId, String operatorType) { checkArgument(operatorId >= 0, "operatorId is negative"); for (OperatorContext operatorContext : operatorContexts) { checkArgument(operatorId != operatorContext.getOperatorId(), "A context already exists for operatorId %s", operatorId); } OperatorContext operatorContext = new OperatorContext( operatorId, planNodeId, operatorType, this, notificationExecutor, driverMemoryContext.newMemoryTrackingContext()); operatorContexts.add(operatorContext); return operatorContext; }
public MarkDistinctOperator(OperatorContext operatorContext, List<Type> types, List<Integer> markDistinctChannels, Optional<Integer> hashChannel, JoinCompiler joinCompiler) { this.operatorContext = requireNonNull(operatorContext, "operatorContext is null"); requireNonNull(hashChannel, "hashChannel is null"); requireNonNull(markDistinctChannels, "markDistinctChannels is null"); ImmutableList.Builder<Type> distinctTypes = ImmutableList.builder(); for (int channel : markDistinctChannels) { distinctTypes.add(types.get(channel)); } this.markDistinctHash = new MarkDistinctHash(operatorContext.getSession(), distinctTypes.build(), Ints.toArray(markDistinctChannels), hashChannel, joinCompiler, this::updateMemoryReservation); this.localUserMemoryContext = operatorContext.localUserMemoryContext(); }
public LocalMergeSourceOperator(OperatorContext operatorContext, List<LocalExchangeSource> sources, List<Type> types, PageWithPositionComparator comparator) { this.operatorContext = requireNonNull(operatorContext, "operatorContext is null"); this.sources = requireNonNull(sources, "sources is null"); List<WorkProcessor<Page>> pageProducers = sources.stream() .map(LocalExchangeSource::pages) .collect(toImmutableList()); mergedPages = mergeSortedPages( pageProducers, requireNonNull(comparator, "comparator is null"), types, operatorContext.aggregateUserMemoryContext(), operatorContext.getDriverContext().getYieldSignal()); }
private Page processColumnSource() { DriverYieldSignal yieldSignal = operatorContext.getDriverContext().getYieldSignal(); if (!finishing && !yieldSignal.isSet()) { CursorProcessorOutput output = cursorProcessor.process(operatorContext.getSession().toConnectorSession(), yieldSignal, cursor, pageBuilder); pageSourceMemoryContext.setBytes(cursor.getSystemMemoryUsage()); long bytesProcessed = cursor.getCompletedBytes() - completedBytes; long elapsedNanos = cursor.getReadTimeNanos() - readTimeNanos; operatorContext.recordRawInputWithTiming(bytesProcessed, elapsedNanos); // TODO: derive better values for cursors operatorContext.recordProcessedInput(bytesProcessed, output.getProcessedRows()); completedBytes = cursor.getCompletedBytes(); readTimeNanos = cursor.getReadTimeNanos(); if (output.isNoMoreRows()) { finishing = true; mergingOutput.finish(); } } // only return a page if buffer is full or we are finishing Page page = null; if (!pageBuilder.isEmpty() && (finishing || pageBuilder.isFull())) { page = pageBuilder.build(); pageBuilder.reset(); } outputMemoryContext.setBytes(pageBuilder.getRetainedSizeInBytes()); return page; }
hashChannel, expectedGroups, isDictionaryAggregationEnabled(operatorContext.getSession()), joinCompiler, updateMemory); this.partial = step.isOutputPartial(); this.maxPartialMemory = maxPartialMemory.map(dataSize -> OptionalLong.of(dataSize.toBytes())).orElseGet(OptionalLong::empty); this.systemMemoryContext = operatorContext.newLocalSystemMemoryContext(InMemoryHashAggregationBuilder.class.getSimpleName()); this.localUserMemoryContext = operatorContext.localUserMemoryContext(); this.useSystemMemory = useSystemMemory;
private Page processPageSource() { DriverYieldSignal yieldSignal = operatorContext.getDriverContext().getYieldSignal(); if (!finishing && mergingOutput.needsInput() && !yieldSignal.isSet()) { Page page = pageSource.getNextPage(); finishing = pageSource.isFinished(); pageSourceMemoryContext.setBytes(pageSource.getSystemMemoryUsage()); if (page != null) { page = recordProcessedInput(page); // update operator stats long endCompletedBytes = pageSource.getCompletedBytes(); long endReadTimeNanos = pageSource.getReadTimeNanos(); operatorContext.recordRawInputWithTiming(endCompletedBytes - completedBytes, endReadTimeNanos - readTimeNanos); completedBytes = endCompletedBytes; readTimeNanos = endReadTimeNanos; Iterator<Optional<Page>> output = pageProcessor.process(operatorContext.getSession().toConnectorSession(), yieldSignal, pageProcessorMemoryContext, page); mergingOutput.addInput(output); } if (finishing) { mergingOutput.finish(); } } Page result = mergingOutput.getOutput(); outputMemoryContext.setBytes(mergingOutput.getRetainedSizeInBytes() + pageProcessorMemoryContext.getBytes()); return result; }
/** * Update memory usage. * * @return true if the reservation is within the limit */ // TODO: update in the interface after the new memory tracking framework is landed (#9049) // Essentially we would love to have clean interfaces to support both pushing and pulling memory usage // The following implementation is a hybrid model, where the push model is going to call the pull model causing reentrancy private boolean updateMemoryReservation() { // Operator/driver will be blocked on memory after we call localUserMemoryContext.setBytes(). // If memory is not available, once we return, this operator will be blocked until memory is available. long memorySizeInBytes = groupByHash.map(GroupByHash::getEstimatedSize).orElse(0L) + partitionRowCount.sizeOf(); localUserMemoryContext.setBytes(memorySizeInBytes); // If memory is not available, inform the caller that we cannot proceed for allocation. return operatorContext.isWaitingForMemory().isDone(); }
@Override public Page getOutput() { if (split == null) { return null; } if (source == null) { source = pageSourceProvider.createPageSource(operatorContext.getSession(), split, columns); } Page page = source.getNextPage(); if (page != null) { // assure the page is in memory before handing to another operator page = page.getLoadedPage(); // update operator stats long endCompletedBytes = source.getCompletedBytes(); long endReadTimeNanos = source.getReadTimeNanos(); operatorContext.recordRawInputWithTiming(endCompletedBytes - completedBytes, endReadTimeNanos - readTimeNanos); operatorContext.recordProcessedInput(page.getSizeInBytes(), page.getPositionCount()); completedBytes = endCompletedBytes; readTimeNanos = endReadTimeNanos; } // updating system memory usage should happen after page is loaded. systemMemoryContext.setBytes(source.getSystemMemoryUsage()); return page; } }
if (alwaysRevokeMemory) { driver.getDriverContext().getOperatorContexts().stream() .filter(operatorContext -> operatorContext.getOperatorStats().getRevocableMemoryReservation().getValue() > 0) .forEach(OperatorContext::requestMemoryRevoking);
private WorkProcessor<Page> mergeFromDisk() { checkState(spiller.isPresent()); mergeHashSort = Optional.of(new MergeHashSort(operatorContext.newAggregateSystemMemoryContext())); WorkProcessor<Page> mergedSpilledPages = mergeHashSort.get().merge( groupByTypes, hashAggregationBuilder.buildIntermediateTypes(), spiller.get().getSpills().stream() .map(WorkProcessor::fromIterator) .collect(toImmutableList()), operatorContext.getDriverContext().getYieldSignal()); return mergeSortedPages(mergedSpilledPages, memoryLimitForMerge); }
private void assertMemoryRevokingRequestedFor(OperatorContext... operatorContexts) { ImmutableSet<OperatorContext> operatorContextsSet = ImmutableSet.copyOf(operatorContexts); operatorContextsSet.forEach( operatorContext -> assertTrue(operatorContext.isMemoryRevokingRequested(), "expected memory requested for operator " + operatorContext.getOperatorId())); Sets.difference(allOperatorContexts, operatorContextsSet).forEach( operatorContext -> assertFalse(operatorContext.isMemoryRevokingRequested(), "expected memory not requested for operator " + operatorContext.getOperatorId())); }
private Optional<ListenableFuture<?>> getBlockedFuture(Operator operator) { ListenableFuture<?> blocked = revokingOperators.get(operator); if (blocked != null) { // We mark operator as blocked regardless of blocked.isDone(), because finishMemoryRevoke has not been called yet. return Optional.of(blocked); } blocked = operator.isBlocked(); if (!blocked.isDone()) { return Optional.of(blocked); } blocked = operator.getOperatorContext().isWaitingForMemory(); if (!blocked.isDone()) { return Optional.of(blocked); } blocked = operator.getOperatorContext().isWaitingForRevocableMemory(); if (!blocked.isDone()) { return Optional.of(blocked); } return Optional.empty(); }
if (operator.getOperatorContext().getDriverContext().getPipelineContext().getPipelineStats().getSystemMemoryReservation().toBytes() > 0) { greaterThanZero = true; break; assertEquals(operator.getOperatorContext().getOperatorStats().getSystemMemoryReservation().toBytes(), 0);
private boolean isFinalStageInfo(StageInfo stageInfo) { List<StageInfo> subStages = getSubStagesOf(operatorContext.getDriverContext().getTaskId().getStageId(), stageInfo); return subStages.stream().allMatch(StageInfo::isFinalStageInfo); }
memoryPool.reserve(queryId, "test", reservedMemoryInBytes); long oldMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage(); int oldCapacity = getHashCapacity.apply(operator); long newMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage(); assertTrue(operator.getOperatorContext().isWaitingForMemory().isDone()); assertFalse(operator.getOperatorContext().isWaitingForMemory().isDone()); long rehashedMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage(); assertBetweenInclusive(rehashedMemoryUsage * 1.0 / newMemoryUsage, 0.99, 1.01);
NestedLoopJoinPages(List<Page> pages, DataSize estimatedSize, OperatorContext operatorContext) { requireNonNull(pages, "pages is null"); requireNonNull(operatorContext, "operatorContext is null"); this.pages = ImmutableList.copyOf(pages); this.taskContext = operatorContext.getDriverContext().getPipelineContext().getTaskContext(); this.estimatedSize = requireNonNull(estimatedSize, "estimatedSize is null"); operatorContext.transferMemoryToTaskContext(estimatedSize.toBytes()); }
t, "Error closing operator %s for task %s", operator.getOperatorContext().getOperatorId(), driverContext.getTaskId()); operator.getOperatorContext().destroy(); t, "Error freeing all allocated memory for operator %s for task %s", operator.getOperatorContext().getOperatorId(), driverContext.getTaskId());
protected ScanFilterAndProjectOperator( OperatorContext operatorContext, PlanNodeId sourceId, PageSourceProvider pageSourceProvider, CursorProcessor cursorProcessor, PageProcessor pageProcessor, Iterable<ColumnHandle> columns, Iterable<Type> types) { this.cursorProcessor = requireNonNull(cursorProcessor, "cursorProcessor is null"); this.pageProcessor = requireNonNull(pageProcessor, "pageProcessor is null"); this.operatorContext = requireNonNull(operatorContext, "operatorContext is null"); this.planNodeId = requireNonNull(sourceId, "sourceId is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.types = ImmutableList.copyOf(requireNonNull(types, "types is null")); this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null")); this.pageSourceMemoryContext = operatorContext.getSystemMemoryContext().newLocalMemoryContext(); this.pageBuilderMemoryContext = operatorContext.getSystemMemoryContext().newLocalMemoryContext(); this.columnarProcessingEnabled = isColumnarProcessingEnabled(operatorContext.getSession()); this.columnarProcessingDictionaryEnabled = isColumnarProcessingDictionaryEnabled(operatorContext.getSession()); this.pageBuilder = new PageBuilder(getTypes()); }