public void setValueCount(int valueCount){ vector.setValueCount(valueCount); }
private void setValueCount(int count) { for (ValueVector v : allocationVectors) { v.setValueCount(count); } if(flattenVector != null){ flattenVector.setValueCount(count); } if (complexWriters != null) { for (ComplexWriter writer : complexWriters) { writer.setValueCount(count); } } }
private void setOutputRowCount(int count) { for (ValueVector vv : familyVectorMap.values()) { vv.setValueCount(count); } if (rowKeyVector != null) { rowKeyVector.setValueCount(count); } }
private void setValueCount(int numRecords) { for(ValueVector vv : allocationVectors) { vv.setValueCount(numRecords); } for (final ComplexWriter writer : complexWriters) { writer.setValueCount(numRecords); } }
private void setValueCount(final int count) { for (final ValueVector v : allocationVectors) { v.setValueCount(count); } for (final ComplexWriter writer : complexWriters) { writer.setValueCount(count); } }
private void setValueCount() { for (VectorWrapper<?> vw : htContainer) { ValueVector vv = vw.getValueVector(); vv.setValueCount(maxOccupiedIdx + 1); } }
private void setValueCount(int count) { for (VectorWrapper<?> w: outgoing) { w.getValueVector().setValueCount(count); } }
@Override public void setValueCount(int valueCount) { for (final ValueVector v : getChildren()) { v.setValueCount(valueCount); } NonNullableStructVector.this.valueCount = valueCount; }
public int setAllCount(int records){ if(records != 0){ for(VectorWrapper<?> w : this){ w.getValueVector().setValueCount(records); } } setRecordCount(records); return records; }
@Override public int outputBatch(int batchIndex) { assert batchIndex <= batchHolders.size(); final BatchHolder valueHolder = batchHolders.get(batchIndex); final int recordCount = valueHolder.getRecordCount(); allocateOutgoing(recordCount); valueHolder.outputValues(); htable.outputKeys(batchIndex, outContainer); // set the value count for outgoing batch value vectors for (VectorWrapper<?> v : outContainer) { v.getValueVector().setValueCount(recordCount); } return recordCount; }
public void processPages(long recordsToReadInThisPass) throws IOException { reset(); if(recordsToReadInThisPass>0) { do { determineSize(recordsToReadInThisPass, 0); } while (valuesReadInCurrentPass < recordsToReadInThisPass && pageReader.hasPage()); } valueVec.setValueCount(valuesReadInCurrentPass); }
@Override protected void readRecords(int valuesToRead) { if (valuesToRead == 0) { return; } // TODO - validate that this works in all cases, it fixes a bug when reading from multiple pages into // a single vector dataReader.valuesReadInCurrentPass = 0; dataReader.readValues(valuesToRead); valuesReadInCurrentPass += valuesToRead; valueVec.setValueCount(repeatedGroupsReadInCurrentPass); valueVec.getDataVector().setValueCount(valuesReadInCurrentPass); }
/** * Method is invoked when we have a straight aggregate (no group by expression) and our input is empty. * In this case we construct an outgoing batch with record count as 1. For the nullable vectors we don't set anything * as we want the output to be NULL. For the required vectors (only for count()) we set the value to be zero since * we don't zero out our buffers initially while allocating them. */ private void constructSpecialBatch() { outgoing.allocateNew(); List<NamedExpression> exprs = config.getExprs(); if(outgoing.getNumberOfColumns() != exprs.size()){ throw new IllegalStateException(); } int exprIndex = 0; for (final VectorWrapper<?> vw: outgoing) { final ValueVector vv = vw.getValueVector(); if (!exprs.isEmpty() && isCount(exprs.get(exprIndex))) { ((BigIntVector) vv).setSafe(0, 0); } vv.setValueCount(SPECIAL_BATCH_COUNT); exprIndex++; } outgoing.setRecordCount(SPECIAL_BATCH_COUNT); }
public static void evaluate(int recordCount, FunctionContext functionContext, VectorAccessible incoming, ValueVector outVV, LogicalExpression expr) { InitVisitor initVisitor = new InitVisitor(functionContext); EvalVisitor evalVisitor = new EvalVisitor(incoming, functionContext); expr.accept(initVisitor, incoming); for (int i = 0; i < recordCount; i++) { ValueHolder out = expr.accept(evalVisitor, i); TypeHelper.setValueSafe(outVV, i, out); } outVV.setValueCount(recordCount); }
@Override public int outputData() throws Exception { state.is(State.CAN_PRODUCE); final int records = incoming.getRecordCount(); for(JsonConverter<?> converter : converters){ converter.convert(records); } for(TransferPair transfer : transfers){ transfer.transfer(); transfer.getTo().setValueCount(records); } outgoing.setRecordCount(records); state = State.CAN_CONSUME; return records; }
private static VectorContainer createBatch(int recordCount, ValueVector... vv) { VectorContainer container = new VectorContainer(); if (recordCount != 0) { for (ValueVector v : vv) { v.setValueCount(recordCount); } } container.addCollection(asList(vv)); container.setRecordCount(recordCount); container.buildSchema(SelectionVectorMode.NONE); return container; }
@Override public int next() { int recordCount = inner.next(); if (mutator.isSchemaChanged()) { newSchema(); } incoming.setAllCount(recordCount); if (DEBUG_PRINT) { FragmentHandle h = context.getFragmentHandle(); outgoing.buildSchema(); String op = String.format("CoercionReader:%d:%d:%d --> (%d), %s", h.getMajorFragmentId(), h.getMinorFragmentId(), context.getStats().getOperatorId(), recordCount, outgoing.getSchema()); System.out.println(op); BatchPrinter.printBatch(mutator.getContainer()); } if (projector != null) { projector.projectRecords(recordCount); for (final ValueVector v : allocationVectors) { v.setValueCount(recordCount); } } return recordCount; }
public int next(int desiredCount){ final long termination = Math.min(startIndex + rowCount, index + desiredCount); final int recordsGenerated = (int) (termination - index); // System.out.println(String.format("[next] rowCount: %d, start: %d, termination: %d, records: %d", rowCount, index, termination, recordsGenerated)); if(recordsGenerated < 1){ return 0; } all.allocateNew(); int vectorIndex = 0; for(long i = index; i < termination; i++, vectorIndex++){ generateRecord(i, vectorIndex); for(AbstractRandomInt r : randoms){ r.rowFinished(); } } index += recordsGenerated; returned.setRecordCount(recordsGenerated); for(VectorWrapper<?> w : returned){ w.getValueVector().setValueCount(recordsGenerated); } return recordsGenerated; }
@Override public int outputData() { state.is(State.CAN_PRODUCE); if (!canCopy()) { consolidateIfNecessary(); updateStats(); return 0; } int copied = copier.copy(targetBatchSize); if (copied == 0) { state = State.DONE; return 0; } if (sortState == SortState.COPY_FROM_DISK) { // need to use the copierAllocator for the copy, because the copierAllocator is the one that reserves enough // memory to copy the data. This requires using an intermedate VectorContainer. Now, we need to transfer the // the output data to the output VectorContainer diskRuns.transferOut(output, copied); } for (VectorWrapper<?> w : output) { w.getValueVector().setValueCount(copied); } output.setRecordCount(copied); return copied; }
@Override public int outputData() { if(checkForStraightCopy && incoming.getRecordCount() == randomVector.getValueCount()){ for(TransferPair tp : transferPairs){ tp.transfer(); } output.setRecordCount(incoming.getRecordCount()); state = State.CAN_CONSUME; return incoming.getRecordCount(); } int recordCount = incoming.getRecordCount() - this.copyOffset; int copiedRecords = copier.copyRecords(copyOffset, recordCount); if(copiedRecords < recordCount){ copyOffset = copyOffset + copiedRecords; }else{ copyOffset = 0; state = State.CAN_CONSUME; } if(copiedRecords > 0){ for(VectorWrapper<?> v : output){ v.getValueVector().setValueCount(copiedRecords); } } output.setRecordCount(copiedRecords); return copiedRecords; }