@Override public void allocate() { vector.allocateNew(); }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for (final ValueVector v : vectorMap.values()) { v.allocateNew(); } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for (final ValueVector v : vectorMap.values()) { v.allocateNew(); } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for (final ValueVector v : vectorMap.values()) { v.allocateNew(); } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for (final ValueVector v : vectorMap.values()) { v.allocateNew(); } } }
public void allocateNew() { for (VectorWrapper<?> w : wrappers) { w.getValueVector().allocateNew(); } }
private void doAlloc() { //Allocate vv in the allocationVectors. for (ValueVector v : this.allocationVectors) { v.allocateNew(); } //Allocate vv for complexWriters. if (complexWriters == null) { return; } for (ComplexWriter writer : complexWriters) { writer.allocate(); } }
/** * Allocates the exact amount if v is fixed width, otherwise falls back to dynamic allocation * * @param v value vector we are trying to allocate * @param valueCount size we are trying to allocate * @throws org.apache.arrow.memory.OutOfMemoryException if it can't allocate the memory */ public static void allocateNew(ValueVector v, int valueCount) { if (v instanceof FixedWidthVector) { ((FixedWidthVector) v).allocateNew(valueCount); } else { v.allocateNew(); } } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for(ValueVector v : vectorMap.values()){ if(v instanceof FixedWidthVector){ ((FixedWidthVector) v).allocateNew(context.getTargetBatchSize()); } else { v.allocateNew(); } } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { for (final ValueVector v : vectorMap.values()) { v.allocateNew(); } }
private void allocateInternal() { for (VectorWrapper<?> w : container) { ValueVector vv = internal.addOrGet(w.getField()); vv.allocateNew(); } }
@Override public <T extends ValueVector> T addField(Field field, Class<T> clazz) throws SchemaChangeException { ValueVector v = fieldVectorMap.get(field.getName().toLowerCase()); if (v == null || v.getClass() != clazz) { // Field does not exist add it to the map v = TypeHelper.getNewVector(field, context.getAllocator()); if (!clazz.isAssignableFrom(v.getClass())) { throw new SchemaChangeException(String.format( "Class %s was provided, expected %s.", clazz.getSimpleName(), v.getClass().getSimpleName())); } v.allocateNew(); fieldVectorMap.put(field.getName().toLowerCase(), v); } return clazz.cast(v); }
private void allocateInternal() { for (VectorWrapper<?> w : container) { ValueVector vv = internal.addOrGet(w.getField()); vv.allocateNew(); } }
private void allocateVectors(int targetRecordCount) { boolean memoryAllocated = false; double density = lastSuccessfulDensity; while (!memoryAllocated) { try { for (VectorWrapper<?> w : outgoing) { final ValueVector v = w.getValueVector(); if (v instanceof DensityAwareVector) { ((DensityAwareVector) v).setInitialCapacity(targetRecordCount, density); } else { v.setInitialCapacity(targetRecordCount); } v.allocateNew(); } memoryAllocated = true; lastSuccessfulDensity = density; } catch (OutOfMemoryException ex) { // halve the density and try again density = density / 2; if (density < 0.01) { logger.debug("PriorityQueueCopierTemplate ran out of memory to allocate outgoing batch. " + "Records: {}, density: {}", targetRecordCount, density); throw ex; } // try allocating again with lower density logger.debug("PriorityQueueCopierTemplate: Ran out of memory. Retrying allocation with lower density."); } } }
@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { int estimatedRecordCount; if ((reader != null) && (reader.getInput() != null) && (vectorMap.size() > 0)) { final OptionManager options = context.getOptions(); final int listSizeEstimate = (int) options.getOption(ExecConstants.BATCH_LIST_SIZE_ESTIMATE); final int varFieldSizeEstimate = (int) options.getOption(ExecConstants.BATCH_VARIABLE_FIELD_SIZE_ESTIMATE); final int estimatedRecordSize = BatchSchema.estimateRecordSize(vectorMap, listSizeEstimate, varFieldSizeEstimate); if (estimatedRecordSize > 0) { estimatedRecordCount = (int) Math.min(reader.getInput().length / estimatedRecordSize, numRowsPerBatch); } else { estimatedRecordCount = (int) numRowsPerBatch; } } else { estimatedRecordCount = (int) numRowsPerBatch; } for (final ValueVector v : vectorMap.values()) { v.setInitialCapacity(estimatedRecordCount); v.allocateNew(); } }
public static void allocatePrecomputedChildCount( ValueVector v, int valueCount, int bytesPerValue, int childValCount) { if (v instanceof FixedWidthVector) { ((FixedWidthVector) v).allocateNew(valueCount); } else if (v instanceof VariableWidthVector) { ((VariableWidthVector) v).allocateNew(valueCount * bytesPerValue, valueCount); } else if (v instanceof RepeatedFixedWidthVectorLike) { ((RepeatedFixedWidthVectorLike) v).allocateNew(valueCount, childValCount); } else if (v instanceof RepeatedVariableWidthVectorLike) { ((RepeatedVariableWidthVectorLike) v).allocateNew(childValCount * bytesPerValue, valueCount, childValCount); } else { v.allocateNew(); } }
@Override boolean materialize(final NamedExpression ne, final VectorContainer batch, final ClassProducer producer) throws SchemaChangeException { final FunctionCall call = (FunctionCall) ne.getExpr(); final LogicalExpression input = producer.materialize(call.args.get(0), batch); if (input == null) { return false; } // add corresponding ValueVector to container final Field output = input.getCompleteType().toField(ne.getRef()); batch.addOrGet(output).allocateNew(); final TypedFieldId outputId = batch.getValueVectorId(ne.getRef()); writeInputToLead = new ValueVectorWriteExpression(outputId, input, true); return true; }
@Override boolean materialize(final NamedExpression ne, final VectorContainer batch, ClassProducer producer) throws SchemaChangeException { final Field outputField = MajorTypeHelper.getFieldForNameAndMajorType(ne.getRef().getAsNamePart().getName(), getMajorType()); batch.addOrGet(outputField).allocateNew(); fieldId = batch.getValueVectorId(ne.getRef()); return true; }
protected FieldWriter getWriter(MinorType type, ArrowType arrowType) { if (state == State.UNION) { ((UnionWriter) writer).getWriter(type); } else if (state == State.UNTYPED) { if (type == null) { // ??? return null; } if (arrowType == null) { arrowType = type.getType(); } ValueVector v = listVector.addOrGetVector(FieldType.nullable(arrowType)).getVector(); v.allocateNew(); setWriter(v, arrowType); writer.setPosition(position); } else if (type != this.type) { promoteToUnion(); ((UnionWriter) writer).getWriter(type); } return writer; }
private void testInterp(Project project, LogicalExpression expr, Table input, Table expected) throws Exception { final BufferAllocator childAllocator = getTestAllocator().newChildAllocator("interp", 0, Long.MAX_VALUE); try(OperatorContextImpl context = testContext.getNewOperatorContext(childAllocator, project, 1); Generator generator = input.toGenerator(context.getAllocator()); VectorContainer output = new VectorContainer(context.getAllocator()); ){ LogicalExpression materializedExpr = ExpressionTreeMaterializer.materializeAndCheckErrors(expr, generator.getOutput().getSchema(), testContext.getFunctionLookupContext()); ValueVector vector = output.addOrGet(materializedExpr.getCompleteType().toField("out")); vector.allocateNew(); output.buildSchema(); generator.next(1); InterpreterEvaluator.evaluate(generator.getOutput(), context.getFunctionContext(), vector, materializedExpr); output.setAllCount(1); try(RecordBatchData data = new RecordBatchData(output, context.getAllocator())){ expected.checkValid(Collections.singletonList(data)); } } } //