protected Object[] getRunResult(QueryType queryType, String planString) throws Exception { List<QueryDataBatch> resultList = testRunAndReturn(queryType, planString); List<Object> res = new ArrayList<Object>(); RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); for(QueryDataBatch result : resultList) { if (result.getData() != null) { loader.load(result.getHeader().getDef(), result.getData()); ValueVector v = loader.iterator().next().getValueVector(); for (int j = 0; j < v.getAccessor().getValueCount(); j++) { if (v instanceof VarCharVector) { res.add(new String(((VarCharVector) v).getAccessor().get(j))); } else { res.add(v.getAccessor().getObject(j)); } } loader.clear(); result.release(); } } return res.toArray(); }
@Override public Object getObject(int index) { Map<String, Object> vv = new JsonStringHashMap<>(); for (String child:getChildFieldNames()) { ValueVector v = getChild(child); // TODO(DRILL-4001): Resolve this hack: // The index/value count check in the following if statement is a hack // to work around the current fact that RecordBatchLoader.load and // MapVector.load leave child vectors with a length of zero (as opposed // to matching the lengths of siblings and the parent map vector) // because they don't remove (or set the lengths of) vectors from // previous batches that aren't in the current batch. if (v != null && index < v.getAccessor().getValueCount()) { Object value = v.getAccessor().getObject(index); if (value != null) { vv.put(child, value); } } } return vv; }
/** Might truncate the FQN if too long */ private String getFQNForLogging(int maxLength) { final String FQNKey = "FQN"; final ValueVector v = mutator.implicitFieldVectorMap.get(FQNKey); final Object fqnObj; if (v == null || v.getAccessor().getValueCount() == 0 || (fqnObj = ((NullableVarCharVector) v).getAccessor().getObject(0)) == null) { return "NA"; } String fqn = fqnObj.toString(); if (fqn != null && fqn.length() > maxLength) { fqn = fqn.substring(fqn.length() - maxLength, fqn.length()); } return fqn; }
final TypeProtos.MinorType fieldMinorType = vw.getValueVector().getMetadata().getMajorType().getMinorType(); final Accessor accessor = vw.getValueVector().getAccessor(); final Object value = i < accessor.getValueCount() ? accessor.getObject(i) : null; final String display = value == null ? null : formatter.format(value, fieldMinorType); record.put(field, display);
public ColumnSize(ValueVector v, String prefix) { this.prefix = prefix; valueCount = v.getAccessor().getValueCount(); metadata = v.getField(); isVariableWidth = (v instanceof VariableWidthVector || v instanceof RepeatedVariableWidthVectorLike);
@SuppressWarnings("resource") private static ValueVector coerceVector(ValueVector v, VectorContainer c, MaterializedField field, int recordCount, BufferAllocator allocator) { if (v != null) { int valueCount = v.getAccessor().getValueCount(); TransferPair tp = v.getTransferPair(allocator); tp.transfer(); if (v.getField().getType().getMinorType().equals(field.getType().getMinorType())) { if (field.getType().getMinorType() == MinorType.UNION) { UnionVector u = (UnionVector) tp.getTo(); for (MinorType t : field.getType().getSubTypeList()) { u.addSubType(t); } } return tp.getTo(); } else { ValueVector newVector = TypeHelper.getNewVector(field, allocator); Preconditions.checkState(field.getType().getMinorType() == MinorType.UNION, "Can only convert vector to Union vector"); UnionVector u = (UnionVector) newVector; u.setFirstType(tp.getTo(), valueCount); return u; } } else { v = TypeHelper.getNewVector(field, allocator); v.allocateNew(); v.getMutator().setValueCount(recordCount); return v; } }
/** Compose the array of partition values for the directories that are referenced by filter: * e.g suppose the dir hierarchy is year/quarter/month and the query is: * SELECT * FROM T WHERE dir0=2015 AND dir1 = 'Q1', * then for 2015/Q1/Feb, this will have ['2015', 'Q1', null] * If the query filter condition is WHERE dir1 = 'Q2' (i.e no dir0 condition) then the array will * have [null, 'Q2', null] */ private Pair<String[], Integer> composePartition(BitSet referencedDirsBitSet, Map<Integer, Integer> partitionMap, ValueVector[] vectors, int recordCount) { String[] partition = new String[vectors.length]; int maxIndex = -1; for (int referencedDirsIndex : BitSets.toIter(referencedDirsBitSet)) { int partitionColumnIndex = partitionMap.get(referencedDirsIndex); ValueVector vv = vectors[partitionColumnIndex]; if (vv.getAccessor().getValueCount() > 0 && vv.getAccessor().getObject(recordCount) != null) { String value = vv.getAccessor().getObject(recordCount).toString(); partition[partitionColumnIndex] = value; maxIndex = Math.max(maxIndex, partitionColumnIndex); } } return Pair.of(partition, maxIndex); }
@Override public Object getObject(int index) { Map<String, Object> vv = new JsonStringHashMap<>(); for (String child:getChildFieldNames()) { ValueVector v = getChild(child); // TODO(DRILL-4001): Resolve this hack: // The index/value count check in the following if statement is a hack // to work around the current fact that RecordBatchLoader.load and // MapVector.load leave child vectors with a length of zero (as opposed // to matching the lengths of siblings and the parent map vector) // because they don't remove (or set the lengths of) vectors from // previous batches that aren't in the current batch. if (v != null && index < v.getAccessor().getValueCount()) { Object value = v.getAccessor().getObject(index); if (value != null) { vv.put(child, value); } } } return vv; }
/** * Returns an instance sitting at the given index if exists, null otherwise. * * @see org.apache.drill.exec.vector.accessor.SqlAccessor#getObject(int) */ @Override public Object getObject(int rowOffset) throws InvalidAccessException { // In case some vectors have fewer values than others, and callee invokes // this method with index >= getValueCount(), this should still yield null. final ValueVector.Accessor accessor = vector.getAccessor(); if (rowOffset < accessor.getValueCount()) { return delegate.getObject(rowOffset); } return null; } }
@Override public int getInnerValueCount() { return vector.getAccessor().getValueCount(); }
private boolean nullFilled(ValueVector vector) { for (int r = 0; r < vector.getAccessor().getValueCount(); r++) { if (! vector.getAccessor().isNull(r)) { return false; } } return true; }
protected SerializedField.Builder getMetadataBuilder() { return getField().getAsBuilder() .setValueCount(getAccessor().getValueCount()) .setBufferLength(getBufferSize()); }
@Override public int getBufferSize() { if (getAccessor().getValueCount() == 0) { return 0; } return data.writerIndex(); }
@Override public UserBitShared.SerializedField getMetadata() { return getField() .getAsBuilder() .setBufferLength(getBufferSize()) .setValueCount(getAccessor().getValueCount()) .build(); }
@Override public int getInnerValueCount() { return vector.getAccessor().getValueCount(); }
@Override public UserBitShared.SerializedField getMetadata() { return getField() .getAsBuilder() .setBufferLength(getBufferSize()) .setValueCount(getAccessor().getValueCount()) .build(); }
protected SerializedField.Builder getMetadataBuilder() { return getField().getAsBuilder() .setValueCount(getAccessor().getValueCount()) .setBufferLength(getBufferSize()); }
private boolean nullFilled(ValueVector vector) { for (int r = 0; r < vector.getAccessor().getValueCount(); r++) { if (! vector.getAccessor().isNull(r)) { return false; } } return true; }
@Override public int getBufferSize() { if (getAccessor().getValueCount() == 0) { return 0; } return data.writerIndex(); }