DataMode mode = field.getMajorType().getMode(); MinorType minor = field.getMajorType().getMinorType(); String name = field.getNamePart().getName(); boolean changed = false; if (logger.isDebugEnabled()) { || !"$bits$".equals(bitsField.getNamePart().getName()) || bitsField.getMajorType().getMode() != REQUIRED) { throw new IllegalStateException("bit vector should be called $bits$ and have type REQUIRED BIT." +
/** * <code>optional .exec.shared.NamePart child = 3;</code> */ public Builder mergeChild(com.dremio.exec.proto.UserBitShared.NamePart value) { if (childBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && child_ != com.dremio.exec.proto.UserBitShared.NamePart.getDefaultInstance()) { child_ = com.dremio.exec.proto.UserBitShared.NamePart.newBuilder(child_).mergeFrom(value).buildPartial(); } else { child_ = value; } onChanged(); } else { childBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /**
public static SchemaPath create(NamePart namePart) { Preconditions.checkArgument(namePart.getType() == NamePart.Type.NAME); return new SchemaPath((NameSegment) getPathSegment(namePart)); }
for (TypedFieldId id : valueFieldIds) { if (id.getIntermediateType() == CompleteType.OBJECT) { materializedValueFields[i++] = new Field(ref.getAsNamePart().getName(), true, id.getIntermediateType().getType(), null); } else { materializedValueFields[i++] = new Field(ref.getAsNamePart().getName(), true, id.getIntermediateType().getType(), null);
DataMode mode = field.getMajorType().getMode(); MinorType minor = field.getMajorType().getMinorType(); String name = field.getNamePart().getName(); boolean changed = false; if (logger.isDebugEnabled()) {
final String fieldName = field.getNamePart().getName();
if (column.equals(ROW_KEY_PATH)) { if (sample) { Field field = CompleteType.VARBINARY.toField(column.getAsNamePart().getName()); rowKeyVector = outputMutator.addField(field, VarBinaryVector.class); } else {
public static Field getFieldForSerializedField(SerializedField serializedField) { String name = serializedField.getNamePart().getName(); org.apache.arrow.vector.types.Types.MinorType arrowMinorType = getArrowMinorType(serializedField.getMajorType().getMinorType()); switch(serializedField.getMajorType().getMinorType()) { Preconditions.checkState(childList.size() > 0, "children should start with validity vector buffer"); SerializedField bits = childList.get(0); Preconditions.checkState(bits.getNamePart().getName().equals("$bits$"), "children should start with validity vector buffer: %s", childList); for (int i = 1; i < childList.size(); i++) { SerializedField child = childList.get(i);
builder.setNamePart(input.mergeObject(com.dremio.exec.proto.UserBitShared.NamePart.newBuilder(), com.dremio.exec.proto.SchemaUserBitShared.NamePart.MERGE));
public ExcelRecordReader(final OperatorContext executionContext, final FileSystemWrapper dfs, final Path path, final ExcelFormatPluginConfig pluginConfig, final List<SchemaPath> columns) { super(executionContext, columns); this.executionContext = executionContext; this.dfs = dfs; this.path = path; this.pluginConfig = pluginConfig; /* Get the list of columns to project, build a lookup table and pass it * to respective parsers for filtering the columns from excel sheets. */ ArrayList<SchemaPath> columnInfo; if(!isStarQuery() && !isSkipQuery()) { columnInfo = new ArrayList<>(getColumns()); this.columnsToProject = new HashSet<>(); for (int i = 0; i < columnInfo.size(); i++) { String columnName = (columnInfo.get(i)).getAsNamePart().getName(); this.columnsToProject.add(columnName); } logger.debug("number of projected columns: ", columnsToProject.size()); } else { logger.debug("projected columns is null"); this.columnsToProject = null; } }
public com.dremio.exec.proto.UserBitShared.NamePart buildPartial() { com.dremio.exec.proto.UserBitShared.NamePart result = new com.dremio.exec.proto.UserBitShared.NamePart(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.type_ = type_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.name_ = name_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (childBuilder_ == null) { result.child_ = child_; } else { result.child_ = childBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public void mergeFrom(io.protostuff.Input input, com.dremio.exec.proto.UserBitShared.NamePart.Builder builder) throws java.io.IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: builder.setType(com.dremio.exec.proto.UserBitShared.NamePart.Type.valueOf(input.readEnum())); break; case 2: builder.setName(input.readString()); break; case 3: builder.setChild(input.mergeObject(com.dremio.exec.proto.UserBitShared.NamePart.newBuilder(), com.dremio.exec.proto.SchemaUserBitShared.NamePart.MERGE)); break; default: input.handleUnknownField(number, this); } } } public boolean isInitialized(com.dremio.exec.proto.UserBitShared.NamePart.Builder builder)
/** * computes the number of buffers for a given serialized field * @param field serialized field * @param buffers total buffers in the batch * @param buffersStart starting buffer for the passed field * * @return number of buffers for the field */ private static int fieldBuffersCount(SerializedField field, ByteBuf[] buffers, final int buffersStart) { int totalBufferWidth = 0; int lastIndex = buffersStart; while (totalBufferWidth < field.getBufferLength() && lastIndex < buffersStart + buffers.length) { ByteBuf buf = buffers[lastIndex]; totalBufferWidth += buf.readableBytes(); ++lastIndex; } if (totalBufferWidth != field.getBufferLength()) { throw new IllegalStateException("not enough buffers for field " + field.getNamePart().getName() + " of type " + field.getMajorType()); } return lastIndex - buffersStart; }
/** * <code>optional .exec.shared.NamePart name_part = 2;</code> */ public Builder mergeNamePart(com.dremio.exec.proto.UserBitShared.NamePart value) { if (namePartBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && namePart_ != com.dremio.exec.proto.UserBitShared.NamePart.getDefaultInstance()) { namePart_ = com.dremio.exec.proto.UserBitShared.NamePart.newBuilder(namePart_).mergeFrom(value).buildPartial(); } else { namePart_ = value; } onChanged(); } else { namePartBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
private static NamePart getNamePart(PathSegment s) { if (s == null) { return null; } NamePart.Builder b = NamePart.newBuilder(); if (s.getChild() != null) { b.setChild(getNamePart(s.getChild())); } if (s.isArray()) { if (s.getArraySegment().hasIndex()) { throw new IllegalStateException("You cannot convert a indexed schema path to a NamePart. NameParts can only reference Vectors, not individual records or values."); } b.setType(Type.ARRAY); } else { b.setType(Type.NAME); b.setName(s.getNameSegment().getPath()); } return b.build(); }
private StructVector getOrCreateFamilyVector(OutputMutator output, String familyName, boolean allocateOnCreate) { StructVector v = familyVectorMap.get(familyName); if(v == null) { SchemaPath column = SchemaPath.getSimplePath(familyName); Field field = getFieldForNameAndMajorType(column.getAsNamePart().getName(), COLUMN_FAMILY_TYPE); if (sample) { v = outputMutator.addField(field, StructVector.class); if (allocateOnCreate) { v.allocateNew(); } } else { v = (StructVector) output.getVector(column.getAsNamePart().getName()); } getColumns().add(column); familyVectorMap.put(familyName, v); } return v; }
public SerializedField getMetadata() { int bufferSize = structVector.getBufferSize(); SerializedField.Builder b = SerializedField.newBuilder() .setNamePart(NamePart.newBuilder().setName(structVector.getField().getName())) .setMajorType(Types.optional(MinorType.STRUCT)) .setBufferLength(bufferSize) .setValueCount(structVector.valueCount); b.addChild(buildValidityMetadata()); int expectedBufferSize = BaseValueVectorHelper.getValidityBufferSizeFromCount(structVector.valueCount); for(ValueVector v : structVector.getChildren()) { SerializedField metadata = TypeHelper.getMetadata(v); expectedBufferSize += metadata.getBufferLength(); b.addChild(metadata); } Preconditions.checkState(expectedBufferSize == bufferSize, "Invalid buffer count: %s != %s", expectedBufferSize, bufferSize); return b.build(); }
/** * The data layout is the same for the actual data within a repeated field, as it is in a scalar vector for * the same sql type. For example, a repeated int vector has a vector of offsets into a regular int vector to * represent the lists. As the data layout for the actual values in the same in the repeated vector as in the * scalar vector of the same type, we can avoid making individual copies for the column being flattened, and just * use vector copies between the inner vector of the repeated field to the resulting scalar vector from the flatten * operation. This is completed after we determine how many records will fit (as we will hit either a batch end, or * the end of one of the other vectors while we are copying the data of the other vectors alongside each new flattened * value coming out of the repeated field.) */ private TransferPair getFlattenFieldTransferPair(FieldReference outputName) { final TypedFieldId fieldId = incoming.getSchema().getFieldId(config.getColumn()); final Class<? extends ValueVector> vectorClass = TypeHelper.getValueVectorClass(incoming.getSchema().getColumn(fieldId.getFieldIds()[0])); final ValueVector flattenField = incoming.getValueAccessorById(vectorClass, fieldId.getFieldIds()).getValueVector(); final ValueVector vvIn = RepeatedValueVector.class.cast(flattenField).getDataVector(); return vvIn.getTransferPair(outputName.getAsNamePart().getName(), context.getAllocator()); }
@Test public void testColumnOrderingWithUnionVector() throws Exception { List<QueryDataBatch> results = null; try { results = testRunAndReturn(QueryType.SQL, "SELECT * FROM cp.\"type_changes.json\""); final RecordBatchDef def = results.get(0).getHeader().getDef(); assertEquals(2, def.getFieldCount()); assertEquals("a", def.getField(0).getNamePart().getName()); assertEquals("b", def.getField(1).getNamePart().getName()); } finally { if (results != null) { for(QueryDataBatch r : results) { r.release(); } } } }
public Builder toBuilder() { return newBuilder(this); }