public ArrowType getVectorType() { return dictionary.getField().getType(); }
@Override public void serialize(List<Field> fields, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { jsonGenerator.writeStartArray(); for (Field field: fields) { APIFieldDescriber.FieldDescriber describer = new APIFieldDescriber.FieldDescriber(jsonGenerator, field, false); field.getType().accept(describer); } jsonGenerator.writeEndArray(); } }
public static int[] getTypeIds(List<Field> subTypes) { int[] typeIds = new int[subTypes.size()]; for (int i = 0; i < typeIds.length; i++) { typeIds[i] = Types.getMinorTypeForArrowType(subTypes.get(i).getType()).ordinal(); } return typeIds; }
@Override public void transfer(VectorWrapper<?> destination) { Preconditions.checkArgument(destination instanceof SimpleVectorWrapper); Preconditions.checkArgument(getField().getType().equals(destination.getField().getType())); vector.makeTransferPair(((SimpleVectorWrapper<?>)destination).vector).transfer(); }
public static CompleteType fromField(Field field){ // IGNORE this until the NullableMapVector.getField() returns a nullable type. // Preconditions.checkArgument(field.isNullable(), "Dremio only supports nullable types."); return new CompleteType(field.getType(), field.getChildren()); }
@Override public boolean equals(Object obj) { if (!(obj instanceof Field)) { return false; } Field that = (Field) obj; return Objects.equals(this.name, that.name) && Objects.equals(this.isNullable(), that.isNullable()) && Objects.equals(this.getType(), that.getType()) && Objects.equals(this.getDictionary(), that.getDictionary()) && Objects.equals(this.getMetadata(), that.getMetadata()) && Objects.equals(this.children, that.children); }
public static void toString(Field field, int depth, StringBuilder b) { b.append("\n"); for (int i = 0; i < depth; i++) { b.append(" "); } b.append(field.getName()); b.append(";"); b.append(field.isNullable()); b.append(";"); b.append(Describer.describe(field.getType())); for (Field child : field.getChildren()) { toString(child, depth + 1, b); } }
private void writeField(Field field, JsonGenerator generator, boolean skipName) throws IOException { APIFieldDescriber.FieldDescriber describer = new APIFieldDescriber.FieldDescriber(generator, field, false); field.getType().accept(describer); }
private static String describe(Field field, boolean includeName){ return field.getType().accept(new FieldDescriber(field, includeName)); }
public static String describe(Field field){ return field.getType().accept(new FieldDescriber(field, true)); }
private static Field mergeField(Field field, Cell c){ Preconditions.checkNotNull(field); Preconditions.checkNotNull(c); Field newField = c.toField(field.getName()); if(!newField.getType().equals(field.getType())){ throw new UnsupportedOperationException(String.format("Not supporting mixed types yet. Initial Field was %s but new field was %s", field.getType(), newField.getType())); } // they are the same. return field; }
@Override public int hashCode() { return Objects.hash(name, isNullable(), getType(), getDictionary(), getMetadata(), children); }
private void appendNodes(FieldVector vector, List<ArrowFieldNode> nodes, List<ArrowBuf> buffers) { nodes.add(new ArrowFieldNode(vector.getValueCount(), includeNullCount ? vector.getNullCount() : -1)); List<ArrowBuf> fieldBuffers = vector.getFieldBuffers(); List<BufferType> expectedBuffers = TypeLayout.getTypeLayout(vector.getField().getType()).getBufferTypes(); if (fieldBuffers.size() != expectedBuffers.size()) { throw new IllegalArgumentException(String.format( "wrong number of buffers for field %s in vector %s. found: %s", vector.getField(), vector.getClass().getSimpleName(), fieldBuffers)); } buffers.addAll(fieldBuffers); for (FieldVector child : vector.getChildrenFromFields()) { appendNodes(child, nodes, buffers); } }
@Override public Field visit(ArrowType.List type) { if(field.getName().equals(column.getAsUnescapedPath())){ Field child = field.getChildren().get(0); return new Field(field.getName(), child.isNullable(), child.getType(), child.getChildren()); } return field; }
private void initHashVector(VectorAccessible incoming) { Preconditions.checkArgument(config.getExpr() instanceof SchemaPath, "hash expression expected to be a SchemaPath but was : " + config.getExpr().getClass().getName()); final SchemaPath expr = (SchemaPath) config.getExpr(); final TypedFieldId typedFieldId = incoming.getSchema().getFieldId(expr); final Field field = incoming.getSchema().getColumn(typedFieldId.getFieldIds()[0]); Preconditions.checkArgument(field.getType().getTypeID() == ArrowTypeID.Int); partitionIndices = incoming.getValueAccessorById(IntVector.class, typedFieldId.getFieldIds()[0]).getValueVector(); }
Converter groupConverterFromArrowSchema(String nameForChild, String fieldName, GroupType groupType, Collection<SchemaPath> c) { final Field arrowField = Schema.findField(arrowSchema, fieldName); final ArrowTypeID arrowTypeType = arrowField.getType().getTypeID(); final List<Field> arrowChildren = arrowField.getChildren(); if (arrowTypeType == ArrowTypeID.Union) { // if it's a union we will add the children directly to the parent return new UnionGroupConverter(mutator, getWriterProvider(), groupType, c, options, arrowChildren, nameForChild, schemaHelper); } else if (arrowTypeType == ArrowTypeID.List) { // make sure the parquet schema matches the arrow schema and delegate handling the logical list to defaultGroupConverter() Preconditions.checkState(groupType.getOriginalType() == OriginalType.LIST, "parquet schema doesn't match the arrow schema for LIST " + nameForChild); } return defaultGroupConverter(mutator, groupType, nameForChild, c, arrowChildren); }
@Override protected BatchSchema constructSchema(FunctionLookupContext context) { final SchemaBuilder builder = BatchSchema.newBuilder(); for(Field f : child.getSchema(context)){ builder.addField(f.getType().accept(new SchemaConverter(f))); } return builder.build(); }
@Override public Field getField() { Field f = super.getField(); FieldType type = new FieldType(true, f.getType(), f.getFieldType().getDictionary(), f.getFieldType().getMetadata()); return new Field(f.getName(), type, f.getChildren()); }
private void assertDatasetSchemasDefined(List<NamespaceKey> datasetKeys) throws Exception { for (NamespaceKey datasetKey : datasetKeys) { DatasetConfig dsConfig = namespaceService.getDataset(datasetKey); BatchSchema schema = BatchSchema.deserialize(dsConfig.getRecordSchema()); assertEquals(schema.getFieldCount(), 1); assertEquals(schema.getColumn(0).getName(), "string"); assertEquals(schema.getColumn(0).getType(), ArrowType.Utf8.INSTANCE); } }