private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) { final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes(); final int length = fields.size(); final Row row = new Row(length); for (int i = 0; i < length; i++) { final Schema.Field field = fields.get(i); row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i))); } return row; }
private ObjectNode convertRow(ObjectNode reuse, RowTypeInfo info, Row row) { if (reuse == null) { reuse = mapper.createObjectNode(); } final String[] fieldNames = info.getFieldNames(); final TypeInformation<?>[] fieldTypes = info.getFieldTypes(); // validate the row if (row.getArity() != fieldNames.length) { throw new IllegalStateException(String.format( "Number of elements in the row '%s' is different from number of field names: %d", row, fieldNames.length)); } for (int i = 0; i < fieldNames.length; i++) { final String name = fieldNames[i]; final JsonNode fieldConverted = convert(reuse, reuse.get(name), fieldTypes[i], row.getField(i)); reuse.set(name, fieldConverted); } return reuse; }
private Row convertRow(JsonNode node, RowTypeInfo info) { final String[] names = info.getFieldNames(); final TypeInformation<?>[] types = info.getFieldTypes(); final Row row = new Row(names.length); for (int i = 0; i < names.length; i++) { final String name = names[i]; final JsonNode subNode = node.get(name); if (subNode == null) { if (failOnMissingField) { throw new IllegalStateException( "Could not find field with name '" + name + "'."); } else { row.setField(i, null); } } else { row.setField(i, convert(subNode, types[i])); } } return row; }
final RowTypeInfo rt = (RowTypeInfo) typeInfo; final String[] fieldNames = rt.getFieldNames(); final TypeInformation<?>[] fieldTypes = rt.getFieldTypes();
private OrcTableSource(String path, TypeDescription orcSchema, Configuration orcConfig, int batchSize, boolean recursiveEnumeration, int[] selectedFields, Predicate[] predicates) { Preconditions.checkNotNull(path, "Path must not be null."); Preconditions.checkNotNull(orcSchema, "OrcSchema must not be null."); Preconditions.checkNotNull(path, "Configuration must not be null."); Preconditions.checkArgument(batchSize > 0, "Batch size must be larger than null."); this.path = path; this.orcSchema = orcSchema; this.orcConfig = orcConfig; this.batchSize = batchSize; this.recursiveEnumeration = recursiveEnumeration; this.selectedFields = selectedFields; this.predicates = predicates; // determine the type information from the ORC schema RowTypeInfo typeInfoFromSchema = (RowTypeInfo) OrcBatchReader.schemaToTypeInfo(this.orcSchema); // set return type info if (selectedFields == null) { this.typeInfo = typeInfoFromSchema; } else { this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields); } // create a TableSchema that corresponds to the ORC schema this.tableSchema = new TableSchema( typeInfoFromSchema.getFieldNames(), typeInfoFromSchema.getFieldTypes() ); }
@Override public TypeInformation<?>[] getFieldTypes() { return type.getFieldTypes(); }
MockTableSource(List<Row> data, RowTypeInfo type) { this.data = data; this.type = type; this.schema = new TableSchema(type.getFieldNames(), type.getFieldTypes()); }
ExternalCatalogTable toExternalCatalogTable() { TableSchema tableSchema = new TableSchema(schema.getFieldNames(), schema.getFieldTypes()); ConnectorDescriptor descriptor = new ConnectorDescriptor(CONNECTOR_TYPE, CONNECTOR_VERSION, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY, tableSchema); properties.putString(TABLE_DATA_CONNECTOR_PROPERTY, serializeRows()); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) { final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes(); final int length = fields.size(); final Row row = new Row(length); for (int i = 0; i < length; i++) { final Schema.Field field = fields.get(i); row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i))); } return row; }
/** * Creates a JSON deserialization schema for the given fields and types. * * @param typeInfo Type information describing the result type. The field names are used * to parse the JSON file and so are the types. */ public JsonRowDeserializationSchema(TypeInformation<Row> typeInfo) { Preconditions.checkNotNull(typeInfo, "Type information"); this.typeInfo = typeInfo; this.fieldNames = ((RowTypeInfo) typeInfo).getFieldNames(); this.fieldTypes = ((RowTypeInfo) typeInfo).getFieldTypes(); }
private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) { final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes(); final int length = fields.size(); final Row row = new Row(length); for (int i = 0; i < length; i++) { final Schema.Field field = fields.get(i); row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i))); } return row; }
/** * Creates a JSON deserialization schema for the given fields and types. * * @param typeInfo Type information describing the result type. The field names are used * to parse the JSON file and so are the types. */ public JsonRowDeserializationSchema(TypeInformation<Row> typeInfo) { Preconditions.checkNotNull(typeInfo, "Type information"); this.typeInfo = typeInfo; this.fieldNames = ((RowTypeInfo) typeInfo).getFieldNames(); this.fieldTypes = ((RowTypeInfo) typeInfo).getFieldTypes(); }
@Override public TypeInformation getProducedType() { return new BaseRowTypeInfo(rowTypeInfo.getFieldTypes(), rowTypeInfo.getFieldNames()); }
@SuppressWarnings("unchecked") @Override public DataType getReturnType() { return TypeConverters.createInternalTypeFromTypeInfo(new BaseRowTypeInfo( rowTypeInfo.getFieldTypes(), rowTypeInfo.getFieldNames())); }
/** * Creates a JSON serialization schema for the given fields and types. * * @param rowSchema The schema of the rows to encode. */ public JsonRowSerializationSchema(RowTypeInfo rowSchema) { Preconditions.checkNotNull(rowSchema); String[] fieldNames = rowSchema.getFieldNames(); TypeInformation[] fieldTypes = rowSchema.getFieldTypes(); // check that no field is composite for (int i = 0; i < fieldTypes.length; i++) { if (fieldTypes[i] instanceof CompositeType) { throw new IllegalArgumentException("JsonRowSerializationSchema cannot encode rows with nested schema, " + "but field '" + fieldNames[i] + "' is nested: " + fieldTypes[i].toString()); } } this.fieldNames = fieldNames; }
private ObjectNode convertRow(ObjectNode reuse, RowTypeInfo info, Row row) { if (reuse == null) { reuse = mapper.createObjectNode(); } final String[] fieldNames = info.getFieldNames(); final TypeInformation<?>[] fieldTypes = info.getFieldTypes(); // validate the row if (row.getArity() != fieldNames.length) { throw new IllegalStateException(String.format( "Number of elements in the row '%s' is different from number of field names: %d", row, fieldNames.length)); } for (int i = 0; i < fieldNames.length; i++) { final String name = fieldNames[i]; final JsonNode fieldConverted = convert(reuse, reuse.get(name), fieldTypes[i], row.getField(i)); reuse.set(name, fieldConverted); } return reuse; }
private ObjectNode convertRow(ObjectNode reuse, RowTypeInfo info, Row row) { if (reuse == null) { reuse = mapper.createObjectNode(); } final String[] fieldNames = info.getFieldNames(); final TypeInformation<?>[] fieldTypes = info.getFieldTypes(); // validate the row if (row.getArity() != fieldNames.length) { throw new IllegalStateException(String.format( "Number of elements in the row '%s' is different from number of field names: %d", row, fieldNames.length)); } for (int i = 0; i < fieldNames.length; i++) { final String name = fieldNames[i]; final JsonNode fieldConverted = convert(reuse, reuse.get(name), fieldTypes[i], row.getField(i)); reuse.set(name, fieldConverted); } return reuse; }
private Row convertRow(JsonNode node, RowTypeInfo info) { final String[] names = info.getFieldNames(); final TypeInformation<?>[] types = info.getFieldTypes(); final Row row = new Row(names.length); for (int i = 0; i < names.length; i++) { final String name = names[i]; final JsonNode subNode = node.get(name); if (subNode == null) { if (failOnMissingField) { throw new IllegalStateException( "Could not find field with name '" + name + "'."); } else { row.setField(i, null); } } else { row.setField(i, convert(subNode, types[i])); } } return row; }
private static List<FieldInfo> getAllField(JoinScope scope){ Iterator prefixId = scope.getChildren().iterator(); List<FieldInfo> fieldInfoList = Lists.newArrayList(); while(true) { JoinScope.ScopeChild resolved; RowTypeInfo field; if(!prefixId.hasNext()) { return fieldInfoList; } resolved = (JoinScope.ScopeChild)prefixId.next(); field = resolved.getRowTypeInfo(); String[] fieldNames = field.getFieldNames(); TypeInformation<?>[] types = field.getFieldTypes(); for(int i=0; i< field.getTotalFields(); i++){ String fieldName = fieldNames[i]; TypeInformation<?> type = types[i]; FieldInfo fieldInfo = new FieldInfo(); fieldInfo.setTable(resolved.getAlias()); fieldInfo.setFieldName(fieldName); fieldInfo.setTypeInformation(type); fieldInfoList.add(fieldInfo); } } }
private Row convertRow(JsonNode node, RowTypeInfo info) { final String[] names = info.getFieldNames(); final TypeInformation<?>[] types = info.getFieldTypes(); final Row row = new Row(names.length); for (int i = 0; i < names.length; i++) { final String name = names[i]; final JsonNode subNode = node.get(name); if (subNode == null) { if (failOnMissingField) { throw new IllegalStateException( "Could not find field with name '" + name + "'."); } else { row.setField(i, null); } } else { row.setField(i, convert(subNode, types[i])); } } return row; }