@Override public TableSchema getTableSchema() { return new TableSchema( new String[] {"key", "rowtime", "payload"}, new TypeInformation[] {Types.INT, Types.SQL_TIMESTAMP, Types.STRING}); }
/** * Returns a {@link TableSchema} instance. */ public TableSchema build() { return new TableSchema( fieldNames.toArray(new String[0]), fieldTypes.toArray(new TypeInformation<?>[0])); } }
/** * Creates a table schema from a {@link TypeInformation} instance. If the type information is * a {@link CompositeType}, the field names and types for the composite type are used to * construct the {@link TableSchema} instance. Otherwise, a table schema with a single field * is created. The field name is "f0" and the field type the provided type. * * @param typeInfo The {@link TypeInformation} from which the table schema is generated. * @return The table schema that was generated from the given {@link TypeInformation}. */ public static TableSchema fromTypeInfo(TypeInformation<?> typeInfo) { if (typeInfo instanceof CompositeType<?>) { final CompositeType<?> compositeType = (CompositeType<?>) typeInfo; // get field names and types from composite type final String[] fieldNames = compositeType.getFieldNames(); final TypeInformation<?>[] fieldTypes = new TypeInformation[fieldNames.length]; for (int i = 0; i < fieldTypes.length; i++) { fieldTypes[i] = compositeType.getTypeAt(i); } return new TableSchema(fieldNames, fieldTypes); } else { // create table schema with a single field named "f0" of the given type. return new TableSchema( new String[]{ATOMIC_TYPE_FIELD_NAME}, new TypeInformation<?>[]{typeInfo}); } }
@Override public TableSchema getTableSchema() { if (this.tableSchema == null) { return new TableSchema(getFieldNames(), getFieldTypes()); } else { return this.tableSchema; } }
/** * Returns a deep copy of the table schema. */ public TableSchema copy() { return new TableSchema(fieldNames.clone(), fieldTypes.clone()); }
private OrcTableSource(String path, TypeDescription orcSchema, Configuration orcConfig, int batchSize, boolean recursiveEnumeration, int[] selectedFields, Predicate[] predicates) { Preconditions.checkNotNull(path, "Path must not be null."); Preconditions.checkNotNull(orcSchema, "OrcSchema must not be null."); Preconditions.checkNotNull(path, "Configuration must not be null."); Preconditions.checkArgument(batchSize > 0, "Batch size must be larger than null."); this.path = path; this.orcSchema = orcSchema; this.orcConfig = orcConfig; this.batchSize = batchSize; this.recursiveEnumeration = recursiveEnumeration; this.selectedFields = selectedFields; this.predicates = predicates; // determine the type information from the ORC schema RowTypeInfo typeInfoFromSchema = (RowTypeInfo) OrcBatchReader.schemaToTypeInfo(this.orcSchema); // set return type info if (selectedFields == null) { this.typeInfo = typeInfoFromSchema; } else { this.typeInfo = RowTypeInfo.projectFields(typeInfoFromSchema, selectedFields); } // create a TableSchema that corresponds to the ORC schema this.tableSchema = new TableSchema( typeInfoFromSchema.getFieldNames(), typeInfoFromSchema.getFieldTypes() ); }
MockTableSource(List<Row> data, RowTypeInfo type) { this.data = data; this.type = type; this.schema = new TableSchema(type.getFieldNames(), type.getFieldTypes()); }
private static ExternalCatalogTable mockExternalCatalogTable(String topic, String brokerAddress) { TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO}); ConnectorDescriptor descriptor = new ConnectorDescriptor("kafka+json", 1, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TOPIC_SCHEMA_KEY, schema); properties.putString(TOPIC_NAME_KEY, topic); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.GROUP_ID_CONFIG, "foo"); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); } }
@Override public TableSchema getTableSchema() { TypeInformation<?>[] types = new TypeInformation[] { Types.LONG, Types.LONG, Types.LONG, Types.STRING, Types.FLOAT, Types.FLOAT, Types.FLOAT, Types.SQL_TIMESTAMP }; String[] names = new String[]{ "rideId", "taxiId", "driverId", "paymentType", "tip", "tolls", "totalFare", "eventTime" }; return new TableSchema(names, types); }
@Override public TableSchema getTableSchema() { TypeInformation<?>[] types = new TypeInformation[] { Types.LONG, Types.LONG, Types.LONG, Types.BOOLEAN, Types.FLOAT, Types.FLOAT, Types.FLOAT, Types.FLOAT, Types.SHORT, Types.SQL_TIMESTAMP }; String[] names = new String[]{ "rideId", "taxiId", "driverId", "isStart", "startLon", "startLat", "endLon", "endLat", "passengerCnt", "eventTime" }; return new TableSchema(names, types); }
/** * Returns a {@link TableSchema} instance. */ public TableSchema build() { return new TableSchema( fieldNames.toArray(new String[0]), fieldTypes.toArray(new TypeInformation<?>[0])); } }
ExternalCatalogTable toExternalCatalogTable() { TableSchema tableSchema = new TableSchema(schema.getFieldNames(), schema.getFieldTypes()); ConnectorDescriptor descriptor = new ConnectorDescriptor(CONNECTOR_TYPE, CONNECTOR_VERSION, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY, tableSchema); properties.putString(TABLE_DATA_CONNECTOR_PROPERTY, serializeRows()); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
/** * Returns a {@link TableSchema} instance. */ public TableSchema build() { return new TableSchema( columns.toArray(new Column[0]), primaryKey.toArray(new String[0]), // List<List<String>> -> String[][] uniqueKeys .stream() .map(u -> u.toArray(new String[0])) // mapping each List to an array .collect(Collectors.toList()) // collecting as a List<String[]> .toArray(new String[uniqueKeys.size()][]), // List<List<String>> -> String[][] indexes .stream() .map(u -> u.toArray(new String[0])) // mapping each List to an array .collect(Collectors.toList()) // collecting as a List<String[]> .toArray(new String[indexes.size()][]), computedColumns.toArray(new ComputedColumn[0]), watermarks.toArray(new Watermark[0])); } }
/** * Returns a deep copy of the table schema. */ public TableSchema copy() { return new TableSchema(fieldNames.clone(), fieldTypes.clone()); }
/** * Creates a table schema from a {@link TypeInformation} instance. If the type information is * a {@link CompositeType}, the field names and types for the composite type are used to * construct the {@link TableSchema} instance. Otherwise, a table schema with a single field * is created. The field name is "f0" and the field type the provided type. * * @param typeInfo The {@link TypeInformation} from which the table schema is generated. * @return The table schema that was generated from the given {@link TypeInformation}. */ public static TableSchema fromTypeInfo(TypeInformation<?> typeInfo) { if (typeInfo instanceof CompositeType<?>) { final CompositeType<?> compositeType = (CompositeType<?>) typeInfo; // get field names and types from composite type final String[] fieldNames = compositeType.getFieldNames(); final TypeInformation<?>[] fieldTypes = new TypeInformation[fieldNames.length]; for (int i = 0; i < fieldTypes.length; i++) { fieldTypes[i] = compositeType.getTypeAt(i); } return new TableSchema(fieldNames, fieldTypes); } else { // create table schema with a single field named "f0" of the given type. return new TableSchema( new String[]{ATOMIC_TYPE_FIELD_NAME}, new TypeInformation<?>[]{typeInfo}); } }
final String sourceTopic = "foo"; final String sinkTopic = "bar"; final TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO});
@Override public TableSchema getTableSchema() { if (this.tableSchema == null) { return new TableSchema(getFieldNames(), getFieldTypes()); } else { return this.tableSchema; } }
@Override public TableSchema getTableSchema() { if (this.tableSchema == null) { return new TableSchema(getFieldNames(), getFieldTypes()); } else { return this.tableSchema; } }
/** * Create Flink's TableSchema from Hive columns. */ public static TableSchema createTableSchema(List<FieldSchema> fieldSchemas, List<FieldSchema> partitionFields) { int colSize = fieldSchemas.size() + partitionFields.size(); String[] colNames = new String[colSize]; InternalType[] colTypes = new InternalType[colSize]; for (int i = 0; i < fieldSchemas.size(); i++) { FieldSchema fs = fieldSchemas.get(i); colNames[i] = fs.getName(); colTypes[i] = HiveMetadataUtil.convert(fs.getType()); } for (int i = 0; i < colSize - fieldSchemas.size(); i++){ FieldSchema fs = partitionFields.get(i); colNames[i + fieldSchemas.size()] = fs.getName(); colTypes[i + fieldSchemas.size()] = HiveMetadataUtil.convert(fs.getType()); } return new TableSchema(colNames, colTypes); }