@Override public TypeInformation<Row> getRecordType() { return schema.toRowType(); }
@Override public TypeInformation<Row> getOutputType() { return schema.toRowType(); }
private boolean checkForCustomFieldMapping(DescriptorProperties descriptorProperties, TableSchema schema) { final Map<String, String> fieldMapping = SchemaValidator.deriveFieldMapping( descriptorProperties, Optional.of(schema.toRowType())); // until FLINK-9870 is fixed we assume that the table schema is the output type return fieldMapping.size() != schema.getFieldNames().length || !fieldMapping.entrySet().stream().allMatch(mapping -> mapping.getKey().equals(mapping.getValue())); }
private TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) { if (descriptorProperties.containsKey(JsonValidator.FORMAT_SCHEMA)) { return (RowTypeInfo) descriptorProperties.getType(JsonValidator.FORMAT_SCHEMA); } else if (descriptorProperties.containsKey(JsonValidator.FORMAT_JSON_SCHEMA)) { return JsonRowSchemaConverter.convert(descriptorProperties.getString(JsonValidator.FORMAT_JSON_SCHEMA)); } else { return deriveSchema(descriptorProperties.asMap()).toRowType(); } }
.field(TIME, Types.SQL_TIMESTAMP()) .build() .toRowType() );
KAFKA_PROPERTIES, Optional.of(new FlinkFixedPartitioner<>()), new TestSerializationSchema(schema.toRowType())); final DataStreamMock streamMock = new DataStreamMock(new StreamExecutionEnvironmentMock(), schema.toRowType()); actualKafkaSink.emitDataStream(streamMock); assertTrue(getExpectedFlinkKafkaProducer().isAssignableFrom(streamMock.sinkFunction.getClass()));
KEY_DELIMITER, KEY_NULL_LITERAL, new JsonRowSerializationSchema(schema.toRowType()), XContentType.JSON, new DummyFailureHandler(),
@Override public TypeInformation<Row> getRecordType() { return schema.toRowType(); }
private boolean checkForCustomFieldMapping(DescriptorProperties descriptorProperties, TableSchema schema) { final Map<String, String> fieldMapping = SchemaValidator.deriveFieldMapping( descriptorProperties, Optional.of(schema.toRowType())); // until FLINK-9870 is fixed we assume that the table schema is the output type return fieldMapping.size() != schema.getFieldNames().length || !fieldMapping.entrySet().stream().allMatch(mapping -> mapping.getKey().equals(mapping.getValue())); }
private boolean checkForCustomFieldMapping(DescriptorProperties descriptorProperties, TableSchema schema) { final Map<String, String> fieldMapping = SchemaValidator.deriveFieldMapping( descriptorProperties, Optional.of(schema.toRowType())); // until FLINK-9870 is fixed we assume that the table schema is the output type return fieldMapping.size() != schema.getFieldNames().length || !fieldMapping.entrySet().stream().allMatch(mapping -> mapping.getKey().equals(mapping.getValue())); }
/** * Creates a generic Kafka JSON {@link StreamTableSource}. * * @param topic Kafka topic to consume. * @param properties Properties for the Kafka consumer. * @param tableSchema The schema of the table. * @param jsonSchema The schema of the JSON messages to decode from Kafka. * @deprecated Use table descriptors instead of implementation-specific builders. */ @Deprecated protected KafkaJsonTableSource( String topic, Properties properties, TableSchema tableSchema, TableSchema jsonSchema) { super( tableSchema, topic, properties, new JsonRowDeserializationSchema(jsonSchema.toRowType())); }
/** * Creates a generic Kafka JSON {@link StreamTableSource}. * * @param topic Kafka topic to consume. * @param properties Properties for the Kafka consumer. * @param tableSchema The schema of the table. * @param jsonSchema The schema of the JSON messages to decode from Kafka. * @deprecated Use table descriptors instead of implementation-specific builders. */ @Deprecated protected KafkaJsonTableSource( String topic, Properties properties, TableSchema tableSchema, TableSchema jsonSchema) { super( tableSchema, topic, properties, new JsonRowDeserializationSchema(jsonSchema.toRowType())); }
private TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) { if (descriptorProperties.containsKey(JsonValidator.FORMAT_SCHEMA)) { return (RowTypeInfo) descriptorProperties.getType(JsonValidator.FORMAT_SCHEMA); } else if (descriptorProperties.containsKey(JsonValidator.FORMAT_JSON_SCHEMA)) { return JsonRowSchemaConverter.convert(descriptorProperties.getString(JsonValidator.FORMAT_JSON_SCHEMA)); } else { return deriveSchema(descriptorProperties.asMap()).toRowType(); } }