@Override
public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass,
Properties properties, List<FieldInfo> fields) {
List<String> fieldNames = new ArrayList<>();
int primaryIndex = -1;
for (int i = 0; i < fields.size(); ++i) {
FieldInfo f = fields.get(i);
fieldNames.add(f.name());
if (f.isPrimary()) {
primaryIndex = i;
}
}
Preconditions.checkState(primaryIndex != -1, "Kafka stream table must have a primary key");
Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames);
Map<String, String> values = parseUriParams(uri.getQuery());
String bootstrapServers = values.get(URI_PARAMS_BOOTSTRAP_SERVERS);
Preconditions.checkNotNull(bootstrapServers, "bootstrap-servers must be specified");
String topic = uri.getHost();
KafkaSpoutConfig<ByteBuffer, ByteBuffer> kafkaSpoutConfig =
new KafkaSpoutConfig.Builder<ByteBuffer, ByteBuffer>(bootstrapServers, topic)
.setProp(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class)
.setProp(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class)
.setProp(ConsumerConfig.GROUP_ID_CONFIG, "storm-sql-kafka-" + UUID.randomUUID().toString())
.setRecordTranslator(new RecordTranslatorSchemeAdapter(scheme))
.build();
IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames);
return new KafkaStreamsDataSource(kafkaSpoutConfig, bootstrapServers, topic, properties, serializer);
}