public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) { Properties props = new Properties(); props.putAll(config); return KafkaSchemaRegistryFactory.getSchemaRegistry(props); } }
/** * Configure this class. * @param configs configs in key/value pairs * @param isKey whether is for key or value */ public void configure(Map<String, ?> configs, boolean isKey) { Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields"); _datumReader = new GenericDatumReader<>(); Properties props = new Properties(); for (Map.Entry<String, ?> entry: configs.entrySet()) { String value = String.valueOf(entry.getValue()); props.setProperty(entry.getKey(), value); } _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); }
@Override public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) { this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties()); this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry); return this; }
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint) { Config config = ConfigFactory.parseProperties(props); topic = config.getString("topic"); String zkConnect = config.getString("zookeeper.connect"); schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); deserializer = new LiAvroDeserializer(schemaRegistry); /** TODO: Make Confluent schema registry integration configurable * HashMap<String, String> avroSerDeConfig = new HashMap<>(); * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081"); * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); * deserializer.configure(avroSerDeConfig, false); * **/ Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("auto.offset.reset", "smallest"); consumeProps.put("auto.commit.enable", "false"); //consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }
public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) { Properties props = new Properties(); props.putAll(config); return KafkaSchemaRegistryFactory.getSchemaRegistry(props); } }
/** * Configure this class. * @param configs configs in key/value pairs * @param isKey whether is for key or value */ public void configure(Map<String, ?> configs, boolean isKey) { Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields"); _datumReader = new GenericDatumReader<>(); Properties props = new Properties(); for (Map.Entry<String, ?> entry: configs.entrySet()) { String value = String.valueOf(entry.getValue()); props.setProperty(entry.getKey(), value); } _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); }
@Override public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) { this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties()); this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry); return this; }
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint) { Config config = ConfigFactory.parseProperties(props); topic = config.getString("topic"); String zkConnect = config.getString("zookeeper.connect"); schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props); deserializer = new LiAvroDeserializer(schemaRegistry); /** TODO: Make Confluent schema registry integration configurable * HashMap<String, String> avroSerDeConfig = new HashMap<>(); * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081"); * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); * deserializer.configure(avroSerDeConfig, false); * **/ Properties consumeProps = new Properties(); consumeProps.put("zookeeper.connect", zkConnect); consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime()); consumeProps.put("zookeeper.session.timeout.ms", "10000"); consumeProps.put("zookeeper.sync.time.ms", "10000"); consumeProps.put("auto.commit.interval.ms", "10000"); consumeProps.put("auto.offset.reset", "smallest"); consumeProps.put("auto.commit.enable", "false"); //consumeProps.put("consumer.timeout.ms", "10000"); consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps)); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1)); List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic); stream = streams.get(0); iterator = stream.iterator(); }