public MeetupRsvpStream(File schemaFile) throws IOException, URISyntaxException { schema = Schema.fromFile(schemaFile); Properties properties = new Properties(); properties.put("metadata.broker.list", KafkaStarterUtils.DEFAULT_KAFKA_BROKER); properties.put("serializer.class", "kafka.serializer.DefaultEncoder"); properties.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(properties); producer = new Producer<String, byte[]>(producerConfig); }
public AirlineDataStream(Schema pinotSchema, File avroFile) throws FileNotFoundException, IOException { this.pinotSchema = pinotSchema; this.avroFile = avroFile; createStream(); Properties properties = new Properties(); properties.put("metadata.broker.list", KafkaStarterUtils.DEFAULT_KAFKA_BROKER); properties.put("serializer.class", "kafka.serializer.DefaultEncoder"); properties.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(properties); producer = new Producer<String, byte[]>(producerConfig); service = Executors.newFixedThreadPool(1); Quickstart.printStatus(Quickstart.Color.YELLOW, "***** Offine data has max time as 16101, realtime will start consuming from time 16102 and increment time every 3000 events *****"); }
Producer<byte[], byte[]> producer = new Producer<byte[], byte[]>(producerConfig); try {
Producer<byte[], byte[]> producer = new Producer<>(producerConfig);
Producer<byte[], byte[]> producer = new Producer<>(producerConfig);
protected Producer createProducer(Properties props) { return new Producer(new ProducerConfig(props)); }
@Inject public KafkaEventStore(@Named("event.store.kafka") KafkaConfig config, FieldDependencyBuilder.FieldDependency fieldDependency) { config = checkNotNull(config, "config is null"); this.sourceFields = Sets.union(fieldDependency.dependentFields.keySet(), fieldDependency.constantFields.stream().map(SchemaField::getName) .collect(Collectors.toSet())); Properties props = new Properties(); props.put("metadata.broker.list", config.getNodes().stream().map(HostAndPort::toString).collect(Collectors.joining(","))); props.put("serializer.class", config.SERIALIZER); ProducerConfig producerConfig = new ProducerConfig(props); this.producer = new Producer(producerConfig); CuratorFramework client = CuratorFrameworkFactory.newClient(config.getZookeeperNode().toString(), new ExponentialBackoffRetry(1000, 3)); client.start(); try { if (client.checkExists().forPath(ZK_OFFSET_PATH) == null) client.create().forPath(ZK_OFFSET_PATH); } catch (Exception e) { LOGGER.error(e, format("Couldn't create event offset path %s", ZK_OFFSET_PATH)); } new LeaderSelector(client, ZK_OFFSET_PATH, this).start(); }
private static List<Message> writeKafka(String topic, int numOfMessages) { List<Message> messages = new ArrayList<Message>(); List<KeyedMessage<String, String>> kafkaMessages = new ArrayList<KeyedMessage<String, String>>(); for (int i = 0; i < numOfMessages; i++) { Message msg = new Message(RANDOM.nextInt()); messages.add(msg); kafkaMessages.add(new KeyedMessage<String, String>(topic, Integer.toString(i), gson.toJson(msg))); } Properties producerProps = cluster.getProps(); producerProps.setProperty("serializer.class", StringEncoder.class.getName()); producerProps.setProperty("key.serializer.class", StringEncoder.class.getName()); Producer<String, String> producer = new Producer<String, String>(new ProducerConfig(producerProps)); try { producer.send(kafkaMessages); } finally { producer.close(); } return messages; }
@Override protected Producer createProducer(Properties props) { switch (producerType) { case REGULAR: return new Producer(new ProducerConfig(props)); case SEND_THROWS_EXCEPTION: return mockProducerSendThrowsException(); case SEND_SUCCEED_THIRD_TIME: return mockProducerThirdSendSucceed(); default: throw new RuntimeException("producer type not found"); } }
kafkaProducerProperties_08.put("client.id", clientId + "-" + i); ProducerConfig producerConfig_08 = new ProducerConfig(kafkaProducerProperties_08); Producer producer = new Producer(producerConfig_08); ProducerThread producerThread = new ProducerThread(producerDataChannel, producer, i); producerThread.start();
private synchronized Producer<String, T> getProducerCreateIfNull( final String typeName, final GeoWaveAvroFormatPlugin<?, ?> plugin) { if (!cachedProducers.containsKey(typeName)) { final ProducerConfig producerConfig = new ProducerConfig(properties); final Producer<String, T> producer = new Producer<String, T>(producerConfig); cachedProducers.put(typeName, producer); } return cachedProducers.get(typeName); }
public class SomeClient { public void start() { Queue sharedQueue = new LinkedList(); producer = new Producer( sharedQueue ); consumer = new Consumer( sharedQueue ); producer.start(); consumer.start(); } }
@Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { Map configMap = (Map) stormConf.get(KAFKA_BROKER_PROPERTIES); Properties properties = new Properties(); properties.putAll(configMap); ProducerConfig config = new ProducerConfig(properties); producer = new Producer<K, V>(config); this.topic = (String) stormConf.get(TOPIC); this.collector = collector; }
public class MainApp { public static void main(String[] args) throws Exception { Producer p = new Producer(); for (int i = 0; i < 10000; i++) p.send(i); } }
public void startServer() throws IOException { ProducerConfig producerConfig = new ProducerConfig(configProperties); producer = new Producer<String, T>(producerConfig); serverSocket = new ServerSocket(port); }
class Program { public static void main() { BlockingQueue<Double> queue = new ArrayBlockingQueue<>(); Producer producer = new Producer(queue); Consumer consumer = new Consumer(queue); new Thread(producer).start(); new Thread(consumer).start(); } }
protected Producer<String, String> getProducer() { if (producer == null) { Properties properties = new Properties(); properties.put("serializer.class", "kafka.serializer.StringEncoder"); properties.put("metadata.broker.list", kafkaQuorum); properties.put("producer.type", "async"); producer = new Producer<String, String>(new ProducerConfig(properties)); } return producer; }
@Bean public Producer<String, byte[]> avroProducer() { Properties properties = new Properties(); properties.put("metadata.broker.list", configurationContext.getKafkaHostsQuorum()); properties.put(org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); properties.put(org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); return new Producer<String, byte[]>(new ProducerConfig(properties)); }
@Bean public Producer<String, String> producer() { Properties properties = new Properties(); properties.put("serializer.class", "kafka.serializer.StringEncoder"); properties.put("metadata.broker.list", configurationContext.getKafkaHostsQuorum()); properties.put("producer.type", "async"); return new Producer<String, String>(new ProducerConfig(properties)); }
@Override public synchronized void start() { // instantiate the producer ProducerConfig config = new ProducerConfig(producerProps); producer = new Producer<String, String>(config); super.start(); }