private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) { .collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) { .collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) { .collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) { FlowFile flowFile = session.create(); final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding); tracker.incrementRecordCount(1); final byte[] value = record.value(); if (value != null) { flowFile = session.write(flowFile, out -> { out.write(value); }); } flowFile = session.putAllAttributes(flowFile, getAttributes(record)); tracker.updateFlowFile(flowFile); populateAttributes(tracker); session.transfer(tracker.flowFile, REL_SUCCESS); }
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) { FlowFile flowFile = session.create(); final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding); tracker.incrementRecordCount(1); final byte[] value = record.value(); if (value != null) { flowFile = session.write(flowFile, out -> { out.write(value); }); } flowFile = session.putAllAttributes(flowFile, getAttributes(record)); tracker.updateFlowFile(flowFile); populateAttributes(tracker); session.transfer(tracker.flowFile, REL_SUCCESS); }
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) { FlowFile flowFile = session.create(); final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding); tracker.incrementRecordCount(1); final byte[] value = record.value(); if (value != null) { flowFile = session.write(flowFile, out -> { out.write(value); }); } flowFile = session.putAllAttributes(flowFile, getAttributes(record)); tracker.updateFlowFile(flowFile); populateAttributes(tracker); session.transfer(tracker.flowFile, REL_SUCCESS); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
try { for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) { final Map<String, String> attributes = getAttributes(consumerRecord);
try { for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) { final Map<String, String> attributes = getAttributes(consumerRecord);
try { for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) { final Map<String, String> attributes = getAttributes(consumerRecord);
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) { .collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) { .collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) { FlowFile flowFile = session.create(); final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding); tracker.incrementRecordCount(1); final byte[] value = record.value(); if (value != null) { flowFile = session.write(flowFile, out -> { out.write(value); }); } flowFile = session.putAllAttributes(flowFile, getAttributes(record)); tracker.updateFlowFile(flowFile); populateAttributes(tracker); session.transfer(tracker.flowFile, REL_SUCCESS); }
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) { FlowFile flowFile = session.create(); final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding); tracker.incrementRecordCount(1); final byte[] value = record.value(); if (value != null) { flowFile = session.write(flowFile, out -> { out.write(value); }); } flowFile = session.putAllAttributes(flowFile, getAttributes(record)); tracker.updateFlowFile(flowFile); populateAttributes(tracker); session.transfer(tracker.flowFile, REL_SUCCESS); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
try { for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) { final Map<String, String> attributes = getAttributes(consumerRecord);
try { for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) { final Map<String, String> attributes = getAttributes(consumerRecord);