@Override public StandardEvent create(final byte[] data, final Map<String, String> metadata, final ChannelResponder responder) { String sender = null; if (metadata != null && metadata.containsKey(EventFactory.SENDER_KEY)) { sender = metadata.get(EventFactory.SENDER_KEY); } return new StandardEvent(sender, data, responder); }
@Override protected Map<String, String> getAttributes(final FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final Map<String,String> attributes = new HashMap<>(3); attributes.put("tcp.sender", sender); attributes.put("tcp.port", String.valueOf(port)); return attributes; }
private void handleParseFailure(final StandardEvent event, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(event.getSender()); FlowFile failureFlowFile = session.create(); failureFlowFile = session.write(failureFlowFile, out -> out.write(event.getData())); failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = getTransitUri(event.getSender()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { getLogger().error(message); } else { getLogger().error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
try (final InputStream in = new ByteArrayInputStream(event.getData())) { reader = readerFactory.createRecordReader(Collections.emptyMap(), in, getLogger()); FlowFileRecordWriter flowFileRecordWriter = flowFileRecordWriters.get(event.getSender()); flowFileRecordWriters.put(event.getSender(), flowFileRecordWriter); } catch (final Exception ex) { getLogger().error("Failed to properly initialize record writer. Datagram will be queued for re-processing.", ex); IOUtils.closeQuietly(writer); session.remove(flowFileRecordWriter.getFlowFile()); flowFileRecordWriters.remove(event.getSender()); break;
@Test public void testSuccessWithBatchSizeGreaterThanAvailableRecords() { final String sender = "foo"; final StandardEvent event1 = new StandardEvent(sender, DATAGRAM_1.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event1); final StandardEvent event2 = new StandardEvent(sender, DATAGRAM_2.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event2); final StandardEvent event3 = new StandardEvent(sender, DATAGRAM_3.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event3); runner.run(); runner.assertAllFlowFilesTransferred(ListenUDPRecord.REL_SUCCESS, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListenUDPRecord.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(ListenUDPRecord.RECORD_COUNT_ATTR, "3"); }
private void handleParseFailure(final StandardEvent event, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(event.getSender()); FlowFile failureFlowFile = session.create(); failureFlowFile = session.write(failureFlowFile, out -> out.write(event.getData())); failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = getTransitUri(event.getSender()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { getLogger().error(message); } else { getLogger().error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
@Override protected String getTransitUri(FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":") .append(port).toString(); return transitUri; }
@Test public void testBatchingWithDifferentSenders() throws IOException, InterruptedException { final String sender1 = "sender1"; final String sender2 = "sender2"; final ChannelResponder responder = Mockito.mock(ChannelResponder.class); final byte[] message = "test message".getBytes(StandardCharsets.UTF_8); final List<StandardEvent> mockEvents = new ArrayList<>(); mockEvents.add(new StandardEvent(sender1, message, responder)); mockEvents.add(new StandardEvent(sender1, message, responder)); mockEvents.add(new StandardEvent(sender2, message, responder)); mockEvents.add(new StandardEvent(sender2, message, responder)); MockListenUDP mockListenUDP = new MockListenUDP(mockEvents); runner = TestRunners.newTestRunner(mockListenUDP); runner.setProperty(ListenRELP.PORT, "1"); runner.setProperty(ListenRELP.MAX_BATCH_SIZE, "10"); // sending 4 messages with a batch size of 10, but should get 2 FlowFiles because of different senders runner.run(); runner.assertAllFlowFilesTransferred(ListenRELP.REL_SUCCESS, 2); verifyProvenance(2); }
try (final InputStream in = new ByteArrayInputStream(event.getData())) { reader = readerFactory.createRecordReader(Collections.emptyMap(), in, getLogger()); FlowFileRecordWriter flowFileRecordWriter = flowFileRecordWriters.get(event.getSender()); flowFileRecordWriters.put(event.getSender(), flowFileRecordWriter); } catch (final Exception ex) { getLogger().error("Failed to properly initialize record writer. Datagram will be queued for re-processing.", ex); IOUtils.closeQuietly(writer); session.remove(flowFileRecordWriter.getFlowFile()); flowFileRecordWriters.remove(event.getSender()); break;
@Override protected Map<String, String> getAttributes(final FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final Map<String,String> attributes = new HashMap<>(3); attributes.put(UDP_SENDER_ATTR, sender); attributes.put(UDP_PORT_ATTR, String.valueOf(port)); return attributes; }
@Test public void testMultipleRecordsPerDatagram() { final String sender = "foo"; final StandardEvent event1 = new StandardEvent(sender, MULTI_DATAGRAM_1.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event1); final StandardEvent event2 = new StandardEvent(sender, MULTI_DATAGRAM_2.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event2); runner.run(); runner.assertAllFlowFilesTransferred(ListenUDPRecord.REL_SUCCESS, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListenUDPRecord.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(ListenUDPRecord.RECORD_COUNT_ATTR, "6"); }
@Override protected String getTransitUri(FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; final String transitUri = new StringBuilder().append("tcp").append("://").append(senderHost).append(":") .append(port).toString(); return transitUri; }
@Test public void testParseFailure() { final String sender = "foo"; final StandardEvent event1 = new StandardEvent(sender, DATAGRAM_1.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event1); final StandardEvent event2 = new StandardEvent(sender, "WILL NOT PARSE".getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event2); runner.run(); runner.assertTransferCount(ListenUDPRecord.REL_SUCCESS, 1); runner.assertTransferCount(ListenUDPRecord.REL_PARSE_FAILURE, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListenUDPRecord.REL_PARSE_FAILURE).get(0); flowFile.assertContentEquals("WILL NOT PARSE"); }
@Override protected String getTransitUri(FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; final String transitUri = new StringBuilder().append("tcp").append("://").append(senderHost).append(":") .append(port).toString(); return transitUri; }
@Test public void testWriterFailure() throws InitializationException { // re-create the writer to set fail-after 2 attempts final String writerId = "record-writer"; mockRecordWriter = new MockRecordWriter("timestamp, logsource, message", false, 2); runner.addControllerService(writerId, mockRecordWriter); runner.enableControllerService(mockRecordWriter); runner.setProperty(ListenUDPRecord.RECORD_WRITER, writerId); final String sender = "foo"; final StandardEvent event1 = new StandardEvent(sender, DATAGRAM_1.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event1); final StandardEvent event2 = new StandardEvent(sender, DATAGRAM_2.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event2); final StandardEvent event3 = new StandardEvent(sender, DATAGRAM_3.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event3); runner.run(); runner.assertAllFlowFilesTransferred(ListenUDPRecord.REL_SUCCESS, 0); runner.assertAllFlowFilesTransferred(ListenUDPRecord.REL_PARSE_FAILURE, 0); }
@Override protected Map<String, String> getAttributes(final FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final Map<String,String> attributes = new HashMap<>(3); attributes.put(UDP_SENDER_ATTR, sender); attributes.put(UDP_PORT_ATTR, String.valueOf(port)); return attributes; }
final String sender = "foo"; final StandardEvent event1 = new StandardEvent(sender, DATAGRAM_1.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event1); final StandardEvent event2 = new StandardEvent(sender, DATAGRAM_2.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event2); final StandardEvent event3 = new StandardEvent(sender, DATAGRAM_3.getBytes(StandardCharsets.UTF_8), null); proc.addEvent(event3);
@Override protected String getTransitUri(FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":") .append(port).toString(); return transitUri; }
@Override protected Map<String, String> getAttributes(final FlowFileEventBatch batch) { final String sender = batch.getEvents().get(0).getSender(); final Map<String,String> attributes = new HashMap<>(3); attributes.put("tcp.sender", sender); attributes.put("tcp.port", String.valueOf(port)); return attributes; }