public static Event withBody(byte[] body) { return withBody(body, null); }
public static Event withBody(String body, Charset charset) { return withBody(body, charset, null); }
@Override public Event readEvent() throws IOException { String line = reader.readLine(); if (line != null) { return EventBuilder.withBody(line, Charsets.UTF_8); } else { return null; } }
public static Event withBody(String body, Charset charset, Map<String, String> headers) { return withBody(body.getBytes(charset), headers); }
private void prepEventData(int bufferSize) { buffer = new byte[bufferSize]; Arrays.fill(buffer, Byte.MAX_VALUE); if (batchSize > 1) { //Create event objects in case of batch test eventBatchList = new ArrayList<Event>(); for (int i = 0; i < batchSize; i++) { eventBatchList.add(EventBuilder.withBody(buffer)); } } else { //Create single event in case of non-batch test event = EventBuilder.withBody(buffer); } }
private List<Event> readDeserializerEvents(int numEvents) throws IOException { EventDeserializer des = currentFile.get().getDeserializer(); List<Event> events = des.readEvents(numEvents); if (events.isEmpty() && firstTimeRead) { events.add(EventBuilder.withBody(new byte[0])); } firstTimeRead = false; return events; }
private List<Event> getSimpleEvents(List<Event> events) { List<Event> newEvents = new ArrayList<Event>(events.size()); for (Event e:events) { newEvents.add(EventBuilder.withBody(e.getBody(), e.getHeaders())); } return newEvents; } }
private Event getEvent(int index) { return EventBuilder.withBody(("event: " + index).getBytes()); }
private List<Event> generateGenericEvent(LogSchema schema, List<LogEvent> logEvents) { List<Event> events = new ArrayList<>(); for (LogEvent logEvent : logEvents) { Event event = EventBuilder.withBody(logEvent.getLogData()); event.getHeaders().put(AVRO_SCHEMA_HEADER_LITERAL, schema.getSchema()); events.add(event); } return events; } }
/** * Reads a line from a file and returns an event * @return Event containing parsed line * @throws IOException */ @Override public Event readEvent() throws IOException { ensureOpen(); String line = readLine(); if (line == null) { return null; } else { return EventBuilder.withBody(line, outputCharset); } }
/** * Insert events 0..count-1 * * @param client * @param count * @throws Exception */ private static void insertEvents(RpcClient client, int count) throws Exception { for (int i = 0; i < count; i++) { Map<String, String> header = new HashMap<String, String>(); header.put(SEQ, String.valueOf(i)); client.append(EventBuilder.withBody(String.valueOf(i).getBytes(), header)); } }
/** * Append events as a batch with seq starting at start and ending at limit. * * @param client * @param start * @param limit * @throws Exception */ private static void insertAsBatch(RpcClient client, int start, int limit) throws Exception { List<Event> events = new ArrayList<Event>(); for (int i = start; i <= limit; i++) { Map<String, String> header = new HashMap<String, String>(); header.put(SEQ, String.valueOf(i)); events.add(EventBuilder.withBody(String.valueOf(i).getBytes(), header)); } client.appendBatch(events); }
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { batchCount++; if (events.size() < 10) { incompleteBatches++; } for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } return Status.OK; } }
@Override public Status append(ThriftFlumeEvent event) throws TException { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); individualCount++; return Status.OK; }
@Test public void testBody() { Event e1 = EventBuilder.withBody("e1".getBytes()); Assert.assertNotNull(e1); Assert.assertArrayEquals("body is correct", "e1".getBytes(), e1.getBody()); Event e2 = EventBuilder.withBody(Long.valueOf(2).toString().getBytes()); Assert.assertNotNull(e2); Assert.assertArrayEquals("body is correct", Long.valueOf(2L).toString() .getBytes(), e2.getBody()); }
@Test public void testHeaders() { Map<String, String> headers = new HashMap<String, String>(); headers.put("one", "1"); headers.put("two", "2"); Event e1 = EventBuilder.withBody("e1".getBytes(), headers); Assert.assertNotNull(e1); Assert.assertArrayEquals("e1 has the proper body", "e1".getBytes(), e1.getBody()); Assert.assertEquals("e1 has the proper headers", 2, e1.getHeaders().size()); Assert.assertEquals("e1 has a one key", "1", e1.getHeaders().get("one")); }
@Test public void testTwoParamDeprecatedAppend() throws FlumeException, EventDeliveryException { RpcClient client = null; Server server = RpcTestUtils.startServer(new OKAvroHandler()); try { client = RpcClientFactory.getInstance(localhost, server.getPort()); client.append(EventBuilder.withBody("wheee!!!", Charset.forName("UTF8"))); } finally { RpcTestUtils.stopServer(server); if (client != null) client.close(); } }
@Test public void testTwoParamSimpleAppend() throws FlumeException, EventDeliveryException { RpcClient client = null; Server server = RpcTestUtils.startServer(new OKAvroHandler()); try { client = RpcClientFactory.getDefaultInstance(localhost, server.getPort()); client.append(EventBuilder.withBody("wheee!!!", Charset.forName("UTF8"))); } finally { RpcTestUtils.stopServer(server); if (client != null) client.close(); } }
@Test public void testThreeParamDeprecatedAppend() throws FlumeException, EventDeliveryException { RpcClient client = null; Server server = RpcTestUtils.startServer(new OKAvroHandler()); try { client = RpcClientFactory.getInstance(localhost, server.getPort(), 3); Assert.assertEquals("Batch size was specified", 3, client.getBatchSize()); client.append(EventBuilder.withBody("wheee!!!", Charset.forName("UTF8"))); } finally { RpcTestUtils.stopServer(server); if (client != null) client.close(); } }
private Sink.Status prepareAndSend(Context context, String msg) throws EventDeliveryException { Sink kafkaSink = new KafkaSink(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes()); memoryChannel.put(event); tx.commit(); tx.close(); return kafkaSink.process(); }