public void put(Event event) throws ChannelException { getChannelProcessor().processEvent(event); }
if (batchSize == 1) { if (eventsSentTX < totalEvents) { getChannelProcessor().processEvent( EventBuilder.withBody(String.valueOf(eventsSentTX++).getBytes())); sourceCounter.incrementEventAcceptedCount();
public void onStatus(Status status) { Record doc = extractRecord("", avroSchema, status); if (doc == null) { return; // skip } docs.add(doc); if (docs.size() >= maxBatchSize || System.currentTimeMillis() >= batchEndTime) { batchEndTime = System.currentTimeMillis() + maxBatchDurationMillis; byte[] bytes; try { bytes = serializeToAvro(avroSchema, docs); } catch (IOException e) { LOGGER.error("Exception while serializing tweet", e); return; //skip } Event event = EventBuilder.withBody(bytes); getChannelProcessor().processEvent(event); // send event to the flume sink docs.clear(); } docCount++; if ((docCount % REPORT_INTERVAL) == 0) { LOGGER.info(String.format("Processed %s docs", numFormatter.format(docCount))); } if ((docCount % STATS_INTERVAL) == 0) { logStats(); } }
limiter.acquire(); getChannelProcessor().processEvent(event); } else { long eventsLeft = maxTotalEvents - totalEventSent;
@Override public Void append( AvroFlumeOGEvent evt ) throws AvroRemoteException { counterGroup.incrementAndGet("rpc.received"); Map<String, String> headers = new HashMap<String, String>(); // extract Flume OG event headers headers.put(HOST, evt.getHost().toString()); headers.put(TIMESTAMP, evt.getTimestamp().toString()); headers.put(PRIORITY, evt.getPriority().toString()); headers.put(NANOS, evt.getNanos().toString()); for (Entry<CharSequence, ByteBuffer> entry : evt.getFields().entrySet()) { headers.put(entry.getKey().toString(), entry.getValue().toString()); } headers.put(OG_EVENT, "yes"); Event event = EventBuilder.withBody(evt.getBody().array(), headers); try { getChannelProcessor().processEvent(event); counterGroup.incrementAndGet("rpc.events"); } catch (ChannelException ex) { return null; } counterGroup.incrementAndGet("rpc.successful"); return null; }
public void append(ThriftFlumeEvent evt ) { if (evt == null) { return; } Map<String, String> headers = new HashMap<String, String>(); // extract Flume event headers headers.put(HOST, evt.getHost()); headers.put(TIMESTAMP, Long.toString(evt.getTimestamp())); headers.put(PRIORITY, evt.getPriority().toString()); headers.put(NANOS, Long.toString(evt.getNanos())); for (Entry<String, ByteBuffer> entry: evt.getFields().entrySet()) { headers.put(entry.getKey().toString(), UTF_8.decode(entry.getValue()).toString()); } headers.put(OG_EVENT, "yes"); Event event = EventBuilder.withBody(evt.getBody(), headers); counterGroup.incrementAndGet("rpc.events"); try { getChannelProcessor().processEvent(event); } catch (ChannelException ex) { LOG.warn("Failed to process event", ex); return; } counterGroup.incrementAndGet("rpc.successful"); return; }
source.getChannelProcessor().processEvent(event); } catch (ChannelException chEx) { ex = chEx;
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { try { Event e = extractEvent((ChannelBuffer)mEvent.getMessage(), mEvent.getRemoteAddress()); if (e == null) { return; } getChannelProcessor().processEvent(e); counterGroup.incrementAndGet("events.success"); } catch (ChannelException ex) { counterGroup.incrementAndGet("events.dropped"); logger.error("Error writing to channel", ex); } catch (RuntimeException ex) { counterGroup.incrementAndGet("events.dropped"); logger.error("Error retrieving event from udp stream, event dropped", ex); } } }
@Override public Status append(AvroFlumeEvent avroEvent) { if (logger.isDebugEnabled()) { if (LogPrivacyUtil.allowLogRawData()) { logger.debug("Avro source {}: Received avro event: {}", getName(), avroEvent); } else { logger.debug("Avro source {}: Received avro event", getName()); } } sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); try { getChannelProcessor().processEvent(event); } catch (ChannelException ex) { logger.warn("Avro source " + getName() + ": Unable to process event. " + "Exception follows.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) {
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { try { syslogUtils.setEventSize(maxsize); Event e = syslogUtils.extractEvent((ChannelBuffer)mEvent.getMessage()); if (e == null) { return; } if (clientIPHeader != null) { e.getHeaders().put(clientIPHeader, SyslogUtils.getIP(mEvent.getRemoteAddress())); } if (clientHostnameHeader != null) { e.getHeaders().put(clientHostnameHeader, SyslogUtils.getHostname(mEvent.getRemoteAddress())); } sourceCounter.incrementEventReceivedCount(); getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel", ex); sourceCounter.incrementChannelWriteFail(); return; } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return; } } }
@Override public Status append(ThriftFlumeEvent event) throws TException { Event flumeEvent = EventBuilder.withBody(event.getBody(), event.getHeaders()); sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); try { getChannelProcessor().processEvent(flumeEvent); } catch (ChannelException ex) { logger.warn("Thrift source " + getName() + " could not append events " + "to the channel.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
@Override public void onMessage(String channel, String message) { Event event = EventBuilder.withBody(message, Charset.forName(charset)); channelProcessor.processEvent(event); }
private void processLines(ChannelProcessor channelProcessor, InputStream input) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(input)); String line; while ((line = reader.readLine()) != null) { channelProcessor.processEvent(createEvent(line)); } reader.close(); }
@Override public void onPMessage(String pattern, String channel, String message) { Map<String, String> headers = Maps.newHashMap(); headers.put("channel", channel); Event event = EventBuilder.withBody(message, Charset.forName(charset), headers); channelProcessor.processEvent(event); }
public void onStatus(Status status) { // The EventBuilder is used to build an event using the headers and // the raw JSON of a tweet // shouldn't log possibly sensitive customer data logger.debug("tweet arrived"); headers.put("timestamp", String.valueOf(status.getCreatedAt().getTime())); Event event = EventBuilder.withBody( DataObjectFactory.getRawJSON(status).getBytes(), headers); channel.processEvent(event); }
@Override public void onWebSocketText(String s) { LOGGER.info("Received message: {}", s); channelProcessor.processEvent(createEvent(s)); long processed = processedEvent.incrementAndGet(); if (processed % LOG_PROCESS_THRESHOLD == 0) { try { session.getRemote().sendString("Processed: " + processedEvent.get()); } catch (IOException e) { LOGGER.warn("Cannot send process message"); } } }
@Override public void onWebSocketText(String s) { LOGGER.info("Received message: {}", s); channelProcessor.processEvent(createEvent(s)); long processed = processedEvent.incrementAndGet(); if (processed % 100 == 0) { try { session.getRemote().sendString("Processed: " + processedEvent.get()); } catch (IOException e) { LOGGER.warn("Cannot send process message"); } } }
private void sendEvent(FileSet fileSet) { if (fileSet.getBufferList().isEmpty()) return; synchronized (fileSet) { StringBuffer sb = fileSet.getAllLines(); Event event = EventBuilder.withBody(String.valueOf(sb).getBytes(), fileSet.getHeaders()); source.getChannelProcessor().processEvent(event); sourceCounter.incrementEventReceivedCount(); fileSet.clear(); } }
public synchronized void processPdu(CommandResponderEvent e) { PDU command = e.getPDU(); if (command != null) { ChannelProcessor channelProcessor = getChannelProcessor(); sourceCounter.addToEventReceivedCount(1); sourceCounter.incrementAppendBatchReceivedCount(); channelProcessor.processEvent(EventBuilder.withBody(command.toString(), Charsets.UTF_8)); sourceCounter.addToEventAcceptedCount(1); sourceCounter.incrementAppendBatchAcceptedCount(); } } };