/** * Read parameters from context * <li>batchSize = type int that defines the size of event batches */ @Override protected void doConfigure(Context context) throws FlumeException { batchSize = context.getInteger("batchSize", 1); totalEvents = context.getLong("totalEvents", Long.MAX_VALUE); Preconditions.checkArgument(batchSize > 0, "batchSize was %s but expected positive", batchSize); if (sourceCounter == null) { sourceCounter = new SourceCounter(getName()); } }
@Override protected void doStart() throws FlumeException { logger.info("Sequence generator source do starting"); sourceCounter.start(); logger.debug("Sequence generator source do started"); }
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> flumeEvents = Lists.newArrayList(); for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } try { getChannelProcessor().processEventBatch(flumeEvents); } catch (ChannelException ex) { logger.warn("Thrift source %s could not append events to the channel.", getName()); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; } }
@Override public Status append(ThriftFlumeEvent event) throws TException { Event flumeEvent = EventBuilder.withBody(event.getBody(), event.getHeaders()); sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); try { getChannelProcessor().processEvent(flumeEvent); } catch (ChannelException ex) { logger.warn("Thrift source " + getName() + " could not append events " + "to the channel.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
public ResultCode Log(List<LogEntry> list) throws TException { if (list != null) { sourceCounter.addToEventReceivedCount(list.size()); try { List<Event> events = new ArrayList<Event>(list.size()); for (LogEntry entry : list) { Map<String, String> headers = new HashMap<String, String>(1, 1); String category = entry.getCategory(); if (category != null) { headers.put(SCRIBE_CATEGORY, category); } Event event = EventBuilder.withBody(entry.getMessage().getBytes(), headers); events.add(event); } if (events.size() > 0) { getChannelProcessor().processEventBatch(events); } sourceCounter.addToEventAcceptedCount(list.size()); return ResultCode.OK; } catch (Exception e) { LOG.warn("Scribe source handling failure", e); sourceCounter.incrementEventReadOrChannelFail(e); } } return ResultCode.TRY_LATER; } }
@Override protected synchronized void doStop() { if (consumer != null) { consumer.close(); consumer = null; } sourceCounter.stop(); }
getChannelProcessor().processEvent( EventBuilder.withBody(String.valueOf(eventsSentTX++).getBytes())); sourceCounter.incrementEventAcceptedCount(); } else { status = Status.BACKOFF; sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(batchArrayList.size()); } catch (ChannelException ex) { logger.error( getName() + " source could not write to channel.", ex); sourceCounter.incrementChannelWriteFail();
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { try { syslogUtils.setEventSize(maxsize); Event e = syslogUtils.extractEvent((ChannelBuffer)mEvent.getMessage()); if (e == null) { return; } if (clientIPHeader != null) { e.getHeaders().put(clientIPHeader, SyslogUtils.getIP(mEvent.getRemoteAddress())); } if (clientHostnameHeader != null) { e.getHeaders().put(clientHostnameHeader, SyslogUtils.getHostname(mEvent.getRemoteAddress())); } sourceCounter.incrementEventReceivedCount(); getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel", ex); sourceCounter.incrementChannelWriteFail(); return; } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return; } } }
@Override public void start() { logger.info("Starting {}...", this); try { Responder responder = new SpecificResponder(AvroSourceProtocol.class, this); socketChannelFactory = initSocketChannelFactory(); ChannelPipelineFactory pipelineFactory = initChannelPipelineFactory(); server = new NettyServer(responder, new InetSocketAddress(bindAddress, port), socketChannelFactory, pipelineFactory, null); } catch (org.jboss.netty.channel.ChannelException nce) { logger.error("Avro source {} startup failed. Cannot initialize Netty server", getName(), nce); stop(); throw new FlumeException("Failed to set up server socket", nce); } connectionCountUpdater = Executors.newSingleThreadScheduledExecutor(); server.start(); sourceCounter.start(); super.start(); final NettyServer srv = (NettyServer)server; connectionCountUpdater.scheduleWithFixedDelay( () -> sourceCounter.setOpenConnectionCount(Long.valueOf(srv.getNumActiveConnections())), 0, 60, TimeUnit.SECONDS); logger.info("Avro source {} started.", getName()); }
private void flushEventBatch(List<Event> eventList) { channelProcessor.processEventBatch(eventList); sourceCounter.addToEventAcceptedCount(eventList.size()); eventList.clear(); lastPushToChannel = systemClock.currentTimeMillis(); }
@Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { sourceCounter.incrementEventReceivedCount(); try { channelProcessor.processEvent(parseMessage(envelope, properties, body)); sourceCounter.incrementEventAcceptedCount(); ackMessage(envelope.getDeliveryTag()); } catch (Exception ex) { logger.error("Error writing to channel for {}, message rejected {}", this, ex); rejectMessage(envelope.getDeliveryTag()); } } };
sourceCounter.incrementEventReceivedCount(); synchronized (eventList) { eventList.add(EventBuilder.withBody(line.getBytes(charset)));
@Override protected void doStop() throws FlumeException { logger.info("Sequence generator source do stopping"); sourceCounter.stop(); logger.info("Sequence generator source do stopped. Metrics:{}",getName(), sourceCounter); }
sourceCounter.addToEventReceivedCount(numEvents); sourceCounter.addToEventAcceptedCount(numEvents); } catch (Throwable t) { logger.error("Error writing to channel, event dropped", t); sourceCounter.incrementEventReadOrChannelFail(t); if (t instanceof Error) { Throwables.propagate(t);
getChannelProcessor().processEvent( EventBuilder.withBody(String.valueOf(eventsSentTX++).getBytes())); sourceCounter.incrementEventAcceptedCount(); } else { status = Status.BACKOFF; sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(batchArrayList.size()); } catch (ChannelException ex) { logger.error( getName() + " source could not write to channel.", ex); sourceCounter.incrementChannelWriteFail();
sourceCounter.incrementEventReceivedCount(); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel, event dropped", ex); sourceCounter.incrementChannelWriteFail(); } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return;
@Override public Status append(AvroFlumeEvent avroEvent) { if (logger.isDebugEnabled()) { if (LogPrivacyUtil.allowLogRawData()) { logger.debug("Avro source {}: Received avro event: {}", getName(), avroEvent); } else { logger.debug("Avro source {}: Received avro event", getName()); } } sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); try { getChannelProcessor().processEvent(event); } catch (ChannelException ex) { logger.warn("Avro source " + getName() + ": Unable to process event. " + "Exception follows.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
@Override public void start() { logger.info("Starting {}...", this); try { Responder responder = new SpecificResponder(AvroSourceProtocol.class, this); socketChannelFactory = initSocketChannelFactory(); ChannelPipelineFactory pipelineFactory = initChannelPipelineFactory(); server = new NettyServer(responder, new InetSocketAddress(bindAddress, port), socketChannelFactory, pipelineFactory, null); } catch (org.jboss.netty.channel.ChannelException nce) { logger.error("Avro source {} startup failed. Cannot initialize Netty server", getName(), nce); stop(); throw new FlumeException("Failed to set up server socket", nce); } connectionCountUpdater = Executors.newSingleThreadScheduledExecutor(); server.start(); sourceCounter.start(); super.start(); final NettyServer srv = (NettyServer)server; connectionCountUpdater.scheduleWithFixedDelay( () -> sourceCounter.setOpenConnectionCount(Long.valueOf(srv.getNumActiveConnections())), 0, 60, TimeUnit.SECONDS); logger.info("Avro source {} started.", getName()); }
private void flushEventBatch(List<Event> eventList){ channelProcessor.processEventBatch(eventList); sourceCounter.addToEventAcceptedCount(eventList.size()); eventList.clear(); lastPushToChannel = systemClock.currentTimeMillis(); }
private void sendEvent(FileSet fileSet) { if (fileSet.getBufferList().isEmpty()) return; synchronized (fileSet) { StringBuffer sb = fileSet.getAllLines(); Event event = EventBuilder.withBody(String.valueOf(sb).getBytes(), fileSet.getHeaders()); source.getChannelProcessor().processEvent(event); sourceCounter.incrementEventReceivedCount(); fileSet.clear(); } }