public long incrementEventReadOrChannelFail(Throwable t) { if (t instanceof ChannelException) { return incrementChannelWriteFail(); } return incrementEventReadFail(); }
+ "Channel might be full. Consider increasing the channel " + "capacity or make sure the sinks perform faster.", ex); sourceCounter.incrementChannelWriteFail(); response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "Error appending event to channel. Channel might be full."
} catch (ChannelException ex) { logger.error( getName() + " source could not write to channel.", ex); sourceCounter.incrementChannelWriteFail();
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> flumeEvents = Lists.newArrayList(); for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } try { getChannelProcessor().processEventBatch(flumeEvents); } catch (ChannelException ex) { logger.warn("Thrift source %s could not append events to the channel.", getName()); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; } }
logger.warn("The channel is full or unexpected failure. " + "The source will try again after " + retryInterval + " ms"); sourceCounter.incrementChannelWriteFail(); TimeUnit.MILLISECONDS.sleep(retryInterval); retryInterval = retryInterval << 1;
@Override public Status appendBatch(List<AvroFlumeEvent> events) { logger.debug("Avro source {}: Received avro event batch of {} events.", getName(), events.size()); sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> batch = new ArrayList<Event>(); for (AvroFlumeEvent avroEvent : events) { Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); batch.add(event); } try { getChannelProcessor().processEventBatch(batch); } catch (Throwable t) { logger.error("Avro source " + getName() + ": Unable to process event " + "batch. Exception follows.", t); sourceCounter.incrementChannelWriteFail(); if (t instanceof Error) { throw (Error) t; } return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; }
"source will try again after " + backoffInterval + " milliseconds"); sourceCounter.incrementChannelWriteFail(); hitChannelFullException = true; backoffInterval = waitAndGetNewBackoffInterval(backoffInterval); "source will try again after " + backoffInterval + " milliseconds"); sourceCounter.incrementChannelWriteFail(); hitChannelException = true; backoffInterval = waitAndGetNewBackoffInterval(backoffInterval);
@Override public Status append(AvroFlumeEvent avroEvent) { if (logger.isDebugEnabled()) { if (LogPrivacyUtil.allowLogRawData()) { logger.debug("Avro source {}: Received avro event: {}", getName(), avroEvent); } else { logger.debug("Avro source {}: Received avro event", getName()); } } sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); try { getChannelProcessor().processEvent(event); } catch (ChannelException ex) { logger.warn("Avro source " + getName() + ": Unable to process event. " + "Exception follows.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { try { syslogUtils.setEventSize(maxsize); Event e = syslogUtils.extractEvent((ChannelBuffer)mEvent.getMessage()); if (e == null) { return; } if (clientIPHeader != null) { e.getHeaders().put(clientIPHeader, SyslogUtils.getIP(mEvent.getRemoteAddress())); } if (clientHostnameHeader != null) { e.getHeaders().put(clientHostnameHeader, SyslogUtils.getHostname(mEvent.getRemoteAddress())); } sourceCounter.incrementEventReceivedCount(); getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel", ex); sourceCounter.incrementChannelWriteFail(); return; } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return; } } }
} catch (ChannelException ex) { logger.error("Error writting to channel, event dropped", ex); sourceCounter.incrementChannelWriteFail(); } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex);
@Override public Status append(ThriftFlumeEvent event) throws TException { Event flumeEvent = EventBuilder.withBody(event.getBody(), event.getHeaders()); sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); try { getChannelProcessor().processEvent(flumeEvent); } catch (ChannelException ex) { logger.warn("Thrift source " + getName() + " could not append events " + "to the channel.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
+ "Channel might be full. Consider increasing the channel " + "capacity or make sure the sinks perform faster.", channelException); sourceCounter.incrementChannelWriteFail(); } catch (JMSException jmsException) { logger.warn("JMSException consuming events", jmsException);
public long incrementEventReadOrChannelFail(Throwable t) { if (t instanceof ChannelException) { return incrementChannelWriteFail(); } return incrementEventReadFail(); }
} catch (ChannelException ex) { logger.error( getName() + " source could not write to channel.", ex); sourceCounter.incrementChannelWriteFail();
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> flumeEvents = Lists.newArrayList(); for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } try { getChannelProcessor().processEventBatch(flumeEvents); } catch (ChannelException ex) { logger.warn("Thrift source %s could not append events to the channel.", getName()); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; } }
@Override public Status appendBatch(List<AvroFlumeEvent> events) { logger.debug("Avro source {}: Received avro event batch of {} events.", getName(), events.size()); sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> batch = new ArrayList<Event>(); for (AvroFlumeEvent avroEvent : events) { Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); batch.add(event); } try { getChannelProcessor().processEventBatch(batch); } catch (Throwable t) { logger.error("Avro source " + getName() + ": Unable to process event " + "batch. Exception follows.", t); sourceCounter.incrementChannelWriteFail(); if (t instanceof Error) { throw (Error) t; } return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; }
@Override public Status append(AvroFlumeEvent avroEvent) { if (logger.isDebugEnabled()) { if (LogPrivacyUtil.allowLogRawData()) { logger.debug("Avro source {}: Received avro event: {}", getName(), avroEvent); } else { logger.debug("Avro source {}: Received avro event", getName()); } } sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); try { getChannelProcessor().processEvent(event); } catch (ChannelException ex) { logger.warn("Avro source " + getName() + ": Unable to process event. " + "Exception follows.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }
} catch (ChannelException ex) { logger.error("Error writting to channel, event dropped", ex); sourceCounter.incrementChannelWriteFail(); } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex);
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { try { syslogUtils.setEventSize(maxsize); Event e = syslogUtils.extractEvent((ChannelBuffer)mEvent.getMessage()); if (e == null) { return; } if (clientIPHeader != null) { e.getHeaders().put(clientIPHeader, SyslogUtils.getIP(mEvent.getRemoteAddress())); } if (clientHostnameHeader != null) { e.getHeaders().put(clientHostnameHeader, SyslogUtils.getHostname(mEvent.getRemoteAddress())); } sourceCounter.incrementEventReceivedCount(); getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel", ex); sourceCounter.incrementChannelWriteFail(); return; } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return; } } }
@Override public Status append(ThriftFlumeEvent event) throws TException { Event flumeEvent = EventBuilder.withBody(event.getBody(), event.getHeaders()); sourceCounter.incrementAppendReceivedCount(); sourceCounter.incrementEventReceivedCount(); try { getChannelProcessor().processEvent(flumeEvent); } catch (ChannelException ex) { logger.warn("Thrift source " + getName() + " could not append events " + "to the channel.", ex); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendAcceptedCount(); sourceCounter.incrementEventAcceptedCount(); return Status.OK; }