public void skippedBackPressureMs(long ms) { this.skippedBackPressureMs.incrBy(ms); } }
public void incrReportedErrorCountBy(long n) { this.reportedErrorCount.incrBy(n); }
public void skippedMaxSpoutMs(long ms) { this.skippedMaxSpoutMs.incrBy(ms); }
public void skippedInactiveMs(long ms) { this.skippedInactiveMs.incrBy(ms); }
@Override public void beforeBulk(long executionId, BulkRequest request) { eventCounter.scope("BulkRequest").incrBy(1); }
@Override public void beforeBulk(long executionId, BulkRequest request) { LOG.debug("beforeBulk {} with {} actions", executionId, request.numberOfActions()); eventCounter.scope("bulks_received").incrBy(1); }
@Override public void ack(Object msgId) { beingProcessed.remove(msgId); eventCounter.scope("acked").incrBy(1); }
@Override public void fail(Object msgId) { beingProcessed.remove(msgId); eventCounter.scope("failed").incrBy(1); }
@Override public void countBy(final MetricDefinition metric, final long incrementBy, final Object... metricParameters) { final String key = generateKey(metric, metricParameters); counters.scope(key).incrBy(incrementBy); }
@Override public void afterBulk(long executionId, BulkRequest request, Throwable throwable) { eventCounter.scope("bulks_received").incrBy(1); LOG.error("Exception with bulk {} - failing the whole lot ", executionId, throwable); synchronized (waitAck) { // WHOLE BULK FAILED // mark all the docs as fail Iterator<DocWriteRequest<?>> itreq = request.requests().iterator(); while (itreq.hasNext()) { DocWriteRequest bir = itreq.next(); String id = bir.id(); List<Tuple> xx = waitAck.getIfPresent(id); if (xx != null) { LOG.debug("Failed {} tuple(s) for ID {}", xx.size(), id); for (Tuple x : xx) { // fail it _collector.fail(x); } waitAck.invalidate(id); } else { LOG.warn("Could not find unacked tuple for {}", id); } } } }
private void handleException(String url, Throwable e, Metadata metadata, Tuple tuple, String errorSource, String errorMessage) { LOG.error(errorMessage); // send to status stream in case another component wants to update // its status metadata.setValue(Constants.STATUS_ERROR_SOURCE, errorSource); metadata.setValue(Constants.STATUS_ERROR_MESSAGE, errorMessage); collector.emit(StatusStreamName, tuple, new Values(url, metadata, Status.ERROR)); collector.ack(tuple); // Increment metric that is context specific String s = "error_" + errorSource.replaceAll(" ", "_") + "_"; eventCounter.scope(s + e.getClass().getSimpleName()).incrBy(1); // Increment general metric eventCounter.scope("parse exception").incrBy(1); }
/** * Internal helper to record the value of a timer. * @param key String representation of the key to record the timer under * @param elapsedTimeMs How long the timer ran for, in milliseconds. */ private void recordTimer(final String key, final long elapsedTimeMs) { // Update averaged timer key timers.scope(key).update(elapsedTimeMs); // Increment total time counter, this keeps a running count of total time spent in this timer counters.scope(key + "_totalTimeMs").incrBy(elapsedTimeMs); }
@Override public void nextTuple() { if (!active) return; // synchronize access to buffer needed in case of asynchronous // queries to the backend synchronized (buffer) { if (!buffer.isEmpty()) { List<Object> fields = buffer.remove(); String url = fields.get(0).toString(); this._collector.emit(fields, url); beingProcessed.put(url, null); eventCounter.scope("emitted").incrBy(1); return; } } if (isInQuery.get() || throttleQueries() > 0) { // sleep for a bit but not too much in order to give ack/fail a // chance Utils.sleep(10); return; } // re-populate the buffer populateBuffer(); timeLastQuery = System.currentTimeMillis(); }
eventCounter.scope("error_invalid_source_url").incrBy(1); return new LinkedList<Outlink>(); eventCounter .scope("error_outlink_parsing_" + e.getClass().getSimpleName()).incrBy(1); continue; urlOL = urlFilters.filter(url_, parentMetadata, urlOL); if (urlOL == null) { eventCounter.scope("outlink_filtered").incrBy(1); continue; eventCounter.scope("outlink_kept").incrBy(1);
eventCounter.scope("error_invalid_source_url").incrBy(1); return new LinkedList<Outlink>();
eventCounter.scope("result_keytoobig").incrBy(1); _collector.emit(tuple, new Values(url, metadata)); obj = client.getObject(bucketName, key); } catch (AmazonS3Exception e) { eventCounter.scope("result_misses").incrBy(1); try { byte[] content = IOUtils.toByteArray(obj.getObjectContent()); eventCounter.scope("result_hits").incrBy(1); eventCounter.scope("bytes_fetched").incrBy(content.length); return; } catch (Exception e) { eventCounter.scope("result.exception").incrBy(1); LOG.error("IOException when extracting byte array", e);
request.numberOfActions()); long msec = response.getTook().getMillis(); eventCounter.scope("bulks_received").incrBy(1); eventCounter.scope("bulk_msec").incrBy(msec); Iterator<BulkItemResponse> bulkitemiterator = response.iterator(); int itemcount = 0; eventCounter.scope("doc_conflicts").incrBy(1); } else { LOG.error("update ID {}, failure: {}", id, f);