@Override public void reportError(Throwable error) { _collector.reportError(error); }
public void reportError(Throwable t) { out.reportError(t); } }
private void handleError(String msg) { _collector.reportError(new Exception("Shell Process Exception: " + msg)); }
private void failBatch(TrackedBatch tracked, FailedException e) { if (e != null && e instanceof ReportedFailedException) { _collector.reportError(e); } tracked.failed = true; if (tracked.delayedAck != null) { _collector.fail(tracked.delayedAck); tracked.delayedAck = null; } }
@Override public void onException(Throwable throwable) { if (throwable != null) { LOG.error("Async send messages failure!", throwable); collector.reportError(throwable); collector.fail(input); } } });
public void fail(Exception e) { collector.reportError(e); for (Tuple t : tupleBatch) { collector.fail(t); } tupleBatch.clear(); forceFlush = false; }
/** * This method is called when an one of the methods of the {@link BaseExecutionResultHandler} is not * overridden. It can be practical if you want to bundle some/all of the methods to a single method. * * @param e the exception throws * @param collector the output collector * @param tuple the tuple in failure */ protected void onDriverException(DriverException e, OutputCollector collector, Tuple tuple) { LOG.error("An error occurred while executing cassandra statement", e); collector.fail(tuple); collector.reportError(e); }
private void doRotationAndRemoveAllWriters() { synchronized (writeLock) { for (final Writer writer : writers.values()) { try { rotateOutputFile(writer); } catch (IOException e) { this.collector.reportError(e); LOG.warn("IOException during scheduled file rotation.", e); } } //above for-loop has closed all the writers. It's safe to clear the map here. writers.clear(); } }
void doRotationAndRemoveWriter(String writerKey, Writer writer) { try { rotateOutputFile(writer); } catch (IOException e) { this.collector.reportError(e); LOG.error("File could not be rotated"); //At this point there is nothing to do. In all likelihood any filesystem operations will fail. //The next tuple will almost certainly fail to write and/or sync, which force a rotation. That //will give rotateAndReset() a chance to work which includes creating a fresh file handle. } finally { //rotateOutputFile(writer) has closed the writer. It's safe to remove the writer from the map here. writers.remove(writerKey); } }
private void die(Throwable exception) { String processInfo = _process.getProcessInfoString() + _process.getProcessTerminationInfoString(); _exception = new RuntimeException(processInfo, exception); String message = String.format("Halting process: ShellBolt died. Command: %s, ProcessInfo %s", Arrays.toString(_command), processInfo); LOG.error(message, exception); _collector.reportError(exception); if (!_isLocalMode && (_running || (exception instanceof Error))) { //don't exit if not running, unless it is an Error System.exit(11); } }
@Override protected boolean removeEldestEntry(Map.Entry<String, Writer> eldest) { if (this.size() > this.maxWriters) { //The writer must be closed before removed from the map. //If it failed, we might lose some data. try { eldest.getValue().close(); } catch (IOException e) { collector.reportError(e); LOG.error("Failed to close the eldest Writer"); } return true; } else { return false; } } }
@Override public void execute(Tuple input) { Values values = (Values) input.getValue(0); byte[] array = serializer.write(values, null).array(); String data = new String(array); try { writer.write(data + "\n"); writer.flush(); collector.ack(input); } catch (IOException e) { LOG.error("Error while writing data to socket.", e); collector.reportError(e); collector.fail(input); } }
@Override protected void process(Tuple input) { try { final Map<String, List<Object>> scoresPerStream = runner.scoredTuplePerStream(input); LOG.debug("Input tuple [{}] generated predicted scores [{}]", input, scoresPerStream); if (scoresPerStream != null) { for (Map.Entry<String, List<Object>> streamToTuple : scoresPerStream.entrySet()) { collector.emit(streamToTuple.getKey(), input, streamToTuple.getValue()); } collector.ack(input); } else { LOG.debug("Input tuple [{}] generated NULL scores", input); } } catch (Exception e) { collector.reportError(e); collector.fail(input); } }
@Override public void process(Tuple tuple) { try { Collection<Values> values = lookupValuesInEs(tuple); tryEmitAndAck(values, tuple); } catch (Exception e) { collector.reportError(e); collector.fail(tuple); } }
private void fail(Tuple tuple, Exception e) { collector.reportError(e); if (commitStgy == null) { collector.fail(tuple); } else { List<Tuple> failedTuples = getQueuedTuples(); failQueuedTuples(failedTuples); } }
@Override protected void process(Tuple input) { MqttMessage message = this.mapper.toMessage(input); try { this.publisher.publish(message); this.collector.ack(input); } catch (Exception e) { LOG.warn("Error publishing MQTT message. Failing tuple.", e); // should we fail the tuple or kill the worker? collector.reportError(e); collector.fail(input); } }
@Override protected void process(Tuple tuple) { try { List<Column> columns = jdbcMapper.getColumns(tuple); List<List<Column>> columnLists = new ArrayList<List<Column>>(); columnLists.add(columns); if (!StringUtils.isBlank(tableName)) { this.jdbcClient.insert(this.tableName, columnLists); } else { this.jdbcClient.executeInsertQuery(this.insertQuery, columnLists); } this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }
/** * {@inheritDoc} */ @Override public void prepare(Map<String, Object> topoConfig, TopologyContext topologyContext, OutputCollector outputCollector) { this.outputCollector = outputCollector; this.topoConfig = topoConfig; Map<String, Object> cassandraClientConfig = cassandraConfig != null ? cassandraConfig : topoConfig; this.cassandraConf = new CassandraConf(cassandraClientConfig); this.client = clientProvider.getClient(cassandraClientConfig); try { session = client.connect(); } catch (NoHostAvailableException e) { outputCollector.reportError(e); throw e; } }
@Override public void onThrowable(Throwable t, OutputCollector collector, Tuple i) { if (t instanceof QueryValidationException) { this.onQueryValidationException((QueryValidationException) t, collector, i); } else if (t instanceof ReadTimeoutException) { this.onReadTimeoutException((ReadTimeoutException) t, collector, i); } else if (t instanceof WriteTimeoutException) { this.onWriteTimeoutException((WriteTimeoutException) t, collector, i); } else if (t instanceof UnavailableException) { this.onUnavailableException((UnavailableException) t, collector, i); } else { collector.reportError(t); collector.fail(i); } }
@Override public void execute(Tuple tuple) { if (TupleUtils.isTick(tuple)) { return; } try { //get document Document doc = mapper.toDocument(tuple); //get query filter Bson filter = queryCreator.createFilter(tuple); mongoClient.update(filter, doc, upsert, many); this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }