private void failQueuedTuples(List<Tuple> failedTuples) { for (Tuple failedTuple : failedTuples) { collector.fail(failedTuple); } }
public void fail(Tuple tup) { _collector.fail(tup); } }
@Override public void expire(List<Object> id, Map<GlobalStreamId, Tuple> tuples) { for (Tuple tuple : tuples.values()) { _collector.fail(tuple); } } }
private void handleFail(Object id) { Tuple failed = _inputs.remove(id); if (failed == null) { throw new RuntimeException("Failed a non-existent or already acked/failed id: " + id); } _collector.fail(failed); }
public void fail(Exception e) { collector.reportError(e); for (Tuple t : tupleBatch) { collector.fail(t); } tupleBatch.clear(); forceFlush = false; }
private void failBatch(TrackedBatch tracked, FailedException e) { if (e != null && e instanceof ReportedFailedException) { _collector.reportError(e); } tracked.failed = true; if (tracked.delayedAck != null) { _collector.fail(tracked.delayedAck); tracked.delayedAck = null; } }
@Override public void onException(Throwable throwable) { if (throwable != null) { LOG.error("Async send messages failure!", throwable); collector.reportError(throwable); collector.fail(input); } } });
/** * This method is called when an one of the methods of the {@link BaseExecutionResultHandler} is not * overridden. It can be practical if you want to bundle some/all of the methods to a single method. * * @param e the exception throws * @param collector the output collector * @param tuple the tuple in failure */ protected void onDriverException(DriverException e, OutputCollector collector, Tuple tuple) { LOG.error("An error occurred while executing cassandra statement", e); collector.fail(tuple); collector.reportError(e); }
@Override public void execute(Tuple tuple) { log.info("Received tuple : {}", tuple.getValue(0)); count++; if (count == 3) { collector.fail(tuple); } else { collector.ack(tuple); } }
@Override public void execute(Tuple input) { Values values = (Values) input.getValue(0); byte[] array = serializer.write(values, null).array(); String data = new String(array); try { writer.write(data + "\n"); writer.flush(); collector.ack(input); } catch (IOException e) { LOG.error("Error while writing data to socket.", e); collector.reportError(e); collector.fail(input); } }
@Override protected void process(Tuple input) { try { final Map<String, List<Object>> scoresPerStream = runner.scoredTuplePerStream(input); LOG.debug("Input tuple [{}] generated predicted scores [{}]", input, scoresPerStream); if (scoresPerStream != null) { for (Map.Entry<String, List<Object>> streamToTuple : scoresPerStream.entrySet()) { collector.emit(streamToTuple.getKey(), input, streamToTuple.getValue()); } collector.ack(input); } else { LOG.debug("Input tuple [{}] generated NULL scores", input); } } catch (Exception e) { collector.reportError(e); collector.fail(input); } }
private void fail(Tuple tuple, Exception e) { collector.reportError(e); if (commitStgy == null) { collector.fail(tuple); } else { List<Tuple> failedTuples = getQueuedTuples(); failQueuedTuples(failedTuples); } }
@Override public void process(Tuple tuple) { try { Collection<Values> values = lookupValuesInEs(tuple); tryEmitAndAck(values, tuple); } catch (Exception e) { collector.reportError(e); collector.fail(tuple); } }
@Override protected void process(Tuple input) { MqttMessage message = this.mapper.toMessage(input); try { this.publisher.publish(message); this.collector.ack(input); } catch (Exception e) { LOG.warn("Error publishing MQTT message. Failing tuple.", e); // should we fail the tuple or kill the worker? collector.reportError(e); collector.fail(input); } }
@Override protected void process(Tuple tuple) { try { List<Column> columns = jdbcMapper.getColumns(tuple); List<List<Column>> columnLists = new ArrayList<List<Column>>(); columnLists.add(columns); if (!StringUtils.isBlank(tableName)) { this.jdbcClient.insert(this.tableName, columnLists); } else { this.jdbcClient.executeInsertQuery(this.insertQuery, columnLists); } this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }
@Override public void onThrowable(Throwable t, OutputCollector collector, Tuple i) { if (t instanceof QueryValidationException) { this.onQueryValidationException((QueryValidationException) t, collector, i); } else if (t instanceof ReadTimeoutException) { this.onReadTimeoutException((ReadTimeoutException) t, collector, i); } else if (t instanceof WriteTimeoutException) { this.onWriteTimeoutException((WriteTimeoutException) t, collector, i); } else if (t instanceof UnavailableException) { this.onUnavailableException((UnavailableException) t, collector, i); } else { collector.reportError(t); collector.fail(i); } }
@Override protected void process(Tuple tuple) { try { List<Column> columns = jdbcLookupMapper.getColumns(tuple); List<List<Column>> result = jdbcClient.select(this.selectQuery, columns); if (result != null && result.size() != 0) { for (List<Column> row : result) { List<Values> values = jdbcLookupMapper.toTuple(tuple, row); for (Values value : values) { collector.emit(tuple, value); } } } this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }
@Test public void testPrepareAndRollbackBeforeInitstate() throws Exception { Mockito.when(mockTuple.getSourceStreamId()).thenReturn("default"); executor.execute(mockTuple); Mockito.when(mockCheckpointTuple.getSourceStreamId()).thenReturn(CheckpointSpout.CHECKPOINT_STREAM_ID); Mockito.when(mockCheckpointTuple.getValueByField(CHECKPOINT_FIELD_ACTION)).thenReturn(PREPARE); Mockito.when(mockCheckpointTuple.getLongByField(CHECKPOINT_FIELD_TXID)).thenReturn(new Long(100)); executor.execute(mockCheckpointTuple); Mockito.verify(mockOutputCollector, Mockito.times(1)).fail(mockCheckpointTuple); Mockito.when(mockCheckpointTuple.getValueByField(CHECKPOINT_FIELD_ACTION)).thenReturn(ROLLBACK); Mockito.when(mockCheckpointTuple.getLongByField(CHECKPOINT_FIELD_TXID)).thenReturn(new Long(100)); Mockito.doNothing().when(mockOutputCollector).ack(mockCheckpointTuple); executor.execute(mockCheckpointTuple); Mockito.verify(mockState, Mockito.times(1)).rollback(); }
@Override public void execute(Tuple input) { collector.emit(new Values(emitted, acked, failed, executed)); if (acked - failed == 0) { collector.ack(input); } else { collector.fail(input); } } }
@Override public void execute(Tuple tuple) { if (TupleUtils.isTick(tuple)) { return; } try { //get document Document doc = mapper.toDocument(tuple); //get query filter Bson filter = queryCreator.createFilter(tuple); mongoClient.update(filter, doc, upsert, many); this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }