/** * Utility to iterate through a collection while checking the cancelled flag. * * @param iterable the collection to iterate through * @param consumer the processor invoked for each element * @param <T> the type of elements being processed */ public <T> void iterateWhileActive(Iterable<T> iterable, Consumer<T> consumer) { for (T obj : iterable) { if (!isActive()) { return; } consumer.accept(obj); } }
protected void processResultSet(Function<Row, Boolean> handler, Limit effectiveLimit, ResultSet resultSet, TaskContext taskContext) throws SQLException { while (resultSet.next() && taskContext.isActive()) { Row row = loadIntoRow(resultSet); if (effectiveLimit.nextRow()) { if (!handler.apply(row)) { return; } } if (!effectiveLimit.shouldContinue()) { return; } } }
@Override protected void processResultSet(Function<Row, Boolean> handler, Limit effectiveLimit, ResultSet resultSet, TaskContext taskContext) throws SQLException { while (resultSet.next() && taskContext.isActive()) { Row row = loadIntoRow(resultSet); if (effectiveLimit.nextRow()) { if (!handler.apply(row)) { return; } } if (!effectiveLimit.shouldContinue()) { return; } } }
@Override public void startElement(String uri, String localName, String name, Attributes attributes) throws SAXException { // Delegate to active handlers... for (SAX2DOMHandler handler : activeHandlers) { handler.createElement(name, attributes); } // Start a new handler is necessary NodeHandler handler = handlers.get(name); if (handler != null) { SAX2DOMHandler saxHandler = new SAX2DOMHandler(handler, documentBuilder.newDocument()); saxHandler.createElement(name, attributes); activeHandlers.add(saxHandler); } // Check if the user tried to interrupt parsing.... if (!taskContext.isActive()) { throw new UserInterruptException(); } }
private boolean processEntity(Function<? super E, Boolean> consumer, Limit lim, TaskContext ctx, RateLimit rateLimit, Set<String> entityDeDuplicator, E entity) { if (!entityDeDuplicator.contains(entity.getId())) { if (lim.nextRow()) { if (!consumer.apply(entity)) { return false; } if (!lim.shouldContinue()) { return false; } } if (rateLimit.check()) { // Check is the user tries to cancel this task if (!ctx.isActive()) { return false; } } } return true; }
@SuppressWarnings("unchecked") protected void execIterate(Function<E, Boolean> handler, Compiler compiler, Limit limit, boolean nativeLimit, ResultSet rs) throws Exception { TaskContext tc = TaskContext.get(); Set<String> columns = dbs.readColumns(rs); while (rs.next() && tc.isActive()) { if (nativeLimit || limit.nextRow()) { SQLEntity e = makeEntity(descriptor, null, columns, rs); compiler.executeJoinFetches(e, columns, rs); if (!handler.apply((E) e)) { return; } } if (!nativeLimit && !limit.shouldContinue()) { return; } } }
/** * Executes the change against the given database. * * @param db the datbase to change */ public void execute(Database db) { error = null; for (String statement : getSql()) { if (TaskContext.get().isActive()) { try { OMA.LOG.FINE("Executing Schema Update: %s", statement); db.createQuery(statement).executeUpdate(); } catch (SQLException e) { error = e.getMessage(); } } } executed = !isFailed(); }
/** * Parses the previously supplied input and calls the given consumer for each row. * <p> * Note that this method will close the given input. * * @param consumer the consume to call for each line * @throws IOException if an IO error occures while reading from the given input */ public void execute(Consumer<Values> consumer) throws IOException { try { this.consumer = consumer; TaskContext tc = TaskContext.get(); read(); while (tc.isActive() && !isEOF() && limit.shouldContinue()) { readRow(); consumeNewLine(); } } finally { input.close(); } }
scrollResponse = executeScroll(entity -> { if (rateLimit.check() && !ctx.isActive()) { return false;
if (!keepGoing || !ctx.isActive()) { return;
private void reIndexEntitiesOfDescriptor(EntityDescriptor ed) { try { SearchRequestBuilder srb = index.getClient().prepareSearch(index.getIndexName(ed.getIndex())).setTypes(ed.getType()); srb.addSort("_doc", SortOrder.ASC); // Limit to 10 per shard srb.setSize(10); srb.setScroll(org.elasticsearch.common.unit.TimeValue.timeValueSeconds(FIVE_MINUTES)); SearchResponse searchResponse = srb.execute().actionGet(); while (TaskContext.get().isActive()) { searchResponse = reindexBlock(ed, searchResponse.getScrollId()); //Break condition: No hits are returned if (searchResponse.getHits().getHits().length == 0) { return; } } } catch (Exception t) { throw Exceptions.handle(IndexAccess.LOG, t); } }
private boolean processHit(ResultHandler<? super E> handler, EntityDescriptor entityDescriptor, TaskContext ctx, RateLimit rateLimit, Limit lim, SearchHit hit) { try { E entity = clazz.newInstance(); entity.setId(hit.getId()); entity.initSourceTracing(); entity.setVersion(hit.getVersion()); entity.setMatchedNamedQueries(hit.getMatchedQueries()); entityDescriptor.readSource(entity, hit.getSourceAsMap()); if (lim.nextRow()) { if (!handler.handleRow(entity)) { return false; } if (!lim.shouldContinue()) { return false; } } if (rateLimit.check() && !ctx.isActive()) { return false; } } catch (Exception e) { Exceptions.handle().to(IndexAccess.LOG).error(e).handle(); } return true; }
int failed = 0; for (SchemaUpdateAction action : getSchemaUpdateActions()) { if (!ctx.isActive()) { break;