ExecutionVertex[] mapTaskVertices = execMapVertex.getTaskVertices(); ExecutionVertex[] sinkTaskVertices = execSinkVertex.getTaskVertices();
/** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public CompletableFuture<Void> cancelWithFuture() { // we collect all futures from the task cancellations CompletableFuture<ExecutionState>[] futures = Arrays.stream(getTaskVertices()) .map(ExecutionVertex::cancel) .<CompletableFuture<ExecutionState>>toArray(CompletableFuture[]::new); // return a conjunct future, which is complete once all individual tasks are canceled return CompletableFuture.allOf(futures); }
/** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public CompletableFuture<Void> cancelWithFuture() { // we collect all futures from the task cancellations CompletableFuture<ExecutionState>[] futures = Arrays.stream(getTaskVertices()) .map(ExecutionVertex::cancel) .<CompletableFuture<ExecutionState>>toArray(CompletableFuture[]::new); // return a conjunct future, which is complete once all individual tasks are canceled return CompletableFuture.allOf(futures); }
/** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public CompletableFuture<Void> cancelWithFuture() { // we collect all futures from the task cancellations CompletableFuture<ExecutionState>[] futures = Arrays.stream(getTaskVertices()) .map(ExecutionVertex::cancel) .<CompletableFuture<ExecutionState>>toArray(CompletableFuture[]::new); // return a conjunct future, which is complete once all individual tasks are canceled return CompletableFuture.allOf(futures); }
/** * Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } }
/** * Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } }
public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } }
public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } }
/** * Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } }
public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } }
/** * Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } }
public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } }
private void makeAllOneRegion(List<ExecutionJobVertex> jobVertices) { LOG.warn("Cannot decompose ExecutionGraph into individual failover regions due to use of " + "Co-Location constraints (iterations). Job will fail over as one holistic unit."); final ArrayList<ExecutionVertex> allVertices = new ArrayList<>(); for (ExecutionJobVertex ejv : jobVertices) { // safe some incremental size growing allVertices.ensureCapacity(allVertices.size() + ejv.getParallelism()); for (ExecutionVertex ev : ejv.getTaskVertices()) { allVertices.add(ev); } } final FailoverRegion singleRegion = new FailoverRegion(executionGraph, executor, allVertices, regionFailLimit); for (ExecutionVertex ev : allVertices) { vertexToRegion.put(ev, singleRegion); } }
private void makeAllOneRegion(List<ExecutionJobVertex> jobVertices) { LOG.warn("Cannot decompose ExecutionGraph into individual failover regions due to use of " + "Co-Location constraints (iterations). Job will fail over as one holistic unit."); final ArrayList<ExecutionVertex> allVertices = new ArrayList<>(); for (ExecutionJobVertex ejv : jobVertices) { // safe some incremental size growing allVertices.ensureCapacity(allVertices.size() + ejv.getParallelism()); for (ExecutionVertex ev : ejv.getTaskVertices()) { allVertices.add(ev); } } final FailoverRegion singleRegion = new FailoverRegion(executionGraph, executor, allVertices); for (ExecutionVertex ev : allVertices) { vertexToRegion.put(ev, singleRegion); } }
/** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public Future<Void> cancelWithFuture() { // we collect all futures from the task cancellations ArrayList<Future<ExecutionState>> futures = new ArrayList<>(parallelism); // cancel each vertex for (ExecutionVertex ev : getTaskVertices()) { futures.add(ev.cancel()); } // return a conjunct future, which is complete once all individual tasks are canceled return FutureUtils.waitForAll(futures); }
public ArchivedExecutionJobVertex(ExecutionJobVertex jobVertex) { this.taskVertices = new ArchivedExecutionVertex[jobVertex.getTaskVertices().length]; for (int x = 0; x < taskVertices.length; x++) { taskVertices[x] = jobVertex.getTaskVertices()[x].archive(); } archivedUserAccumulators = jobVertex.getAggregatedUserAccumulatorsStringified(); this.id = jobVertex.getJobVertexId(); this.name = jobVertex.getJobVertex().getName(); this.parallelism = jobVertex.getParallelism(); this.maxParallelism = jobVertex.getMaxParallelism(); }
public ArchivedExecutionJobVertex(ExecutionJobVertex jobVertex) { this.taskVertices = new ArchivedExecutionVertex[jobVertex.getTaskVertices().length]; for (int x = 0; x < taskVertices.length; x++) { taskVertices[x] = jobVertex.getTaskVertices()[x].archive(); } archivedUserAccumulators = jobVertex.getAggregatedUserAccumulatorsStringified(); this.id = jobVertex.getJobVertexId(); this.name = jobVertex.getJobVertex().getName(); this.parallelism = jobVertex.getParallelism(); this.maxParallelism = jobVertex.getMaxParallelism(); }
public ArchivedExecutionJobVertex(ExecutionJobVertex jobVertex) { this.taskVertices = new ArchivedExecutionVertex[jobVertex.getTaskVertices().length]; for (int x = 0; x < taskVertices.length; x++) { taskVertices[x] = jobVertex.getTaskVertices()[x].archive(); } archivedUserAccumulators = jobVertex.getAggregatedUserAccumulatorsStringified(); this.id = jobVertex.getJobVertexId(); this.name = jobVertex.getJobVertex().getName(); this.parallelism = jobVertex.getParallelism(); this.maxParallelism = jobVertex.getMaxParallelism(); }
public ArchivedExecutionJobVertex(ExecutionJobVertex jobVertex) { this.taskVertices = new ArchivedExecutionVertex[jobVertex.getTaskVertices().length]; for (int x = 0; x < taskVertices.length; x++) { taskVertices[x] = jobVertex.getTaskVertices()[x].archive(); } archivedUserAccumulators = jobVertex.getAggregatedUserAccumulatorsStringified(); this.id = jobVertex.getJobVertexId(); this.name = jobVertex.getJobVertex().getName(); this.parallelism = jobVertex.getParallelism(); this.maxParallelism = jobVertex.getMaxParallelism(); this.operatorDescriptors = jobVertex.getOperatorDescriptors(); }
@Override public ExecutionVertexStatus getExecutionVertexStatus(ExecutionVertexID executionVertexID) { checkNotNull(executionVertexID); ExecutionJobVertex vertex = executionGraph.getJobVertex(executionVertexID.getJobVertexID()); if (vertex == null) { throw new IllegalArgumentException("Cannot find any vertex with id " + executionVertexID.getJobVertexID()); } return vertex.getTaskVertices()[executionVertexID.getSubTaskIndex()].getCurrentStatus(); }