if (vertex.isInputVertex()) { triggerVertices.add(vertex.getID());
/** * Tests that there are no collisions with two identical sources. * * <pre> * [ (src0) ] --\ * +--> [ (sink) ] * [ (src1) ] --/ * </pre> */ @Test public void testNodeHashIdenticalSources() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(); env.setParallelism(4); env.disableOperatorChaining(); DataStream<String> src0 = env.addSource(new NoOpSourceFunction()); DataStream<String> src1 = env.addSource(new NoOpSourceFunction()); src0.union(src1).addSink(new NoOpSinkFunction()); JobGraph jobGraph = env.getStreamGraph().getJobGraph(); List<JobVertex> vertices = jobGraph.getVerticesSortedTopologicallyFromSources(); assertTrue(vertices.get(0).isInputVertex()); assertTrue(vertices.get(1).isInputVertex()); assertNotNull(vertices.get(0).getID()); assertNotNull(vertices.get(1).getID()); assertNotEquals(vertices.get(0).getID(), vertices.get(1).getID()); }
assertTrue("Unexpected vertex type. Test setup is broken.", src.isInputVertex());
private void scheduleLazy(SlotProvider slotProvider) throws NoResourceAvailableException { // simply take the vertices without inputs. for (ExecutionJobVertex ejv : verticesInCreationOrder) { if (ejv.getJobVertex().isInputVertex()) { ejv.scheduleAll(slotProvider, allowQueuedScheduling); } } }
private CompletableFuture<Void> scheduleLazy(SlotProvider slotProvider) { final ArrayList<CompletableFuture<Void>> schedulingFutures = new ArrayList<>(numVerticesTotal); // simply take the vertices without inputs. for (ExecutionJobVertex ejv : verticesInCreationOrder) { if (ejv.getJobVertex().isInputVertex()) { final CompletableFuture<Void> schedulingJobVertexFuture = ejv.scheduleAll( slotProvider, allowQueuedScheduling, LocationPreferenceConstraint.ALL, // since it is an input vertex, the input based location preferences should be empty Collections.emptySet()); schedulingFutures.add(schedulingJobVertexFuture); } } return FutureUtils.waitForAll(schedulingFutures); }
private CompletableFuture<Void> scheduleLazy(SlotProvider slotProvider) { final ArrayList<CompletableFuture<Void>> schedulingFutures = new ArrayList<>(numVerticesTotal); // simply take the vertices without inputs. for (ExecutionJobVertex ejv : verticesInCreationOrder) { if (ejv.getJobVertex().isInputVertex()) { final CompletableFuture<Void> schedulingJobVertexFuture = ejv.scheduleAll( slotProvider, allowQueuedScheduling, LocationPreferenceConstraint.ALL, // since it is an input vertex, the input based location preferences should be empty Collections.emptySet()); schedulingFutures.add(schedulingJobVertexFuture); } } return FutureUtils.waitForAll(schedulingFutures); }
@Override public void onSchedulingStarted() { final List<ExecutionVertexID> verticesToSchedule = new ArrayList<>(); for (JobVertex vertex : jobGraph.getVerticesSortedTopologicallyFromSources()) { if (vertex.isInputVertex()) { for (int i = 0; i < vertex.getParallelism(); i++) { verticesToSchedule.add(new ExecutionVertexID(vertex.getID(), i)); } } } scheduleOneByOne(verticesToSchedule); }
public void attachJobGraph(List<JobVertex> topologicallySorted) throws JobException { LOG.debug("Attaching {} topologically sorted vertices to existing job graph with {} " + "vertices and {} intermediate results.", topologicallySorted.size(), tasks.size(), intermediateResults.size()); final ArrayList<ExecutionJobVertex> newExecJobVertices = new ArrayList<>(topologicallySorted.size()); createExecutionJobVertex(topologicallySorted); for (JobVertex jobVertex : topologicallySorted) { if (jobVertex.isInputVertex() && !jobVertex.isStoppable()) { this.isStoppable = false; } ExecutionJobVertex ejv = tasks.get(jobVertex.getID()); ejv.connectToPredecessors(this.intermediateResults); for (IntermediateResult res : ejv.getProducedDataSets()) { IntermediateResult previousDataSet = this.intermediateResults.putIfAbsent(res.getId(), res); if (previousDataSet != null) { throw new JobException(String.format("Encountered two intermediate data set with ID %s : previous=[%s] / new=[%s]", res.getId(), res, previousDataSet)); } } this.verticesInCreationOrder.add(ejv); this.numVerticesTotal += ejv.getParallelism(); newExecJobVertices.add(ejv); } terminationFuture = new CompletableFuture<>(); failoverStrategy.notifyNewVertices(newExecJobVertices); }
if (jobVertex.isInputVertex() && !jobVertex.isStoppable()) { this.isStoppable = false;
if (jobVertex.isInputVertex() && !jobVertex.isStoppable()) { this.isStoppable = false;
if (jobVertex.isInputVertex() && !jobVertex.isStoppable()) { this.isStoppable = false;
private boolean isReadyToSchedule(ExecutionVertexID vertexID) { ExecutionVertexStatus vertexStatus = scheduler.getExecutionVertexStatus(vertexID); // only CREATED vertices can be scheduled if (vertexStatus.getExecutionState() != ExecutionState.CREATED) { return false; } // source vertices can be scheduled at once if (jobGraph.findVertexByID(vertexID.getJobVertexID()).isInputVertex()) { return true; } // query whether the inputs are ready overall return inputTracker.areInputsReady(vertexID); }
gen.writeStringField("description", description); if (!vertex.isInputVertex()) {
if (vertex.isInputVertex()) { triggerVertices.add(vertex.getID());
if (vertex.isInputVertex()) { triggerVertices.add(vertex.getID());
if (vertex.isInputVertex()) { triggerVertices.add(vertex.getID());
gen.writeStringField("description", description); if (!vertex.isInputVertex()) {
gen.writeStringField("description", description); if (!vertex.isInputVertex()) {
gen.writeStringField("description", description); if (!vertex.isInputVertex()) {