static SpecProducer getSpecProducer(DagNode<JobExecutionPlan> dagNode) throws ExecutionException, InterruptedException { return dagNode.getValue().getSpecExecutor().getProducer().get(); }
public void remove(Spec spec, Properties headers) { // TODO: Evolve logic to cache and reuse previously compiled JobSpecs // .. this will work for Identity compiler but not always for multi-hop. // Note: Current logic assumes compilation is consistent between all executions if (spec instanceof FlowSpec) { Dag<JobExecutionPlan> jobExecutionPlanDag = specCompiler.compileFlow(spec); if (jobExecutionPlanDag.isEmpty()) { _log.warn("Cannot determine an executor to delete Spec: " + spec); return; } // Delete all compiled JobSpecs on their respective Executor for (Dag.DagNode<JobExecutionPlan> dagNode: jobExecutionPlanDag.getNodes()) { JobExecutionPlan jobExecutionPlan = dagNode.getValue(); // Delete this spec on selected executor SpecProducer producer = null; try { producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec(); _log.info(String.format("Going to delete JobSpec: %s on Executor: %s", jobSpec, producer)); producer.deleteSpec(jobSpec.getUri(), headers); } catch (Exception e) { _log.error("Cannot successfully delete spec: " + jobExecutionPlan.getJobSpec() + " on executor: " + producer + " for flow: " + spec, e); } } } else { throw new RuntimeException("Spec not of type FlowSpec, cannot delete: " + spec); } }
Assert.assertTrue(((List)(sei.getProducer().get().listSpecs().get())).size() == 0, "SpecProducer should not know about " + "any Flow before addition"); Assert.assertTrue(((List)(sei.getProducer().get().listSpecs().get())).size() == 0, "SpecProducer should contain 0 " + "Spec after addition");
producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec();
int specsInSEI = ((List)(sei.getProducer().get().listSpecs().get())).size(); Assert.assertTrue(specsInSEI == 0, "SpecProducer should contain 0 " + "Spec after addition because Orchestrator is a no-op listener for any new FlowSpecs"); specsInSEI = ((List)(sei.getProducer().get().listSpecs().get())).size(); Assert.assertTrue(specsInSEI == 0, "SpecProducer should not contain " + "Spec after deletion");
static SpecProducer getSpecProducer(DagNode<JobExecutionPlan> dagNode) throws ExecutionException, InterruptedException { return dagNode.getValue().getSpecExecutor().getProducer().get(); }
public void remove(Spec spec, Properties headers) { // TODO: Evolve logic to cache and reuse previously compiled JobSpecs // .. this will work for Identity compiler but not always for multi-hop. // Note: Current logic assumes compilation is consistent between all executions if (spec instanceof FlowSpec) { Dag<JobExecutionPlan> jobExecutionPlanDag = specCompiler.compileFlow(spec); if (jobExecutionPlanDag.isEmpty()) { _log.warn("Cannot determine an executor to delete Spec: " + spec); return; } // Delete all compiled JobSpecs on their respective Executor for (Dag.DagNode<JobExecutionPlan> dagNode: jobExecutionPlanDag.getNodes()) { JobExecutionPlan jobExecutionPlan = dagNode.getValue(); // Delete this spec on selected executor SpecProducer producer = null; try { producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec(); _log.info(String.format("Going to delete JobSpec: %s on Executor: %s", jobSpec, producer)); producer.deleteSpec(jobSpec.getUri(), headers); } catch (Exception e) { _log.error("Cannot successfully delete spec: " + jobExecutionPlan.getJobSpec() + " on executor: " + producer + " for flow: " + spec, e); } } } else { throw new RuntimeException("Spec not of type FlowSpec, cannot delete: " + spec); } }
producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec();