/** Delete a {@link Spec} being executed on {@link SpecExecutor}. */ Future<?> deleteSpec(URI deletedSpecURI, Properties headers);
producer.addSpec(jobSpec).get();
int specsInSEI = ((List)(sei.getProducer().get().listSpecs().get())).size(); Assert.assertTrue(specsInSEI == 0, "SpecProducer should contain 0 " + "Spec after addition because Orchestrator is a no-op listener for any new FlowSpecs"); specsInSEI = ((List)(sei.getProducer().get().listSpecs().get())).size(); Assert.assertTrue(specsInSEI == 0, "SpecProducer should not contain " + "Spec after deletion");
public void remove(Spec spec, Properties headers) { // TODO: Evolve logic to cache and reuse previously compiled JobSpecs // .. this will work for Identity compiler but not always for multi-hop. // Note: Current logic assumes compilation is consistent between all executions if (spec instanceof FlowSpec) { Dag<JobExecutionPlan> jobExecutionPlanDag = specCompiler.compileFlow(spec); if (jobExecutionPlanDag.isEmpty()) { _log.warn("Cannot determine an executor to delete Spec: " + spec); return; } // Delete all compiled JobSpecs on their respective Executor for (Dag.DagNode<JobExecutionPlan> dagNode: jobExecutionPlanDag.getNodes()) { JobExecutionPlan jobExecutionPlan = dagNode.getValue(); // Delete this spec on selected executor SpecProducer producer = null; try { producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec(); _log.info(String.format("Going to delete JobSpec: %s on Executor: %s", jobSpec, producer)); producer.deleteSpec(jobSpec.getUri(), headers); } catch (Exception e) { _log.error("Cannot successfully delete spec: " + jobExecutionPlan.getJobSpec() + " on executor: " + producer + " for flow: " + spec, e); } } } else { throw new RuntimeException("Spec not of type FlowSpec, cannot delete: " + spec); } }
getTimingEvent(TimingEvent.LauncherTimings.JOB_ORCHESTRATED) : null; producer.addSpec(jobSpec);
/** Delete a {@link Spec} being executed on {@link SpecExecutor}. */ Future<?> deleteSpec(URI deletedSpecURI, Properties headers);
/** * Submits a {@link JobSpec} to a {@link org.apache.gobblin.runtime.api.SpecExecutor}. */ private void submitJob(DagNode<JobExecutionPlan> dagNode) { JobExecutionPlan jobExecutionPlan = DagManagerUtils.getJobExecutionPlan(dagNode); jobExecutionPlan.setExecutionStatus(RUNNING); JobSpec jobSpec = DagManagerUtils.getJobSpec(dagNode); // Run this spec on selected executor SpecProducer producer = null; try { producer = DagManagerUtils.getSpecProducer(dagNode); Config jobConfig = DagManagerUtils.getJobConfig(dagNode); if (!jobConfig.hasPath(ConfigurationKeys.FLOW_EXECUTION_ID_KEY)) { log.warn("JobSpec does not contain flowExecutionId."); } log.info("Submitting job: {} on executor: {}", jobSpec, producer); Map<String, String> jobMetadata = TimingEventUtils.getJobMetadata(Maps.newHashMap(), jobExecutionPlan); log.info("Going to orchestrate JobSpec: {} on Executor: {}", jobSpec, producer); TimingEvent jobOrchestrationTimer = this.eventSubmitter.isPresent() ? this.eventSubmitter.get(). getTimingEvent(TimingEvent.LauncherTimings.JOB_ORCHESTRATED) : null; producer.addSpec(jobSpec); if (jobOrchestrationTimer != null) { jobOrchestrationTimer.stop(jobMetadata); } log.info("Orchestrated JobSpec: {} on Executor: {}", jobSpec, producer); } catch (Exception e) { log.error("Cannot submit job: {} on executor: {}", jobSpec, producer, e); } }
public void remove(Spec spec, Properties headers) { // TODO: Evolve logic to cache and reuse previously compiled JobSpecs // .. this will work for Identity compiler but not always for multi-hop. // Note: Current logic assumes compilation is consistent between all executions if (spec instanceof FlowSpec) { Dag<JobExecutionPlan> jobExecutionPlanDag = specCompiler.compileFlow(spec); if (jobExecutionPlanDag.isEmpty()) { _log.warn("Cannot determine an executor to delete Spec: " + spec); return; } // Delete all compiled JobSpecs on their respective Executor for (Dag.DagNode<JobExecutionPlan> dagNode: jobExecutionPlanDag.getNodes()) { JobExecutionPlan jobExecutionPlan = dagNode.getValue(); // Delete this spec on selected executor SpecProducer producer = null; try { producer = jobExecutionPlan.getSpecExecutor().getProducer().get(); Spec jobSpec = jobExecutionPlan.getJobSpec(); _log.info(String.format("Going to delete JobSpec: %s on Executor: %s", jobSpec, producer)); producer.deleteSpec(jobSpec.getUri(), headers); } catch (Exception e) { _log.error("Cannot successfully delete spec: " + jobExecutionPlan.getJobSpec() + " on executor: " + producer + " for flow: " + spec, e); } } } else { throw new RuntimeException("Spec not of type FlowSpec, cannot delete: " + spec); } }
getTimingEvent(TimingEvent.LauncherTimings.JOB_ORCHESTRATED) : null; producer.addSpec(jobSpec);