JsonObject jobSpecJson = new JsonObject(); JobSpec jobSpec = jobExecutionPlan.getJobSpec(); String uri = (jobSpec.getUri() != null) ? jobSpec.getUri().toString() : null; jobSpecJson.addProperty(SerializationConstants.JOB_SPEC_URI_KEY, uri); jobSpecJson.addProperty(SerializationConstants.JOB_SPEC_VERSION_KEY, jobSpec.getVersion()); jobSpecJson.addProperty(SerializationConstants.JOB_SPEC_DESCRIPTION_KEY, jobSpec.getDescription()); String jobSpecTemplateURI = (jobSpec.getTemplateURI().isPresent()) ? jobSpec.getTemplateURI().get().toString() : null; jobSpecJson.addProperty(SerializationConstants.JOB_SPEC_TEMPLATE_URI_KEY, jobSpecTemplateURI); jobSpecJson.addProperty(SerializationConstants.JOB_SPEC_CONFIG_KEY, jobSpec.getConfig().root().render(ConfigRenderOptions.concise())); jobExecutionPlanJson.add(SerializationConstants.JOB_SPEC_KEY, jobSpecJson); Config specExecutorConfig;
JobSpec.Builder jobSpecBuilder = JobSpec.builder(jobSpecURIGenerator(flowSpec)) .withConfig(flowSpec.getConfig()) .withDescription(flowSpec.getDescription()) try { jobSpec = new ResolvedJobSpec(jobSpecBuilder.build(), templateCatalog.get()); log.info("Resolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); } catch (SpecNotFoundException | JobTemplate.TemplateException e) { throw new RuntimeException("Could not resolve template in JobSpec from TemplateCatalog", e); log.info("Unresolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); jobSpec.setConfig(jobSpec.getConfig().withoutPath(ConfigurationKeys.JOB_SCHEDULE_KEY)); jobSpec.setConfig(jobSpec.getConfig() .withValue(ConfigurationKeys.JOB_NAME_KEY, flowSpec.getConfig().getValue(ConfigurationKeys.FLOW_NAME_KEY))); jobSpec.setConfig(jobSpec.getConfig() .withValue(ConfigurationKeys.JOB_GROUP_KEY, flowSpec.getConfig().getValue(ConfigurationKeys.FLOW_GROUP_KEY))); jobSpec.setConfig(jobSpec.getConfig().withValue(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, ConfigValueFactory.fromAnyRef(flowExecutionId))); jobSpec.setConfigAsProperties(ConfigUtils.configToProperties(jobSpec.getConfig())); return jobSpec;
public String toLongString() { return getUri().toString() + "/" + getVersion() + "[" + getDescription() + "]"; }
public String toShortString() { return getUri().toString() + "/" + getVersion(); }
/** * Resolve the job spec using classpath templates as well as any templates available in the input {@link JobCatalog}. */ public ResolvedJobSpec(JobSpec other, JobCatalog catalog) throws SpecNotFoundException, JobTemplate.TemplateException { super(other.getUri(), other.getVersion(), other.getDescription(), resolveConfig(other, catalog), ConfigUtils.configToProperties(resolveConfig(other, catalog)), other.getTemplateURI(), other.getMetadata()); this.originalJobSpec = other; }
JobCatalogListenersList ll = new JobCatalogListenersList(); JobSpec js1_1 = JobSpec.builder("test:job1").build(); JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build(); JobSpec js2 = JobSpec.builder("test:job2").build(); .onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion())); ll.onAddJob(js2); ll.onUpdateJob(js1_2); ll.onDeleteJob(js2.getUri(), js2.getVersion()); Mockito.verify(l1).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion())); Mockito.verify(l2).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion())); Mockito.verify(l3).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
private AvroJobSpec convertToAvroJobSpec(Spec spec, SpecExecutor.Verb verb) { if (spec instanceof JobSpec) { JobSpec jobSpec = (JobSpec) spec; AvroJobSpec.Builder avroJobSpecBuilder = AvroJobSpec.newBuilder(); avroJobSpecBuilder.setUri(jobSpec.getUri().toString()).setVersion(jobSpec.getVersion()) .setDescription(jobSpec.getDescription()).setProperties(Maps.fromProperties(jobSpec.getConfigAsProperties())) .setMetadata(ImmutableMap.of(VERB_KEY, verb.name())); if (jobSpec.getTemplateURI().isPresent()) { avroJobSpecBuilder.setTemplateUri(jobSpec.getTemplateURI().get().toString()); } return avroJobSpecBuilder.build(); } else { throw new RuntimeException("Unsupported spec type " + spec.getClass()); } } }
JobSpec js1Deserialized = SerializationUtils.deserialize(serializedBytes); Assert.assertEquals(js1Deserialized.getUri().toString(), js1.getUri().toString()); Assert.assertEquals(js1Deserialized.getVersion(), js1.getVersion()); Assert.assertNotNull(js1Deserialized.getDescription()); Assert.assertTrue(js1Deserialized.getDescription().contains(js1.getDescription())); Assert.assertEquals(js1Deserialized.getConfig().entrySet().size(), 0); Assert.assertEquals(js1Deserialized.getConfigAsProperties().size(), 0); JobSpec js2Deserialized = SerializationUtils.deserialize(serializedBytes); Assert.assertEquals(js2Deserialized.getUri().toString(), js2.getUri().toString()); Assert.assertEquals(js2Deserialized.getVersion(), js2.getVersion()); Assert.assertEquals(js2Deserialized.getDescription(), js2.getDescription()); Assert.assertEquals(js2Deserialized.getConfig().getString("a1"), "a_value"); Assert.assertEquals(js2Deserialized.getConfig().getLong("a2.b"), 1L); Assert.assertEquals(js2Deserialized.getConfig().getDouble("a2.c.d"), 12.34); Assert.assertTrue(js2Deserialized.getConfig().getBoolean("a2.c.d2"));
JobSpec js1_1 = JobSpec.builder("test_job1").withVersion("1").build(); JobSpec js1_2 = JobSpec.builder("test_job1").withVersion("2").build(); JobSpec js2 = JobSpec.builder("test_job2").withVersion("1").build(); JobSpec js3 = JobSpec.builder("test_job3").withVersion("1").withTemplate(new URI("FS:///job3.template")) .withConfig(ConfigBuilder.create().addPrimitive("job.template", "FS:///job3.template").build()).build(); Assert.assertTrue(specs.containsKey(js1_1.getUri())); JobSpec js1_1_notified = specs.get(js1_1.getUri()); Assert.assertTrue(ConfigUtils.verifySubset(js1_1_notified.getConfig(), js1_1.getConfig())); Assert.assertEquals(js1_1.getVersion(), js1_1_notified.getVersion()); Assert.assertTrue(specs.containsKey(js1_2.getUri())); JobSpec js1_2_notified = specs.get(js1_2.getUri()); Assert.assertTrue(ConfigUtils.verifySubset(js1_2_notified.getConfig(), js1_2.getConfig())); Assert.assertEquals(js1_2.getVersion(), js1_2_notified.getVersion()); Assert.assertTrue(specs.containsKey(js2.getUri())); JobSpec js2_notified = specs.get(js2.getUri()); Assert.assertTrue(ConfigUtils.verifySubset(js2_notified.getConfig(), js2.getConfig())); Assert.assertEquals(js2.getVersion(), js2_notified.getVersion()); cat.remove(js2.getUri()); Assert.assertFalse(specs.containsKey(js2.getUri())); Assert.assertTrue(specs.containsKey(js3.getUri())); JobSpec js3_notified = specs.get(js3.getUri()); Assert.assertTrue(ConfigUtils.verifySubset(js3_notified.getConfig(), js3.getConfig())); Assert.assertEquals(js3.getVersion(), js3_notified.getVersion()); ResolvedJobSpec js3_resolved = new ResolvedJobSpec(js3_notified, cat);
private void fetchJobSpecs() throws ExecutionException, InterruptedException { List<Pair<SpecExecutor.Verb, Spec>> changesSpecs = (List<Pair<SpecExecutor.Verb, Spec>>) this.specConsumer.changedSpecs().get(); // propagate thread interruption so that caller will exit from loop if (Thread.interrupted()) { throw new InterruptedException(); } for (Pair<SpecExecutor.Verb, Spec> entry : changesSpecs) { SpecExecutor.Verb verb = entry.getKey(); if (verb.equals(SpecExecutor.Verb.ADD)) { // Handle addition JobSpec jobSpec = (JobSpec) entry.getValue(); postNewJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); } else if (verb.equals(SpecExecutor.Verb.UPDATE)) { // Handle update JobSpec jobSpec = (JobSpec) entry.getValue(); postUpdateJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); } else if (verb.equals(SpecExecutor.Verb.DELETE)) { // Handle delete Spec anonymousSpec = (Spec) entry.getValue(); postDeleteJobConfigArrival(anonymousSpec.getUri().toString(), new Properties()); } } }
public static JobExecutionUpdatable createFromJobSpec(JobSpec jobSpec) { return new JobExecutionUpdatable(jobSpec.getUri(), jobSpec.getVersion(), System.currentTimeMillis(), JobLauncherUtils.newJobId(JobState.getJobNameFromProps(jobSpec.getConfigAsProperties()))); } }
@Override protected JobSpec doPut(JobSpec jobSpec) { return this.jobSpecs.put(jobSpec.getUri(), jobSpec); }
/** * Suppose in the testing routine, each JobSpec will at least have either config or properties. * @param jobConfigs * @return */ private List<Properties> convertJobSpecList2PropList(List<JobSpec> jobConfigs) { List<Properties> result = Lists.newArrayList(); for (JobSpec js : jobConfigs) { Properties propToBeAdded; if (js.getConfigAsProperties() != null) { propToBeAdded = js.getConfigAsProperties(); } else { propToBeAdded = ConfigUtils.configToProperties(js.getConfig()); } // For the testing purpose, added it back when doing the comparison. propToBeAdded.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, js.getUri().toString()); result.add(propToBeAdded); } return result; } }
@Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { super.onDeleteJob(deletedJobURI, deletedJobVersion); try { JobSpec.Builder jobSpecBuilder = JobSpec.builder(deletedJobURI); Properties props = new Properties(); jobSpecBuilder.withVersion(deletedJobVersion).withConfigAsProperties(props); _jobSpecQueue.put(new ImmutablePair<SpecExecutor.Verb, Spec>(SpecExecutor.Verb.DELETE, jobSpecBuilder.build())); _metrics.jobSpecEnqCount.incrementAndGet(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }
@Test public void testParseJobSpec() throws Exception { SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.of("metadataKey1", "key1")); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event = createSLAEvent("DatasetPublish", new URI("/data/myDataset"), ImmutableMap.of("metadataKey1","value1","key1","value2")); Collection<Either<JobSpec, URI>> jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); JobSpec jobSpec = (JobSpec) jobSpecs.iterator().next().get(); Assert.assertEquals(jobSpec.getUri(), new URI("/base/URI/data/myDataset")); Assert.assertEquals(jobSpec.getTemplateURI().get(), templateURI); // should insert configuration from metadata Assert.assertEquals(jobSpec.getConfig().getString("key1"), "value1"); monitor.shutdownMetrics(); }
for (JobTemplate jobTemplate: this.jobTemplates) { String jobSpecUri = Files.getNameWithoutExtension(new Path(jobTemplate.getUri()).getName()); jobExecutionPlans.add(new JobExecutionPlan(JobSpec.builder(jobSpecUri).withConfig(jobTemplate.getRawTemplateConfig()). withVersion("1").withTemplate(jobTemplate.getUri()).build(), specExecutor)); Assert.assertEquals(dag.getEndNodes().size(), 1); Assert.assertEquals(dag.getNodes().size(), 4); String startNodeName = new Path(dag.getStartNodes().get(0).getValue().getJobSpec().getUri()).getName(); Assert.assertEquals(startNodeName, "job1"); String templateUri = new Path(dag.getStartNodes().get(0).getValue().getJobSpec().getTemplateURI().get()).getName(); Assert.assertEquals(templateUri, "job1.job"); String endNodeName = new Path(dag.getEndNodes().get(0).getValue().getJobSpec().getUri()).getName(); Assert.assertEquals(endNodeName, "job4"); templateUri = new Path(dag.getEndNodes().get(0).getValue().getJobSpec().getTemplateURI().get()).getName(); Assert.assertEquals(templateUri, "job4.job"); Set<String> nodeSet = new HashSet<>(); for (Dag.DagNode<JobExecutionPlan> node: nextNodes) { nodeSet.add(new Path(node.getValue().getJobSpec().getUri()).getName()); Dag.DagNode<JobExecutionPlan> nextNode = dag.getChildren(node).get(0); Assert.assertEquals(new Path(nextNode.getValue().getJobSpec().getUri()).getName(), "job4");
static Map<String, String> getJobMetadata(Map<String, String> flowMetadata, JobExecutionPlan jobExecutionPlan) { Map<String, String> jobMetadata = Maps.newHashMap(); JobSpec jobSpec = jobExecutionPlan.getJobSpec(); SpecExecutor specExecutor = jobExecutionPlan.getSpecExecutor(); jobMetadata.putAll(flowMetadata); jobMetadata.put(TimingEvent.FlowEventConstants.FLOW_NAME_FIELD, jobSpec.getConfig().getString(ConfigurationKeys.FLOW_NAME_KEY)); jobMetadata.put(TimingEvent.FlowEventConstants.FLOW_GROUP_FIELD, jobSpec.getConfig().getString(ConfigurationKeys.FLOW_GROUP_KEY)); jobMetadata.put(TimingEvent.FlowEventConstants.FLOW_EXECUTION_ID_FIELD, jobSpec.getConfig().getString(ConfigurationKeys.FLOW_EXECUTION_ID_KEY)); jobMetadata.put(TimingEvent.FlowEventConstants.JOB_NAME_FIELD, jobSpec.getConfig().getString(ConfigurationKeys.JOB_NAME_KEY)); jobMetadata.put(TimingEvent.FlowEventConstants.JOB_GROUP_FIELD, jobSpec.getConfig().getString(ConfigurationKeys.JOB_GROUP_KEY)); jobMetadata.put(TimingEvent.FlowEventConstants.SPEC_EXECUTOR_FIELD, specExecutor.getClass().getCanonicalName()); return jobMetadata; }
/** * Create a {@link Trigger} from the given {@link JobSpec} */ private Trigger createTrigger(JobKey jobKey, JobSpec jobSpec) { // Build a trigger for the job with the given cron-style schedule return TriggerBuilder.newTrigger() .withIdentity("Cron for " + jobSpec.getUri()) .forJob(jobKey) .withSchedule(CronScheduleBuilder.cronSchedule( jobSpec.getConfig().getString(ConfigurationKeys.JOB_SCHEDULE_KEY))) .build(); }
private static File[] writeAzkabanConfigFiles(String workDir, String flowName, AzkabanProjectConfig azkabanProjectConfig) throws IOException { // Determine final config file path String jobFilePath = String.format("%s/%s.job", workDir, flowName); File jobFile = new File(jobFilePath); if (jobFile.exists()) { if (jobFile.delete()) { log.info("JobFile existed and was deleted: " + jobFilePath); } else { log.warn("JobFile exists but was not deleted: " + jobFilePath); } } StringBuilder propertyFileContent = new StringBuilder(); for (Map.Entry entry : azkabanProjectConfig.getJobSpec().getConfigAsProperties().entrySet()) { propertyFileContent.append(String.format("%s=%s", entry.getKey(), entry.getValue())).append("\n"); } // Write the job file FileUtils.writeStringToFile(jobFile, propertyFileContent.toString(), Charset.forName("UTF-8"),true); return new File[] {jobFile}; }
/** {@inheritDoc} */ @Override protected JobSpecSchedule doScheduleJob(JobSpec jobSpec, Runnable jobRunnable) { // Build a data map that gets passed to the job JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(JOB_SPEC_KEY, jobSpec); jobDataMap.put(JOB_RUNNABLE_KEY, jobRunnable); // Build a Quartz job JobDetail job = JobBuilder.newJob(QuartzJob.class) .withIdentity(jobSpec.getUri().toString()) .withDescription(Strings.nullToEmpty(jobSpec.getDescription())) .usingJobData(jobDataMap) .build(); Trigger jobTrigger = createTrigger(job.getKey(), jobSpec); QuartzJobSchedule jobSchedule = new QuartzJobSchedule(jobSpec, jobRunnable, jobTrigger); try { _scheduler.getScheduler().scheduleJob(job, jobTrigger); getLog().info(String.format("Scheduled job %s next two fire times: %s , %s.", jobSpec, jobTrigger.getNextFireTime(), jobTrigger.getFireTimeAfter(jobTrigger.getNextFireTime()))); } catch (SchedulerException e) { throw new RuntimeException("Scheduling failed for " + jobSpec + ":" + e, e); } return jobSchedule; }