.build()); final JobSpec js1 = JobSpec.builder("test.job1").withConfig(jobCfg1).build(); final JobSpec js2 = JobSpec.builder("test.job2").withConfig(jobCfg2).build(); final JobSpec js1_2 = JobSpec.builder("test.job1").withConfig(jobCfg1).withVersion("2").build();
jobSpecBuilder.withJobCatalogURI(record.getUri()).withVersion(record.getVersion()) .withDescription(record.getDescription()).withConfigAsProperties(props); jobSpecBuilder.withTemplate(new URI(record.getTemplateUri())); SpecExecutor.Verb verb = SpecExecutor.Verb.valueOf(verbName); changesSpecs.add(new ImmutablePair<SpecExecutor.Verb, Spec>(verb, jobSpecBuilder.build())); } catch (Throwable t) { log.error("Could not decode record at partition " + this.currentPartitionIdx +
@Test public void testBuilder() throws URISyntaxException { JobSpec.Builder b = new JobSpec.Builder("test:job"); JobSpec js1 = b.build(); props.put("a2.c.d2", "true"); b = new JobSpec.Builder("test:job2") .withVersion("2") .withDescription("A test job") .withConfigAsProperties(props); JobSpec js2 = b.build(); b = new JobSpec.Builder("test:job") .withVersion("3") .withDescription("A test job") .withConfig(cfg); JobSpec js3 = b.build(); b = new JobSpec.Builder().withConfig(cfg2); JobSpec js4 = b.build(); b = new JobSpec.Builder().withConfig(cfg2).withJobCatalogURI("my-jobs:/"); JobSpec js5 = b.build(); Assert.assertEquals(js5.getUri(), new URI("my-jobs:/myGroup/myJob"));
jobSpec = this.specBuilder.withConfig(finalConfig).build();
jobSpecBuilder.withJobCatalogURI(record.getUri()).withVersion(record.getVersion()) .withDescription(record.getDescription()).withConfigAsProperties(props); jobSpecBuilder.withTemplate(new URI(record.getTemplateUri())); SpecExecutor.Verb verb = SpecExecutor.Verb.valueOf(verbName); changesSpecs.add(new ImmutablePair<SpecExecutor.Verb, Spec>(verb, jobSpecBuilder.build())); } catch (Throwable t) { log.error("Could not decode record at partition " + this.currentPartitionIdx +
try { JobSpec.Builder builder = (uri == null) ? JobSpec.builder() : JobSpec.builder(uri); builder = (templateURI == null) ? builder : builder.withTemplate(new URI(templateURI)); builder = (version == null) ? builder : builder.withVersion(version); builder = (description == null) ? builder : builder.withDescription(description); jobSpec = builder.withConfig(jobConfig).build(); } catch (URISyntaxException e) { log.error("Error deserializing JobSpec {}", config);
JobSpec.Builder jobSpecBuilder = JobSpec.builder(jobSpecURIGenerator(flowGroup, jobName, flowSpec)).withConfig(jobConfig) .withDescription(flowSpec.getDescription()).withVersion(flowSpec.getVersion()); JobSpec jobSpec = jobSpecBuilder.withTemplate(jobTemplateUri).build();
JobSpec jobSpec; JobSpec.Builder jobSpecBuilder = JobSpec.builder(jobSpecURIGenerator(flowSpec, sourceNode, targetNode)) .withConfig(flowSpec.getConfig()) .withDescription(flowSpec.getDescription()) .withVersion(flowSpec.getVersion()); if (templateURI != null) { jobSpecBuilder.withTemplate(templateURI); try { jobSpec = new ResolvedJobSpec(jobSpecBuilder.build(), templateCatalog.get()); log.info("Resolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); } catch (SpecNotFoundException | JobTemplate.TemplateException e) { jobSpec = jobSpecBuilder.build(); log.info("Unresolved JobSpec properties are: " + jobSpec.getConfigAsProperties());
JobSpec.Builder builder = JobSpec.builder(jobConfigURI).withConfig(filteredConfig) .withDescription(description) .withVersion(version); builder.withTemplate(new URI(rawConfig.getString(ConfigurationKeys.JOB_TEMPLATE_PATH))); } catch (URISyntaxException e) { throw new RuntimeException("Bad job template URI " + e, e); return builder.build();
JobSpec jobSpec; JobSpec.Builder jobSpecBuilder = JobSpec.builder(jobSpecURIGenerator(flowSpec)) .withConfig(flowSpec.getConfig()) .withDescription(flowSpec.getDescription()) .withVersion(flowSpec.getVersion()); jobSpecBuilder = jobSpecBuilder.withTemplate(flowSpec.getTemplateURIs().get().iterator().next()); try { jobSpec = new ResolvedJobSpec(jobSpecBuilder.build(), templateCatalog.get()); log.info("Resolved JobSpec properties are: " + jobSpec.getConfigAsProperties()); } catch (SpecNotFoundException | JobTemplate.TemplateException e) { jobSpec = jobSpecBuilder.build(); log.info("Unresolved JobSpec properties are: " + jobSpec.getConfigAsProperties());
@Override public Dag<JobExecutionPlan> compileFlow(Spec spec) { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); long flowExecutionId = System.currentTimeMillis(); int i = 0; while(i++ < NUMBER_OF_JOBS) { String specUri = "/foo/bar/spec/" + i; Properties properties = new Properties(); properties.put(ConfigurationKeys.FLOW_NAME_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_NAME_KEY)); properties.put(ConfigurationKeys.FLOW_GROUP_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_GROUP_KEY)); properties.put(ConfigurationKeys.JOB_NAME_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_NAME_KEY) + "_" + i); properties.put(ConfigurationKeys.JOB_GROUP_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_GROUP_KEY) + "_" + i); properties.put(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, flowExecutionId); JobSpec jobSpec = JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); jobExecutionPlans.add(new JobExecutionPlan(jobSpec, new InMemorySpecExecutor(ConfigFactory.empty()))); } return new JobExecutionPlanDagFactory().createDag(jobExecutionPlans); } }
/** * Creates {@link JobSpec} from the {@link AvroJobSpec} record. * @param record the record as an {@link AvroJobSpec} * @return a {@link JobSpec} wrapped in a {@link Collection} of {@link Either} */ @Override public Collection<Either<JobSpec, URI>> parseJobSpec(AvroJobSpec record) { JobSpec.Builder jobSpecBuilder = JobSpec.builder(record.getUri()); Properties props = new Properties(); props.putAll(record.getProperties()); jobSpecBuilder.withJobCatalogURI(record.getUri()).withVersion(record.getVersion()) .withDescription(record.getDescription()).withConfigAsProperties(props).withMetadata(record.getMetadata()); if (!record.getTemplateUri().isEmpty()) { try { jobSpecBuilder.withTemplate(new URI(record.getTemplateUri())); } catch (URISyntaxException e) { log.error("could not parse template URI " + record.getTemplateUri()); } } JobSpec jobSpec = jobSpecBuilder.build(); log.info("Parsed job spec " + jobSpec.toString()); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(GobblinTrackingEvent event) { if (!acceptEvent(event)) { this.rejectedEvents.inc(); return Lists.newArrayList(); } String datasetURN = event.getMetadata().get(SlaEventKeys.DATASET_URN_KEY); URI jobSpecURI = PathUtils.mergePaths(new Path(this.baseURI), new Path(datasetURN)).toUri(); Map<String, String> jobConfigMap = Maps.newHashMap(); for (Map.Entry<String, String> entry : this.extractKeys.entrySet()) { if (event.getMetadata().containsKey(entry.getKey())) { jobConfigMap.put(entry.getValue(), event.getMetadata().get(entry.getKey())); } } Config jobConfig = ConfigFactory.parseMap(jobConfigMap); JobSpec jobSpec = JobSpec.builder(jobSpecURI).withTemplate(this.template).withConfig(jobConfig).build(); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
/** Creates a builder for the JobSpec based on values in a job properties config. */ public static Builder builder(URI catalogURI, Properties jobProps) { String name = JobState.getJobNameFromProps(jobProps); String group = JobState.getJobGroupFromProps(jobProps); if (null == group) { group = "default"; } try { URI jobURI = new URI(catalogURI.getScheme(), catalogURI.getAuthority(), "/" + group + "/" + name, null); Builder builder = new Builder(jobURI).withConfigAsProperties(jobProps); String descr = JobState.getJobDescriptionFromProps(jobProps); if (null != descr) { builder.withDescription(descr); } return builder; } catch (URISyntaxException e) { throw new RuntimeException("Unable to create a JobSpec URI: " + e, e); } }
@CliObjectSupport(argumentNames = {"jobName"}) public EmbeddedGobblin(String name) { HadoopUtils.addGobblinSite(); this.specBuilder = new JobSpec.Builder(name); this.userConfigMap = Maps.newHashMap(); this.builtConfigMap = Maps.newHashMap(); this.sysConfigOverrides = Maps.newHashMap(); this.defaultSysConfig = getDefaultSysConfig(); this.distributedJars = Maps.newHashMap(); loadCoreGobblinJarsToDistributedJars(); this.distributeJarsFunction = new Runnable() { @Override public void run() { // NOOP } }; }
public static Builder builder() { return new Builder(); }
public static Builder builder(URI jobSpecUri) { return new Builder(jobSpecUri); }
public static Builder builder(String jobSpecUri) { return new Builder(jobSpecUri); }
/** * {@inheritDoc} * * NOTE: For this callback only conditions on the URI and version will be used. * */ @Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { JobSpec fakeJobSpec = JobSpec.builder(deletedJobURI).withVersion(deletedJobVersion).build(); if (this.filter.apply(fakeJobSpec)) { this.delegate.onDeleteJob(deletedJobURI, deletedJobVersion); } }
@Override public Future<?> deleteSpec(URI deletedSpecURI, Properties headers) { // Delete project JobSpec jobSpec = new JobSpec.Builder(deletedSpecURI).build(); try { AzkabanJobHelper.deleteAzkabanJob(_sessionId, new AzkabanProjectConfig(jobSpec)); } catch (IOException e) { throw new RuntimeException("Issue in deleting Azkaban project.", e); } throw new UnsupportedOperationException(); }