public void encryptSecureProperties(CruiseConfig preprocessedConfig, PipelineTemplateConfig pipelineTemplateConfig) { for (JobConfig jobConfig : getJobs()) { jobConfig.encryptSecureProperties(preprocessedConfig, pipelineTemplateConfig); } }
private List<ProjectStatus> findStageAndStatusesFromCache(PipelineConfig pipelineConfig, StageConfig stageConfig) { List<ProjectStatus> projectStatuses = new ArrayList<>(); String stageProjectName = stageProjectName(pipelineConfig, stageConfig); projectStatuses.add(cache.get(stageProjectName)); for (JobConfig jobConfig : stageConfig.getJobs()) { ProjectStatus jobStatus = cache.get(jobProjectName(stageProjectName, jobConfig)); if (jobStatus != null) { projectStatuses.add(jobStatus); } } return projectStatuses; }
private static Consumer<OutputListWriter> getJobs(StageConfig stageConfig) { return jobsWriter -> { stageConfig.getJobs().forEach(job -> { jobsWriter.addChild(jobWriter -> JobRepresenter.toJSON(jobWriter, job)); }); }; }
private void populateReferences(List<Map<JobConfigIdentifier, List<PluggableArtifactConfig>>> usedByPipelines, PipelineConfig pipelineConfig) { for (StageConfig stage : pipelineConfig) { JobConfigs jobs = stage.getJobs(); for (JobConfig job : jobs) { final List<PluggableArtifactConfig> artifactConfigs = job.artifactConfigs().findByStoreId(profile.getId()); if (!artifactConfigs.isEmpty()) { usedByPipelines.add(Collections.singletonMap(new JobConfigIdentifier(pipelineConfig.name(), stage.name(), job.name()), artifactConfigs)); } } } } }
private void populateDups(List<JobConfigIdentifier> usedByPipelines, PipelineConfig pipelineConfig) { for (StageConfig stage : pipelineConfig) { JobConfigs jobs = stage.getJobs(); for (JobConfig job : jobs) { String id = profile.getId(); if (id.equals(job.getElasticProfileId())) { usedByPipelines.add(new JobConfigIdentifier(pipelineConfig.name(), stage.name(), job.name())); } } } }
public static PipelineConfig pipelineWithElasticJob(String... elasticProfileIds) { PipelineConfig pipelineConfig = pipelineConfig(UUID.randomUUID().toString()); pipelineConfig.first().getJobs().clear(); for (String elasticProfileId : elasticProfileIds) { pipelineConfig.first().getJobs().add(JobConfigMother.elasticJob(elasticProfileId)); } return pipelineConfig; }
public static PipelineTemplateConfig createTemplateWithParams(String templateName, String... paramNameAndValue) { PipelineTemplateConfig template = createTemplate(templateName); for (String nameAndValue : paramNameAndValue) { template.get(0).getJobs().get(0).addVariable(String.format("name-%s", nameAndValue), String.format("value-#{%s}", nameAndValue)); } return template; } }
@Test public void shouldNotAttemptToEncryptPropertiesIfThereAreNoPluginConfigs() { PipelineConfig pipelineConfig = new PipelineConfig(); StageConfig mockStageConfig = mock(StageConfig.class); pipelineConfig.add(mockStageConfig); JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("job")); when(mockStageConfig.getJobs()).thenReturn(new JobConfigs(jobConfig)); when(mockStageConfig.name()).thenReturn(new CaseInsensitiveString("stage")); pipelineConfig.encryptSecureProperties(new BasicCruiseConfig(), pipelineConfig); verify(mockStageConfig, never()).encryptSecureProperties(eq(new BasicCruiseConfig()), eq(pipelineConfig), ArgumentMatchers.any(StageConfig.class)); }
public static PipelineConfig pipelineWithElasticJobs(String elasticProfileId, String pipelineName, String stageName, String... jobNames) { final PipelineConfig pipelineConfig = createPipelineConfig(pipelineName, stageName, jobNames); final StageConfig stage = pipelineConfig.getStage(stageName); for (JobConfig job : stage.getJobs()) { job.setElasticProfileId(elasticProfileId); } return pipelineConfig; }
private void validatePartsOfPipelineConfig(PipelineConfig pipelineConfig, PipelineConfigSaveValidationContext contextForStages) { for (StageConfig stageConfig : pipelineConfig.getStages()) { PipelineConfigSaveValidationContext contextForChildren = contextForStages.withParent(stageConfig); validateStageApprovalAuthorization(stageConfig, contextForChildren); for (JobConfig jobConfig : stageConfig.getJobs()) { PipelineConfigSaveValidationContext contextForJobChildren = contextForChildren.withParent(jobConfig); validateFetchTasks(jobConfig, contextForJobChildren); validateElasticProfileId(jobConfig, contextForJobChildren); validatePluggableArtifactConfig(jobConfig, contextForJobChildren); } } }
@Test public void shouldEncryptSecurePropertiesInStagesIfPipelineHasStagesDefined() { PipelineConfig pipelineConfig = new PipelineConfig(); StageConfig mockStageConfig = mock(StageConfig.class); pipelineConfig.add(mockStageConfig); JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("job")); jobConfig.artifactConfigs().add(new PluggableArtifactConfig("foo", "bar")); when(mockStageConfig.getJobs()).thenReturn(new JobConfigs(jobConfig)); when(mockStageConfig.name()).thenReturn(new CaseInsensitiveString("stage")); pipelineConfig.encryptSecureProperties(new BasicCruiseConfig(), pipelineConfig); verify(mockStageConfig).encryptSecureProperties(eq(new BasicCruiseConfig()), eq(pipelineConfig), ArgumentMatchers.any(StageConfig.class)); }
@Test public void shouldPickupJobConfigDetailsFromAttributeMap() throws Exception{ StageConfig config = new StageConfig(); Map stageAttrs = m(StageConfig.JOBS, a(m(JobConfig.NAME, "con-job"), m(JobConfig.NAME, "boring-job"))); config.setConfigAttributes(stageAttrs); assertThat(config.getJobs().get(0).name(), is(new CaseInsensitiveString("con-job"))); assertThat(config.getJobs().get(1).name(), is(new CaseInsensitiveString("boring-job"))); }
@Test public void shouldAllowEditingOfJobNameWhenItIsNotUsedAsFetchArtifact() throws Exception { PipelineTemplateConfig template = new PipelineTemplateConfig(new CaseInsensitiveString("template"), StageConfigMother.oneBuildPlanWithResourcesAndMaterials("stage", "job2")); BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); cruiseConfig.addTemplate(template); template.getStages().get(0).getJobs().get(0).setName(new CaseInsensitiveString("updatedJobName")); template.validateTree(ConfigSaveValidationContext.forChain(cruiseConfig), cruiseConfig, false); assertThat(template.errors().isEmpty(), is(true)); }
@Test public void should_NOT_BeValidWhen_NO_pathFromAncestorIsGiven_butAncestorPipelineIsBeingFetchedFrom() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(null, new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"downstream :: uppest-stage3\" which does not exist.")); }
@Test public void shouldFailWhenFetchArtifactIsFromAnyStage_AFTER_theDependencyStageOnTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("up-stage2"), new CaseInsensitiveString("up-job2"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"upstream :: up-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void validate_shouldPopulateErrorOnSrcFileOrSrcDirOrDestIfIsNotAValidFilePathPattern() { FetchTask task = new FetchTask(new CaseInsensitiveString(""), new CaseInsensitiveString(""), new CaseInsensitiveString(""), "", ""); task.validateAttributes(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage( new CaseInsensitiveString("stage")).getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Should provide either srcdir or srcfile")); }
@Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButStageDoesNotExist() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage-does-not-exist"), new CaseInsensitiveString("job"), "s3"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage( new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage " + "\"upstream :: stage-does-not-exist\" which does not exist.")); }
@Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButStageDoesNotExist() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage-does-not-exist"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage( new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage " + "\"upstream :: stage-does-not-exist\" which does not exist.")); }
@Test public void should_NOT_BeValidWhenFetchArtifactIsFromAnyAncestorStage_s_successorStage_onTheUpstreamPipeline() { uppestStream.getStage("uppest-stage3").jobConfigByConfigName("uppest-job3").artifactConfigs() .add(new PluggableArtifactConfig("s3", "cd.go.s3")); FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage3\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void validate_shouldErrorWhenReferencingConfigRepositoryPipelineFromFilePipeline() { uppestStream.setOrigin(new RepoConfigOrigin()); downstream.setOrigin(new FileConfigOrigin()); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.ARTIFACT_ORIGIN), startsWith("\"downstream :: stage :: job\" tries to fetch artifact from job \"uppest_stream :: uppest-stage2 :: uppest-job2\" which is defined in")); }