@Test public void shouldUpdateStageOnAttributes() { PipelineConfig pipelineConfig = new PipelineConfig(); HashMap stageMap = new HashMap(); List jobList = a(m(JobConfig.NAME, "JobName")); stageMap.put(StageConfig.NAME, "someStage"); stageMap.put(StageConfig.JOBS, jobList); HashMap attributeMap = new HashMap(); attributeMap.put(PipelineConfig.NAME, "startup"); attributeMap.put(PipelineConfig.STAGE, stageMap); pipelineConfig.setConfigAttributes(attributeMap); assertThat(pipelineConfig.name(), is(new CaseInsensitiveString("startup"))); assertThat(pipelineConfig.get(0).name(), is(new CaseInsensitiveString("someStage"))); assertThat(pipelineConfig.get(0).getJobs().first().name(), is(new CaseInsensitiveString("JobName"))); }
@Test public void shouldSupportCommandWithWhiteSpace() throws Exception { String jobWithCommand = "<job name=\"functional\">\n" + " <tasks>\n" + " <exec command=\"c:\\program files\\cmd.exe\" args=\"arguments\" />\n" + " </tasks>\n" + " </job>\n"; String configWithCommand = withCommand(jobWithCommand); CruiseConfig cruiseConfig = ConfigMigrator.loadWithMigration(configWithCommand).config; Task task = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("pipeline1")).first().allBuildPlans().first().tasks().first(); assertThat(task, is(instanceOf(ExecTask.class))); assertThat(task, is(new ExecTask("c:\\program files\\cmd.exe", "arguments", (String) null))); }
@Test public void shouldWriteJobProperties() throws Exception { String jobXml = "<job name=\"dev\">\n" + " <properties>\n" + " <property name=\"coverage\" src=\"reports/emma.html\" xpath=\"//coverage/class\" />\n" + " <property name=\"prop2\" src=\"test.xml\" xpath=\"//value\" />\n" + " </properties>\n" + "</job>"; CruiseConfig config = loader.loadConfigHolder(ConfigFileFixture.withJob(jobXml)).configForEdit; JobConfig jobConfig = config.pipelineConfigByName(new CaseInsensitiveString("pipeline1")).get(1).allBuildPlans().first(); assertThat(writer.toXmlPartial(jobConfig), is(jobXml)); }
private JobConfig loadJobConfig(String jobXml) throws Exception { CruiseConfig config = loader.loadConfigHolder(ConfigFileFixture.withJob(jobXml)).configForEdit; return config.pipelineConfigByName(new CaseInsensitiveString("pipeline1")).first().allBuildPlans().first(); }
@Test public void shouldErrorOutForTemplates_WhenItHasATaskWithInvalidWorkingDirectory() { CruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("some_pipeline"); StageConfig templateStage = StageConfigMother.stageWithTasks("templateStage"); ExecTask execTask = new ExecTask("ls", "-la", "/"); templateStage.getJobs().first().addTask(execTask); PipelineTemplateConfig template = new PipelineTemplateConfig(new CaseInsensitiveString("template_name"), templateStage); cruiseConfig.addTemplate(template); try { execTask.validateTask(ConfigSaveValidationContext.forChain(cruiseConfig, template, templateStage, templateStage.getJobs().first())); assertThat(execTask.errors().isEmpty(), is(false)); assertThat(execTask.errors().on(ExecTask.WORKING_DIR), is("The path of the working directory for the custom command in job 'job' in stage 'templateStage' of template 'template_name' is outside the agent sandbox.")); } catch (Exception e) { fail("should not have failed. Exception: " + e.getMessage()); } }
private FetchTask fetchTaskFromSamePipeline(PipelineConfig pipelineConfig) { FetchTask fetchTask = new FetchTask(); fetchTask.setStage(pipelineConfig.first().name()); fetchTask.setJob(pipelineConfig.first().getJobs().first().name()); return fetchTask; }
@Test public void shouldLoadJobProperties() throws Exception { String jobXml = "<job name=\"dev\">\n" + " <properties>\n" + " <property name=\"coverage\" src=\"reports/emma.html\" xpath=\"//coverage/class\" />\n" + " <property name=\"prop2\" src=\"test.xml\" xpath=\"//value\" />\n" + " </properties>\n" + "</job>"; CruiseConfig config = loader.loadConfigHolder(ConfigFileFixture.withJob(jobXml)).configForEdit; JobConfig jobConfig = config.pipelineConfigByName(new CaseInsensitiveString("pipeline1")).get(1).allBuildPlans().first(); assertThat(jobConfig.getProperties().first(), is(new ArtifactPropertyConfig("coverage", "reports/emma.html", "//coverage/class"))); assertThat(jobConfig.getProperties().get(1), is(new ArtifactPropertyConfig("prop2", "test.xml", "//value"))); }
@Test public void shouldCopyErrorsForFieldsOnPipelineConfig(){ PipelineConfig pipelineConfig = PipelineConfigMother.pipelineConfig("pipeline", MaterialConfigsMother.defaultMaterialConfigs(), new JobConfigs(JobConfigMother.createJobConfigWithJobNameAndEmptyResources())); pipelineConfig.setVariables(new EnvironmentVariablesConfig(asList(new EnvironmentVariableConfig("name", "value")))); PipelineConfig pipelineWithErrors = new Cloner().deepClone(pipelineConfig); pipelineWithErrors.getVariables().get(0).addError("name", "error on environment variable"); pipelineWithErrors.first().addError("name", "error on stage"); pipelineWithErrors.first().getJobs().first().addError("name", "error on job"); BasicCruiseConfig.copyErrors(pipelineWithErrors, pipelineConfig); assertThat(pipelineConfig.getVariables().get(0).errors().on("name"), is("error on environment variable")); assertThat(pipelineConfig.first().errors().on("name"), is("error on stage")); assertThat(pipelineConfig.first().getJobs().first().errors().on("name"), is("error on job")); }
@Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButJobNotExist() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job-does-not-exist"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.JOB), is("\"downstream :: stage :: job\" tries to fetch artifact from job " + "\"upstream :: stage :: job-does-not-exist\" which does not exist.")); }
@Test public void should_NOT_BeValidWhenFetchArtifactIsFromAnyAncestorStage_s_successorStage_onTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage3\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButJobNotExist() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job-does-not-exist"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.JOB), is("\"downstream :: stage :: job\" tries to fetch artifact from job " + "\"upstream :: stage :: job-does-not-exist\" which does not exist.")); }
@Test public void shouldPopulateErrorOnSrcFileOrSrcDirOrDestIfIsNotAValidFilePathPattern() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "..", ".."); StageConfig stage = upstream.getStage(new CaseInsensitiveString("stage")); ValidationContext context = ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), upstream, stage, stage.getJobs().first()); task.validate(context); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Task of job 'job' in stage 'stage' of pipeline 'upstream' has src path '..' which is outside the working directory.")); assertThat(task.errors().on(FetchTask.DEST), is("Task of job 'job' in stage 'stage' of pipeline 'upstream' has dest path '..' which is outside the working directory.")); task.setSrcfile(null); task.setSrcdir(".."); task.validate(context); assertThat(task.errors().on(FetchTask.SRC), is("Task of job 'job' in stage 'stage' of pipeline 'upstream' has src path '..' which is outside the working directory.")); }
@Test public void shouldFailWhenFetchArtifactIsFromAnyStage_AFTER_theDependencyStageOnTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("up-stage2"), new CaseInsensitiveString("up-job2"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"upstream :: up-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void shouldFailWhenFetchArtifactIsFromAnyStage_AFTER_theDependencyStageOnTheUpstreamPipeline() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("up-stage2"), new CaseInsensitiveString("up-job2"), "s3"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"upstream :: up-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void should_NOT_BeValidWhenFetchArtifactIsFromAnyAncestorStage_s_successorStage_onTheUpstreamPipeline() { uppestStream.getStage("uppest-stage3").jobConfigByConfigName("uppest-job3").artifactConfigs() .add(new PluggableArtifactConfig("s3", "cd.go.s3")); FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage3\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void validate_shouldErrorWhenReferencingConfigRepositoryPipelineFromFilePipeline() { uppestStream.setOrigin(new RepoConfigOrigin()); downstream.setOrigin(new FileConfigOrigin()); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.ARTIFACT_ORIGIN), startsWith("\"downstream :: stage :: job\" tries to fetch artifact from job \"uppest_stream :: uppest-stage2 :: uppest-job2\" which is defined in")); }
@Test public void validate_shouldErrorWhenReferencingConfigRepositoryPipelineFromFilePipeline() { uppestStream.setOrigin(new RepoConfigOrigin()); downstream.setOrigin(new FileConfigOrigin()); FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.ARTIFACT_ORIGIN), startsWith("\"downstream :: stage :: job\" tries to fetch artifact from job \"uppest_stream :: uppest-stage2 :: uppest-job2\" which is defined in")); }
@Test public void should_NOT_BeValidWhen_stageMayNotHaveRunViaTheGivenPath_evenThoughItMayHaveActuallyRunAccordingToAnAlternatePath() {//TODO: Please fix this if someone cares about this corner case working -jj PipelineConfig upstreamsPeer = config.pipelineConfigByName(new CaseInsensitiveString("upstreams_peer")); upstreamsPeer.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage1"))); upstreamsPeer.add(StageConfigMother.stageConfig("peer-stage", new JobConfigs(new JobConfig("peer-job")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"), MaterialConfigsMother.dependencyMaterialConfig("upstreams_peer", "peer-stage"))); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(true)); task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void should_NOT_BeValidWhen_stageMayNotHaveRunViaTheGivenPath_evenThoughItMayHaveActuallyRunAccordingToAnAlternatePath() { uppestStream.getStage("uppest-stage1").jobConfigByConfigName("uppest-job1").artifactConfigs() .add(new PluggableArtifactConfig("s3", "cd.go.s3")); PipelineConfig upstreamsPeer = config.pipelineConfigByName(new CaseInsensitiveString("upstreams_peer")); upstreamsPeer.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage1"))); upstreamsPeer.add(StageConfigMother.stageConfig("peer-stage", new JobConfigs(new JobConfig("peer-job")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"), MaterialConfigsMother.dependencyMaterialConfig("upstreams_peer", "peer-stage"))); FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "s3"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(true)); task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "s3"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("\"downstream :: stage :: job\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); }
@Test public void shouldNotFailValidationIfUpstreamExists_PipelineConfigSave() { PipelineConfig upstream = new PipelineConfig(new CaseInsensitiveString("upstream-pipeline"), new MaterialConfigs(), new StageConfig(new CaseInsensitiveString("upstream-stage"), new JobConfigs(new JobConfig(new CaseInsensitiveString("upstream-job"))))); JobConfig job = new JobConfig(new CaseInsensitiveString("downstream-job")); FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("upstream-pipeline"), new CaseInsensitiveString("upstream-stage"), new CaseInsensitiveString("upstream-job"), "quux.c", "bang-file"); job.addTask(fetchTask); PipelineConfig downstream = new PipelineConfig(new CaseInsensitiveString("downstream-pipeline"), new MaterialConfigs(new DependencyMaterialConfig(upstream.name(), upstream.getFirstStageConfig().name())), new StageConfig(new CaseInsensitiveString("downstream-stage"), new JobConfigs(job))); fetchTask.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", new BasicCruiseConfig(new BasicPipelineConfigs(upstream, downstream)), downstream, downstream.getFirstStageConfig(), downstream.getFirstStageConfig().getJobs().first())); assertThat(fetchTask.errors().isEmpty(), is(true)); }