private void createNewGroup(String sanitizedGroupName, PipelineConfig pipeline) { PipelineConfigs configs = new BasicPipelineConfigs(pipeline); configs.setGroup(sanitizedGroupName); this.add(0, configs); }
@Test public void walkedObject_shouldWalkMergePipelineConfigs() { assertThat(new GoConfigGraphWalker.WalkedObject(new MergePipelineConfigs(new BasicPipelineConfigs())).shouldWalk(), is(true)); }
@Test(expected = RuntimeException.class) public void shouldFailToUpdateName() { PipelineConfigs group = new MergePipelineConfigs( new BasicPipelineConfigs(PipelineConfigMother.pipelineConfig("pipeline1")), new BasicPipelineConfigs(PipelineConfigMother.pipelineConfig("pipeline2"))); group.setConfigAttributes(m(BasicPipelineConfigs.GROUP, "my-new-group")); assertThat(group.getGroup(), is("my-new-group")); }
@Test public void shouldReturnTrueForOperatePermissionIfAuthorizationIsNotDefined_When2ConfigParts() { BasicPipelineConfigs filePart = new BasicPipelineConfigs(); filePart.setOrigin(new FileConfigOrigin()); assertThat(new MergePipelineConfigs(filePart, new BasicPipelineConfigs()) .hasOperatePermission(new CaseInsensitiveString("anyone"), null), is(true)); }
@Test public void shouldReturnFirstEditablePartWhenExists(){ PipelineConfig pipe1 = PipelineConfigMother.pipelineConfig("pipeline1"); BasicPipelineConfigs part1 = new BasicPipelineConfigs(pipe1); part1.setOrigin(new FileConfigOrigin()); MergePipelineConfigs group = new MergePipelineConfigs( part1, new BasicPipelineConfigs()); assertThat(group.getFirstEditablePartOrNull(), Matchers.<PipelineConfigs>is(part1)); }
public static CruiseConfig configWith(PipelineConfig... pipelineConfigs) { BasicPipelineConfigs configs = new BasicPipelineConfigs(); configs.setGroup("defaultGroup"); configs.addAll(Arrays.asList(pipelineConfigs)); return new BasicCruiseConfig(configs); }
@Override protected PipelineConfigs createEmpty() { BasicPipelineConfigs pipelineConfigs = new BasicPipelineConfigs(); pipelineConfigs.setOrigin(new FileConfigOrigin()); return new MergePipelineConfigs(pipelineConfigs); }
@Test public void shouldReturnFilePartForGetLocalWhenHasRemoteAndFilePart() { BasicPipelineConfigs filePart = new BasicPipelineConfigs(); filePart.setOrigin(new FileConfigOrigin()); BasicPipelineConfigs secondPart = new BasicPipelineConfigs(); secondPart.setOrigin(new RepoConfigOrigin()); MergePipelineConfigs merge = new MergePipelineConfigs(filePart, secondPart); assertThat(merge.getLocal(), Matchers.<PipelineConfigs>is(filePart)); }
@Before public void setup() throws Exception { pipelines = new BasicPipelineConfigs("existing_group", new Authorization()); cruiseConfig = new BasicCruiseConfig(pipelines); goConfigMother = new GoConfigMother(); } @After
@Test public void getAllLocalPipelines_shouldReturnPipelinesOnlyFromMainPart() { PipelineConfig pipe1 = PipelineConfigMother.pipelineConfig("pipe1"); pipelines = new BasicPipelineConfigs("group_main", new Authorization(), pipe1); BasicCruiseConfig mainCruiseConfig = new BasicCruiseConfig(pipelines); cruiseConfig = new BasicCruiseConfig(mainCruiseConfig, PartialConfigMother.withPipeline("pipe2")); assertThat(cruiseConfig.getAllLocalPipelineConfigs(false).size(), is(1)); assertThat(cruiseConfig.getAllLocalPipelineConfigs(false), hasItem(pipe1)); }
public static PartialConfig withPipelineAssociatedWithTemplate(String name, String templateName, RepoConfigOrigin repoConfigOrigin) { PipelineConfig pipeline = PipelineConfigMother.pipelineConfigWithTemplate(name, templateName); BasicPipelineConfigs pipelineConfigs = new BasicPipelineConfigs(pipeline); pipelineConfigs.setGroup("group"); PartialConfig partialConfig = new PartialConfig(new PipelineGroups(pipelineConfigs)); partialConfig.setOrigins(repoConfigOrigin); return partialConfig; }
@Test public void shouldCheckForPipelineNameUniqueness() { BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("p1"); String group = "group"; cruiseConfig.getGroups().add(new BasicPipelineConfigs(group, new Authorization())); PipelineConfig p1Duplicate = GoConfigMother.createPipelineConfigWithMaterialConfig("p1", new GitMaterialConfig("url")); cruiseConfig.addPipeline(group, p1Duplicate); PipelineConfigSaveValidationContext context = PipelineConfigSaveValidationContext.forChain(true, group, cruiseConfig, p1Duplicate); p1Duplicate.validateTree(context); assertThat(p1Duplicate.errors().on(PipelineConfig.NAME), is(String.format("You have defined multiple pipelines named '%s'. Pipeline names must be unique. Source(s): [cruise-config.xml]", p1Duplicate.name()))); }
@Test public void shouldPassValidationWhenFetchingFromAnInstanceOfRunOnAllJob() { StageConfig stage = upstream.getFirstStageConfig(); JobConfig job = stage.getJobs().get(0); job.setRunOnAllAgents(true); FetchTask task = new FetchTask(upstream.name(), stage.name(), new CaseInsensitiveString(job.name() + "-runOnAll-1"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), upstream, stage, job)); assertThat(task.errors().on(FetchTask.JOB), is(Matchers.nullValue())); }
@Test public void shouldAddPipelineToMain() { pipelines = new BasicPipelineConfigs("group_main", new Authorization(), PipelineConfigMother.pipelineConfig("pipe1")); pipelines.setOrigin(new FileConfigOrigin()); BasicCruiseConfig mainCruiseConfig = new BasicCruiseConfig(pipelines); cruiseConfig = new BasicCruiseConfig(mainCruiseConfig, PartialConfigMother.withPipeline("pipe2")); cruiseConfig.addPipeline("group_main", PipelineConfigMother.pipelineConfig("pipe3")); assertThat(mainCruiseConfig.hasPipelineNamed(new CaseInsensitiveString("pipe3")), is(true)); assertThat(cruiseConfig.hasPipelineNamed(new CaseInsensitiveString("pipe3")), is(true)); }
@Test public void validate_shouldValidatePresenceOfartifactId() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("dummy"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), ""); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertFalse(task.errors().on("artifactId").isEmpty()); assertThat(task.errors().on("artifactId"), is("Artifact Id cannot be blank.")); }
@Test public void should_NOT_BeValidWhen_pathFromAncestor_isInvalid_becauseRefferedPipelineIsNotAnAncestor() { FetchTask task = new FetchTask(new CaseInsensitiveString("random_pipeline/upstream"), new CaseInsensitiveString("random-stage1"), new CaseInsensitiveString("random-job1"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.PIPELINE_NAME), is("Pipeline named 'random_pipeline' exists, but is not an ancestor of 'downstream' as declared in 'random_pipeline/upstream'.")); }
@Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButJobNotExist() { FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job-does-not-exist"), "s3"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, stage, stage.getJobs().first())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.JOB), is("\"downstream :: stage :: job\" tries to fetch artifact from job " + "\"upstream :: stage :: job-does-not-exist\" which does not exist.")); }
@Test public void validate_shouldNotErrorWhenReferencingFilePipelineFromConfigRepositoryPipeline() { uppestStream.setOrigin(new FileConfigOrigin()); downstream.setOrigin(new RepoConfigOrigin()); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); }
@Test public void validate_shouldNotErrorWhenReferencingFilePipelineFromFilePipeline() { uppestStream.getStage("uppest-stage2").jobConfigByConfigName("uppest-job2").artifactConfigs() .add(new PluggableArtifactConfig("s3", "cd.go.s3")); uppestStream.setOrigin(new FileConfigOrigin()); downstream.setOrigin(new FileConfigOrigin()); FetchPluggableArtifactTask task = new FetchPluggableArtifactTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "s3"); task.validate(ConfigSaveValidationContext.forChain(config, new BasicPipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); }
@Test public void shouldNotThrowExceptionIfRoleNameExist() { AdminRole role = new AdminRole(new CaseInsensitiveString("role1")); StageConfig stage = StageConfigMother.custom("ft", new AuthConfig(role)); PipelineConfigs pipelineConfigs = new BasicPipelineConfigs(new PipelineConfig(new CaseInsensitiveString("pipeline"), new MaterialConfigs(), stage)); CruiseConfig config = new BasicCruiseConfig(pipelineConfigs); config.server().security().addRole(new RoleConfig(new CaseInsensitiveString("role1"))); role.validate(ConfigSaveValidationContext.forChain(config)); assertThat(role.errors().isEmpty(), is(true)); }