FlowExecution execution = owner.getOrNull(); if (execution != null) { DepthFirstScanner scanner = new DepthFirstScanner(); FlowNode stageId = scanner.findFirstMatch(execution, new StageNamePredicate(stageName)); if (stageId != null) { tr = ((hudson.tasks.junit.TestResult) tr).getResultForPipelineBlock(stageId.getId());
return; DepthFirstScanner scanner = new DepthFirstScanner(); List<FlowNode> sorted = scanner.filteredNodes(exec.getCurrentHeads(), (Predicate) Predicates.alwaysTrue()); Collections.sort(sorted, new Comparator<FlowNode>() { @Override
/** Verify we didn't lose TimingAction */ static void assertHasTimingAction(FlowExecution exec) throws Exception { DepthFirstScanner scan = new DepthFirstScanner(); for (FlowNode node : scan.allNodes(exec)) { try { if (!(node instanceof FlowStartNode) && !(node instanceof FlowEndNode)) { Assert.assertNotNull("Missing TimingAction on node", node.getPersistentAction(TimingAction.class)); } } catch (Exception ex) { throw new Exception("Error with node: "+node.getId(), ex); } } }
BlockEndNode bruteForceScanForEnd(@Nonnull BlockStartNode start) { DepthFirstScanner scan = new DepthFirstScanner(); scan.setup(start.getExecution().getCurrentHeads()); for (FlowNode f : scan) { if (f instanceof BlockEndNode) { BlockEndNode end = (BlockEndNode)f; BlockStartNode maybeStart = end.getStartNode(); // Cache start in case we need to scan again in the future blockStartToEnd.put(maybeStart.getId(), end.getId()); if (start.equals(maybeStart)) { return end; } } else if (f instanceof BlockStartNode) { BlockStartNode maybeThis = (BlockStartNode) f; // We're walking from the end to the start and see the start without finding the end first, block is incomplete String previousEnd = blockStartToEnd.get(maybeThis.getId()); if (previousEnd == null) { blockStartToEnd.put(maybeThis.getId(), INCOMPLETE); } if (start.equals(maybeThis)) { // Early exit, the end can't be encountered before the start return null; } } } return null; }
static void verifySucceededCleanly(Jenkins j, WorkflowRun run) throws Exception { Assert.assertEquals(Result.SUCCESS, run.getResult()); int outputHash = run.getLog().hashCode(); FlowExecution exec = run.getExecution(); verifyCompletedCleanly(j, run); // Confirm the flow graph is fully navigable and contains the heads with appropriate ending DepthFirstScanner scan = new DepthFirstScanner(); List<FlowNode> allNodes = scan.allNodes(exec); FlowNode endNode = exec.getCurrentHeads().get(0); Assert.assertEquals(FlowEndNode.class, endNode.getClass()); assert allNodes.contains(endNode); Assert.assertEquals(8, allNodes.size()); // Graph structure assertions Assert.assertEquals(2, scan.filteredNodes(endNode, (Predicate)(Predicates.instanceOf(StepStartNode.class))).size()); Assert.assertEquals(2, scan.filteredNodes(endNode, (Predicate)(Predicates.instanceOf(StepEndNode.class))).size()); Assert.assertEquals(1, scan.filteredNodes(endNode, (Predicate)(Predicates.instanceOf(FlowStartNode.class))).size()); Predicate<FlowNode> sleepOrSemaphoreMatch = Predicates.or( new NodeStepNamePredicate(StepDescriptor.byFunctionName("semaphore").getId()), new NodeStepNamePredicate(StepDescriptor.byFunctionName("sleep").getId()) ); Assert.assertEquals(1, scan.filteredNodes(endNode, sleepOrSemaphoreMatch).size()); Assert.assertEquals(1, scan.filteredNodes(endNode, new NodeStepNamePredicate(StepDescriptor.byFunctionName("echo").getId())).size()); for (FlowNode node : (List<FlowNode>)(scan.filteredNodes(endNode, (Predicate)(Predicates.instanceOf(StepNode.class))))) { Assert.assertNotNull("Node: "+node.toString()+" does not have a TimingAction", node.getAction(TimingAction.class)); } assertHasTimingAction(run.getExecution()); }
protected boolean testCandidate(FlowNode f, Collection<FlowNode> blackList) { return !blackList.contains(f) && !((possibleParallelStart(f)) && visited.contains(f)); }
public static void assertStageResults(WorkflowRun run, int suiteCount, int testCount, int failCount, String stageName) { FlowExecution execution = run.getExecution(); DepthFirstScanner scanner = new DepthFirstScanner(); FlowNode aStage = scanner.findFirstMatch(execution, stageForName(stageName)); assertNotNull(aStage); assertBlockResults(run, suiteCount, testCount, failCount, aStage); }
@Ignore("TODO ArgumentsAction.getResolvedArguments does not yet handle NotStoredReason sensibly") @Test public void metastepConsoleNotStoredArgument() throws Exception { WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); String spaces = StringUtils.repeat(" ", 1025); // cf. ArgumentsAction.MAX_RETAINED_LENGTH p.setDefinition(new CpsFlowDefinition( "node {\n" + " configFileProvider([]) {\n" + " writeFile text: '''<testsuite name='a'><testcase name='c'><error>failed</error></testcase></testsuite>''', file: 'x.xml'\n" + " junit 'x.xml," + spaces + "'\n" + " }\n" + "}", true)); WorkflowRun b = r.assertBuildStatus(Result.UNSTABLE, p.scheduleBuild2(0)); List<FlowNode> coreStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), new NodeStepTypePredicate("step")); assertThat(coreStepNodes, hasSize(1)); assertEquals("junit", coreStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(JUnitResultArchiver.class).getDisplayName(), coreStepNodes.get(0).getDisplayName()); List<FlowNode> coreWrapperStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), Predicates.and(new NodeStepTypePredicate("wrap"), new Predicate<FlowNode>() { @Override public boolean apply(FlowNode n) { return n instanceof StepStartNode && !((StepStartNode) n).isBody(); } })); assertThat(coreWrapperStepNodes, hasSize(1)); assertEquals("configFileProvider", coreWrapperStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ConfigFileBuildWrapper.class).getDisplayName() + " : Start", coreWrapperStepNodes.get(0).getDisplayName()); r.assertLogContains("[Pipeline] junit", b); r.assertLogContains("[Pipeline] configFileProvider", b); r.assertLogContains("[Pipeline] // configFileProvider", b); }
/** Verify that we retain and flowgraph start with the included nodes, which must be in sorted order */ void assertIncludesNodes(List<FlowNode> prefixNodes, WorkflowRun run) throws Exception { List<FlowNode> nodes = new DepthFirstScanner().allNodes(run.getExecution()); nodes.sort(FlowScanningUtils.ID_ORDER_COMPARATOR); // Make sure we have the starting nodes at least assert prefixNodes.size() <= nodes.size(); for (int i=0; i<prefixNodes.size(); i++) { try { FlowNode match = prefixNodes.get(i); FlowNode after = nodes.get(i); Assert.assertEquals(match.getDisplayFunctionName(), after.getDisplayFunctionName()); } catch (Exception ex) { throw new Exception("Error with flownode at index="+i, ex); } } }
private void assertStageIsNotExecuted(@Nonnull String stageName, @Nonnull WorkflowRun run, @Nonnull FlowExecution execution) { List<FlowNode> heads = execution.getCurrentHeads(); DepthFirstScanner scanner = new DepthFirstScanner(); FlowNode startStage = scanner.findFirstMatch(heads, null, Utils.isStageWithOptionalName(stageName)); assertNotNull(startStage); assertTrue(startStage instanceof BlockStartNode); FlowNode endStage = scanner.findFirstMatch(heads, null, Utils.endNodeForStage((BlockStartNode)startStage)); assertNotNull(endStage); assertEquals(GenericStatus.NOT_EXECUTED, StatusAndTiming.computeChunkStatus(run, null, startStage, endStage, null)); } }
@Test public void metastepConsoleRaw() throws Exception { WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition( "node {\n" + " wrap(new org.jenkinsci.plugins.configfiles.buildwrapper.ConfigFileBuildWrapper([])) {\n" + " writeFile text: '''<testsuite name='a'><testcase name='c'><error>failed</error></testcase></testsuite>''', file: 'x.xml'\n" + " step(new hudson.tasks.ArtifactArchiver('x.xml'))\n" + " }\n" + "}", false)); WorkflowRun b = r.buildAndAssertSuccess(p); List<FlowNode> coreStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), new NodeStepTypePredicate("step")); assertThat(coreStepNodes, hasSize(1)); assertEquals("archiveArtifacts", coreStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ArtifactArchiver.class).getDisplayName(), coreStepNodes.get(0).getDisplayName()); List<FlowNode> coreWrapperStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), Predicates.and(new NodeStepTypePredicate("wrap"), new Predicate<FlowNode>() { @Override public boolean apply(FlowNode n) { return n instanceof StepStartNode && !((StepStartNode) n).isBody(); } })); assertThat(coreWrapperStepNodes, hasSize(1)); assertEquals("configFileProvider", coreWrapperStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ConfigFileBuildWrapper.class).getDisplayName() + " : Start", coreWrapperStepNodes.get(0).getDisplayName()); r.assertLogContains("[Pipeline] archiveArtifacts", b); r.assertLogContains("[Pipeline] configFileProvider", b); r.assertLogContains("[Pipeline] // configFileProvider", b); }
@Override public void evaluate() throws Throwable { Jenkins jenkins = story.j.jenkins; WorkflowRun run = createAndRunSleeperJob(story.j.jenkins, jobName, FlowDurabilityHint.MAX_SURVIVABILITY); run.getParent().setResumeBlocked(true); FlowExecution exec = run.getExecution(); if (exec instanceof CpsFlowExecution) { assert ((CpsFlowExecution) exec).getStorage().isPersistedFully(); } logStart[0] = JenkinsRule.getLog(run); nodesOut.addAll(new DepthFirstScanner().allNodes(run.getExecution())); nodesOut.sort(FlowScanningUtils.ID_ORDER_COMPARATOR); } });
public static void assertBranchResults(WorkflowRun run, int suiteCount, int testCount, int failCount, String branchName, String stageName, String innerStageName) { FlowExecution execution = run.getExecution(); DepthFirstScanner scanner = new DepthFirstScanner(); FlowNode aBranch = scanner.findFirstMatch(execution, branchForName(branchName)); assertNotNull(aBranch); TestResult branchResult = assertBlockResults(run, suiteCount, testCount, failCount, aBranch); String namePrefix = stageName + " / " + branchName; if (innerStageName != null) { namePrefix += " / " + innerStageName; } for (CaseResult c : branchResult.getPassedTests()) { assertEquals(namePrefix + " / " + c.getTransformedTestName(), c.getDisplayName()); } }
@Issue("JENKINS-45109") @Test public void metastepConsole() throws Exception { WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition( "node {\n" + " configFileProvider([]) {\n" + " writeFile text: '''<testsuite name='a'><testcase name='c'><error>failed</error></testcase></testsuite>''', file: 'x.xml'\n" + " archiveArtifacts 'x.xml'\n" + " }\n" + "}", true)); WorkflowRun b = r.buildAndAssertSuccess(p); List<FlowNode> coreStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), new NodeStepTypePredicate("step")); assertThat(coreStepNodes, hasSize(1)); assertEquals("archiveArtifacts", coreStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ArtifactArchiver.class).getDisplayName(), coreStepNodes.get(0).getDisplayName()); List<FlowNode> coreWrapperStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), Predicates.and(new NodeStepTypePredicate("wrap"), new Predicate<FlowNode>() { @Override public boolean apply(FlowNode n) { return n instanceof StepStartNode && !((StepStartNode) n).isBody(); } })); assertThat(coreWrapperStepNodes, hasSize(1)); assertEquals("configFileProvider", coreWrapperStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ConfigFileBuildWrapper.class).getDisplayName() + " : Start", coreWrapperStepNodes.get(0).getDisplayName()); r.assertLogContains("[Pipeline] archiveArtifacts", b); r.assertLogContains("[Pipeline] configFileProvider", b); r.assertLogContains("[Pipeline] // configFileProvider", b); }
@Override public void evaluate() throws Throwable { WorkflowRun run = createAndRunSleeperJob(story.j.jenkins, "durableAgainstClean", FlowDurabilityHint.PERFORMANCE_OPTIMIZED); Assert.assertEquals(FlowDurabilityHint.PERFORMANCE_OPTIMIZED, run.getExecution().getDurabilityHint()); logStart[0] = JenkinsRule.getLog(run); if (run.getExecution() instanceof CpsFlowExecution) { // Pause and unPause to force persistence CpsFlowExecution cpsFlow = (CpsFlowExecution)(run.getExecution()); cpsFlow.pause(true); long timeout = System.nanoTime()+TimeUnit.NANOSECONDS.convert(5, TimeUnit.SECONDS); while(System.nanoTime() < timeout && !cpsFlow.isPaused()) { Thread.sleep(100L); } nodesOut.addAll(new DepthFirstScanner().allNodes(run.getExecution())); nodesOut.sort(FlowScanningUtils.ID_ORDER_COMPARATOR); cpsFlow.pause(false); timeout = System.nanoTime()+TimeUnit.NANOSECONDS.convert(5, TimeUnit.SECONDS); while(System.nanoTime() < timeout && cpsFlow.isPaused()) { Thread.sleep(100L); } // Ensures we're marked as can-not-resume cpsFlow.persistedClean = false; cpsFlow.saveOwner(); } } });
@Test public void skippedStagesForFailure() throws Exception { WorkflowRun b = expect(Result.FAILURE, "skippedStagesForFailure") .logContains("[Pipeline] { (foo)", "hello", "I have failed") .logNotContains("I will be skipped", "I also will be skipped", "I have succeeded") .go(); FlowExecution execution = b.getExecution(); assertNotNull(execution); assertNotNull(execution.getCauseOfFailure()); Collection<FlowNode> heads = execution.getCurrentHeads(); DepthFirstScanner scanner = new DepthFirstScanner(); assertNull(scanner.findFirstMatch(heads, stageStatusPredicate("foo", Utils.getStageStatusMetadata().getSkippedForFailure()))); assertNotNull(scanner.findFirstMatch(heads, stageStatusPredicate("foo", Utils.getStageStatusMetadata().getFailedAndContinued()))); assertNotNull(scanner.findFirstMatch(heads, stageStatusPredicate("bar", Utils.getStageStatusMetadata().getSkippedForFailure()))); assertNotNull(scanner.findFirstMatch(heads, stageStatusPredicate("baz", Utils.getStageStatusMetadata().getSkippedForFailure()))); }
@Test public void metastepConsoleShellClass() throws Exception { WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition( "node {\n" + " wrap([$class: 'ConfigFileBuildWrapper', managedFiles: []]) {\n" + " writeFile text: '''<testsuite name='a'><testcase name='c'><error>failed</error></testcase></testsuite>''', file: 'x.xml'\n" + " step([$class: 'ArtifactArchiver', artifacts: 'x.xml'])\n" + " }\n" + "}", true)); WorkflowRun b = r.buildAndAssertSuccess(p); List<FlowNode> coreStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), new NodeStepTypePredicate("step")); assertThat(coreStepNodes, hasSize(1)); assertEquals("archiveArtifacts", coreStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ArtifactArchiver.class).getDisplayName(), coreStepNodes.get(0).getDisplayName()); List<FlowNode> coreWrapperStepNodes = new DepthFirstScanner().filteredNodes(b.getExecution(), Predicates.and(new NodeStepTypePredicate("wrap"), new Predicate<FlowNode>() { @Override public boolean apply(FlowNode n) { return n instanceof StepStartNode && !((StepStartNode) n).isBody(); } })); assertThat(coreWrapperStepNodes, hasSize(1)); assertEquals("configFileProvider", coreWrapperStepNodes.get(0).getDisplayFunctionName()); assertEquals(r.jenkins.getDescriptor(ConfigFileBuildWrapper.class).getDisplayName() + " : Start", coreWrapperStepNodes.get(0).getDisplayName()); r.assertLogContains("[Pipeline] archiveArtifacts", b); r.assertLogContains("[Pipeline] configFileProvider", b); r.assertLogContains("[Pipeline] // configFileProvider", b); }
@Override public void evaluate() throws Throwable { Jenkins jenkins = story.j.jenkins; WorkflowRun run = createAndRunSleeperJob(story.j.jenkins, jobName, FlowDurabilityHint.MAX_SURVIVABILITY); run.getParent().setResumeBlocked(false); FlowExecution exec = run.getExecution(); if (exec instanceof CpsFlowExecution) { assert ((CpsFlowExecution) exec).getStorage().isPersistedFully(); } logStart[0] = JenkinsRule.getLog(run); nodesOut.addAll(new DepthFirstScanner().allNodes(run.getExecution())); nodesOut.sort(FlowScanningUtils.ID_ORDER_COMPARATOR); run.getParent().setResumeBlocked(true); } });
FlowExecution execution = owner.get(); if (execution != null) { previousHadStep = new DepthFirstScanner().findFirstMatch(execution, new NodeStepTypePredicate(step.getDescriptor())) != null;
getFileM.setAccessible(true); List<FlowNode> nodes = new DepthFirstScanner().allNodes(execution.getCurrentHeads()); Collections.sort(nodes, FlowScanningUtils.ID_ORDER_COMPARATOR);