diff --git a/pom.xml b/pom.xml
index e470981..a8d6b1f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -56,6 +56,11 @@
org.jenkins-ci.plugins
parameterized-trigger
+ true
+
+
+ org.jenkins-ci.plugins
+ variant
org.jenkins-ci.plugins
diff --git a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/MultipleBinaryFileParameterFactory.java b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/MultipleBinaryFileParameterFactory.java
index 5e2a689..8cfd731 100644
--- a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/MultipleBinaryFileParameterFactory.java
+++ b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/MultipleBinaryFileParameterFactory.java
@@ -2,7 +2,6 @@
import com.google.common.collect.Lists;
import hudson.AbortException;
-import hudson.Extension;
import hudson.FilePath;
import hudson.model.AbstractBuild;
import hudson.model.Action;
@@ -19,6 +18,7 @@
import java.io.IOException;
import java.util.List;
import java.util.logging.Logger;
+import org.jenkinsci.plugins.variant.OptionalExtension;
/**
* Essentially a copy-paste of {@link hudson.plugins.parameterizedtrigger.BinaryFileParameterFactory} that takes a
@@ -87,7 +87,7 @@ public Action getAction(AbstractBuild, ?> build, TaskListener listener) throws
}
- @Extension
+ @OptionalExtension(requirePlugins = "parameterized-trigger")
public static class DescriptorImpl extends AbstractBuildParameterFactoryDescriptor {
@Override
public String getDisplayName() {
diff --git a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutor.java b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutor.java
index 546f8bc..8db2e17 100644
--- a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutor.java
+++ b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutor.java
@@ -1,17 +1,12 @@
package org.jenkinsci.plugins.parallel_test_executor;
-import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import edu.umd.cs.findbugs.annotations.CheckForNull;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.AbortException;
-import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
-import hudson.console.ModelHyperlinkNote;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Action;
@@ -19,9 +14,7 @@
import hudson.model.BuildListener;
import hudson.model.Item;
import hudson.model.ItemGroup;
-import hudson.model.Job;
import hudson.model.Result;
-import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.plugins.parameterizedtrigger.AbstractBuildParameterFactory;
import hudson.plugins.parameterizedtrigger.AbstractBuildParameters;
@@ -30,35 +23,19 @@
import hudson.plugins.parameterizedtrigger.TriggerBuilder;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.Builder;
-import hudson.tasks.junit.ClassResult;
import hudson.tasks.junit.JUnitResultArchiver;
-import hudson.tasks.test.AbstractTestResultAction;
-import hudson.tasks.test.TabulatedResult;
-import hudson.tasks.test.TestResult;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
-import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import java.util.stream.Collectors;
-import jenkins.scm.api.SCMHead;
-import jenkins.scm.api.mixin.ChangeRequestSCMHead;
import org.jenkinsci.plugins.parallel_test_executor.testmode.TestMode;
-import org.jenkinsci.plugins.workflow.actions.LabelAction;
-import org.jenkinsci.plugins.workflow.flow.FlowExecution;
-import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
+import org.jenkinsci.plugins.variant.OptionalExtension;
import org.kohsuke.stapler.AncestorInPath;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
@@ -167,7 +144,7 @@ public boolean perform(AbstractBuild, ?> build, Launcher launcher, BuildListen
}
FilePath dir = workspace.child("test-splits");
dir.deleteRecursive();
- List splits = findTestSplits(parallelism, testMode, build, listener, includesPatternFile != null,
+ List splits = Splitter.findTestSplits(parallelism, testMode, build, listener, includesPatternFile != null,
null, build.getWorkspace());
for (int i = 0; i < splits.size(); i++) {
InclusionExclusionPattern pattern = splits.get(i);
@@ -189,91 +166,6 @@ public boolean perform(AbstractBuild, ?> build, Launcher launcher, BuildListen
return true;
}
- static List findTestSplits(Parallelism parallelism, @CheckForNull TestMode inputTestMode, Run,?> build, TaskListener listener,
- boolean generateInclusions,
- @CheckForNull final String stageName, @CheckForNull FilePath workspace) throws InterruptedException {
- TestMode testMode = inputTestMode == null ? TestMode.getDefault() : inputTestMode;
- TestResult tr = findPreviousTestResult(build, listener);
- Map data = new TreeMap<>();
- if (tr != null) {
- Run,?> prevRun = tr.getRun();
- if (prevRun instanceof FlowExecutionOwner.Executable && stageName != null) {
- FlowExecutionOwner owner = ((FlowExecutionOwner.Executable)prevRun).asFlowExecutionOwner();
- if (owner != null) {
- FlowExecution execution = owner.getOrNull();
- if (execution != null) {
- DepthFirstScanner scanner = new DepthFirstScanner();
- FlowNode stageId = scanner.findFirstMatch(execution, new StageNamePredicate(stageName));
- if (stageId != null) {
- listener.getLogger().println("Found stage \"" + stageName + "\" in " + prevRun.getFullDisplayName());
- tr = ((hudson.tasks.junit.TestResult) tr).getResultForPipelineBlock(stageId.getId());
- } else {
- listener.getLogger().println("No stage \"" + stageName + "\" found in " + prevRun.getFullDisplayName());
- }
- }
- }
- }
- collect(tr, data, testMode);
- } else {
- listener.getLogger().println("No record available, try to find test classes");
- data = testMode.estimate(workspace, listener);
- if(data.isEmpty()) {
- listener.getLogger().println("No test classes was found, so executing everything in one place");
- return Collections.singletonList(new InclusionExclusionPattern(Collections.emptyList(), false));
- }
- }
-
- // sort in the descending order of the duration
- List sorted = new ArrayList<>(data.values());
- Collections.sort(sorted);
-
- // degree of the parallelism. we need minimum 1
- final int n = Math.max(1, parallelism.calculate(sorted));
-
- List knapsacks = new ArrayList<>(n);
- for (int i = 0; i < n; i++)
- knapsacks.add(new Knapsack());
-
- /*
- This packing problem is a NP-complete problem, so we solve
- this simply by a greedy algorithm. We pack heavier items first,
- and the result should be of roughly equal size
- */
- PriorityQueue q = new PriorityQueue<>(knapsacks);
- for (var testEntity : sorted) {
- Knapsack k = q.poll();
- k.add(testEntity);
- q.add(k);
- }
-
- long total = 0, min = Long.MAX_VALUE, max = Long.MIN_VALUE;
- for (Knapsack k : knapsacks) {
- total += k.total;
- max = Math.max(max, k.total);
- min = Math.min(min, k.total);
- }
- long average = total / n;
- long variance = 0;
- for (Knapsack k : knapsacks) {
- variance += pow(k.total - average);
- }
- variance /= n;
- long stddev = (long) Math.sqrt(variance);
- listener.getLogger().printf("%d test %s (%dms) divided into %d sets. Min=%dms, Average=%dms, Max=%dms, stddev=%dms%n",
- data.size(), testMode.getWord(), total, n, min, average, max, stddev);
-
- List r = new ArrayList<>();
- for (int i = 0; i < n; i++) {
- Knapsack k = knapsacks.get(i);
- boolean shouldIncludeElements = generateInclusions && i != 0;
- List elements = sorted.stream().filter(testEntity -> shouldIncludeElements == (testEntity.knapsack == k))
- .flatMap(testEntity -> testEntity.getElements().stream())
- .collect(Collectors.toList());
- r.add(new InclusionExclusionPattern(elements, shouldIncludeElements));
- }
- return r;
- }
-
/**
* Collects all the test reports
*/
@@ -320,82 +212,7 @@ public Action getAction(AbstractBuild, ?> build, TaskListener listener) throws
return new TriggerBuilder(config);
}
-
- private static long pow(long l) {
- return l * l;
- }
-
- /**
- * Visits the structure inside {@link hudson.tasks.test.TestResult}.
- */
- private static void collect(TestResult r, Map data, TestMode testMode) {
- var queue = new ArrayDeque();
- queue.push(r);
- while (!queue.isEmpty()) {
- var current = queue.pop();
- if (current instanceof ClassResult) {
- var classResult = (ClassResult) current;
- LOGGER.log(Level.FINE, () -> "Retrieving test entities from " + classResult.getFullName());
- data.putAll(testMode.getTestEntitiesMap(classResult));
- } else if (current instanceof TabulatedResult) {
- LOGGER.log(Level.FINE, () -> "Considering children of " + current.getFullName());
- queue.addAll(((TabulatedResult) current).getChildren());
- } else {
- LOGGER.log(Level.FINE, () -> "Ignoring " + current.getFullName());
- }
- }
- }
-
- private static TestResult findPreviousTestResult(Run, ?> b, TaskListener listener) {
- Job, ?> project = b.getParent();
- // Look for test results starting with the previous build
- TestResult result = getTestResult(project, b.getPreviousBuild(), listener);
- if (result == null) {
- // Look for test results from the target branch builds if this is a change request.
- SCMHead head = SCMHead.HeadByItem.findHead(project);
- if (head instanceof ChangeRequestSCMHead) {
- SCMHead target = ((ChangeRequestSCMHead) head).getTarget();
- Item targetBranch = project.getParent().getItem(target.getName());
- if (targetBranch != null && targetBranch instanceof Job) {
- result = getTestResult(project, ((Job, ?>) targetBranch).getLastBuild(), listener);
- }
- }
- }
- return result;
- }
-
-
- static TestResult getTestResult(Job, ?> originProject, Run, ?> b, TaskListener listener) {
- TestResult result = null;
- for (int i = 0; i < NUMBER_OF_BUILDS_TO_SEARCH; i++) {// limit the search to a small number to avoid loading too much
- if (b == null) break;
- if (RESULTS_OF_BUILDS_TO_CONSIDER.contains(b.getResult()) && !b.isBuilding()) {
- String hyperlink = ModelHyperlinkNote.encodeTo('/' + b.getUrl(), originProject != b.getParent() ? b.getFullDisplayName() : b.getDisplayName());
- try {
- AbstractTestResultAction tra = b.getAction(AbstractTestResultAction.class);
- if (tra != null) {
- Object o = tra.getResult();
- if (o instanceof TestResult) {
- TestResult tr = (TestResult) o;
- if (tr.getTotalCount() == 0) {
- listener.getLogger().printf("Build %s has no loadable test results (supposed count %d), skipping%n", hyperlink, tra.getTotalCount());
- } else {
- listener.getLogger().printf("Using build %s as reference%n", hyperlink);
- result = tr;
- break;
- }
- }
- }
- } catch (RuntimeException e) {
- e.printStackTrace(listener.error("Failed to load (corrupt?) build %s, skipping%n", hyperlink));
- }
- }
- b = b.getPreviousBuild();
- }
- return result;
- }
-
- @Extension
+ @OptionalExtension(requirePlugins = "parameterized-trigger")
public static class DescriptorImpl extends BuildStepDescriptor {
@Override
public boolean isApplicable(Class extends AbstractProject> aClass) {
@@ -412,18 +229,4 @@ public String getDisplayName() {
}
}
- private static class StageNamePredicate implements Predicate {
- private final String stageName;
- public StageNamePredicate(@NonNull String stageName) {
- this.stageName = stageName;
- }
- @Override
- public boolean apply(@Nullable FlowNode input) {
- if (input != null) {
- LabelAction labelAction = input.getPersistentAction(LabelAction.class);
- return labelAction != null && stageName.equals(labelAction.getDisplayName());
- }
- return false;
- }
- }
}
diff --git a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/RunListenerImpl.java b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/RunListenerImpl.java
index 9bb467e..ad91c12 100644
--- a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/RunListenerImpl.java
+++ b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/RunListenerImpl.java
@@ -1,18 +1,18 @@
package org.jenkinsci.plugins.parallel_test_executor;
-import hudson.Extension;
import hudson.model.AbstractBuild;
import hudson.model.TaskListener;
import hudson.model.listeners.RunListener;
import edu.umd.cs.findbugs.annotations.NonNull;
+import org.jenkinsci.plugins.variant.OptionalExtension;
/**
* Looks for {@link TestCollector} in the build and collects the test reports.
*
* @author Kohsuke Kawaguchi
*/
-@Extension
+@OptionalExtension(requirePlugins = "parameterized-trigger")
public class RunListenerImpl extends RunListener> {
@Override
public void onCompleted(AbstractBuild,?> build, @NonNull TaskListener listener) {
diff --git a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/SplitStep.java b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/SplitStep.java
index 86cca2c..d39ae6c 100644
--- a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/SplitStep.java
+++ b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/SplitStep.java
@@ -132,11 +132,11 @@ protected List> run() throws Exception {
FilePath path = context.get(FilePath.class);
if (step.generateInclusions) {
- return ParallelTestExecutor.findTestSplits(step.parallelism, step.testMode, build, listener, step.generateInclusions,
+ return Splitter.findTestSplits(step.parallelism, step.testMode, build, listener, step.generateInclusions,
step.stage, path);
} else {
List> result = new ArrayList<>();
- for (InclusionExclusionPattern pattern : ParallelTestExecutor.findTestSplits(step.parallelism, step.testMode, build, listener,
+ for (InclusionExclusionPattern pattern : Splitter.findTestSplits(step.parallelism, step.testMode, build, listener,
step.generateInclusions, step.stage, path)) {
result.add(pattern.getList());
}
diff --git a/src/main/java/org/jenkinsci/plugins/parallel_test_executor/Splitter.java b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/Splitter.java
new file mode 100644
index 0000000..1299d84
--- /dev/null
+++ b/src/main/java/org/jenkinsci/plugins/parallel_test_executor/Splitter.java
@@ -0,0 +1,242 @@
+/*
+ * The MIT License
+ *
+ * Copyright 2024 CloudBees, Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package org.jenkinsci.plugins.parallel_test_executor;
+
+import com.google.common.base.Predicate;
+import edu.umd.cs.findbugs.annotations.CheckForNull;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
+import hudson.FilePath;
+import hudson.console.ModelHyperlinkNote;
+import hudson.model.Item;
+import hudson.model.Job;
+import hudson.model.Run;
+import hudson.model.TaskListener;
+import hudson.tasks.junit.ClassResult;
+import hudson.tasks.test.AbstractTestResultAction;
+import hudson.tasks.test.TabulatedResult;
+import hudson.tasks.test.TestResult;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.TreeMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import jenkins.scm.api.SCMHead;
+import jenkins.scm.api.mixin.ChangeRequestSCMHead;
+import static org.jenkinsci.plugins.parallel_test_executor.ParallelTestExecutor.NUMBER_OF_BUILDS_TO_SEARCH;
+import static org.jenkinsci.plugins.parallel_test_executor.ParallelTestExecutor.RESULTS_OF_BUILDS_TO_CONSIDER;
+import org.jenkinsci.plugins.parallel_test_executor.testmode.TestMode;
+import org.jenkinsci.plugins.workflow.actions.LabelAction;
+import org.jenkinsci.plugins.workflow.flow.FlowExecution;
+import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner;
+import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
+
+class Splitter {
+
+ private static final Logger LOGGER = Logger.getLogger(Splitter.class.getName());
+
+ static List findTestSplits(Parallelism parallelism, @CheckForNull TestMode inputTestMode, Run,?> build, TaskListener listener,
+ boolean generateInclusions,
+ @CheckForNull final String stageName, @CheckForNull FilePath workspace) throws InterruptedException {
+ TestMode testMode = inputTestMode == null ? TestMode.getDefault() : inputTestMode;
+ TestResult tr = findPreviousTestResult(build, listener);
+ Map data = new TreeMap<>();
+ if (tr != null) {
+ Run,?> prevRun = tr.getRun();
+ if (prevRun instanceof FlowExecutionOwner.Executable && stageName != null) {
+ FlowExecutionOwner owner = ((FlowExecutionOwner.Executable)prevRun).asFlowExecutionOwner();
+ if (owner != null) {
+ FlowExecution execution = owner.getOrNull();
+ if (execution != null) {
+ DepthFirstScanner scanner = new DepthFirstScanner();
+ FlowNode stageId = scanner.findFirstMatch(execution, new StageNamePredicate(stageName));
+ if (stageId != null) {
+ listener.getLogger().println("Found stage \"" + stageName + "\" in " + prevRun.getFullDisplayName());
+ tr = ((hudson.tasks.junit.TestResult) tr).getResultForPipelineBlock(stageId.getId());
+ } else {
+ listener.getLogger().println("No stage \"" + stageName + "\" found in " + prevRun.getFullDisplayName());
+ }
+ }
+ }
+ }
+ collect(tr, data, testMode);
+ } else {
+ listener.getLogger().println("No record available, try to find test classes");
+ data = testMode.estimate(workspace, listener);
+ if(data.isEmpty()) {
+ listener.getLogger().println("No test classes was found, so executing everything in one place");
+ return Collections.singletonList(new InclusionExclusionPattern(Collections.emptyList(), false));
+ }
+ }
+
+ // sort in the descending order of the duration
+ List sorted = new ArrayList<>(data.values());
+ Collections.sort(sorted);
+
+ // degree of the parallelism. we need minimum 1
+ final int n = Math.max(1, parallelism.calculate(sorted));
+
+ List knapsacks = new ArrayList<>(n);
+ for (int i = 0; i < n; i++)
+ knapsacks.add(new ParallelTestExecutor.Knapsack());
+
+ /*
+ This packing problem is a NP-complete problem, so we solve
+ this simply by a greedy algorithm. We pack heavier items first,
+ and the result should be of roughly equal size
+ */
+ PriorityQueue q = new PriorityQueue<>(knapsacks);
+ for (var testEntity : sorted) {
+ ParallelTestExecutor.Knapsack k = q.poll();
+ k.add(testEntity);
+ q.add(k);
+ }
+
+ long total = 0, min = Long.MAX_VALUE, max = Long.MIN_VALUE;
+ for (ParallelTestExecutor.Knapsack k : knapsacks) {
+ total += k.total;
+ max = Math.max(max, k.total);
+ min = Math.min(min, k.total);
+ }
+ long average = total / n;
+ long variance = 0;
+ for (ParallelTestExecutor.Knapsack k : knapsacks) {
+ variance += pow(k.total - average);
+ }
+ variance /= n;
+ long stddev = (long) Math.sqrt(variance);
+ listener.getLogger().printf("%d test %s (%dms) divided into %d sets. Min=%dms, Average=%dms, Max=%dms, stddev=%dms%n",
+ data.size(), testMode.getWord(), total, n, min, average, max, stddev);
+
+ List r = new ArrayList<>();
+ for (int i = 0; i < n; i++) {
+ ParallelTestExecutor.Knapsack k = knapsacks.get(i);
+ boolean shouldIncludeElements = generateInclusions && i != 0;
+ List elements = sorted.stream().filter(testEntity -> shouldIncludeElements == (testEntity.knapsack == k))
+ .flatMap(testEntity -> testEntity.getElements().stream())
+ .collect(Collectors.toList());
+ r.add(new InclusionExclusionPattern(elements, shouldIncludeElements));
+ }
+ return r;
+ }
+
+ private static long pow(long l) {
+ return l * l;
+ }
+
+ /**
+ * Visits the structure inside {@link hudson.tasks.test.TestResult}.
+ */
+ private static void collect(TestResult r, Map data, TestMode testMode) {
+ var queue = new ArrayDeque();
+ queue.push(r);
+ while (!queue.isEmpty()) {
+ var current = queue.pop();
+ if (current instanceof ClassResult) {
+ var classResult = (ClassResult) current;
+ LOGGER.log(Level.FINE, () -> "Retrieving test entities from " + classResult.getFullName());
+ data.putAll(testMode.getTestEntitiesMap(classResult));
+ } else if (current instanceof TabulatedResult) {
+ LOGGER.log(Level.FINE, () -> "Considering children of " + current.getFullName());
+ queue.addAll(((TabulatedResult) current).getChildren());
+ } else {
+ LOGGER.log(Level.FINE, () -> "Ignoring " + current.getFullName());
+ }
+ }
+ }
+
+ private static TestResult findPreviousTestResult(Run, ?> b, TaskListener listener) {
+ Job, ?> project = b.getParent();
+ // Look for test results starting with the previous build
+ TestResult result = getTestResult(project, b.getPreviousBuild(), listener);
+ if (result == null) {
+ // Look for test results from the target branch builds if this is a change request.
+ SCMHead head = SCMHead.HeadByItem.findHead(project);
+ if (head instanceof ChangeRequestSCMHead) {
+ SCMHead target = ((ChangeRequestSCMHead) head).getTarget();
+ Item targetBranch = project.getParent().getItem(target.getName());
+ if (targetBranch != null && targetBranch instanceof Job) {
+ result = getTestResult(project, ((Job, ?>) targetBranch).getLastBuild(), listener);
+ }
+ }
+ }
+ return result;
+ }
+
+
+ static TestResult getTestResult(Job, ?> originProject, Run, ?> b, TaskListener listener) {
+ TestResult result = null;
+ for (int i = 0; i < NUMBER_OF_BUILDS_TO_SEARCH; i++) {// limit the search to a small number to avoid loading too much
+ if (b == null) break;
+ if (RESULTS_OF_BUILDS_TO_CONSIDER.contains(b.getResult()) && !b.isBuilding()) {
+ String hyperlink = ModelHyperlinkNote.encodeTo('/' + b.getUrl(), originProject != b.getParent() ? b.getFullDisplayName() : b.getDisplayName());
+ try {
+ AbstractTestResultAction> tra = b.getAction(AbstractTestResultAction.class);
+ if (tra != null) {
+ Object o = tra.getResult();
+ if (o instanceof TestResult) {
+ TestResult tr = (TestResult) o;
+ if (tr.getTotalCount() == 0) {
+ listener.getLogger().printf("Build %s has no loadable test results (supposed count %d), skipping%n", hyperlink, tra.getTotalCount());
+ } else {
+ listener.getLogger().printf("Using build %s as reference%n", hyperlink);
+ result = tr;
+ break;
+ }
+ }
+ }
+ } catch (RuntimeException e) {
+ e.printStackTrace(listener.error("Failed to load (corrupt?) build %s, skipping%n", hyperlink));
+ }
+ }
+ b = b.getPreviousBuild();
+ }
+ return result;
+ }
+
+ private static class StageNamePredicate implements Predicate {
+ private final String stageName;
+ StageNamePredicate(@NonNull String stageName) {
+ this.stageName = stageName;
+ }
+ @Override
+ public boolean apply(@Nullable FlowNode input) {
+ if (input != null) {
+ LabelAction labelAction = input.getPersistentAction(LabelAction.class);
+ return labelAction != null && stageName.equals(labelAction.getDisplayName());
+ }
+ return false;
+ }
+ }
+
+ private Splitter() {}
+
+}
diff --git a/src/test/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutorUnitTest.java b/src/test/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutorUnitTest.java
index 0982bf2..d6a60c4 100644
--- a/src/test/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutorUnitTest.java
+++ b/src/test/java/org/jenkinsci/plugins/parallel_test_executor/ParallelTestExecutorUnitTest.java
@@ -117,7 +117,7 @@ public void checkTestSplits(Parallelism parallelism, int expectedSplitSize, Test
testResult.tally();
when(action.getResult()).thenReturn(testResult);
- List splits = ParallelTestExecutor.findTestSplits(parallelism, testMode, build, listener, false, null, null);
+ List splits = Splitter.findTestSplits(parallelism, testMode, build, listener, false, null, null);
assertEquals(expectedSplitSize, splits.size());
for (InclusionExclusionPattern split : splits) {
assertFalse(split.isIncludes());
@@ -132,7 +132,7 @@ public void testWeDoNotCreateMoreSplitsThanThereAreTests() throws Exception {
when(action.getResult()).thenReturn(testResult);
CountDrivenParallelism parallelism = new CountDrivenParallelism(5);
- List splits = ParallelTestExecutor.findTestSplits(parallelism, null, build, listener, false, null, null);
+ List splits = Splitter.findTestSplits(parallelism, null, build, listener, false, null, null);
assertEquals(2, splits.size());
for (InclusionExclusionPattern split : splits) {
assertFalse(split.isIncludes());
@@ -145,7 +145,7 @@ public void findTestCasesWithParameters() throws Exception {
testResult.tally();
when(action.getResult()).thenReturn(testResult);
CountDrivenParallelism parallelism = new CountDrivenParallelism(3);
- List splits = ParallelTestExecutor.findTestSplits(parallelism, new JavaTestCaseName(), build, listener, false, null, null);
+ List splits = Splitter.findTestSplits(parallelism, new JavaTestCaseName(), build, listener, false, null, null);
assertEquals(3, splits.size());
var allSplits = splits.stream().flatMap(s -> s.getList().stream()).collect(Collectors.toSet());
assertThat(allSplits, hasSize(20));
@@ -169,7 +169,7 @@ private void checkTestSplitsInclusions(Parallelism parallelism, int expectedSpli
testResult.tally();
when(action.getResult()).thenReturn(testResult);
- List splits = ParallelTestExecutor.findTestSplits(parallelism, testMode, build, listener, true, null, null);
+ List splits = Splitter.findTestSplits(parallelism, testMode, build, listener, true, null, null);
assertEquals(expectedSplitSize, splits.size());
List exclusions = new ArrayList<>(splits.get(0).getList());
List inclusions = new ArrayList<>();
@@ -189,7 +189,7 @@ private void checkTestSplitsInclusions(Parallelism parallelism, int expectedSpli
@Test
public void findTestInJavaProjectDirectory() throws InterruptedException {
CountDrivenParallelism parallelism = new CountDrivenParallelism(5);
- List splits = ParallelTestExecutor.findTestSplits(parallelism, null, build, listener, true, null, new FilePath(scanner.getBasedir()));
+ List splits = Splitter.findTestSplits(parallelism, null, build, listener, true, null, new FilePath(scanner.getBasedir()));
assertEquals(5, splits.size());
}
@@ -207,7 +207,7 @@ public void findTestOfJavaProjectDirectoryInWorkspace() throws InterruptedExcept
expectedTests.add("FourthTest");
expectedTests.add("FifthTest");
assertEquals("Result does not contains expected tests.", expectedTests, data.keySet());
- List splits = ParallelTestExecutor.findTestSplits(parallelism, null, build, listener, true, null, new FilePath(scanner.getBasedir()));
+ List splits = Splitter.findTestSplits(parallelism, null, build, listener, true, null, new FilePath(scanner.getBasedir()));
assertEquals(5, splits.size());
}
@@ -226,6 +226,6 @@ public void previousBuildIsOngoing() throws IOException {
testResult.tally();
when(action.getResult()).thenReturn(testResult);
- assertNotNull(ParallelTestExecutor.getTestResult(project, previousBuild, listener));
+ assertNotNull(Splitter.getTestResult(project, previousBuild, listener));
}
}