Skip to content

Commit

Permalink
[JENKINS-46313] Avoid excessive triggering of downstream jobs #109
Browse files Browse the repository at this point in the history
  • Loading branch information
Markus Dlugi authored and Cyrille Le Clerc committed Dec 22, 2017
1 parent 277404a commit d68ce7b
Show file tree
Hide file tree
Showing 5 changed files with 211 additions and 3 deletions.
Expand Up @@ -29,6 +29,7 @@
import org.jenkinsci.plugins.pipeline.maven.publishers.PipelineGraphPublisher;

import java.util.List;
import java.util.Map;

import javax.annotation.Nonnull;

Expand Down Expand Up @@ -121,6 +122,31 @@ void recordGeneratedArtifact(@Nonnull String jobFullName, int buildNumber,
*/
@Nonnull
List<String> listDownstreamJobs(@Nonnull String jobFullName, int buildNumber);

/**
* List the upstream jobs who generate an artifact that the given build depends on
* (build identified by the given {@code jobFullName}, {@code buildNumber})
*
* @param jobFullName see {@link Item#getFullName()}
* @param buildNumber see {@link Run#getNumber()}
* @return list of job full names (see {@link Item#getFullName()})
* @see Item#getFullName()
*/
@Nonnull
Map<String, Integer> listUpstreamJobs(@Nonnull String jobFullName, int buildNumber);

/**
* List the upstream jobs who generate an artifact that the given build depends
* on, including transitive dependencies (build identified by the given
* {@code jobFullName}, {@code buildNumber})
*
* @param jobFullName see {@link Item#getFullName()}
* @param buildNumber see {@link Run#getNumber()}
* @return list of job full names (see {@link Item#getFullName()})
* @see Item#getFullName()
*/
@Nonnull
Map<String, Integer> listTransitiveUpstreamJobs(@Nonnull String jobFullName, int buildNumber);

/**
* Routine task to cleanup the database and reclaim disk space (if possible in the underlying database).
Expand Down
Expand Up @@ -46,13 +46,25 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;

import javax.annotation.Nonnull;

import org.apache.commons.io.IOUtils;
import org.h2.api.ErrorCode;
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.util.RuntimeIoException;
import org.jenkinsci.plugins.pipeline.maven.util.RuntimeSqlException;

import hudson.model.Item;
import hudson.model.Run;

/**
* @author <a href="mailto:cleclerc@cloudbees.com">Cyrille Le Clerc</a>
*/
Expand Down Expand Up @@ -525,6 +537,113 @@ protected List<String> listDownstreamPipelinesBasedOnParentProjectDependencies(@
return downstreamJobsFullNames;
}


@Nonnull
@Override
public Map<String, Integer> listUpstreamJobs(@Nonnull String jobFullName, int buildNumber) {
Map<String, Integer> upstreamJobs = listUpstreamPipelinesBasedOnMavenDependencies(jobFullName, buildNumber);
upstreamJobs.putAll(listUpstreamPipelinesBasedOnParentProjectDependencies(jobFullName, buildNumber));
return upstreamJobs;
}

protected Map<String, Integer> listUpstreamPipelinesBasedOnMavenDependencies(@Nonnull String jobFullName, int buildNumber) {
LOGGER.log(Level.FINER, "listUpstreamJobs({0}, {1})", new Object[]{jobFullName, buildNumber});
String dependenciesSql = "SELECT DISTINCT MAVEN_DEPENDENCY.ARTIFACT_ID " +
" FROM MAVEN_DEPENDENCY " +
" INNER JOIN JENKINS_BUILD AS DOWNSTREAM_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = DOWNSTREAM_BUILD.ID " +
" INNER JOIN JENKINS_JOB AS DOWNSTREAM_JOB ON DOWNSTREAM_BUILD.JOB_ID = DOWNSTREAM_JOB.ID " +
" WHERE " +
" DOWNSTREAM_JOB.FULL_NAME = ? AND " +
" DOWNSTREAM_BUILD.NUMBER = ?";

String sql = "SELECT DISTINCT UPSTREAM_JOB.FULL_NAME, UPSTREAM_BUILD.NUMBER " +
" FROM JENKINS_JOB AS UPSTREAM_JOB" +
" INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON UPSTREAM_JOB.ID = UPSTREAM_BUILD.JOB_ID " +
" INNER JOIN GENERATED_MAVEN_ARTIFACT ON UPSTREAM_BUILD.ID = GENERATED_MAVEN_ARTIFACT.BUILD_ID" +
" WHERE " +
" GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID IN (" + dependenciesSql + ") AND " +
" GENERATED_MAVEN_ARTIFACT.SKIP_DOWNSTREAM_TRIGGERS = FALSE AND " +
" UPSTREAM_BUILD.NUMBER in (SELECT MAX(JENKINS_BUILD.NUMBER) FROM JENKINS_BUILD WHERE UPSTREAM_JOB.ID = JENKINS_BUILD.JOB_ID)" +
" ORDER BY UPSTREAM_JOB.FULL_NAME";

Map<String, Integer> upstreamJobsFullNames = new HashMap<>();
LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});

try (Connection cnn = jdbcConnectionPool.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
stmt.setString(1, jobFullName);
stmt.setInt(2, buildNumber);
try (ResultSet rst = stmt.executeQuery()) {
while (rst.next()) {
upstreamJobsFullNames.put(rst.getString(1), rst.getInt(2));
}
}
}
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
LOGGER.log(Level.FINE, "listUpstreamJobs({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, upstreamJobsFullNames});

return upstreamJobsFullNames;
}

protected Map<String, Integer> listUpstreamPipelinesBasedOnParentProjectDependencies(@Nonnull String jobFullName, int buildNumber) {
LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{jobFullName, buildNumber});
String dependenciesSql = "SELECT DISTINCT MAVEN_DEPENDENCY.ARTIFACT_ID " +
" FROM MAVEN_DEPENDENCY " +
" INNER JOIN JENKINS_BUILD AS DOWNSTREAM_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = DOWNSTREAM_BUILD.ID " +
" INNER JOIN JENKINS_JOB AS DOWNSTREAM_JOB ON DOWNSTREAM_BUILD.JOB_ID = DOWNSTREAM_JOB.ID " +
" WHERE " +
" DOWNSTREAM_JOB.FULL_NAME = ? AND " +
" DOWNSTREAM_BUILD.NUMBER = ?";

String sql = "SELECT DISTINCT UPSTREAM_JOB.FULL_NAME, UPSTREAM_BUILD.NUMBER " +
" FROM JENKINS_JOB AS UPSTREAM_JOB" +
" INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON UPSTREAM_JOB.ID = UPSTREAM_BUILD.JOB_ID " +
" INNER JOIN MAVEN_PARENT_PROJECT ON UPSTREAM_BUILD.ID = MAVEN_PARENT_PROJECT.BUILD_ID" +
" WHERE " +
" MAVEN_PARENT_PROJECT.ARTIFACT_ID IN (" + dependenciesSql + ") AND " +
" UPSTREAM_BUILD.NUMBER in (SELECT MAX(JENKINS_BUILD.NUMBER) FROM JENKINS_BUILD WHERE UPSTREAM_JOB.ID = JENKINS_BUILD.JOB_ID)" +
" ORDER BY UPSTREAM_JOB.FULL_NAME";

Map<String, Integer> upstreamJobsFullNames = new HashMap<>();
LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});

try (Connection cnn = jdbcConnectionPool.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
stmt.setString(1, jobFullName);
stmt.setInt(2, buildNumber);
try (ResultSet rst = stmt.executeQuery()) {
while (rst.next()) {
upstreamJobsFullNames.put(rst.getString(1), rst.getInt(2));
}
}
}
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, upstreamJobsFullNames});

return upstreamJobsFullNames;
}

@Nonnull
public Map<String, Integer> listTransitiveUpstreamJobs(@Nonnull String jobFullName, int buildNumber) {
return listTransitiveUpstreamJobs(jobFullName, buildNumber, new HashMap<String, Integer>());
}

private Map<String, Integer> listTransitiveUpstreamJobs(@Nonnull String jobFullName, int buildNumber, Map<String, Integer> result) {
for(Entry<String, Integer> entry : listUpstreamJobs(jobFullName, buildNumber).entrySet()) {
String upstreamJobName = entry.getKey();
Integer upstreamJobBuildNumber = entry.getValue();
if(!result.containsKey(upstreamJobName)) {
result.put(upstreamJobName, upstreamJobBuildNumber);
listTransitiveUpstreamJobs(upstreamJobName, upstreamJobBuildNumber, result);
}
}
return result;
}

/**
* List the artifacts generated by the given build
*
Expand Down
Expand Up @@ -26,6 +26,7 @@

import java.util.Collections;
import java.util.List;
import java.util.Map;

import javax.annotation.Nonnull;

Expand Down Expand Up @@ -68,6 +69,18 @@ public void deleteBuild(String jobFullName, int buildNumber) {
public List<String> listDownstreamJobs(@Nonnull String jobFullName, int buildNumber) {
return Collections.emptyList();
}

@Nonnull
@Override
public Map<String, Integer> listUpstreamJobs(String jobFullName, int buildNumber) {
return Collections.emptyMap();
}

@Nonnull
@Override
public Map<String, Integer> listTransitiveUpstreamJobs(String jobFullName, int buildNumber) {
return Collections.emptyMap();
}

@Override
public void cleanup() {
Expand Down
Expand Up @@ -58,6 +58,10 @@ public void onCompleted(WorkflowRun upstreamBuild, @Nonnull TaskListener listene
WorkflowJob upstreamPipeline = upstreamBuild.getParent();
List<String> downstreamPipelines = GlobalPipelineMavenConfig.getDao().listDownstreamJobs(upstreamPipeline.getFullName(), upstreamBuild.getNumber());

// Don't trigger myself
downstreamPipelines.remove(upstreamPipeline.getFullName());

outer:
for (String downstreamPipelineFullName : downstreamPipelines) {
final WorkflowJob downstreamPipeline = Jenkins.getInstance().getItemByFullName(downstreamPipelineFullName, WorkflowJob.class);
if (downstreamPipeline == null) {
Expand All @@ -66,9 +70,28 @@ public void onCompleted(WorkflowRun upstreamBuild, @Nonnull TaskListener listene
// job not found, the database was probably out of sync
continue;
}
if (downstreamPipeline.equals(upstreamPipeline)) {
// don't trigger myself
continue;

// Avoid excessive triggering
// See #46313
if(downstreamPipeline.getLastBuild() != null) {
int downstreamBuildNumber = downstreamPipeline.getLastBuild().getNumber();
Map<String, Integer> transitiveUpstreamPipelines = GlobalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber);
for(String transitiveUpstream : transitiveUpstreamPipelines.keySet()) {
// Skip if one of the downstream's upstream is already building or in queue
// Then it will get triggered anyway by that upstream, we don't need to trigger it again
WorkflowJob tup = Jenkins.getInstance().getItemByFullName(transitiveUpstream, WorkflowJob.class);
if(tup != null && !tup.equals(upstreamPipeline) && (tup.isBuilding() || tup.isInQueue())) {
listener.getLogger().println("[withMaven] Not triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " because it has a dependency " + ModelHyperlinkNote.encodeTo(tup) + " already building or in queue");
continue outer;
}

// Skip if this downstream pipeline will be triggered by another one of our downstream pipelines
// That's the case when one of the downstream's transitive upstream is our own downstream
if(downstreamPipelines.contains(transitiveUpstream)) {
listener.getLogger().println("[withMaven] Not triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " because it has dependencies in the downstream project list");
continue outer;
}
}
}

if (!downstreamPipeline.isBuildable()) {
Expand Down
Expand Up @@ -10,6 +10,7 @@
import org.junit.Test;

import java.util.List;
import java.util.Map;

/**
* @author <a href="mailto:cleclerc@cloudbees.com">Cyrille Le Clerc</a>
Expand Down Expand Up @@ -450,4 +451,30 @@ public void list_downstream_jobs_timestamped_snapshot_version() {
List<String> downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2);
assertThat(downstreamPipelinesForBuild2, Matchers.containsInAnyOrder("my-downstream-pipeline-1"));
}

@Test
public void list_upstream_jobs() {

dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", false);
dao.recordGeneratedArtifact("my-upstream-pipeline-2", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", false);

dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false);


Map<String, Integer> upstreamPipelinesForBuild1 = dao.listUpstreamJobs("my-downstream-pipeline-1", 1);
assertThat(upstreamPipelinesForBuild1.keySet(), Matchers.containsInAnyOrder("my-upstream-pipeline-1"));
}

@Test
public void list_transitive_upstream_jobs() {

dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", false);
dao.recordGeneratedArtifact("my-downstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", false);

dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false);
dao.recordDependency("my-downstream-downstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "compile", false);

Map<String, Integer> upstreamPipelinesForBuild1 = dao.listTransitiveUpstreamJobs("my-downstream-downstream-pipeline-1", 1);
assertThat(upstreamPipelinesForBuild1.keySet(), Matchers.containsInAnyOrder("my-upstream-pipeline-1","my-downstream-pipeline-1"));
}
}

0 comments on commit d68ce7b

Please sign in to comment.