Skip to content

Commit

Permalink
Merge pull request #92 from infosiftr/combine-loop
Browse files Browse the repository at this point in the history
Unify the Trigger build loop for GHA and non-GHA builds
  • Loading branch information
yosifkit authored Nov 7, 2024
2 parents 468bf7d + 95ffa53 commit 4b78106
Showing 1 changed file with 57 additions and 73 deletions.
130 changes: 57 additions & 73 deletions Jenkinsfile.trigger
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ node {
queueJson = sh(returnStdout: true, script: '''
jq -L.scripts '
include "meta";
include "jenkins";
(env.pastFailedJobsJson | fromjson) as $pastFailedJobs
| [
.[]
Expand All @@ -70,6 +71,9 @@ node {
| index($arch)
)
)
| if env.BASHBREW_ARCH == "gha" then
.gha_payload = (gha_payload | @json)
else . end
]
# this Jenkins job exports a JSON file that includes the number of attempts so far per failing buildId so that this can sort by attempts which means failing builds always live at the bottom of the queue (sorted by the number of times they have failed, so the most failing is always last)
| sort_by($pastFailedJobs[.buildId].count // 0)
Expand All @@ -85,36 +89,41 @@ node {
breakEarly = true
return
}
}
}

// for GHA builds, we still need a node (to curl GHA API), so we'll handle those here
if (env.BASHBREW_ARCH == 'gha') {
withCredentials([
string(
variable: 'GH_TOKEN',
credentialsId: 'github-access-token-docker-library-bot-meta',
),
]) {
for (buildObj in queue) {
def identifier = buildObj.source.arches[buildObj.build.arch].tags[0] + ' (' + buildObj.build.arch + ')'
def json = writeJSON(json: buildObj, returnText: true)
if (breakEarly) { return } // thanks Jenkins...

// now that we have our parsed queue, we can release the node we're holding up (since we handle GHA builds above)
def pastFailedJobs = readJSON(text: pastFailedJobsJson)
def newFailedJobs = [:]

for (buildObj in queue) {
def identifier = buildObj.source.arches[buildObj.build.arch].tags[0]
if (buildObj.build.arch != env.BASHBREW_ARCH) {
identifier += ' (' + buildObj.build.arch + ')'
}
stage(identifier) {
def json = writeJSON(json: buildObj, returnText: true)
echo(json) // for debugging/data purposes

// "catchError" to set "stageResult" :(
catchError(message: 'Build of "' + identifier + '" failed', buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
if (buildObj.gha_payload) {
node {
withEnv([
'json=' + json,
'payload=' + buildObj.gha_payload,
]) {
stage(identifier) {
echo(json) // for debugging/data purposes

sh '''#!/usr/bin/env bash
set -Eeuo pipefail -x
withCredentials([
string(
variable: 'GH_TOKEN',
credentialsId: 'github-access-token-docker-library-bot-meta',
),
]) {
sh '''
set -u +x
# https://docs.github.com/en/free-pro-team@latest/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event
payload="$(
jq <<<"$json" -L.scripts '
include "jenkins";
gha_payload
'
)"
set +x
curl -fL \
-X POST \
-H 'Accept: application/vnd.github+json' \
Expand All @@ -126,55 +135,30 @@ node {
}
}
}
}
// we're done triggering GHA, so we're completely done with this job
breakEarly = true
return
}
}
}

if (breakEarly) { return } // thanks Jenkins...

// now that we have our parsed queue, we can release the node we're holding up (since we handle GHA builds above)
def pastFailedJobs = readJSON(text: pastFailedJobsJson)
def newFailedJobs = [:]

for (buildObj in queue) {
def identifier = buildObj.source.arches[buildObj.build.arch].tags[0]
def json = writeJSON(json: buildObj, returnText: true)
withEnv([
'json=' + json,
]) {
stage(identifier) {
echo(json) // for debugging/data purposes

def res = build(
job: 'build-' + env.BASHBREW_ARCH,
parameters: [
string(name: 'buildId', value: buildObj.buildId),
],
propagate: false,
quietPeriod: 5, // seconds
)
// TODO do something useful with "res.result" (especially "res.result != 'SUCCESS'")
echo(res.result)
if (res.result != 'SUCCESS') {
def c = 1
if (pastFailedJobs[buildObj.buildId]) {
// TODO more defensive access of .count? (it is created just below, so it should be safe)
c += pastFailedJobs[buildObj.buildId].count
} else {
def res = build(
job: 'build-' + env.BASHBREW_ARCH,
parameters: [
string(name: 'buildId', value: buildObj.buildId),
],
propagate: false,
quietPeriod: 5, // seconds
)
if (res.result != 'SUCCESS') {
def c = 1
if (pastFailedJobs[buildObj.buildId]) {
// TODO more defensive access of .count? (it is created just below, so it should be safe)
c += pastFailedJobs[buildObj.buildId].count
}
// TODO maybe implement some amount of backoff? keep first url/endTime?
newFailedJobs[buildObj.buildId] = [
count: c,
identifier: identifier,
url: res.absoluteUrl,
endTime: (res.startTimeInMillis + res.duration) / 1000.0, // convert to seconds
]
error(res.result)
}
// TODO maybe implement some amount of backoff? keep first url/endTime?
newFailedJobs[buildObj.buildId] = [
count: c,
identifier: identifier,
url: res.absoluteUrl,
endTime: (res.startTimeInMillis + res.duration) / 1000.0, // convert to seconds
]

// "catchError" is the only way to set "stageResult" :(
catchError(message: 'Build of "' + identifier + '" failed', buildResult: 'UNSTABLE', stageResult: 'FAILURE') { error() }
}
}
}
Expand Down

0 comments on commit 4b78106

Please sign in to comment.