Commit f7fab721 authored by Arkadiusz Hiler's avatar Arkadiusz Hiler

tools/patchwork_gitlab_igt: Support retries

A pipeline can have multiple instances of a single job executed due to
automatic retries. Let's account for them by taking only the latest
one into account.
Signed-off-by: default avatarArkadiusz Hiler <>
parent 2bbd2fe9
......@@ -9,6 +9,7 @@ import os
import io
from zipfile import ZipFile
from itertools import groupby
# this is a bridge that posts gitlab pipeline results to patchwork
# the repositories and patchwork's test name (i.e. result name) are hardcoded
......@@ -115,13 +116,28 @@ def has_failed(x):
return x['status'] == 'failed'
def groupsort(iterable, key=None):
return groupby(sorted(iterable, key=key), key=key)
def get_details(pipeline):
jobs_url = '{}/pipelines/{}/jobs?scope=failed'.format(GITLAB_PROJECT_URL, pipeline['id'])
jobs_url = '{}/pipelines/{}/jobs?per_page=100'.format(GITLAB_PROJECT_URL, pipeline['id'])
resp = requests.get(jobs_url, headers=GITLAB_HEADERS)
details = ""
for job in resp.json():
jobs = resp.json()
if len(jobs) == 100:
print("hit the limit of 100 jobs per page, please implement pagination support")
grouped_jobs = groupsort(jobs, key=lambda job: job['name'])
latest_jobs = [ sorted(jobs, key=lambda job: job['id'])[-1] for name, jobs in grouped_jobs ]
for job in latest_jobs:
if job['status'] != 'failed':
details += "\n{} has {} ({}):\n".format(job['name'], job['status'], job['web_url'])
log_url = '{}/jobs/{}/trace'.format(GITLAB_PROJECT_URL, job['id'])
log_resp = requests.get(log_url, headers=GITLAB_HEADERS)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment