Skip to content
Snippets Groups Projects

Format convert failure information collated

Merged Sergi Blanch Torné requested to merge sergi/ci-uprev:review_collate_subcommand into main
+ 15
8
@@ -48,6 +48,7 @@ from gitlab import GitlabGetError, GitlabUpdateError
from gitlab.v4.objects import (Project, ProjectBranch, ProjectCommit,
ProjectIssue, ProjectJob, ProjectMergeRequest,
ProjectNote, ProjectPipeline, ProjectPipelineJob)
import json
from pprint import pformat
from tenacity import retry, stop_after_attempt, wait_exponential
from uprev_exceptions import NothingToUprev
@@ -91,6 +92,10 @@ class UpdateRevision(Enum):
uprev_pair: UpdateRevision
def defaultdictdeque() -> defaultdict:
return defaultdict(deque)
def prepare_environment(
default_target_namespace: str = 'virgl',
default_target: str = 'virglrenderer',
@@ -740,10 +745,6 @@ def collate_results(
:return: failures, artifacts, without_artifacts
"""
print(f"Looking for failed jobs in pipeline {pipeline.web_url}")
def defaultdictdeque() -> defaultdict:
return defaultdict(deque)
if artifacts is None:
artifacts = dict()
if failures is None:
@@ -802,7 +803,7 @@ def __is_fundamental_job(
def __classify_the_job(
job: ProjectJob
) -> str | Tuple:
) -> Union[str, Tuple]:
"""
Different uprev would have different ways to classify jobs. So we would
specialize this method depending on the pair to uprev.
@@ -850,8 +851,7 @@ def __download_artifacts(
return True
except Exception as outer_exception:
print(f"An exception ({type(outer_exception)}) occurred "
f"when downloading results for '{job.name}' "
f"from '{failures_path}' at {job.web_url}: "
f"when downloading results for '{job.name}' at {job.web_url}: "
f"{outer_exception}!")
return False
@@ -1523,7 +1523,14 @@ def __cmd_collate_results(
print(f"These jobs failed to produce artifacts: "
f"{', '.join(job_names)}")
if failures:
print(f"Failures:\n{pformat(failures)}")
copy_failures = defaultdict(defaultdict)
for job in failures:
for test in failures[job]:
# remove the deque
copy_failures[str(job)][test] = list(failures[job][test])
# remove the defaultdict
copy_failures = json.loads(json.dumps(copy_failures))
print(f"Failures:\n{pformat(copy_failures)}")
def __cmd_retry_pipeline(
Loading