Commit 4f3589c0 authored by Marco Trevisan's avatar Marco Trevisan
Browse files

build: List single unit python tests allowing to run them separately

We run a certain number of tests right now, without being able to easily
run them separated or to check which one failed.

So add a script to inspect all the available unittests per each python
script and use it to figure out the tests we can run in meson.

As per this, define a global 'python_tests' variable in meson that allows
to register new python tests easily without having to repeat the settings
for all the tests.

For each test we have, we check if we can fetch a list of unit tests, and
if possible we create a meson test for each one.
Otherwise we just fallback to normal behavior.

This is something that can be hopefully implemented into upstream meson [1].

[1] https://github.com/mesonbuild/meson/issues/6851
parent eccd790d
# Add a way to discover and run python unit tests separately
# https://github.com/mesonbuild/meson/issues/6851
python_tests = [
# List all the python tests, must be in the form:
# {
# 'name': 'test name',
# 'file': 'full test file path, use files('path')[0]',
# Fields below are optional:
# 'workdir': '',
# 'env': [],
# 'depends': [],
# 'suite': [],
# 'extra_args': [],
# 'timeout': 30,
# 'is_parallel': true,
# }
]
tests = [
'fprintd',
'test_fprintd_utils',
]
foreach t: tests
test(t,
python3,
args: meson.current_source_dir() / t + '.py',
suite: ['daemon'],
depends: [
fprintd,
fprintd_utils,
],
env: [
'G_DEBUG=fatal-criticals',
'G_MESSAGES_DEBUG=all',
'FPRINT_BUILD_DIR=' + meson.build_root() / 'src',
'TOPSRCDIR=' + meson.source_root(),
],
timeout: t == 'fprintd' ? 60 : 30,
)
python_tests += [
{
'name': t,
'file': files(meson.current_source_dir() / t + '.py')[0],
'env': [
'G_DEBUG=fatal-criticals',
'G_MESSAGES_DEBUG=all',
'FPRINT_BUILD_DIR=' + meson.build_root() / 'src',
'TOPSRCDIR=' + meson.source_root(),
],
'depends': [
fprintd,
fprintd_utils,
],
'suite': ['daemon'],
}
]
endforeach
if get_option('pam')
subdir('pam')
endif
# Add a way to discover and run python unit tests separately
# https://github.com/mesonbuild/meson/issues/6851
unittest_inspector = find_program('unittest_inspector.py')
foreach pt: python_tests
r = run_command(unittest_inspector, pt.get('file'))
unit_tests = r.stdout().strip().split('\n')
base_args = [ pt.get('file') ] + pt.get('extra_args', [])
suite = pt.get('suite', [])
if r.returncode() == 0 and unit_tests.length() > 0
suite += pt.get('name')
else
unit_tests = [pt.get('name')]
endif
foreach ut: unit_tests
ut_suite = suite
ut_args = base_args
if unit_tests.length() > 1
ut_args += ut
ut_suite += ut.split('.')[0]
endif
test(ut,
python3,
args: ut_args,
suite: ut_suite,
depends: pt.get('depends', []),
workdir: pt.get('workdir', meson.build_root()),
env: pt.get('env', []),
timeout: pt.get('timeout', 30),
is_parallel: pt.get('is_parallel', true),
)
endforeach
endforeach
add_test_setup('default_setup',
......@@ -43,6 +102,3 @@ if find_program('valgrind', required: false).found()
)
endif
if get_option('pam')
subdir('pam')
endif
......@@ -5,23 +5,24 @@ tests = [
]
foreach t: tests
test(t,
python3,
args: meson.current_source_dir() / t + '.py',
suite: ['PAM'],
depends: [
pam_fprintd,
pam_service_file,
],
env: [
'TOPBUILDDIR=' + meson.build_root(),
'TOPSRCDIR=' + meson.source_root(),
'LD_PRELOAD=libpam_wrapper.so',
'PAM_WRAPPER=1',
'PAM_WRAPPER_DEBUGLEVEL=2',
'PAM_WRAPPER_SERVICE_DIR=' + meson.current_build_dir() / 'services',
'G_DEBUG=fatal-warnings',
],
timeout: 60,
)
python_tests += [
{
'name': t,
'file': files(meson.current_source_dir() / t + '.py')[0],
'env': [
'TOPBUILDDIR=' + meson.build_root(),
'TOPSRCDIR=' + meson.source_root(),
'LD_PRELOAD=libpam_wrapper.so',
'PAM_WRAPPER=1',
'PAM_WRAPPER_DEBUGLEVEL=2',
'PAM_WRAPPER_SERVICE_DIR=' + meson.current_build_dir() / 'services',
'G_DEBUG=fatal-warnings',
],
'depends': [
pam_fprintd,
pam_service_file,
],
'suite': ['PAM'],
}
]
endforeach
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment