[glib/wip/3v1n0/support-can-fail-tests: 1/9] meson, ci: Support tests that can fail under certain conditions
- From: Marco Trevisan <marcotrevi src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [glib/wip/3v1n0/support-can-fail-tests: 1/9] meson, ci: Support tests that can fail under certain conditions
- Date: Thu, 20 Oct 2022 00:56:56 +0000 (UTC)
commit 6d82239295303d84b072d9d237ec70c9c3684c53
Author: Marco Trevisan (Treviño) <mail 3v1n0 net>
Date: Wed Oct 19 20:08:15 2022 +0200
meson, ci: Support tests that can fail under certain conditions
We have tests that are failing in some environments, but it's
difficult to handle them because:
- for some environments we just allow all the tests to fail: DANGEROUS
- when we don't allow failures we have flacky tests: A CI pain
So, to avoid this and ensure that:
- New failing tests are tracked in all platforms
- gitlab integration on tests reports is working
- coverage is reported also for failing tests
Add support for `can_fail` keyword on tests that would mark the test as
part of the `failing` test suite.
Not adding the suite directly when defining the tests as this is
definitely simpler and allows to define conditions more clearly (see next
commits).
Now, add a default test setup that does not run the failing and flaky tests
by default (not to bother distributors with testing well-known issues) and
eventually run all the tests in CI:
- Non-flaky tests cannot fail in all platforms
- Failing and Flaky tests can fail
In both cases we save the test reports so that gitlab integration is
preserved.
.gitlab-ci.yml | 40 ++++++++++++++++++++++++++---------
.gitlab-ci/run-tests.sh | 7 +++++-
.gitlab-ci/test-msvc.bat | 3 ++-
.gitlab-ci/test-msys2.sh | 4 +++-
gio/tests/meson.build | 20 +++++++++++++-----
glib/tests/meson.build | 21 +++++++++++++-----
gmodule/tests/meson.build | 4 ++++
gobject/tests/meson.build | 24 +++++++++++++++------
gobject/tests/performance/meson.build | 4 ++++
meson.build | 14 +++++++++++-
10 files changed, 110 insertions(+), 31 deletions(-)
---
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3846535e4b..d9d1244551 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -88,7 +88,9 @@ fedora-x86_64:
- lcov --config-file .lcovrc --directory _build --capture --output-file "_coverage/${CI_JOB_NAME}.lcov"
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -130,7 +132,9 @@ debian-stable-x86_64:
- .gitlab-ci/run-tests.sh
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -196,7 +200,9 @@ G_DISABLE_ASSERT:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -271,7 +277,9 @@ cross-mingw64:
- ninja -C _build
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -298,7 +306,9 @@ msys2-mingw32:
- C:\msys64\usr\bin\bash -lc "bash -x ./.gitlab-ci/test-msys2.sh"
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -321,7 +331,9 @@ vs2017-x64:
--python.purelibdir=C:\Python37\site-packages
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -349,7 +361,9 @@ vs2017-x64-static:
--python.purelibdir=C:\Python37\site-packages
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -391,7 +405,9 @@ freebsd-12-x86_64:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -418,7 +434,9 @@ freebsd-13-x86_64:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@@ -458,7 +476,9 @@ macos:
- .gitlab-ci/run-tests.sh
artifacts:
reports:
- junit: "_build/meson-logs/testlog.junit.xml"
+ junit:
+ - _build/meson-logs/testlog.junit.xml
+ - _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
diff --git a/.gitlab-ci/run-tests.sh b/.gitlab-ci/run-tests.sh
index 634f861eb8..b63130a97c 100755
--- a/.gitlab-ci/run-tests.sh
+++ b/.gitlab-ci/run-tests.sh
@@ -5,5 +5,10 @@
meson test \
-C _build \
--timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
- --no-suite flaky \
"$@"
+
+# Run only the flaky tests, so we can log the failures but without hard failing
+meson test \
+ -C _build \
+ --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
+ "$@" --setup=unstable_tests --suite=failing --suite=flaky || true
diff --git a/.gitlab-ci/test-msvc.bat b/.gitlab-ci/test-msvc.bat
index 82e445e00a..524b6be303 100644
--- a/.gitlab-ci/test-msvc.bat
+++ b/.gitlab-ci/test-msvc.bat
@@ -18,7 +18,8 @@ python .gitlab-ci/check-missing-install-tag.py _build || goto :error
ninja -C _build || goto :error
:: FIXME: dont ignore test errors
-meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER% --no-suite flaky
+meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER%
+meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER% --setup=unstable_tests
--suite=failing --suite=flaky
:: FIXME: can we get code coverage support?
diff --git a/.gitlab-ci/test-msys2.sh b/.gitlab-ci/test-msys2.sh
index a345fda6a7..43708636a5 100755
--- a/.gitlab-ci/test-msys2.sh
+++ b/.gitlab-ci/test-msys2.sh
@@ -51,7 +51,9 @@ lcov \
--output-file "${DIR}/_coverage/${CI_JOB_NAME}-baseline.lcov"
# FIXME: fix the test suite
-meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" --no-suite flaky || true
+meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" || true
+meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
+ --setup=unstable_tests --suite=failing --suite=flaky || true
lcov \
--quiet \
diff --git a/gio/tests/meson.build b/gio/tests/meson.build
index 57fc7d3cc6..414d4bfa40 100644
--- a/gio/tests/meson.build
+++ b/gio/tests/meson.build
@@ -154,9 +154,9 @@ test_extra_programs = {
'gsubprocess-testprog' : {},
}
-python_tests = [
- 'codegen.py',
-]
+python_tests = {
+ 'codegen.py' : {},
+}
test_env = environment(common_test_env)
test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
@@ -914,6 +914,10 @@ foreach test_name, extra_args : gio_tests
local_test_env.append(var, value)
endforeach
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(test_name, exe,
env : local_test_env,
timeout : timeout,
@@ -937,13 +941,19 @@ foreach program_name, extra_args : test_extra_programs
)
endforeach
-foreach test_name : python_tests
+foreach test_name, extra_args : python_tests
+ suite = ['gio', 'no-valgrind']
+
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
- suite: ['gio', 'no-valgrind'],
+ suite: suite,
)
if installed_tests_enabled
diff --git a/glib/tests/meson.build b/glib/tests/meson.build
index 8b5c58b8c5..981eea459d 100644
--- a/glib/tests/meson.build
+++ b/glib/tests/meson.build
@@ -296,6 +296,11 @@ foreach test_name, extra_args : glib_tests
suite = ['glib'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
+
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(test_name, exe,
env : test_env,
timeout : timeout,
@@ -312,9 +317,9 @@ if installed_tests_enabled
)
endif
-python_tests = [
- 'assert-msg-test.py',
-]
+python_tests = {
+ 'assert-msg-test.py' : {},
+}
executable('assert-msg-test', ['assert-msg-test.c'],
c_args : test_cargs,
@@ -325,13 +330,19 @@ executable('assert-msg-test', ['assert-msg-test.c'],
win_subsystem : extra_args.get('win_subsystem', 'console'),
)
-foreach test_name : python_tests
+foreach test_name, extra_args : python_tests
+ suite = ['glib', 'no-valgrind']
+
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
- suite: ['glib', 'no-valgrind'],
+ suite: suite,
)
if installed_tests_enabled
diff --git a/gmodule/tests/meson.build b/gmodule/tests/meson.build
index a751f3185b..c7aa8058d5 100644
--- a/gmodule/tests/meson.build
+++ b/gmodule/tests/meson.build
@@ -93,6 +93,10 @@ foreach test_name, extra_args : gmodule_tests
install: install,
)
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
suite = ['gmodule'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
test(test_name, exe, env : test_env, timeout : timeout, suite : suite)
diff --git a/gobject/tests/meson.build b/gobject/tests/meson.build
index 09f23e8bfb..8ae1bf652e 100644
--- a/gobject/tests/meson.build
+++ b/gobject/tests/meson.build
@@ -122,11 +122,11 @@ if cc.get_id() != 'msvc'
gobject_tests += {'autoptr' : {}}
endif
-python_tests = [
- 'genmarshal.py',
- 'gobject-query.py',
- 'mkenums.py',
-]
+python_tests = {
+ 'genmarshal.py' : {},
+ 'gobject-query.py' : {},
+ 'mkenums.py' : {},
+}
test_env = environment(common_test_env)
test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
@@ -166,6 +166,10 @@ foreach test_name, extra_args : gobject_tests
suite = ['gobject'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
# FIXME: https://gitlab.gnome.org/GNOME/glib/issues/1316
# aka https://bugs.debian.org/880883
if test_name == 'closure-refcount' and ['arm', 'aarch64'].contains(host_machine.cpu_family())
@@ -175,13 +179,19 @@ foreach test_name, extra_args : gobject_tests
test(test_name, exe, env : test_env, timeout : timeout, suite : suite)
endforeach
-foreach test_name : python_tests
+foreach test_name, extra_args : python_tests
+ suite = ['gobject', 'no-valgrind']
+
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
- suite: ['gobject', 'no-valgrind'],
+ suite: suite,
)
if installed_tests_enabled
diff --git a/gobject/tests/performance/meson.build b/gobject/tests/performance/meson.build
index 01b6af534f..f3a37ef4dc 100644
--- a/gobject/tests/performance/meson.build
+++ b/gobject/tests/performance/meson.build
@@ -40,6 +40,10 @@ foreach test_name, extra_args : gobject_tests
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
args = extra_args.get('args', [])
+ if extra_args.get('can_fail', false)
+ suite += 'failing'
+ endif
+
test(test_name, exe,
env : test_env,
timeout : timeout,
diff --git a/meson.build b/meson.build
index ddcdc028d4..3de90206e1 100644
--- a/meson.build
+++ b/meson.build
@@ -129,13 +129,24 @@ installed_tests_template_tap = files('tests/template-tap.test.in')
# Don’t build the tests unless we can run them (either natively, in an exe wrapper, or by installing them
for later use)
build_tests = get_option('tests') and (meson.can_run_host_binaries() or installed_tests_enabled)
+add_test_setup('default',
+ is_default: true,
+ exclude_suites: ['flaky', 'failing'],
+)
+
+add_test_setup('unstable_tests',
+ # Empty test setup, used for having different results set for flaky tests
+ # Sadly we can't use (https://github.com/mesonbuild/meson/issues/10934):
+ #suites: ['flaky', 'unstable']
+)
+
# Allow the tests to be easily run under valgrind using --setup=valgrind
valgrind = find_program('valgrind', required: false)
if valgrind.found()
suppression_file = files('tools' / 'glib.supp')
add_test_setup('valgrind',
- exclude_suites: [ 'no-valgrind' ],
+ exclude_suites: [ 'no-valgrind', 'flaky' ],
exe_wrapper: [
valgrind,
'--tool=memcheck',
@@ -2335,6 +2346,7 @@ common_test_env = [
'G_ENABLE_DIAGNOSTIC=1',
'MALLOC_CHECK_=2',
]
+
test_timeout = 60
test_timeout_slow = 180
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]