From d887a8faff9d2110d34c135a89ef3b43c19630b6 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 14 Apr 2020 12:34:21 -0700 Subject: [PATCH 1/6] run_project_tests: Add an option to put temprorary files in /tmp I have an NVME drive, I really don't want to be thrashing it with temporary files. It's also annoying to watch vscode thrash about with files that are quickly created and deleted. I'd rather put them in /tmp, so I've added a non-default option to do so. --- run_project_tests.py | 43 ++++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/run_project_tests.py b/run_project_tests.py index 2ab6284f6..15ee21c47 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -404,21 +404,21 @@ def run_test_inprocess(testdir): # Build directory name must be the same so Ccache works over # consecutive invocations. -def create_deterministic_builddir(test: TestDef) -> str: +def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str: import hashlib src_dir = test.path.as_posix() if test.name: src_dir += test.name rel_dirname = 'b ' + hashlib.sha256(src_dir.encode(errors='ignore')).hexdigest()[0:10] - os.mkdir(rel_dirname) - abs_pathname = os.path.join(os.getcwd(), rel_dirname) + abs_pathname = os.path.join(tempfile.gettempdir() if use_tmpdir else os.getcwd(), rel_dirname) + os.mkdir(abs_pathname) return abs_pathname -def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail): +def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool): if test.skip: return None - with AutoDeletedDir(create_deterministic_builddir(test)) as build_dir: - with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=os.getcwd())) as install_dir: + with AutoDeletedDir(create_deterministic_builddir(test, use_tmp)) as build_dir: + with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=None if use_tmp else os.getcwd())) as install_dir: try: return _run_test(test, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail) except TestResult as r: @@ -666,8 +666,8 @@ def have_d_compiler(): return True return False -def have_objc_compiler(): - with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: +def have_objc_compiler(use_tmp: bool) -> bool: + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir: env = environment.Environment(None, build_dir, get_fake_options('/')) try: objc_comp = env.detect_objc_compiler(MachineChoice.HOST) @@ -682,8 +682,8 @@ def have_objc_compiler(): return False return True -def have_objcpp_compiler(): - with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: +def have_objcpp_compiler(use_tmp: bool) -> bool: + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir: env = environment.Environment(None, build_dir, get_fake_options('/')) try: objcpp_comp = env.detect_objcpp_compiler(MachineChoice.HOST) @@ -805,7 +805,7 @@ def should_skip_rust(backend: Backend) -> bool: return True return False -def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef], bool]]: +def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, T.List[TestDef], bool]]: """ Parameters ---------- @@ -842,8 +842,8 @@ def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), ('rust', 'rust', should_skip_rust(backend)), ('d', 'd', backend is not Backend.ninja or not have_d_compiler()), - ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler()), - ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler()), + ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), + ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), ('fortran', 'fortran', skip_fortran or backend != Backend.ninja), ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja @@ -866,16 +866,16 @@ def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], log_name_base: str, failfast: bool, - extra_args: T.List[str]) -> T.Tuple[int, int, int]: + extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]: global logfile txtname = log_name_base + '.txt' with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf: logfile = lf - return _run_tests(all_tests, log_name_base, failfast, extra_args) + return _run_tests(all_tests, log_name_base, failfast, extra_args, use_tmp) def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], log_name_base: str, failfast: bool, - extra_args: T.List[str]) -> T.Tuple[int, int, int]: + extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]: global stop, executor, futures, system_compiler xmlname = log_name_base + '.xml' junit_root = ET.Element('testsuites') @@ -929,7 +929,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], t.skip = skipped or t.skip result = executor.submit(run_test, t, extra_args + suite_args + t.args, - system_compiler, backend, backend_flags, commands, should_fail) + system_compiler, backend, backend_flags, commands, should_fail, use_tmp) futures.append((testname, t, result)) for (testname, t, result) in futures: sys.stdout.flush() @@ -1047,7 +1047,7 @@ def check_meson_commands_work(options): global backend, compile_commands, test_commands, install_commands testdir = PurePath('test cases', 'common', '1 trivial').as_posix() meson_commands = mesonlib.python_command + [get_meson_script()] - with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir: print('Checking that configuring works...') gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + options.extra_args pc, o, e = Popen_safe(gen_cmd) @@ -1072,7 +1072,7 @@ def check_meson_commands_work(options): def detect_system_compiler(options): global system_compiler, compiler_id_map - with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir: fake_opts = get_fake_options('/') if options.cross_file: fake_opts.cross_file = [options.cross_file] @@ -1139,6 +1139,7 @@ if __name__ == '__main__': help='Not used, only here to simplify run_tests.py') parser.add_argument('--only', help='name of test(s) to run', nargs='+', choices=ALL_TESTS) parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.') + parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.') options = parser.parse_args() if options.cross_file: options.extra_args += ['--cross-file', options.cross_file] @@ -1152,8 +1153,8 @@ if __name__ == '__main__': check_format() check_meson_commands_work(options) try: - all_tests = detect_tests_to_run(options.only) - (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args) + all_tests = detect_tests_to_run(options.only, options.use_tmpdir) + (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args, options.use_tmpdir) except StopException: pass print('\nTotal passed tests:', green(str(passing_tests))) From a3f39fde1bca9993450535fa738b5c1820522c0f Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 14 Apr 2020 12:45:14 -0700 Subject: [PATCH 2/6] run_project_tests: Allow matrix tests to skip Since they generate some tests that are only valid on specific operating systems. --- run_project_tests.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/run_project_tests.py b/run_project_tests.py index 15ee21c47..875a5223e 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -734,7 +734,11 @@ def skippable(suite, test): # Scientific libraries are skippable on certain systems # See the discussion here: https://github.com/mesonbuild/meson/pull/6562 - if any([test.endswith(x) for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific: + if any([x in test for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific: + return True + + # These create OS specific tests, and need to be skippable + if any([x in test for x in ['16 sdl', '17 mpi']]): return True # No frameworks test should be skipped on linux CI, as we expect all From 91050e0c7c4920d9e793e0b911f8f3255b4d0e3e Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 20 Mar 2020 14:45:14 -0700 Subject: [PATCH 3/6] ci: Add python-jsonschema Which is used to validate the json schema files in the various test directories in a unit test. --- .github/workflows/os_comp.yml | 2 +- azure-pipelines.yml | 6 +++--- ci/ciimage/arch/install.sh | 1 + ci/run.ps1 | 2 +- ci/travis_install.sh | 1 + 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 839727c9f..7f3437e31 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -15,7 +15,7 @@ jobs: - name: Install Dependencies run: | sudo apt update -yq - sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev + sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev python3-jsonschema - name: Install ninja-build tool uses: seanmiddleditch/gha-setup-ninja@v1 - name: Python version diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ad0b6ae89..066f1a586 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -100,8 +100,8 @@ jobs: displayName: Install Dependencies - script: | set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 - env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist - displayName: pip install pefile pytest-xdist + env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema + displayName: pip install pefile pytest-xdist jsonschema - script: | set BOOST_ROOT= set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 @@ -169,7 +169,7 @@ jobs: mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^ mingw-w64-$(MSYS2_ARCH)-python3-pip ^ %TOOLCHAIN% - %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile" + %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema" displayName: Install Dependencies - script: | set BOOST_ROOT= diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh index 7b6eda9b3..7fe139edc 100755 --- a/ci/ciimage/arch/install.sh +++ b/ci/ciimage/arch/install.sh @@ -12,6 +12,7 @@ pkgs=( itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext + python-jsonschema # cuda ) diff --git a/ci/run.ps1 b/ci/run.ps1 index 9811febc9..34856c0cd 100644 --- a/ci/run.ps1 +++ b/ci/run.ps1 @@ -67,7 +67,7 @@ python --version # Needed for running unit tests in parallel. echo "" -python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist +python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist jsonschema echo "" echo "=== Start running tests ===" diff --git a/ci/travis_install.sh b/ci/travis_install.sh index 346dcb6b5..5d191f1ca 100755 --- a/ci/travis_install.sh +++ b/ci/travis_install.sh @@ -11,6 +11,7 @@ if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then if [[ "$MESON_ARGS" =~ .*unity=on.* ]]; then which pkg-config || brew install pkg-config fi + python3 -m pip install jsonschema elif [[ "$TRAVIS_OS_NAME" == "linux" ]]; then msg "Running Linux setup" docker pull mesonbuild/eoan From e0c9259e79f21c6ee19f996c1f5d817d510de663 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 20 Mar 2020 13:22:43 -0700 Subject: [PATCH 4/6] Add a json schema for the test.json used in tests This does a couple of nice things, one is that editors like vscode can be configured to use this schema to provide auto completion and error highlighting if invalid values are added or required values are missing. It also allows us test that the format of the test matrix work in a unit test, which I've added. It does require that the python jsonschema package is installed. --- .editorconfig | 2 + data/test.schema.json | 105 ++++++++++++++++++++++++++++++++++ docs/markdown/Contributing.md | 12 +++- run_unittests.py | 26 +++++++++ 4 files changed, 143 insertions(+), 2 deletions(-) create mode 100644 data/test.schema.json diff --git a/.editorconfig b/.editorconfig index d84862726..c2dd5d06a 100644 --- a/.editorconfig +++ b/.editorconfig @@ -23,3 +23,5 @@ indent_size = 2 [meson.build] indent_size = 2 +[*.json] +indent_size = 2 diff --git a/data/test.schema.json b/data/test.schema.json new file mode 100644 index 000000000..72f160fd2 --- /dev/null +++ b/data/test.schema.json @@ -0,0 +1,105 @@ +{ + "type": "object", + "properties": { + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "installed": { + "type": "array", + "items": { + "type": "object", + "properties": { + "file": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "file", + "exe", + "shared_lib", + "pdb", + "implib", + "implibempty", + "expr" + ] + }, + "platform": { + "type": "string", + "enum": [ + "msvc", + "gcc", + "cygwin", + "!cygwin" + ] + }, + "version": { + "type": "string" + }, + "language": { + "type": "string" + } + }, + "required": [ + "file", + "type" + ] + } + }, + "matrix": { + "type": "object", + "additionalProperties": { + "properties": { + "options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "val": { + "type": "string" + }, + "compilers": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "skip_on_env": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "val" + ] + } + }, + "exclude": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "do_not_set_opts": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "libdir", + "prefix" + ] + } + } + } +} diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 6f4c397b8..53329389b 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -294,9 +294,17 @@ Additionally, the `skip_on_env` key can be used to specify a list of environment variables. If at least one environment variable in `skip_on_env` is present, all matrix entries containing this value are skipped. -Similarly, the `compilers` key can be used to define a set of compilers required -for this value. +Similarly, the `compilers` key can be used to define a mapping of compilers to languages that are required for this value. +```json +{ + "compilers": { + "c": "gcc", + "cpp": "gcc", + "d": "gdc" + } +} +``` Specific option combinations can be excluded with the `exclude` section. It should be noted that `exclude` does not require exact matches. Instead, any matrix entry diff --git a/run_unittests.py b/run_unittests.py index 4cd6e1787..cc294dd69 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1227,6 +1227,32 @@ class InternalTests(unittest.TestCase): actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})] self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig']) + def test_validate_json(self) -> None: + """Validate the json schema for the test cases.""" + try: + from jsonschema import validate, ValidationError + except ImportError: + if is_ci(): + raise + raise unittest.SkipTest('Python jsonschema module not found.') + + with Path('data/test.schema.json').open() as f: + schema = json.load(f) + + errors = [] # type: T.Tuple[str, Exception] + for p in Path('test cases').glob('**/test.json'): + with p.open() as f: + try: + validate(json.load(f), schema=schema) + except ValidationError as e: + errors.append((p.resolve(), e)) + + for f, e in errors: + print('Failed to validate: "{}"'.format(f)) + print(str(e)) + + self.assertFalse(errors) + @unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release') class DataTests(unittest.TestCase): From ea34c666acba392552eb55ddc782417a5d604f50 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 20 Mar 2020 13:28:54 -0700 Subject: [PATCH 5/6] tests: use a json matrix for MPI This is a nice way to ensure that all of the methods continue to work --- test cases/frameworks/17 mpi/test.json | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 test cases/frameworks/17 mpi/test.json diff --git a/test cases/frameworks/17 mpi/test.json b/test cases/frameworks/17 mpi/test.json new file mode 100644 index 000000000..b3194ed81 --- /dev/null +++ b/test cases/frameworks/17 mpi/test.json @@ -0,0 +1,15 @@ +{ + "matrix": { + "options": { + "method": [ + { "val": "auto" }, + { "val": "pkg-config" }, + { "val": "config-tool" }, + { + "val": "system", + "compilers": { "c" :"msvc", "cpp": "msvc" } + } + ] + } + } +} From eb45ce6189870181b9d8a8eb07cfeab55a0ed012 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 20 Mar 2020 14:31:59 -0700 Subject: [PATCH 6/6] tests: Convert sdl2 to use a matrix option --- test cases/frameworks/16 sdl2/meson.build | 20 +++---------------- .../frameworks/16 sdl2/meson_options.txt | 6 ++++++ test cases/frameworks/16 sdl2/test.json | 13 ++++++++++++ 3 files changed, 22 insertions(+), 17 deletions(-) create mode 100644 test cases/frameworks/16 sdl2/meson_options.txt create mode 100644 test cases/frameworks/16 sdl2/test.json diff --git a/test cases/frameworks/16 sdl2/meson.build b/test cases/frameworks/16 sdl2/meson.build index 662f9b521..fc98010ba 100644 --- a/test cases/frameworks/16 sdl2/meson.build +++ b/test cases/frameworks/16 sdl2/meson.build @@ -1,6 +1,8 @@ project('sdl2 test', 'c') -sdl2_dep = dependency('sdl2', version : '>=2.0.0', required: false) +method = get_option('method') + +sdl2_dep = dependency('sdl2', version : '>=2.0.0', required : false, method : method) if not sdl2_dep.found() error('MESON_SKIP_TEST sdl2 not found.') @@ -9,19 +11,3 @@ endif e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep) test('sdl2test', e) - -if sdl2_dep.type_name() == 'extraframeworks' - # The SDL OSX framework does not ship with detection executables - # so skip the remaining tests. - subdir_done() -endif - -# Ensure that we can find it with sdl2-config too, using the legacy method name -configdep = dependency('sdl2', method : 'sdlconfig') - -# And the modern method name -configdep = dependency('sdl2', method : 'config-tool') - -# Check we can apply a version constraint -dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'pkg-config') -dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'config-tool') diff --git a/test cases/frameworks/16 sdl2/meson_options.txt b/test cases/frameworks/16 sdl2/meson_options.txt new file mode 100644 index 000000000..176af1775 --- /dev/null +++ b/test cases/frameworks/16 sdl2/meson_options.txt @@ -0,0 +1,6 @@ +option( + 'method', + type : 'combo', + choices : ['auto', 'pkg-config', 'config-tool', 'sdlconfig', 'extraframework'], + value : 'auto', +) diff --git a/test cases/frameworks/16 sdl2/test.json b/test cases/frameworks/16 sdl2/test.json new file mode 100644 index 000000000..8cf8543c8 --- /dev/null +++ b/test cases/frameworks/16 sdl2/test.json @@ -0,0 +1,13 @@ +{ + "matrix": { + "options": { + "method": [ + { "val": "auto" }, + { "val": "pkg-config" }, + { "val": "config-tool" }, + { "val": "sdlconfig" }, + { "val": "extraframework" } + ] + } + } +}