Merge pull request #6816 from dcbaker/framework-matrix
project test junit schema + a few more uses
This commit is contained in:
commit
84f28fc3f1
|
@ -23,3 +23,5 @@ indent_size = 2
|
|||
[meson.build]
|
||||
indent_size = 2
|
||||
|
||||
[*.json]
|
||||
indent_size = 2
|
||||
|
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt update -yq
|
||||
sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev
|
||||
sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev python3-jsonschema
|
||||
- name: Install ninja-build tool
|
||||
uses: seanmiddleditch/gha-setup-ninja@v1
|
||||
- name: Python version
|
||||
|
|
|
@ -100,8 +100,8 @@ jobs:
|
|||
displayName: Install Dependencies
|
||||
- script: |
|
||||
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
|
||||
env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist
|
||||
displayName: pip install pefile pytest-xdist
|
||||
env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema
|
||||
displayName: pip install pefile pytest-xdist jsonschema
|
||||
- script: |
|
||||
set BOOST_ROOT=
|
||||
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
|
||||
|
@ -169,7 +169,7 @@ jobs:
|
|||
mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^
|
||||
mingw-w64-$(MSYS2_ARCH)-python3-pip ^
|
||||
%TOOLCHAIN%
|
||||
%MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile"
|
||||
%MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema"
|
||||
displayName: Install Dependencies
|
||||
- script: |
|
||||
set BOOST_ROOT=
|
||||
|
|
|
@ -12,6 +12,7 @@ pkgs=(
|
|||
itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
|
||||
doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
|
||||
libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext
|
||||
python-jsonschema
|
||||
# cuda
|
||||
)
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ python --version
|
|||
|
||||
# Needed for running unit tests in parallel.
|
||||
echo ""
|
||||
python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist
|
||||
python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist jsonschema
|
||||
|
||||
echo ""
|
||||
echo "=== Start running tests ==="
|
||||
|
|
|
@ -11,6 +11,7 @@ if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
|
|||
if [[ "$MESON_ARGS" =~ .*unity=on.* ]]; then
|
||||
which pkg-config || brew install pkg-config
|
||||
fi
|
||||
python3 -m pip install jsonschema
|
||||
elif [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
|
||||
msg "Running Linux setup"
|
||||
docker pull mesonbuild/eoan
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"env": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"installed": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"file",
|
||||
"exe",
|
||||
"shared_lib",
|
||||
"pdb",
|
||||
"implib",
|
||||
"implibempty",
|
||||
"expr"
|
||||
]
|
||||
},
|
||||
"platform": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"msvc",
|
||||
"gcc",
|
||||
"cygwin",
|
||||
"!cygwin"
|
||||
]
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
},
|
||||
"language": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"file",
|
||||
"type"
|
||||
]
|
||||
}
|
||||
},
|
||||
"matrix": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"properties": {
|
||||
"options": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"val": {
|
||||
"type": "string"
|
||||
},
|
||||
"compilers": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"skip_on_env": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"val"
|
||||
]
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"do_not_set_opts": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"libdir",
|
||||
"prefix"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -294,9 +294,17 @@ Additionally, the `skip_on_env` key can be used to specify a list of environment
|
|||
variables. If at least one environment variable in `skip_on_env` is present, all
|
||||
matrix entries containing this value are skipped.
|
||||
|
||||
Similarly, the `compilers` key can be used to define a set of compilers required
|
||||
for this value.
|
||||
Similarly, the `compilers` key can be used to define a mapping of compilers to languages that are required for this value.
|
||||
|
||||
```json
|
||||
{
|
||||
"compilers": {
|
||||
"c": "gcc",
|
||||
"cpp": "gcc",
|
||||
"d": "gdc"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Specific option combinations can be excluded with the `exclude` section. It should
|
||||
be noted that `exclude` does not require exact matches. Instead, any matrix entry
|
||||
|
|
|
@ -404,21 +404,21 @@ def run_test_inprocess(testdir):
|
|||
|
||||
# Build directory name must be the same so Ccache works over
|
||||
# consecutive invocations.
|
||||
def create_deterministic_builddir(test: TestDef) -> str:
|
||||
def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str:
|
||||
import hashlib
|
||||
src_dir = test.path.as_posix()
|
||||
if test.name:
|
||||
src_dir += test.name
|
||||
rel_dirname = 'b ' + hashlib.sha256(src_dir.encode(errors='ignore')).hexdigest()[0:10]
|
||||
os.mkdir(rel_dirname)
|
||||
abs_pathname = os.path.join(os.getcwd(), rel_dirname)
|
||||
abs_pathname = os.path.join(tempfile.gettempdir() if use_tmpdir else os.getcwd(), rel_dirname)
|
||||
os.mkdir(abs_pathname)
|
||||
return abs_pathname
|
||||
|
||||
def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail):
|
||||
def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool):
|
||||
if test.skip:
|
||||
return None
|
||||
with AutoDeletedDir(create_deterministic_builddir(test)) as build_dir:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=os.getcwd())) as install_dir:
|
||||
with AutoDeletedDir(create_deterministic_builddir(test, use_tmp)) as build_dir:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=None if use_tmp else os.getcwd())) as install_dir:
|
||||
try:
|
||||
return _run_test(test, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail)
|
||||
except TestResult as r:
|
||||
|
@ -666,8 +666,8 @@ def have_d_compiler():
|
|||
return True
|
||||
return False
|
||||
|
||||
def have_objc_compiler():
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
|
||||
def have_objc_compiler(use_tmp: bool) -> bool:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
|
||||
env = environment.Environment(None, build_dir, get_fake_options('/'))
|
||||
try:
|
||||
objc_comp = env.detect_objc_compiler(MachineChoice.HOST)
|
||||
|
@ -682,8 +682,8 @@ def have_objc_compiler():
|
|||
return False
|
||||
return True
|
||||
|
||||
def have_objcpp_compiler():
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
|
||||
def have_objcpp_compiler(use_tmp: bool) -> bool:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
|
||||
env = environment.Environment(None, build_dir, get_fake_options('/'))
|
||||
try:
|
||||
objcpp_comp = env.detect_objcpp_compiler(MachineChoice.HOST)
|
||||
|
@ -734,7 +734,11 @@ def skippable(suite, test):
|
|||
|
||||
# Scientific libraries are skippable on certain systems
|
||||
# See the discussion here: https://github.com/mesonbuild/meson/pull/6562
|
||||
if any([test.endswith(x) for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific:
|
||||
if any([x in test for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific:
|
||||
return True
|
||||
|
||||
# These create OS specific tests, and need to be skippable
|
||||
if any([x in test for x in ['16 sdl', '17 mpi']]):
|
||||
return True
|
||||
|
||||
# No frameworks test should be skipped on linux CI, as we expect all
|
||||
|
@ -805,7 +809,7 @@ def should_skip_rust(backend: Backend) -> bool:
|
|||
return True
|
||||
return False
|
||||
|
||||
def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef], bool]]:
|
||||
def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, T.List[TestDef], bool]]:
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
|
@ -842,8 +846,8 @@ def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef
|
|||
('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
|
||||
('rust', 'rust', should_skip_rust(backend)),
|
||||
('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
|
||||
('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler()),
|
||||
('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler()),
|
||||
('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)),
|
||||
('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)),
|
||||
('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
|
||||
('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
|
||||
# CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja
|
||||
|
@ -866,16 +870,16 @@ def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef
|
|||
|
||||
def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
|
||||
log_name_base: str, failfast: bool,
|
||||
extra_args: T.List[str]) -> T.Tuple[int, int, int]:
|
||||
extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
|
||||
global logfile
|
||||
txtname = log_name_base + '.txt'
|
||||
with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf:
|
||||
logfile = lf
|
||||
return _run_tests(all_tests, log_name_base, failfast, extra_args)
|
||||
return _run_tests(all_tests, log_name_base, failfast, extra_args, use_tmp)
|
||||
|
||||
def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
|
||||
log_name_base: str, failfast: bool,
|
||||
extra_args: T.List[str]) -> T.Tuple[int, int, int]:
|
||||
extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
|
||||
global stop, executor, futures, system_compiler
|
||||
xmlname = log_name_base + '.xml'
|
||||
junit_root = ET.Element('testsuites')
|
||||
|
@ -929,7 +933,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
|
|||
|
||||
t.skip = skipped or t.skip
|
||||
result = executor.submit(run_test, t, extra_args + suite_args + t.args,
|
||||
system_compiler, backend, backend_flags, commands, should_fail)
|
||||
system_compiler, backend, backend_flags, commands, should_fail, use_tmp)
|
||||
futures.append((testname, t, result))
|
||||
for (testname, t, result) in futures:
|
||||
sys.stdout.flush()
|
||||
|
@ -1047,7 +1051,7 @@ def check_meson_commands_work(options):
|
|||
global backend, compile_commands, test_commands, install_commands
|
||||
testdir = PurePath('test cases', 'common', '1 trivial').as_posix()
|
||||
meson_commands = mesonlib.python_command + [get_meson_script()]
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
|
||||
print('Checking that configuring works...')
|
||||
gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + options.extra_args
|
||||
pc, o, e = Popen_safe(gen_cmd)
|
||||
|
@ -1072,7 +1076,7 @@ def check_meson_commands_work(options):
|
|||
def detect_system_compiler(options):
|
||||
global system_compiler, compiler_id_map
|
||||
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
|
||||
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
|
||||
fake_opts = get_fake_options('/')
|
||||
if options.cross_file:
|
||||
fake_opts.cross_file = [options.cross_file]
|
||||
|
@ -1139,6 +1143,7 @@ if __name__ == '__main__':
|
|||
help='Not used, only here to simplify run_tests.py')
|
||||
parser.add_argument('--only', help='name of test(s) to run', nargs='+', choices=ALL_TESTS)
|
||||
parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.')
|
||||
parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.')
|
||||
options = parser.parse_args()
|
||||
if options.cross_file:
|
||||
options.extra_args += ['--cross-file', options.cross_file]
|
||||
|
@ -1152,8 +1157,8 @@ if __name__ == '__main__':
|
|||
check_format()
|
||||
check_meson_commands_work(options)
|
||||
try:
|
||||
all_tests = detect_tests_to_run(options.only)
|
||||
(passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args)
|
||||
all_tests = detect_tests_to_run(options.only, options.use_tmpdir)
|
||||
(passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args, options.use_tmpdir)
|
||||
except StopException:
|
||||
pass
|
||||
print('\nTotal passed tests:', green(str(passing_tests)))
|
||||
|
|
|
@ -1227,6 +1227,32 @@ class InternalTests(unittest.TestCase):
|
|||
actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})]
|
||||
self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig'])
|
||||
|
||||
def test_validate_json(self) -> None:
|
||||
"""Validate the json schema for the test cases."""
|
||||
try:
|
||||
from jsonschema import validate, ValidationError
|
||||
except ImportError:
|
||||
if is_ci():
|
||||
raise
|
||||
raise unittest.SkipTest('Python jsonschema module not found.')
|
||||
|
||||
with Path('data/test.schema.json').open() as f:
|
||||
schema = json.load(f)
|
||||
|
||||
errors = [] # type: T.Tuple[str, Exception]
|
||||
for p in Path('test cases').glob('**/test.json'):
|
||||
with p.open() as f:
|
||||
try:
|
||||
validate(json.load(f), schema=schema)
|
||||
except ValidationError as e:
|
||||
errors.append((p.resolve(), e))
|
||||
|
||||
for f, e in errors:
|
||||
print('Failed to validate: "{}"'.format(f))
|
||||
print(str(e))
|
||||
|
||||
self.assertFalse(errors)
|
||||
|
||||
|
||||
@unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release')
|
||||
class DataTests(unittest.TestCase):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
project('sdl2 test', 'c')
|
||||
|
||||
sdl2_dep = dependency('sdl2', version : '>=2.0.0', required: false)
|
||||
method = get_option('method')
|
||||
|
||||
sdl2_dep = dependency('sdl2', version : '>=2.0.0', required : false, method : method)
|
||||
|
||||
if not sdl2_dep.found()
|
||||
error('MESON_SKIP_TEST sdl2 not found.')
|
||||
|
@ -9,19 +11,3 @@ endif
|
|||
e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep)
|
||||
|
||||
test('sdl2test', e)
|
||||
|
||||
if sdl2_dep.type_name() == 'extraframeworks'
|
||||
# The SDL OSX framework does not ship with detection executables
|
||||
# so skip the remaining tests.
|
||||
subdir_done()
|
||||
endif
|
||||
|
||||
# Ensure that we can find it with sdl2-config too, using the legacy method name
|
||||
configdep = dependency('sdl2', method : 'sdlconfig')
|
||||
|
||||
# And the modern method name
|
||||
configdep = dependency('sdl2', method : 'config-tool')
|
||||
|
||||
# Check we can apply a version constraint
|
||||
dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'pkg-config')
|
||||
dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'config-tool')
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
option(
|
||||
'method',
|
||||
type : 'combo',
|
||||
choices : ['auto', 'pkg-config', 'config-tool', 'sdlconfig', 'extraframework'],
|
||||
value : 'auto',
|
||||
)
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"matrix": {
|
||||
"options": {
|
||||
"method": [
|
||||
{ "val": "auto" },
|
||||
{ "val": "pkg-config" },
|
||||
{ "val": "config-tool" },
|
||||
{ "val": "sdlconfig" },
|
||||
{ "val": "extraframework" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"matrix": {
|
||||
"options": {
|
||||
"method": [
|
||||
{ "val": "auto" },
|
||||
{ "val": "pkg-config" },
|
||||
{ "val": "config-tool" },
|
||||
{
|
||||
"val": "system",
|
||||
"compilers": { "c" :"msvc", "cpp": "msvc" }
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue