Can define benchmarks and run them.

This commit is contained in:
Jussi Pakkanen 2015-11-25 23:29:06 +02:00
parent 3cbe9d32fa
commit 26873801bc
9 changed files with 75 additions and 8 deletions

View File

@ -127,6 +127,10 @@ class Backend():
datafile = open(test_data, 'wb')
self.write_test_file(datafile)
datafile.close()
benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
datafile = open(benchmark_data, 'wb')
self.write_benchmark_file(datafile)
datafile.close()
def has_vala(self, target):
for s in target.get_sources():
@ -269,9 +273,15 @@ class Backend():
result.append(dirseg)
return result
def write_benchmark_file(self, datafile):
self.write_test_serialisation(self.build.get_benchmarks(), datafile)
def write_test_file(self, datafile):
self.write_test_serialisation(self.build.get_tests(), datafile)
def write_test_serialisation(self, tests, datafile):
arr = []
for t in self.build.get_tests():
for t in tests:
exe = t.get_exe()
if isinstance(exe, dependencies.ExternalProgram):
fname = exe.fullpath

View File

@ -65,6 +65,7 @@ class Build:
self.cross_compilers = []
self.global_args = {}
self.tests = []
self.benchmarks = []
self.headers = []
self.man = []
self.data = []
@ -108,6 +109,9 @@ class Build:
def get_tests(self):
return self.tests
def get_benchmarks(self):
return self.benchmarks
def get_headers(self):
return self.headers

View File

@ -215,6 +215,7 @@ forbidden_target_names = {'clean': None,
'all': None,
'test': None,
'test-valgrind': None,
'benchmark': None,
'install': None,
'build.ninja': None,
}

View File

@ -944,6 +944,7 @@ class Interpreter():
'run_target' : self.func_run_target,
'generator' : self.func_generator,
'test' : self.func_test,
'benchmark' : self.func_benchmark,
'install_headers' : self.func_install_headers,
'install_man' : self.func_install_man,
'subdir' : self.func_subdir,
@ -1676,7 +1677,13 @@ class Interpreter():
self.generators.append(gen)
return gen
def func_benchmark(self, node, args, kwargs):
self.add_test(node, args, kwargs, False)
def func_test(self, node, args, kwargs):
self.add_test(node, args, kwargs, True)
def add_test(self, node, args, kwargs, is_base_test):
if len(args) != 2:
raise InterpreterException('Incorrect number of arguments')
if not isinstance(args[0], str):
@ -1718,8 +1725,12 @@ class Interpreter():
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
t = Test(args[0], args[1].held_object, par, cmd_args, env, should_fail, valgrind_args, timeout)
self.build.tests.append(t)
mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='')
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='')
else:
self.build.benchmarks.append(t)
mlog.debug('Adding benchmark "', mlog.bold(args[0]), '".', sep='')
@stringArgs
def func_install_headers(self, node, args, kwargs):

View File

@ -54,6 +54,9 @@ def run_benchmarks(options, datafile):
jsonlogfile = open(jsonlogfilename, 'w')
tests = pickle.load(open(datafile, 'rb'))
num_tests = len(tests)
if num_tests == 0:
print('No benchmarks defined.')
return 0
iteration_count = 5
wrap = [] # Benchmarks on cross builds are pointless so don't support them.
for i, test in enumerate(tests):
@ -75,6 +78,7 @@ def run_benchmarks(options, datafile):
resultstr = 'OK'
print_stats(3, num_tests, test.name, resultstr, i, mean, stddev)
print_json_log(jsonlogfile, runs, test.name, i)
print('\nFull log written to meson-logs/benchmarklog.json.')
return failed_tests
def run(args):
@ -87,7 +91,6 @@ def run(args):
os.chdir(options.wd)
datafile = options.args[0]
returncode = run_benchmarks(options, datafile)
print('\nFull log written to meson-logs/benchmarklog.json.')
return returncode
if __name__ == '__main__':

View File

@ -550,6 +550,17 @@ class NinjaBackend(backends.Backend):
velem.write(outfile)
self.check_outputs(velem)
# And then benchmarks.
benchmark_script = os.path.join(script_root, 'meson_benchmark.py')
benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
cmd = [sys.executable, benchmark_script, benchmark_data]
elem = NinjaBuildElement('benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY'])
elem.add_item('COMMAND', cmd)
elem.add_item('DESC', 'Running benchmark suite.')
elem.add_item('pool', 'console')
elem.write(outfile)
self.check_outputs(elem)
def generate_rules(self, outfile):
outfile.write('# Rules for compiling.\n\n')
self.generate_compile_rules(outfile)

View File

@ -21,7 +21,7 @@ import sys
import environment
import mesonlib
import mlog
import meson, meson_test
import meson, meson_test, meson_benchmark
import argparse
import xml.etree.ElementTree as ET
import time
@ -87,7 +87,7 @@ def setup_commands(backend):
compile_commands = [ninja_command, '-v']
else:
compile_commands = [ninja_command]
test_commands = [ninja_command, 'test']
test_commands = [ninja_command, 'test', 'benchmark']
install_commands = [ninja_command, 'install']
def platform_fix_filename(fname):
@ -165,11 +165,12 @@ def run_test_inprocess(testdir):
sys.stderr = mystderr = StringIO()
old_cwd = os.getcwd()
os.chdir(testdir)
returncode = meson_test.run(['meson-private/meson_test_setup.dat'])
returncode_test = meson_test.run(['meson-private/meson_test_setup.dat'])
returncode_benchmark = meson_benchmark.run(['meson-private/meson_benchmark_setup.dat'])
sys.stdout = old_stdout
sys.stderr = old_stderr
os.chdir(old_cwd)
return (returncode, mystdout.getvalue(), mystderr.getvalue())
return (max(returncode_test, returncode_benchmark), mystdout.getvalue(), mystderr.getvalue())
def run_test(testdir, extra_args, should_succeed):

View File

@ -0,0 +1,21 @@
/* Simple prog that sleeps for a random time. */
#include<stdlib.h>
#if !defined(_MSC_VER)
#include<time.h>
#else
#include<windows.h>
#endif
int main(int argc, char **argv) {
srand(time(NULL));
#if !defined(_MSC_VER)
struct timespec t;
t.tv_sec = 0;
t.tv_nsec = 199999999.0*rand()/RAND_MAX;
nanosleep(&t, NULL);
#else
Sleep(500.0*rand()/RAND_MAX);
#endif
return 0;
}

View File

@ -0,0 +1,5 @@
project('benchmark', 'c',
default_options : ['c_std=gnu99'])
delayer = executable('delayer', 'delayer.c')
benchmark('delayer', delayer)