998 lines
43 KiB
Python
998 lines
43 KiB
Python
# Copyright 2012 Jussi Pakkanen
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
import os, sys, re, pickle
|
|
import interpreter, nodes
|
|
import environment, mlog
|
|
from meson_install import InstallData
|
|
from build import InvalidArguments
|
|
import build
|
|
import shutil
|
|
from coredata import MesonException
|
|
|
|
if environment.is_windows():
|
|
quote_char = '"'
|
|
execute_wrapper = 'cmd /c'
|
|
else:
|
|
quote_char = "'"
|
|
execute_wrapper = ''
|
|
|
|
def ninja_quote(text):
|
|
return text.replace(' ', '$ ').replace(':', '$:')
|
|
|
|
def do_replacement(regex, line, confdata):
|
|
match = re.search(regex, line)
|
|
while match:
|
|
varname = match.group(1)
|
|
if varname in confdata.keys():
|
|
var = confdata.get(varname)
|
|
if isinstance(var, str):
|
|
pass
|
|
elif isinstance(var, nodes.StringStatement):
|
|
var = var.get_value()
|
|
elif isinstance(var, int):
|
|
var = str(var)
|
|
else:
|
|
raise RuntimeError('Tried to replace a variable with something other than a string or int.')
|
|
else:
|
|
var = ''
|
|
line = line.replace('@' + varname + '@', var)
|
|
match = re.search(regex, line)
|
|
return line
|
|
|
|
def do_mesondefine(line, confdata):
|
|
arr = line.split()
|
|
if len(arr) != 2:
|
|
raise build.InvalidArguments('#mesondefine does not contain exactly two tokens: %s', line.strip())
|
|
varname = arr[1]
|
|
try:
|
|
v = confdata.get(varname)
|
|
except KeyError:
|
|
return '/* undef %s */\n' % varname
|
|
if isinstance(v, nodes.BoolStatement):
|
|
v = v.get_value()
|
|
if isinstance(v, bool):
|
|
if v:
|
|
return '#define %s\n' % varname
|
|
else:
|
|
return '#undef %s\n' % varname
|
|
elif isinstance(v, int):
|
|
return '#define %s %d\n' % (varname, v)
|
|
elif isinstance(v, str):
|
|
return '#define %s %s\n' % (varname, v)
|
|
else:
|
|
raise build.InvalidArguments('#mesondefine argument "%s" is of unknown type.' % varname)
|
|
|
|
def do_conf_file(src, dst, confdata):
|
|
data = open(src).readlines()
|
|
regex = re.compile('@(.*?)@')
|
|
result = []
|
|
for line in data:
|
|
if line.startswith('#mesondefine'):
|
|
line = do_mesondefine(line, confdata)
|
|
else:
|
|
line = do_replacement(regex, line, confdata)
|
|
result.append(line)
|
|
dst_tmp = dst + '~'
|
|
open(dst_tmp, 'w').writelines(result)
|
|
# If contents are identical, don't touch the file to prevent
|
|
# unnecessary rebuilds.
|
|
try:
|
|
if open(dst, 'r').read() == open(dst_tmp, 'r').read():
|
|
return
|
|
except FileNotFoundError:
|
|
pass
|
|
os.replace(dst_tmp, dst)
|
|
|
|
class TestSerialisation:
|
|
def __init__(self, name, fname, is_cross, exe_wrapper, is_parallel, cmd_args, env):
|
|
self.name = name
|
|
self.fname = fname
|
|
self.is_cross = is_cross
|
|
self.exe_runner = exe_wrapper
|
|
self.is_parallel = is_parallel
|
|
self.cmd_args = cmd_args
|
|
self.env = env
|
|
|
|
# It may seem a bit silly that this Backend class exists on its own
|
|
# rather than being a part of NinjaBackend, which is the only class
|
|
# that uses Backend. The point is that common functionality
|
|
# that will be used in XCode, Visual Studio etc will be in
|
|
# this one. Once work on that code starts the exact division
|
|
# of labor between the classes is determined.
|
|
class Backend():
|
|
def __init__(self, build, interp):
|
|
self.build = build
|
|
self.environment = build.environment
|
|
self.interpreter = interp
|
|
self.processed_targets = {}
|
|
self.dep_rules = {}
|
|
self.build_to_src = os.path.relpath(self.environment.get_source_dir(),
|
|
self.environment.get_build_dir())
|
|
|
|
def get_compiler_for_lang(self, lang):
|
|
for i in self.build.compilers:
|
|
if i.language == lang:
|
|
return i
|
|
raise RuntimeError('No compiler for language ' + lang)
|
|
|
|
def get_compiler_for_source(self, src):
|
|
for i in self.build.compilers:
|
|
if i.can_compile(src):
|
|
return i
|
|
raise RuntimeError('No specified compiler can handle file ' + src)
|
|
|
|
def get_target_filename(self, target):
|
|
targetdir = self.get_target_dir(target)
|
|
filename = os.path.join(targetdir, target.get_filename())
|
|
return filename
|
|
|
|
def get_target_dir(self, target):
|
|
dirname = target.get_subdir()
|
|
os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
|
|
return dirname
|
|
|
|
def get_target_private_dir(self, target):
|
|
dirname = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir')
|
|
os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
|
|
return dirname
|
|
|
|
def generate_target(self, target, outfile):
|
|
name = target.get_basename()
|
|
if name in self.processed_targets:
|
|
return
|
|
(gen_src_deps, gen_other_deps) = self.process_dep_gens(outfile, target)
|
|
self.process_target_dependencies(target, outfile)
|
|
self.generate_custom_generator_rules(target, outfile)
|
|
outname = self.get_target_filename(target)
|
|
obj_list = []
|
|
if self.environment.coredata.use_pch and target.has_pch():
|
|
self.generate_pch(target, outfile)
|
|
header_deps = gen_other_deps
|
|
for genlist in target.get_generated_sources():
|
|
for src in genlist.get_outfilelist():
|
|
if not self.environment.is_header(src):
|
|
obj_list.append(self.generate_single_compile(target, outfile, src, True))
|
|
else:
|
|
header_deps.append(src)
|
|
for src in gen_src_deps:
|
|
obj_list.append(self.generate_single_compile(target, outfile, src, True))
|
|
for src in target.get_sources():
|
|
if not self.environment.is_header(src):
|
|
obj_list.append(self.generate_single_compile(target, outfile, src, False, header_deps))
|
|
for obj in target.get_objects():
|
|
if isinstance(obj, str):
|
|
o = os.path.join(self.build_to_src, target.get_subdir(), obj)
|
|
else:
|
|
raise MesonException('Unknown data type in object list.')
|
|
obj_list.append(o)
|
|
elem = self.generate_link(target, outfile, outname, obj_list)
|
|
self.generate_shlib_aliases(target, self.get_target_dir(target), outfile, elem)
|
|
self.processed_targets[name] = True
|
|
|
|
def process_target_dependencies(self, target, outfile):
|
|
for t in target.get_dependencies():
|
|
tname = t.get_basename()
|
|
if not tname in self.processed_targets:
|
|
self.generate_target(t, outfile)
|
|
|
|
def get_pch_include_args(self, compiler, target):
|
|
args = []
|
|
pchpath = self.get_target_private_dir(target)
|
|
includearg = compiler.get_include_arg(pchpath)
|
|
for lang in ['c', 'cpp']:
|
|
p = target.get_pch(lang)
|
|
if len(p) == 0:
|
|
continue
|
|
if compiler.can_compile(p[-1]):
|
|
header = p[0]
|
|
args += compiler.get_pch_use_args(pchpath, header)
|
|
if len(args) > 0:
|
|
args = [includearg] + args
|
|
return args
|
|
|
|
def generate_basic_compiler_flags(self, target, compiler):
|
|
commands = []
|
|
commands += compiler.get_always_flags()
|
|
commands += self.build.get_global_flags(compiler)
|
|
commands += target.get_extra_args(compiler.get_language())
|
|
if self.environment.coredata.buildtype != 'plain':
|
|
commands += compiler.get_debug_flags()
|
|
commands += compiler.get_std_warn_flags()
|
|
if self.environment.coredata.buildtype == 'optimized':
|
|
commands += compiler.get_std_opt_flags()
|
|
if self.environment.coredata.coverage:
|
|
commands += compiler.get_coverage_flags()
|
|
if isinstance(target, build.SharedLibrary):
|
|
commands += compiler.get_pic_flags()
|
|
for dep in target.get_external_deps():
|
|
commands += dep.get_compile_flags()
|
|
if isinstance(target, build.Executable):
|
|
commands += dep.get_exe_flags()
|
|
|
|
return commands
|
|
|
|
def build_target_link_arguments(self, compiler, deps):
|
|
args = []
|
|
for d in deps:
|
|
if not isinstance(d, build.StaticLibrary) and\
|
|
not isinstance(d, build.SharedLibrary):
|
|
raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
|
|
fname = self.get_target_filename(d)
|
|
if compiler.id == 'msvc':
|
|
if fname.endswith('dll'):
|
|
fname = fname[:-3] + 'lib'
|
|
args.append(fname)
|
|
return args
|
|
|
|
def generate_configure_files(self):
|
|
for cf in self.build.get_configure_files():
|
|
infile = os.path.join(self.environment.get_source_dir(),
|
|
cf.get_subdir(),
|
|
cf.get_source_name())
|
|
outdir = os.path.join(self.environment.get_build_dir(),
|
|
cf.get_subdir())
|
|
os.makedirs(outdir, exist_ok=True)
|
|
outfile = os.path.join(outdir, cf.get_target_name())
|
|
confdata = cf.get_configuration_data()
|
|
do_conf_file(infile, outfile, confdata)
|
|
|
|
class NinjaBuildElement():
|
|
def __init__(self, outfilenames, rule, infilenames):
|
|
if isinstance(outfilenames, str):
|
|
self.outfilenames = [outfilenames]
|
|
else:
|
|
self.outfilenames = outfilenames
|
|
self.rule = rule
|
|
if isinstance(infilenames, str):
|
|
self.infilenames = [infilenames]
|
|
else:
|
|
self.infilenames = infilenames
|
|
self.deps = []
|
|
self.orderdeps = []
|
|
self.elems = []
|
|
|
|
def add_dep(self, dep):
|
|
if isinstance(dep, list):
|
|
self.deps += dep
|
|
else:
|
|
self.deps.append(dep)
|
|
|
|
def add_orderdep(self, dep):
|
|
if isinstance(dep, list):
|
|
self.orderdeps += dep
|
|
else:
|
|
self.orderdeps.append(dep)
|
|
|
|
def add_item(self, name, elems):
|
|
if isinstance(elems, str):
|
|
elems = [elems]
|
|
self.elems.append((name, elems))
|
|
|
|
def write(self, outfile):
|
|
line = 'build %s: %s %s' % (' '.join([ninja_quote(i) for i in self.outfilenames]),\
|
|
self.rule,
|
|
' '.join([ninja_quote(i) for i in self.infilenames]))
|
|
if len(self.deps) > 0:
|
|
line += ' | ' + ' '.join([ninja_quote(x) for x in self.deps])
|
|
if len(self.orderdeps) > 0:
|
|
line += ' || ' + ' '.join([ninja_quote(x) for x in self.orderdeps])
|
|
line += '\n'
|
|
outfile.write(line)
|
|
|
|
for e in self.elems:
|
|
(name, elems) = e
|
|
should_quote = True
|
|
if name == 'DEPFILE' or name == 'DESC':
|
|
should_quote = False
|
|
line = ' %s = ' % name
|
|
q_templ = quote_char + "%s" + quote_char
|
|
noq_templ = "%s"
|
|
newelems = []
|
|
for i in elems:
|
|
if not should_quote or i == '&&': # Hackety hack hack
|
|
templ = noq_templ
|
|
else:
|
|
templ = q_templ
|
|
newelems.append(templ % ninja_quote(i))
|
|
line += ' '.join(newelems)
|
|
line += '\n'
|
|
outfile.write(line)
|
|
outfile.write('\n')
|
|
|
|
class NinjaBackend(Backend):
|
|
|
|
def __init__(self, build, interp):
|
|
Backend.__init__(self, build, interp)
|
|
self.ninja_filename = 'build.ninja'
|
|
|
|
def generate(self):
|
|
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
|
|
tempfilename = outfilename + '~'
|
|
outfile = open(tempfilename, 'w')
|
|
self.generate_configure_files()
|
|
outfile.write('# This is the build file for project "%s"\n' % self.build.get_project())
|
|
outfile.write('# It is autogenerated by the Meson build system.\n')
|
|
outfile.write('# Do not edit by hand.\n\n')
|
|
outfile.write('ninja_required_version = 1.3.4\n\n')
|
|
self.generate_rules(outfile)
|
|
outfile.write('# Build rules for targets\n\n')
|
|
[self.generate_target(t, outfile) for t in self.build.get_targets().values()]
|
|
if len(self.build.pot) > 0:
|
|
outfile.write('# Build rules for localisation.\n\n')
|
|
self.generate_po(outfile)
|
|
outfile.write('# Test rules\n\n')
|
|
self.generate_tests(outfile)
|
|
outfile.write('# Install rules\n\n')
|
|
self.generate_install(outfile)
|
|
if self.environment.coredata.coverage:
|
|
outfile.write('# Coverage rules\n\n')
|
|
self.generate_coverage_rules(outfile)
|
|
outfile.write('# Suffix\n\n')
|
|
self.generate_ending(outfile)
|
|
# Only ovewrite the old build file after the new one has been
|
|
# fully created.
|
|
outfile.close()
|
|
os.replace(tempfilename, outfilename)
|
|
|
|
def generate_po(self, outfile):
|
|
for p in self.build.pot:
|
|
(packagename, languages, subdir) = p
|
|
input_file = os.path.join(subdir, 'POTFILES')
|
|
elem = NinjaBuildElement('pot', 'GEN_POT', [])
|
|
elem.add_item('PACKAGENAME', packagename)
|
|
elem.add_item('OUTFILE', packagename + '.pot')
|
|
elem.add_item('FILELIST', os.path.join(self.environment.get_source_dir(), input_file))
|
|
elem.add_item('OUTDIR', os.path.join(self.environment.get_source_dir(), subdir))
|
|
elem.write(outfile)
|
|
for l in languages:
|
|
infile = os.path.join(self.environment.get_source_dir(), subdir, l + '.po')
|
|
outfilename = os.path.join(subdir, l + '.gmo')
|
|
lelem = NinjaBuildElement(outfilename, 'GEN_GMO', infile)
|
|
lelem.add_item('INFILE', infile)
|
|
lelem.add_item('OUTFILE', outfilename)
|
|
lelem.write(outfile)
|
|
|
|
def generate_coverage_rules(self, outfile):
|
|
(gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
|
|
added_rule = False
|
|
if gcovr_exe:
|
|
added_rule = True
|
|
elem = NinjaBuildElement('coverage-xml', 'CUSTOM_COMMAND', '')
|
|
elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_build_dir(),\
|
|
'-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')])
|
|
elem.add_item('DESC', 'Generating XML coverage report.')
|
|
elem.write(outfile)
|
|
elem = NinjaBuildElement('coverage-text', 'CUSTOM_COMMAND', '')
|
|
elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_build_dir(),\
|
|
'-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')])
|
|
elem.add_item('DESC', 'Generating text coverage report.')
|
|
elem.write(outfile)
|
|
if lcov_exe and genhtml_exe:
|
|
added_rule = True
|
|
phony_elem = NinjaBuildElement('coverage-html', 'phony', 'coveragereport/index.html')
|
|
phony_elem.write(outfile)
|
|
|
|
elem = NinjaBuildElement('coveragereport/index.html', 'CUSTOM_COMMAND', '')
|
|
command = [lcov_exe, '--directory', self.environment.get_build_dir(),\
|
|
'--capture', '--output-file', 'coverage.info', '--no-checksum',\
|
|
'&&', genhtml_exe, '--prefix', self.environment.get_build_dir(),\
|
|
'--output-directory', self.environment.get_log_dir(), '--title', 'Code coverage',\
|
|
'--legend', '--show-details', 'coverage.info']
|
|
elem.add_item('COMMAND', command)
|
|
elem.add_item('DESC', 'Generating HTML coverage report.')
|
|
elem.write(outfile)
|
|
if not added_rule:
|
|
mlog.log(mlog.red('Warning:'), 'coverage requested but neither gcovr nor lcov/genhtml found.')
|
|
|
|
def generate_install(self, outfile):
|
|
script_root = self.environment.get_script_dir()
|
|
install_script = os.path.join(script_root, 'meson_install.py')
|
|
install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
|
|
depfixer = os.path.join(script_root, 'depfixer.py')
|
|
d = InstallData(self.environment.get_prefix(), depfixer, './') # Fixme
|
|
elem = NinjaBuildElement('install', 'CUSTOM_COMMAND', '')
|
|
elem.add_dep('all')
|
|
elem.add_item('DESC', 'Installing files.')
|
|
elem.add_item('COMMAND', [sys.executable, install_script, install_data_file])
|
|
self.generate_target_install(d)
|
|
self.generate_header_install(d)
|
|
self.generate_man_install(d)
|
|
self.generate_data_install(d)
|
|
self.generate_po_install(d, elem)
|
|
elem.write(outfile)
|
|
|
|
ofile = open(install_data_file, 'wb')
|
|
pickle.dump(d, ofile)
|
|
|
|
def generate_po_install(self, d, elem):
|
|
for p in self.build.pot:
|
|
(package_name, languages, subdir) = p
|
|
# FIXME: assumes only one po package per source
|
|
d.po_package_name = package_name
|
|
for lang in languages:
|
|
rel_src = os.path.join(subdir, lang + '.gmo')
|
|
src_file = os.path.join(self.environment.get_build_dir(), rel_src)
|
|
d.po.append((src_file, self.environment.coredata.localedir, lang))
|
|
elem.add_dep(rel_src)
|
|
|
|
def generate_target_install(self, d):
|
|
libdir = self.environment.get_libdir()
|
|
bindir = self.environment.get_bindir()
|
|
|
|
should_strip = self.environment.coredata.strip
|
|
for t in self.build.get_targets().values():
|
|
if t.should_install():
|
|
if isinstance(t, build.Executable):
|
|
outdir = bindir
|
|
else:
|
|
outdir = libdir
|
|
i = [self.get_target_filename(t), outdir, t.get_aliaslist(), should_strip]
|
|
d.targets.append(i)
|
|
|
|
def generate_header_install(self, d):
|
|
incroot = self.environment.get_includedir()
|
|
headers = self.build.get_headers()
|
|
|
|
for h in headers:
|
|
outdir = os.path.join(incroot, h.get_subdir())
|
|
for f in h.get_sources():
|
|
abspath = os.path.join(self.environment.get_source_dir(), f) # FIXME
|
|
i = [abspath, outdir]
|
|
d.headers.append(i)
|
|
|
|
def generate_man_install(self, d):
|
|
manroot = self.environment.get_mandir()
|
|
man = self.build.get_man()
|
|
for m in man:
|
|
for f in m.get_sources():
|
|
num = f.split('.')[-1]
|
|
subdir = 'man' + num
|
|
srcabs = os.path.join(self.environment.get_source_dir(), f)
|
|
dstabs = os.path.join(manroot,
|
|
os.path.join(subdir, f + '.gz'))
|
|
i = [srcabs, dstabs]
|
|
d.man.append(i)
|
|
|
|
def generate_data_install(self, d):
|
|
dataroot = self.environment.get_datadir()
|
|
data = self.build.get_data()
|
|
for de in data:
|
|
subdir = os.path.join(dataroot, de.get_subdir())
|
|
for f in de.get_sources():
|
|
srcabs = os.path.join(self.environment.get_source_dir(), f)
|
|
dstabs = os.path.join(subdir, f)
|
|
i = [srcabs, dstabs]
|
|
d.data.append(i)
|
|
|
|
def generate_tests(self, outfile):
|
|
valgrind = environment.find_valgrind()
|
|
script_root = self.environment.get_script_dir()
|
|
test_script = os.path.join(script_root, 'meson_test.py')
|
|
test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
|
|
cmd = [sys.executable, test_script, test_data]
|
|
elem = NinjaBuildElement('test', 'CUSTOM_COMMAND', 'all')
|
|
elem.add_item('COMMAND', cmd)
|
|
elem.add_item('DESC', 'Running test suite.')
|
|
elem.write(outfile)
|
|
|
|
if valgrind:
|
|
velem = NinjaBuildElement('test-valgrind', 'CUSTOM_COMMAND', 'all')
|
|
velem.add_item('COMMAND', cmd + ['--wrapper=' + valgrind])
|
|
velem.add_item('DESC', 'Running test suite under Valgrind.')
|
|
velem.write(outfile)
|
|
|
|
datafile = open(test_data, 'wb')
|
|
self.write_test_file(datafile)
|
|
datafile.close()
|
|
|
|
def write_test_file(self, datafile):
|
|
arr = []
|
|
for t in self.build.get_tests():
|
|
fname = os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))
|
|
is_cross = self.environment.is_cross_build()
|
|
if is_cross:
|
|
exe_wrapper = self.environment.cross_info.get('exe_wrapper', None)
|
|
else:
|
|
exe_wrapper = None
|
|
ts = TestSerialisation(t.get_name(), fname, is_cross, exe_wrapper,
|
|
t.is_parallel, t.cmd_args, t.env)
|
|
arr.append(ts)
|
|
pickle.dump(arr, datafile)
|
|
|
|
def generate_dep_gen_rules(self, outfile):
|
|
outfile.write('# Rules for external dependency generators.\n\n')
|
|
processed = {}
|
|
for dep in self.environment.coredata.deps.values():
|
|
name = dep.get_name()
|
|
if name in processed:
|
|
continue
|
|
processed[name] = True
|
|
for rule in dep.get_generate_rules():
|
|
outfile.write('rule %s\n' % rule.name)
|
|
command = ' '.join([ninja_quote(x) for x in rule.cmd_list])
|
|
command = command.replace('@INFILE@', '$in').replace('@OUTFILE@', '$out')
|
|
command = command.replace('@SOURCE_ROOT@', self.environment.get_source_dir())
|
|
command = command.replace('@BUILD_ROOT@', self.environment.get_build_dir())
|
|
outfile.write(' command = %s\n' % command)
|
|
desc = rule.description.replace('@INFILE@', '$in')
|
|
outfile.write(' description = %s\n' % desc)
|
|
if rule.src_keyword in self.dep_rules:
|
|
raise InvalidArguments('Multiple rules for keyword %s.' % rule.src_keyword)
|
|
self.dep_rules[rule.src_keyword] = rule
|
|
outfile.write('\n')
|
|
|
|
def generate_rules(self, outfile):
|
|
outfile.write('# Rules for compiling.\n\n')
|
|
self.generate_compile_rules(outfile)
|
|
outfile.write('# Rules for linking.\n\n')
|
|
if self.environment.is_cross_build():
|
|
self.generate_static_link_rules(True, outfile)
|
|
self.generate_static_link_rules(False, outfile)
|
|
self.generate_dynamic_link_rules(outfile)
|
|
self.generate_dep_gen_rules(outfile)
|
|
outfile.write('# Other rules\n\n')
|
|
outfile.write('rule CUSTOM_COMMAND\n')
|
|
outfile.write(' command = $COMMAND\n')
|
|
outfile.write(' description = $DESC\n')
|
|
outfile.write(' restat = 1\n\n')
|
|
outfile.write('rule REGENERATE_BUILD\n')
|
|
c = (quote_char + ninja_quote(sys.executable) + quote_char,
|
|
quote_char + ninja_quote(self.environment.get_build_command()) + quote_char,
|
|
quote_char + ninja_quote(self.environment.get_source_dir()) + quote_char,
|
|
quote_char + ninja_quote(self.environment.get_build_dir()) + quote_char)
|
|
outfile.write(" command = %s %s %s %s --backend ninja secret-handshake\n" % c)
|
|
outfile.write(' description = Regenerating build files\n')
|
|
outfile.write(' generator = 1\n\n')
|
|
if len(self.build.pot) > 0:
|
|
self.generate_gettext_rules(outfile)
|
|
outfile.write('\n')
|
|
|
|
def generate_gettext_rules(self, outfile):
|
|
rule = 'rule GEN_POT\n'
|
|
command = " command = xgettext --package-name=$PACKAGENAME -p $OUTDIR -f $FILELIST -D '%s' -k_ -o $OUTFILE\n" % \
|
|
self.environment.get_source_dir()
|
|
desc = " description = Creating pot file for package $PACKAGENAME.\n"
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(desc)
|
|
outfile.write('\n')
|
|
rule = 'rule GEN_GMO\n'
|
|
command = ' command = msgfmt $INFILE -o $OUTFILE\n'
|
|
desc = ' description = Generating gmo file $OUTFILE\n'
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(desc)
|
|
outfile.write('\n')
|
|
|
|
def generate_static_link_rules(self, is_cross, outfile):
|
|
if is_cross:
|
|
static_linker = self.build.static_cross_linker
|
|
crstr = '_CROSS'
|
|
else:
|
|
static_linker = self.build.static_linker
|
|
crstr = ''
|
|
rule = 'rule STATIC%s_LINKER\n' % crstr
|
|
command = ' command = %s $LINK_FLAGS %s $in\n' % \
|
|
(' '.join(static_linker.get_exelist()),
|
|
' '.join(static_linker.get_output_flags('$out')))
|
|
description = ' description = Static linking library $out\n\n'
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(description)
|
|
|
|
def generate_dynamic_link_rules(self, outfile):
|
|
ctypes = [(self.build.compilers, False), (self.build.cross_compilers, True)]
|
|
for (complist, is_cross) in ctypes:
|
|
for compiler in complist:
|
|
langname = compiler.get_language()
|
|
crstr = ''
|
|
if is_cross:
|
|
crstr = '_CROSS'
|
|
rule = 'rule %s%s_LINKER\n' % (langname, crstr)
|
|
command = ' command = %s %s $FLAGS %s $in $LINK_FLAGS $aliasing\n' % \
|
|
(execute_wrapper,
|
|
' '.join(compiler.get_linker_exelist()),\
|
|
' '.join(compiler.get_linker_output_flags('$out')))
|
|
description = ' description = Linking target $out'
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(description)
|
|
outfile.write('\n')
|
|
scriptdir = self.environment.get_script_dir()
|
|
outfile.write('\n')
|
|
symrule = 'rule SHSYM\n'
|
|
symcmd = ' command = "%s" "%s" "%s" "%s" $CROSS\n' % (ninja_quote(sys.executable),
|
|
ninja_quote(os.path.join(scriptdir, 'symbolextractor.py')),
|
|
'$in', '$out')
|
|
synstat = ' restat = 1\n'
|
|
syndesc = ' description = Generating symbol file $out.\n'
|
|
outfile.write(symrule)
|
|
outfile.write(symcmd)
|
|
outfile.write(synstat)
|
|
outfile.write(syndesc)
|
|
outfile.write('\n')
|
|
|
|
def generate_compile_rule_for(self, langname, compiler, qstr, is_cross, outfile):
|
|
if is_cross:
|
|
crstr = '_CROSS'
|
|
else:
|
|
crstr = ''
|
|
rule = 'rule %s%s_COMPILER\n' % (langname, crstr)
|
|
depflags = compiler.get_dependency_gen_flags('$out', '$DEPFILE')
|
|
command = " command = %s $FLAGS %s %s %s $in\n" % \
|
|
(' '.join(compiler.get_exelist()),\
|
|
' '.join([qstr % d for d in depflags]),\
|
|
' '.join(compiler.get_output_flags('$out')),\
|
|
' '.join(compiler.get_compile_only_flags()))
|
|
description = ' description = Compiling %s object $out\n' % langname
|
|
if compiler.get_id() == 'msvc':
|
|
deps = ' deps = msvc\n'
|
|
else:
|
|
deps = ' deps = gcc\n'
|
|
deps += ' depfile = $DEPFILE\n'
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(deps)
|
|
outfile.write(description)
|
|
outfile.write('\n')
|
|
|
|
def generate_pch_rule_for(self, langname, compiler, qstr, is_cross, outfile):
|
|
if is_cross:
|
|
crstr = '_CROSS'
|
|
else:
|
|
crstr = ''
|
|
rule = 'rule %s%s_PCH\n' % (langname, crstr)
|
|
depflags = compiler.get_dependency_gen_flags('$out', '$DEPFILE')
|
|
if compiler.get_id() == 'msvc':
|
|
output = ''
|
|
else:
|
|
output = ' '.join(compiler.get_output_flags('$out'))
|
|
command = " command = %s $FLAGS %s %s %s $in\n" % \
|
|
(' '.join(compiler.get_exelist()),\
|
|
' '.join([qstr % d for d in depflags]),\
|
|
output,\
|
|
' '.join(compiler.get_compile_only_flags()))
|
|
description = ' description = Precompiling header %s\n' % '$in'
|
|
if compiler.get_id() == 'msvc':
|
|
deps = ' deps = msvc\n'
|
|
else:
|
|
deps = ' deps = gcc\n'
|
|
deps += ' depfile = $DEPFILE\n'
|
|
outfile.write(rule)
|
|
outfile.write(command)
|
|
outfile.write(deps)
|
|
outfile.write(description)
|
|
outfile.write('\n')
|
|
|
|
def generate_compile_rules(self, outfile):
|
|
qstr = quote_char + "%s" + quote_char
|
|
for compiler in self.build.compilers:
|
|
langname = compiler.get_language()
|
|
self.generate_compile_rule_for(langname, compiler, qstr, False, outfile)
|
|
self.generate_pch_rule_for(langname, compiler, qstr, False, outfile)
|
|
if self.environment.is_cross_build():
|
|
for compiler in self.build.cross_compilers:
|
|
langname = compiler.get_language()
|
|
self.generate_compile_rule_for(langname, compiler, qstr, True, outfile)
|
|
self.generate_pch_rule_for(langname, compiler, qstr, True, outfile)
|
|
outfile.write('\n')
|
|
|
|
def generate_custom_generator_rules(self, target, outfile):
|
|
for genlist in target.get_generated_sources():
|
|
generator = genlist.get_generator()
|
|
exe = generator.get_exe()
|
|
if self.environment.is_cross_build() and \
|
|
isinstance(exe, build.BuildTarget) and exe.is_cross:
|
|
if 'exe_wrapper' not in self.environment.cross_info:
|
|
s = 'Can not use target %s as a generator because it is cross-built\n'
|
|
s += 'and no exe wrapper is defined. You might want to set it to native instead.'
|
|
s = s % exe.name
|
|
raise MesonException(s)
|
|
infilelist = genlist.get_infilelist()
|
|
outfilelist = genlist.get_outfilelist()
|
|
if isinstance(exe, build.BuildTarget):
|
|
exe_file = os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))
|
|
else:
|
|
exe_file = exe.get_command()
|
|
base_args = generator.get_arglist()
|
|
for i in range(len(infilelist)):
|
|
if len(infilelist) == len(outfilelist):
|
|
sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
|
|
else:
|
|
for x in base_args:
|
|
if '@OUTPUT@' in x:
|
|
raise MesonException('Tried to use @OUTPUT@ in a rule with more than one output.')
|
|
sole_output = ''
|
|
curfile = infilelist[i]
|
|
infilename = os.path.join(self.environment.get_source_dir(), curfile)
|
|
outfiles = genlist.get_outputs_for(curfile)
|
|
outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles]
|
|
args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)\
|
|
for x in base_args]
|
|
args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir()).replace("@BUILD_DIR@", self.get_target_private_dir(target))
|
|
for x in args]
|
|
cmdlist = [exe_file] + args
|
|
elem = NinjaBuildElement(outfiles, 'CUSTOM_COMMAND', infilename)
|
|
elem.add_item('DESC', 'Generating $out')
|
|
if isinstance(exe, build.BuildTarget):
|
|
elem.add_dep(self.get_target_filename(exe))
|
|
elem.add_item('COMMAND', cmdlist)
|
|
elem.write(outfile)
|
|
|
|
def generate_single_compile(self, target, outfile, src, is_generated=False, header_deps=[]):
|
|
compiler = self.get_compiler_for_source(src)
|
|
commands = self.generate_basic_compiler_flags(target, compiler)
|
|
commands.append(compiler.get_include_arg(self.get_target_private_dir(target)))
|
|
if is_generated:
|
|
if '/' in src:
|
|
rel_src = src
|
|
else:
|
|
rel_src = os.path.join(self.get_target_private_dir(target), src)
|
|
else:
|
|
rel_src = os.path.join(self.build_to_src, target.get_source_subdir(), src)
|
|
if os.path.isabs(src):
|
|
src_filename = os.path.basename(src)
|
|
else:
|
|
src_filename = src
|
|
rel_obj = os.path.join(self.get_target_private_dir(target), os.path.basename(src_filename))
|
|
rel_obj += '.' + self.environment.get_object_suffix()
|
|
dep_file = rel_obj + '.' + compiler.get_depfile_suffix()
|
|
if self.environment.coredata.use_pch:
|
|
pchlist = target.get_pch(compiler.language)
|
|
else:
|
|
pchlist = []
|
|
if len(pchlist) == 0:
|
|
pch_dep = []
|
|
else:
|
|
arr = []
|
|
i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0]))
|
|
arr.append(i)
|
|
pch_dep = arr
|
|
for i in target.get_include_dirs():
|
|
basedir = i.get_curdir()
|
|
for d in i.get_incdirs():
|
|
expdir = os.path.join(basedir, d)
|
|
fulldir = os.path.join(self.environment.get_source_dir(), expdir)
|
|
barg = compiler.get_include_arg(expdir)
|
|
sarg = compiler.get_include_arg(fulldir)
|
|
commands.append(barg)
|
|
commands.append(sarg)
|
|
if self.environment.coredata.use_pch:
|
|
commands += self.get_pch_include_args(compiler, target)
|
|
crstr = ''
|
|
if target.is_cross:
|
|
crstr = '_CROSS'
|
|
compiler_name = '%s%s_COMPILER' % (compiler.get_language(), crstr)
|
|
|
|
element = NinjaBuildElement(rel_obj, compiler_name, rel_src)
|
|
for d in header_deps:
|
|
if not '/' in d:
|
|
d = os.path.join(self.get_target_private_dir(target), d)
|
|
element.add_dep(d)
|
|
element.add_orderdep(pch_dep)
|
|
element.add_item('DEPFILE', dep_file)
|
|
element.add_item('FLAGS', commands)
|
|
element.write(outfile)
|
|
return rel_obj
|
|
|
|
def generate_msvc_pch_command(self, target, compiler, pch):
|
|
if len(pch) != 2:
|
|
raise RuntimeError('MSVC requires one header and one source to produce precompiled headers.')
|
|
header = pch[0]
|
|
source = pch[1]
|
|
pchname = compiler.get_pch_name(header)
|
|
dst = os.path.join(self.get_target_private_dir(target), pchname)
|
|
|
|
commands = []
|
|
commands += self.generate_basic_compiler_flags(target, compiler)
|
|
just_name = os.path.split(header)[1]
|
|
commands += compiler.gen_pch_args(just_name, source, dst)
|
|
|
|
dep = dst + '.' + compiler.get_depfile_suffix()
|
|
return (commands, dep, dst)
|
|
|
|
def generate_gcc_pch_command(self, target, compiler, pch):
|
|
commands = []
|
|
commands += self.generate_basic_compiler_flags(target, compiler)
|
|
|
|
dst = os.path.join(self.get_target_private_dir(target),
|
|
os.path.split(pch)[-1] + '.' + compiler.get_pch_suffix())
|
|
dep = dst + '.' + compiler.get_depfile_suffix()
|
|
return (commands, dep, dst)
|
|
|
|
def generate_pch(self, target, outfile):
|
|
cstr = ''
|
|
if target.is_cross:
|
|
cstr = '_CROSS'
|
|
for lang in ['c', 'cpp']:
|
|
pch = target.get_pch(lang)
|
|
if len(pch) == 0:
|
|
continue
|
|
if '/' not in pch[0] or '/' not in pch[-1]:
|
|
raise build.InvalidArguments('Precompiled header of "%s" must not be in the same direcotory as source, please put it in a subdirectory.' % target.get_basename())
|
|
compiler = self.get_compiler_for_lang(lang)
|
|
if compiler.id == 'msvc':
|
|
src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1])
|
|
(commands, dep, dst) = self.generate_msvc_pch_command(target, compiler, pch)
|
|
extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
|
|
else:
|
|
src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
|
|
(commands, dep, dst) = self.generate_gcc_pch_command(target, compiler, pch[0])
|
|
extradep = None
|
|
rulename = compiler.get_language() + cstr + '_PCH'
|
|
elem = NinjaBuildElement(dst, rulename, src)
|
|
if extradep is not None:
|
|
elem.add_dep(extradep)
|
|
elem.add_item('FLAGS', commands)
|
|
elem.add_item('DEPFILE', dep)
|
|
elem.write(outfile)
|
|
|
|
def generate_shsym(self, outfile, target):
|
|
target_name = self.get_target_filename(target)
|
|
targetdir = self.get_target_private_dir(target)
|
|
symname = os.path.join(targetdir, target_name + '.symbols')
|
|
elem = NinjaBuildElement(symname, 'SHSYM', target_name)
|
|
if self.environment.is_cross_build():
|
|
elem.add_item('CROSS', '--cross-host=' + self.environment.cross_info['name'])
|
|
elem.write(outfile)
|
|
|
|
def generate_link(self, target, outfile, outname, obj_list):
|
|
if isinstance(target, build.StaticLibrary):
|
|
linker = self.build.static_linker
|
|
linker_base = 'STATIC'
|
|
else:
|
|
linker = self.build.compilers[0]
|
|
linker_base = linker.get_language() # Fixme.
|
|
if isinstance(target, build.SharedLibrary):
|
|
self.generate_shsym(outfile, target)
|
|
crstr = ''
|
|
if target.is_cross:
|
|
crstr = '_CROSS'
|
|
linker_rule = linker_base + crstr + '_LINKER'
|
|
abspath = os.path.join(self.environment.get_build_dir(), target.subdir)
|
|
commands = []
|
|
if isinstance(target, build.Executable):
|
|
commands += linker.get_std_exe_link_flags()
|
|
elif isinstance(target, build.SharedLibrary):
|
|
commands += linker.get_std_shared_lib_link_flags()
|
|
commands += linker.get_pic_flags()
|
|
commands += linker.get_soname_flags(target.name, abspath)
|
|
elif isinstance(target, build.StaticLibrary):
|
|
commands += linker.get_std_link_flags()
|
|
else:
|
|
raise RuntimeError('Unknown build target type.')
|
|
for dep in target.get_external_deps():
|
|
commands += dep.get_link_flags()
|
|
dependencies = target.get_dependencies()
|
|
commands += self.build_target_link_arguments(linker, dependencies)
|
|
commands += linker.build_rpath_args(self.environment.get_build_dir(), target.get_rpaths())
|
|
if self.environment.coredata.coverage:
|
|
commands += linker.get_coverage_link_flags()
|
|
dep_targets = [self.get_dependency_filename(t) for t in dependencies]
|
|
elem = NinjaBuildElement(outname, linker_rule, obj_list)
|
|
elem.add_dep(dep_targets)
|
|
elem.add_item('LINK_FLAGS', commands)
|
|
return elem
|
|
|
|
def get_dependency_filename(self, t):
|
|
if isinstance(t, build.SharedLibrary):
|
|
return os.path.join(self.get_target_private_dir(t), self.get_target_filename(t) + '.symbols')
|
|
return self.get_target_filename(t)
|
|
|
|
def generate_shlib_aliases(self, target, outdir, outfile, elem):
|
|
basename = target.get_filename()
|
|
aliases = target.get_aliaslist()
|
|
aliascmd = []
|
|
if shutil.which('ln'):
|
|
for alias in aliases:
|
|
aliasfile = os.path.join(outdir, alias)
|
|
cmd = ["&&", 'ln', '-s', '-f', basename, aliasfile]
|
|
aliascmd += cmd
|
|
else:
|
|
mlog.log("Library versioning disabled because host does not support symlinks.")
|
|
elem.add_item('aliasing', aliascmd)
|
|
elem.write(outfile)
|
|
|
|
def generate_gcov_clean(self, outfile):
|
|
gcno_elem = NinjaBuildElement('clean-gcno', 'CUSTOM_COMMAND', '')
|
|
script_root = self.environment.get_script_dir()
|
|
clean_script = os.path.join(script_root, 'delwithsuffix.py')
|
|
gcno_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcno'])
|
|
gcno_elem.add_item('description', 'Deleting gcno files')
|
|
gcno_elem.write(outfile)
|
|
|
|
gcda_elem = NinjaBuildElement('clean-gcda', 'CUSTOM_COMMAND', '')
|
|
script_root = self.environment.get_script_dir()
|
|
clean_script = os.path.join(script_root, 'delwithsuffix.py')
|
|
gcda_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcda'])
|
|
gcda_elem.add_item('description', 'Deleting gcda files')
|
|
gcda_elem.write(outfile)
|
|
|
|
def is_compilable_file(self, filename):
|
|
if filename.endswith('.cpp') or\
|
|
filename.endswith('.c') or\
|
|
filename.endswith('.cxx') or\
|
|
filename.endswith('.cc') or\
|
|
filename.endswith('.C'):
|
|
return True
|
|
return False
|
|
|
|
def process_dep_gens(self, outfile, target):
|
|
src_deps = []
|
|
other_deps = []
|
|
for rule in self.dep_rules.values():
|
|
srcs = target.get_original_kwargs().get(rule.src_keyword, [])
|
|
if isinstance(srcs, str):
|
|
srcs = [srcs]
|
|
for src in srcs:
|
|
plainname = os.path.split(src)[1]
|
|
basename = plainname.split('.')[0]
|
|
outname = rule.name_templ.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
|
|
outfilename = os.path.join(self.get_target_private_dir(target), outname)
|
|
infilename = os.path.join(self.build_to_src, target.get_source_subdir(), src)
|
|
rule = rule.name
|
|
elem = NinjaBuildElement(outfilename, rule, infilename)
|
|
elem.write(outfile)
|
|
if self.is_compilable_file(outfilename):
|
|
src_deps.append(outfilename)
|
|
else:
|
|
other_deps.append(outfilename)
|
|
return (src_deps, other_deps)
|
|
|
|
def generate_cppcheck_target(self, outfile):
|
|
cppcheck_exe = environment.find_cppcheck()
|
|
if not cppcheck_exe:
|
|
return
|
|
elem = NinjaBuildElement('cppcheck', 'CUSTOM_COMMAND', [])
|
|
elem.add_item('COMMAND', [cppcheck_exe, self.environment.get_source_dir()])
|
|
elem.add_item('description', 'Running cppchecker')
|
|
elem.write(outfile)
|
|
|
|
def generate_ending(self, outfile):
|
|
targetlist = [self.get_target_filename(t) for t in self.build.get_targets().values()]
|
|
elem = NinjaBuildElement('all', 'phony', targetlist)
|
|
elem.write(outfile)
|
|
|
|
default = 'default all\n\n'
|
|
outfile.write(default)
|
|
|
|
ninja_command = environment.detect_ninja()
|
|
if ninja_command is None:
|
|
raise RuntimeError('Could not detect ninja command')
|
|
elem = NinjaBuildElement('clean', 'CUSTOM_COMMAND', '')
|
|
elem.add_item('COMMAND', [ninja_command, '-t', 'clean'])
|
|
elem.add_item('description', 'Cleaning')
|
|
if self.environment.coredata.coverage:
|
|
self.generate_gcov_clean(outfile)
|
|
elem.add_dep('clean-gcda')
|
|
elem.add_dep('clean-gcno')
|
|
elem.write(outfile)
|
|
|
|
deps = [os.path.join(self.build_to_src, df) \
|
|
for df in self.interpreter.get_build_def_files()]
|
|
if self.environment.is_cross_build():
|
|
deps.append(os.path.join(self.build_to_src,
|
|
self.environment.coredata.cross_file))
|
|
deps.append('meson-private/coredata.dat')
|
|
if os.path.exists(os.path.join(self.environment.get_source_dir(), 'meson_options.txt')):
|
|
deps.append(os.path.join(self.build_to_src, 'meson_options.txt'))
|
|
elem = NinjaBuildElement('build.ninja', 'REGENERATE_BUILD', deps)
|
|
elem.write(outfile)
|
|
|
|
elem = NinjaBuildElement(deps, 'phony', '')
|
|
elem.write(outfile)
|
|
|
|
self.generate_cppcheck_target(outfile)
|