typing: fully annotate scripts
This commit is contained in:
parent
0d57e307b2
commit
a4f4379c44
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
def destdir_join(d1, d2):
|
||||
# TODO: consider switching to pathlib for this
|
||||
def destdir_join(d1: str, d2: str) -> str:
|
||||
# c:\destdir + c:\prefix must produce c:\destdir\prefix
|
||||
if len(d1) > 1 and d1[1] == ':' \
|
||||
and len(d2) > 1 and d2[1] == ':':
|
||||
|
|
|
@ -18,8 +18,9 @@ from concurrent.futures import ThreadPoolExecutor
|
|||
|
||||
from ..environment import detect_clangformat
|
||||
from ..compilers import lang_suffixes
|
||||
import typing as T
|
||||
|
||||
def clangformat(exelist, srcdir_name, builddir_name):
|
||||
def clangformat(exelist: T.List[str], srcdir_name: str, builddir_name: str) -> int:
|
||||
srcdir = pathlib.Path(srcdir_name)
|
||||
suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
|
||||
suffixes.add('h')
|
||||
|
@ -33,7 +34,7 @@ def clangformat(exelist, srcdir_name, builddir_name):
|
|||
[x.result() for x in futures]
|
||||
return 0
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
srcdir_name = args[0]
|
||||
builddir_name = args[1]
|
||||
|
||||
|
|
|
@ -16,10 +16,11 @@ import pathlib
|
|||
import subprocess
|
||||
import shutil
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import typing as T
|
||||
|
||||
from ..compilers import lang_suffixes
|
||||
|
||||
def manual_clangformat(srcdir_name, builddir_name):
|
||||
def manual_clangformat(srcdir_name: str, builddir_name: str) -> int:
|
||||
srcdir = pathlib.Path(srcdir_name)
|
||||
suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
|
||||
suffixes.add('h')
|
||||
|
@ -34,7 +35,7 @@ def manual_clangformat(srcdir_name, builddir_name):
|
|||
[max(returncode, x.result().returncode) for x in futures]
|
||||
return returncode
|
||||
|
||||
def clangformat(srcdir_name, builddir_name):
|
||||
def clangformat(srcdir_name: str, builddir_name: str) -> int:
|
||||
run_clang_tidy = None
|
||||
for rct in ('run-clang-tidy', 'run-clang-tidy.py'):
|
||||
if shutil.which(rct):
|
||||
|
@ -45,8 +46,9 @@ def clangformat(srcdir_name, builddir_name):
|
|||
else:
|
||||
print('Could not find run-clang-tidy, running checks manually.')
|
||||
manual_clangformat(srcdir_name, builddir_name)
|
||||
return 0
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
srcdir_name = args[0]
|
||||
builddir_name = args[1]
|
||||
return clangformat(srcdir_name, builddir_name)
|
||||
|
|
|
@ -16,8 +16,9 @@ import os
|
|||
import sys
|
||||
import shutil
|
||||
import pickle
|
||||
import typing as T
|
||||
|
||||
def rmtrees(build_dir, trees):
|
||||
def rmtrees(build_dir: str, trees: T.List[str]) -> None:
|
||||
for t in trees:
|
||||
# Never delete trees outside of the builddir
|
||||
if os.path.isabs(t):
|
||||
|
@ -28,7 +29,7 @@ def rmtrees(build_dir, trees):
|
|||
if os.path.isdir(bt):
|
||||
shutil.rmtree(bt, ignore_errors=True)
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
if len(args) != 1:
|
||||
print('Cleaner script for Meson. Do not run on your own please.')
|
||||
print('cleantrees.py <data-file>')
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
import argparse
|
||||
import subprocess
|
||||
import shutil
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import typing as T
|
||||
|
||||
def run(argsv):
|
||||
commands = [[]]
|
||||
def run(argsv: T.List[str]) -> int:
|
||||
commands = [[]] # type: T.List[T.List[str]]
|
||||
SEPARATOR = ';;;'
|
||||
|
||||
# Generate CMD parameters
|
||||
|
@ -20,13 +20,14 @@ def run(argsv):
|
|||
|
||||
# Parse
|
||||
args = parser.parse_args(argsv)
|
||||
directory = Path(args.directory)
|
||||
|
||||
dummy_target = None
|
||||
if len(args.outputs) == 1 and len(args.original_outputs) == 0:
|
||||
dummy_target = args.outputs[0]
|
||||
dummy_target = Path(args.outputs[0])
|
||||
elif len(args.outputs) != len(args.original_outputs):
|
||||
print('Length of output list and original output list differ')
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
for i in args.commands:
|
||||
if i == SEPARATOR:
|
||||
|
@ -62,39 +63,40 @@ def run(argsv):
|
|||
cmd += [j]
|
||||
|
||||
try:
|
||||
os.makedirs(args.directory, exist_ok=True)
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
|
||||
res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
|
||||
if capture_file:
|
||||
out_file = Path(args.directory) / capture_file
|
||||
out_file = directory / capture_file
|
||||
out_file.write_bytes(res.stdout)
|
||||
except subprocess.CalledProcessError:
|
||||
sys.exit(1)
|
||||
return 1
|
||||
|
||||
if dummy_target:
|
||||
with open(dummy_target, 'a'):
|
||||
os.utime(dummy_target, None)
|
||||
sys.exit(0)
|
||||
dummy_target.touch()
|
||||
return 0
|
||||
|
||||
# Copy outputs
|
||||
zipped_outputs = zip(args.outputs, args.original_outputs)
|
||||
zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
|
||||
for expected, generated in zipped_outputs:
|
||||
do_copy = False
|
||||
if not os.path.exists(expected):
|
||||
if not os.path.exists(generated):
|
||||
if not expected.exists():
|
||||
if not generated.exists():
|
||||
print('Unable to find generated file. This can cause the build to fail:')
|
||||
print(generated)
|
||||
do_copy = False
|
||||
else:
|
||||
do_copy = True
|
||||
elif os.path.exists(generated):
|
||||
if os.path.getmtime(generated) > os.path.getmtime(expected):
|
||||
elif generated.exists():
|
||||
if generated.stat().st_mtime > expected.stat().st_mtime:
|
||||
do_copy = True
|
||||
|
||||
if do_copy:
|
||||
if os.path.exists(expected):
|
||||
os.remove(expected)
|
||||
shutil.copyfile(generated, expected)
|
||||
if expected.exists():
|
||||
expected.unlink()
|
||||
shutil.copyfile(str(generated), str(expected))
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.run(sys.argv[1:])
|
||||
sys.exit(run(sys.argv[1:]))
|
||||
|
|
|
@ -17,8 +17,9 @@ what to run, sets up the environment and executes the command."""
|
|||
|
||||
import sys, os, subprocess, shutil, shlex
|
||||
import re
|
||||
import typing as T
|
||||
|
||||
def run_command(source_dir, build_dir, subdir, meson_command, command, arguments):
|
||||
def run_command(source_dir: str, build_dir: str, subdir: str, meson_command: T.List[str], command: str, arguments: T.List[str]) -> subprocess.Popen:
|
||||
env = {'MESON_SOURCE_ROOT': source_dir,
|
||||
'MESON_BUILD_ROOT': build_dir,
|
||||
'MESON_SUBDIR': subdir,
|
||||
|
@ -50,24 +51,24 @@ def run_command(source_dir, build_dir, subdir, meson_command, command, arguments
|
|||
print('Could not execute command "{}": {}'.format(command, err))
|
||||
sys.exit(1)
|
||||
|
||||
def is_python_command(cmdname):
|
||||
def is_python_command(cmdname: str) -> bool:
|
||||
end_py_regex = r'python(3|3\.\d+)?(\.exe)?$'
|
||||
return re.search(end_py_regex, cmdname) is not None
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
if len(args) < 4:
|
||||
print('commandrunner.py <source dir> <build dir> <subdir> <command> [arguments]')
|
||||
return 1
|
||||
src_dir = args[0]
|
||||
build_dir = args[1]
|
||||
subdir = args[2]
|
||||
meson_command = args[3]
|
||||
if is_python_command(meson_command):
|
||||
meson_command = [meson_command, args[4]]
|
||||
meson_bin = args[3]
|
||||
if is_python_command(meson_bin):
|
||||
meson_command = [meson_bin, args[4]]
|
||||
command = args[5]
|
||||
arguments = args[6:]
|
||||
else:
|
||||
meson_command = [meson_command]
|
||||
meson_command = [meson_bin]
|
||||
command = args[4]
|
||||
arguments = args[5:]
|
||||
pc = run_command(src_dir, build_dir, subdir, meson_command, command, arguments)
|
||||
|
|
|
@ -15,8 +15,9 @@
|
|||
from mesonbuild import environment, mesonlib
|
||||
|
||||
import argparse, sys, os, subprocess, pathlib, stat
|
||||
import typing as T
|
||||
|
||||
def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov):
|
||||
def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
|
||||
outfiles = []
|
||||
exitcode = 0
|
||||
|
||||
|
@ -146,7 +147,7 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llv
|
|||
|
||||
return exitcode
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
if not os.path.isfile('build.ninja'):
|
||||
print('Coverage currently only works with the Ninja backend.')
|
||||
return 1
|
||||
|
|
|
@ -13,8 +13,9 @@
|
|||
# limitations under the License.
|
||||
|
||||
import os, sys
|
||||
import typing as T
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
if len(args) != 2:
|
||||
print('delwithsuffix.py <root of subdir to process> <suffix to delete>')
|
||||
sys.exit(1)
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
import sys, struct
|
||||
import shutil, subprocess
|
||||
import typing as T
|
||||
|
||||
from ..mesonlib import OrderedSet
|
||||
|
||||
|
@ -30,7 +31,7 @@ DT_MIPS_RLD_MAP_REL = 1879048245
|
|||
INSTALL_NAME_TOOL = False
|
||||
|
||||
class DataSizes:
|
||||
def __init__(self, ptrsize, is_le):
|
||||
def __init__(self, ptrsize: int, is_le: bool) -> None:
|
||||
if is_le:
|
||||
p = '<'
|
||||
else:
|
||||
|
@ -57,7 +58,7 @@ class DataSizes:
|
|||
self.OffSize = 4
|
||||
|
||||
class DynamicEntry(DataSizes):
|
||||
def __init__(self, ifile, ptrsize, is_le):
|
||||
def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
|
||||
super().__init__(ptrsize, is_le)
|
||||
self.ptrsize = ptrsize
|
||||
if ptrsize == 64:
|
||||
|
@ -67,7 +68,7 @@ class DynamicEntry(DataSizes):
|
|||
self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
|
||||
self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
|
||||
|
||||
def write(self, ofile):
|
||||
def write(self, ofile: T.BinaryIO) -> None:
|
||||
if self.ptrsize == 64:
|
||||
ofile.write(struct.pack(self.Sxword, self.d_tag))
|
||||
ofile.write(struct.pack(self.XWord, self.val))
|
||||
|
@ -76,7 +77,7 @@ class DynamicEntry(DataSizes):
|
|||
ofile.write(struct.pack(self.Word, self.val))
|
||||
|
||||
class SectionHeader(DataSizes):
|
||||
def __init__(self, ifile, ptrsize, is_le):
|
||||
def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
|
||||
super().__init__(ptrsize, is_le)
|
||||
if ptrsize == 64:
|
||||
is_64 = True
|
||||
|
@ -116,10 +117,12 @@ class SectionHeader(DataSizes):
|
|||
self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
|
||||
|
||||
class Elf(DataSizes):
|
||||
def __init__(self, bfile, verbose=True):
|
||||
def __init__(self, bfile: str, verbose: bool = True) -> None:
|
||||
self.bfile = bfile
|
||||
self.verbose = verbose
|
||||
self.bf = open(bfile, 'r+b')
|
||||
self.sections = [] # type: T.List[SectionHeader]
|
||||
self.dynamic = [] # type: T.List[DynamicEntry]
|
||||
try:
|
||||
(self.ptrsize, self.is_le) = self.detect_elf_type()
|
||||
super().__init__(self.ptrsize, self.is_le)
|
||||
|
@ -130,18 +133,18 @@ class Elf(DataSizes):
|
|||
self.bf.close()
|
||||
raise
|
||||
|
||||
def __enter__(self):
|
||||
def __enter__(self) -> 'Elf':
|
||||
return self
|
||||
|
||||
def __del__(self):
|
||||
def __del__(self) -> None:
|
||||
if self.bf:
|
||||
self.bf.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
|
||||
self.bf.close()
|
||||
self.bf = None
|
||||
|
||||
def detect_elf_type(self):
|
||||
def detect_elf_type(self) -> T.Tuple[int, bool]:
|
||||
data = self.bf.read(6)
|
||||
if data[1:4] != b'ELF':
|
||||
# This script gets called to non-elf targets too
|
||||
|
@ -163,7 +166,7 @@ class Elf(DataSizes):
|
|||
sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
|
||||
return ptrsize, is_le
|
||||
|
||||
def parse_header(self):
|
||||
def parse_header(self) -> None:
|
||||
self.bf.seek(0)
|
||||
self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
|
||||
self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
|
||||
|
@ -180,13 +183,12 @@ class Elf(DataSizes):
|
|||
self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
|
||||
self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
|
||||
|
||||
def parse_sections(self):
|
||||
def parse_sections(self) -> None:
|
||||
self.bf.seek(self.e_shoff)
|
||||
self.sections = []
|
||||
for _ in range(self.e_shnum):
|
||||
self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
|
||||
|
||||
def read_str(self):
|
||||
def read_str(self) -> bytes:
|
||||
arr = []
|
||||
x = self.bf.read(1)
|
||||
while x != b'\0':
|
||||
|
@ -196,17 +198,17 @@ class Elf(DataSizes):
|
|||
raise RuntimeError('Tried to read past the end of the file')
|
||||
return b''.join(arr)
|
||||
|
||||
def find_section(self, target_name):
|
||||
def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
|
||||
section_names = self.sections[self.e_shstrndx]
|
||||
for i in self.sections:
|
||||
self.bf.seek(section_names.sh_offset + i.sh_name)
|
||||
name = self.read_str()
|
||||
if name == target_name:
|
||||
return i
|
||||
return None
|
||||
|
||||
def parse_dynamic(self):
|
||||
def parse_dynamic(self) -> None:
|
||||
sec = self.find_section(b'.dynamic')
|
||||
self.dynamic = []
|
||||
if sec is None:
|
||||
return
|
||||
self.bf.seek(sec.sh_offset)
|
||||
|
@ -216,14 +218,14 @@ class Elf(DataSizes):
|
|||
if e.d_tag == 0:
|
||||
break
|
||||
|
||||
def print_section_names(self):
|
||||
def print_section_names(self) -> None:
|
||||
section_names = self.sections[self.e_shstrndx]
|
||||
for i in self.sections:
|
||||
self.bf.seek(section_names.sh_offset + i.sh_name)
|
||||
name = self.read_str()
|
||||
print(name.decode())
|
||||
|
||||
def print_soname(self):
|
||||
def print_soname(self) -> None:
|
||||
soname = None
|
||||
strtab = None
|
||||
for i in self.dynamic:
|
||||
|
@ -237,14 +239,16 @@ class Elf(DataSizes):
|
|||
self.bf.seek(strtab.val + soname.val)
|
||||
print(self.read_str())
|
||||
|
||||
def get_entry_offset(self, entrynum):
|
||||
def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
|
||||
sec = self.find_section(b'.dynstr')
|
||||
for i in self.dynamic:
|
||||
if i.d_tag == entrynum:
|
||||
return sec.sh_offset + i.val
|
||||
res = sec.sh_offset + i.val
|
||||
assert isinstance(res, int)
|
||||
return res
|
||||
return None
|
||||
|
||||
def print_rpath(self):
|
||||
def print_rpath(self) -> None:
|
||||
offset = self.get_entry_offset(DT_RPATH)
|
||||
if offset is None:
|
||||
print("This file does not have an rpath.")
|
||||
|
@ -252,7 +256,7 @@ class Elf(DataSizes):
|
|||
self.bf.seek(offset)
|
||||
print(self.read_str())
|
||||
|
||||
def print_runpath(self):
|
||||
def print_runpath(self) -> None:
|
||||
offset = self.get_entry_offset(DT_RUNPATH)
|
||||
if offset is None:
|
||||
print("This file does not have a runpath.")
|
||||
|
@ -260,7 +264,7 @@ class Elf(DataSizes):
|
|||
self.bf.seek(offset)
|
||||
print(self.read_str())
|
||||
|
||||
def print_deps(self):
|
||||
def print_deps(self) -> None:
|
||||
sec = self.find_section(b'.dynstr')
|
||||
deps = []
|
||||
for i in self.dynamic:
|
||||
|
@ -272,7 +276,7 @@ class Elf(DataSizes):
|
|||
name = self.read_str()
|
||||
print(name)
|
||||
|
||||
def fix_deps(self, prefix):
|
||||
def fix_deps(self, prefix: bytes) -> None:
|
||||
sec = self.find_section(b'.dynstr')
|
||||
deps = []
|
||||
for i in self.dynamic:
|
||||
|
@ -290,15 +294,13 @@ class Elf(DataSizes):
|
|||
self.bf.seek(offset)
|
||||
self.bf.write(newname)
|
||||
|
||||
def fix_rpath(self, rpath_dirs_to_remove, new_rpath):
|
||||
def fix_rpath(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes) -> None:
|
||||
# The path to search for can be either rpath or runpath.
|
||||
# Fix both of them to be sure.
|
||||
self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
|
||||
self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
|
||||
|
||||
def fix_rpathtype_entry(self, rpath_dirs_to_remove, new_rpath, entrynum):
|
||||
if isinstance(new_rpath, str):
|
||||
new_rpath = new_rpath.encode('utf8')
|
||||
def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes, entrynum: int) -> None:
|
||||
rp_off = self.get_entry_offset(entrynum)
|
||||
if rp_off is None:
|
||||
if self.verbose:
|
||||
|
@ -326,7 +328,7 @@ class Elf(DataSizes):
|
|||
new_rpath = b':'.join(new_rpaths)
|
||||
|
||||
if len(old_rpath) < len(new_rpath):
|
||||
msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath, new_rpath)
|
||||
msg = "New rpath must not be longer than the old one.\n Old: {!r}\n New: {!r}".format(old_rpath, new_rpath)
|
||||
sys.exit(msg)
|
||||
# The linker does read-only string deduplication. If there is a
|
||||
# string that shares a suffix with the rpath, they might get
|
||||
|
@ -343,7 +345,7 @@ class Elf(DataSizes):
|
|||
self.bf.write(new_rpath)
|
||||
self.bf.write(b'\0')
|
||||
|
||||
def remove_rpath_entry(self, entrynum):
|
||||
def remove_rpath_entry(self, entrynum: int) -> None:
|
||||
sec = self.find_section(b'.dynamic')
|
||||
if sec is None:
|
||||
return None
|
||||
|
@ -363,7 +365,7 @@ class Elf(DataSizes):
|
|||
entry.write(self.bf)
|
||||
return None
|
||||
|
||||
def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
|
||||
def fix_elf(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
|
||||
with Elf(fname, verbose) as e:
|
||||
if new_rpath is None:
|
||||
e.print_rpath()
|
||||
|
@ -371,7 +373,7 @@ def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
|
|||
else:
|
||||
e.fix_rpath(rpath_dirs_to_remove, new_rpath)
|
||||
|
||||
def get_darwin_rpaths_to_remove(fname):
|
||||
def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
|
||||
out = subprocess.check_output(['otool', '-l', fname],
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.DEVNULL)
|
||||
|
@ -389,7 +391,7 @@ def get_darwin_rpaths_to_remove(fname):
|
|||
result.append(rp)
|
||||
return result
|
||||
|
||||
def fix_darwin(fname, new_rpath, final_path, install_name_mappings):
|
||||
def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
|
||||
try:
|
||||
rpaths = get_darwin_rpaths_to_remove(fname)
|
||||
except subprocess.CalledProcessError:
|
||||
|
@ -439,7 +441,7 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings):
|
|||
except Exception as err:
|
||||
raise SystemExit(err)
|
||||
|
||||
def fix_jar(fname):
|
||||
def fix_jar(fname: str) -> None:
|
||||
subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
|
||||
with open('META-INF/MANIFEST.MF', 'r+') as f:
|
||||
lines = f.readlines()
|
||||
|
@ -450,7 +452,7 @@ def fix_jar(fname):
|
|||
f.truncate()
|
||||
subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
|
||||
|
||||
def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_mappings, verbose=True):
|
||||
def fix_rpath(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
|
||||
global INSTALL_NAME_TOOL
|
||||
# Static libraries, import libraries, debug information, headers, etc
|
||||
# never have rpaths
|
||||
|
@ -461,6 +463,8 @@ def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_m
|
|||
if fname.endswith('.jar'):
|
||||
fix_jar(fname)
|
||||
return
|
||||
if isinstance(new_rpath, str):
|
||||
new_rpath = new_rpath.encode('utf8')
|
||||
fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
|
||||
return
|
||||
except SystemExit as e:
|
||||
|
@ -473,6 +477,8 @@ def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_m
|
|||
# (upto 30ms), which is significant with --only-changed. For details, see:
|
||||
# https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
|
||||
if INSTALL_NAME_TOOL is False:
|
||||
INSTALL_NAME_TOOL = shutil.which('install_name_tool')
|
||||
INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
|
||||
if INSTALL_NAME_TOOL:
|
||||
if isinstance(new_rpath, bytes):
|
||||
new_rpath = new_rpath.decode('utf8')
|
||||
fix_darwin(fname, new_rpath, final_path, install_name_mappings)
|
||||
|
|
|
@ -16,8 +16,9 @@
|
|||
the command given in the rest of the arguments.'''
|
||||
|
||||
import os, subprocess, sys
|
||||
import typing as T
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
dirname = args[0]
|
||||
command = args[1:]
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ import shutil
|
|||
import argparse
|
||||
import subprocess
|
||||
from . import destdir_join
|
||||
import typing as T
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('command')
|
||||
|
@ -27,7 +28,7 @@ parser.add_argument('--localedir', default='')
|
|||
parser.add_argument('--subdir', default='')
|
||||
parser.add_argument('--extra-args', default='')
|
||||
|
||||
def read_linguas(src_sub):
|
||||
def read_linguas(src_sub: str) -> T.List[str]:
|
||||
# Syntax of this file is documented here:
|
||||
# https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
|
||||
linguas = os.path.join(src_sub, 'LINGUAS')
|
||||
|
@ -43,7 +44,7 @@ def read_linguas(src_sub):
|
|||
print('Could not find file LINGUAS in {}'.format(src_sub))
|
||||
return []
|
||||
|
||||
def run_potgen(src_sub, pkgname, datadirs, args):
|
||||
def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int:
|
||||
listfile = os.path.join(src_sub, 'POTFILES.in')
|
||||
if not os.path.exists(listfile):
|
||||
listfile = os.path.join(src_sub, 'POTFILES')
|
||||
|
@ -60,13 +61,13 @@ def run_potgen(src_sub, pkgname, datadirs, args):
|
|||
'-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
|
||||
env=child_env)
|
||||
|
||||
def gen_gmo(src_sub, bld_sub, langs):
|
||||
def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int:
|
||||
for l in langs:
|
||||
subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
|
||||
'-o', os.path.join(bld_sub, l + '.gmo')])
|
||||
return 0
|
||||
|
||||
def update_po(src_sub, pkgname, langs):
|
||||
def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int:
|
||||
potfile = os.path.join(src_sub, pkgname + '.pot')
|
||||
for l in langs:
|
||||
pofile = os.path.join(src_sub, l + '.po')
|
||||
|
@ -76,7 +77,7 @@ def update_po(src_sub, pkgname, langs):
|
|||
subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
|
||||
return 0
|
||||
|
||||
def do_install(src_sub, bld_sub, dest, pkgname, langs):
|
||||
def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int:
|
||||
for l in langs:
|
||||
srcfile = os.path.join(bld_sub, l + '.gmo')
|
||||
outfile = os.path.join(dest, l, 'LC_MESSAGES',
|
||||
|
@ -88,7 +89,7 @@ def do_install(src_sub, bld_sub, dest, pkgname, langs):
|
|||
print('Installing %s to %s' % (srcfile, outfile))
|
||||
return 0
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
options = parser.parse_args(args)
|
||||
subcmd = options.command
|
||||
langs = options.langs.split('@@') if options.langs else None
|
||||
|
@ -120,3 +121,4 @@ def run(args):
|
|||
else:
|
||||
print('Unknown subcommand.')
|
||||
return 1
|
||||
return 0
|
||||
|
|
|
@ -18,6 +18,7 @@ import shutil
|
|||
import argparse
|
||||
from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
|
||||
from . import destdir_join
|
||||
import typing as T
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
|
@ -50,7 +51,7 @@ for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
|
|||
program_name = 'gtkdoc-' + tool
|
||||
parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
|
||||
|
||||
def gtkdoc_run_check(cmd, cwd, library_paths=None):
|
||||
def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
|
||||
if library_paths is None:
|
||||
library_paths = []
|
||||
|
||||
|
@ -85,12 +86,12 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None):
|
|||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
|
||||
main_file, module, module_version,
|
||||
html_args, scan_args, fixxref_args, mkdb_args,
|
||||
gobject_typesfile, scanobjs_args, run, ld, cc, ldflags, cflags,
|
||||
html_assets, content_files, ignore_headers, namespace,
|
||||
expand_content_files, mode, options):
|
||||
def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
|
||||
main_file: str, module: str, module_version: str,
|
||||
html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
|
||||
gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
|
||||
html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
|
||||
expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
|
||||
print("Building documentation for %s" % module)
|
||||
|
||||
src_dir_args = []
|
||||
|
@ -217,13 +218,13 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
|
|||
shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)),
|
||||
os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version)))
|
||||
|
||||
def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module):
|
||||
def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
|
||||
source = os.path.join(build_root, doc_subdir, 'html')
|
||||
final_destination = os.path.join(install_prefix, datadir, module)
|
||||
shutil.rmtree(final_destination, ignore_errors=True)
|
||||
shutil.copytree(source, final_destination)
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
options = parser.parse_args(args)
|
||||
if options.htmlargs:
|
||||
htmlargs = options.htmlargs.split('@@')
|
||||
|
|
|
@ -5,6 +5,7 @@ import subprocess
|
|||
from . import destdir_join
|
||||
|
||||
import argparse
|
||||
import typing as T
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--install')
|
||||
|
@ -14,7 +15,7 @@ parser.add_argument('--builddir')
|
|||
parser.add_argument('--project-version')
|
||||
|
||||
|
||||
def run(argv):
|
||||
def run(argv: T.List[str]) -> int:
|
||||
options, args = parser.parse_known_args(argv)
|
||||
subenv = os.environ.copy()
|
||||
|
||||
|
@ -23,7 +24,7 @@ def run(argv):
|
|||
|
||||
res = subprocess.call(args, cwd=options.builddir, env=subenv)
|
||||
if res != 0:
|
||||
exit(res)
|
||||
return res
|
||||
|
||||
if options.install:
|
||||
source_dir = os.path.join(options.builddir, options.install)
|
||||
|
@ -34,3 +35,4 @@ def run(argv):
|
|||
|
||||
shutil.rmtree(installdir, ignore_errors=True)
|
||||
shutil.copytree(source_dir, installdir)
|
||||
return 0
|
||||
|
|
|
@ -17,19 +17,20 @@ import sys
|
|||
import argparse
|
||||
import pickle
|
||||
import subprocess
|
||||
import typing as T
|
||||
|
||||
from .. import mesonlib
|
||||
from ..backend.backends import ExecutableSerialisation
|
||||
|
||||
options = None
|
||||
|
||||
def buildparser():
|
||||
def buildparser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
|
||||
parser.add_argument('--unpickle')
|
||||
parser.add_argument('--capture')
|
||||
return parser
|
||||
|
||||
def run_exe(exe):
|
||||
def run_exe(exe: ExecutableSerialisation) -> int:
|
||||
if exe.exe_runner:
|
||||
if not exe.exe_runner.found():
|
||||
raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
|
||||
|
@ -74,7 +75,7 @@ def run_exe(exe):
|
|||
sys.stderr.buffer.write(stderr)
|
||||
return p.returncode
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
global options
|
||||
parser = buildparser()
|
||||
options, cmd_args = parser.parse_known_args(args)
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
import argparse
|
||||
import subprocess
|
||||
import os
|
||||
import typing as T
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('input')
|
||||
|
@ -25,7 +26,7 @@ parser.add_argument('--datadirs', default='')
|
|||
parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
|
||||
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
options = parser.parse_args(args)
|
||||
env = None
|
||||
if options.datadirs:
|
||||
|
|
|
@ -14,10 +14,14 @@
|
|||
|
||||
import sys, os
|
||||
import pickle, subprocess
|
||||
import typing as T
|
||||
|
||||
if T.TYPE_CHECKING:
|
||||
from ..backend.vs2010backend import RegenInfo
|
||||
|
||||
# This could also be used for XCode.
|
||||
|
||||
def need_regen(regeninfo, regen_timestamp):
|
||||
def need_regen(regeninfo: 'RegenInfo', regen_timestamp: float) -> bool:
|
||||
for i in regeninfo.depfiles:
|
||||
curfile = os.path.join(regeninfo.build_dir, i)
|
||||
curtime = os.stat(curfile).st_mtime
|
||||
|
@ -31,7 +35,7 @@ def need_regen(regeninfo, regen_timestamp):
|
|||
Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
|
||||
return False
|
||||
|
||||
def regen(regeninfo, meson_command, backend):
|
||||
def regen(regeninfo: 'RegenInfo', meson_command: T.List[str], backend: str) -> None:
|
||||
cmd = meson_command + ['--internal',
|
||||
'regenerate',
|
||||
regeninfo.build_dir,
|
||||
|
@ -39,19 +43,19 @@ def regen(regeninfo, meson_command, backend):
|
|||
'--backend=' + backend]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
private_dir = args[0]
|
||||
dumpfile = os.path.join(private_dir, 'regeninfo.dump')
|
||||
coredata = os.path.join(private_dir, 'coredata.dat')
|
||||
coredata_file = os.path.join(private_dir, 'coredata.dat')
|
||||
with open(dumpfile, 'rb') as f:
|
||||
regeninfo = pickle.load(f)
|
||||
with open(coredata, 'rb') as f:
|
||||
regeninfo = T.cast('RegenInfo', pickle.load(f))
|
||||
with open(coredata_file, 'rb') as f:
|
||||
coredata = pickle.load(f)
|
||||
backend = coredata.get_builtin_option('backend')
|
||||
regen_timestamp = os.stat(dumpfile).st_mtime
|
||||
if need_regen(regeninfo, regen_timestamp):
|
||||
regen(regeninfo, coredata.meson_command, backend)
|
||||
sys.exit(0)
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
run(sys.argv[1:])
|
||||
sys.exit(run(sys.argv[1:]))
|
||||
|
|
|
@ -12,30 +12,31 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import tempfile
|
||||
from ..environment import detect_ninja, detect_scanbuild
|
||||
from pathlib import Path
|
||||
import typing as T
|
||||
|
||||
|
||||
def scanbuild(exelist, srcdir, blddir, privdir, logdir, args):
|
||||
with tempfile.TemporaryDirectory(dir=privdir) as scandir:
|
||||
def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
|
||||
with tempfile.TemporaryDirectory(dir=str(privdir)) as scandir:
|
||||
meson_cmd = exelist + args
|
||||
build_cmd = exelist + ['-o', logdir] + detect_ninja() + ['-C', scandir]
|
||||
rc = subprocess.call(meson_cmd + [srcdir, scandir])
|
||||
build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
|
||||
rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
|
||||
if rc != 0:
|
||||
return rc
|
||||
return subprocess.call(build_cmd)
|
||||
|
||||
|
||||
def run(args):
|
||||
srcdir = args[0]
|
||||
blddir = args[1]
|
||||
def run(args: T.List[str]) -> int:
|
||||
srcdir = Path(args[0])
|
||||
blddir = Path(args[1])
|
||||
meson_cmd = args[2:]
|
||||
privdir = os.path.join(blddir, 'meson-private')
|
||||
logdir = os.path.join(blddir, 'meson-logs/scanbuild')
|
||||
shutil.rmtree(logdir, ignore_errors=True)
|
||||
privdir = blddir / 'meson-private'
|
||||
logdir = blddir / 'meson-logs' / 'scanbuild'
|
||||
shutil.rmtree(str(logdir), ignore_errors=True)
|
||||
|
||||
exelist = detect_scanbuild()
|
||||
if not exelist:
|
||||
|
|
|
@ -36,12 +36,12 @@ parser.add_argument('args', nargs='+')
|
|||
TOOL_WARNING_FILE = None
|
||||
RELINKING_WARNING = 'Relinking will always happen on source changes.'
|
||||
|
||||
def dummy_syms(outfilename: str):
|
||||
def dummy_syms(outfilename: str) -> None:
|
||||
"""Just touch it so relinking happens always."""
|
||||
with open(outfilename, 'w'):
|
||||
pass
|
||||
|
||||
def write_if_changed(text: str, outfilename: str):
|
||||
def write_if_changed(text: str, outfilename: str) -> None:
|
||||
try:
|
||||
with open(outfilename, 'r') as f:
|
||||
oldtext = f.read()
|
||||
|
@ -52,13 +52,11 @@ def write_if_changed(text: str, outfilename: str):
|
|||
with open(outfilename, 'w') as f:
|
||||
f.write(text)
|
||||
|
||||
def print_tool_warning(tool: list, msg: str, stderr: str = None):
|
||||
def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
|
||||
global TOOL_WARNING_FILE
|
||||
if os.path.exists(TOOL_WARNING_FILE):
|
||||
return
|
||||
if len(tool) == 1:
|
||||
tool = tool[0]
|
||||
m = '{!r} {}. {}'.format(tool, msg, RELINKING_WARNING)
|
||||
m = '{!r} {}. {}'.format(tools, msg, RELINKING_WARNING)
|
||||
if stderr:
|
||||
m += '\n' + stderr
|
||||
mlog.warning(m)
|
||||
|
@ -73,7 +71,7 @@ def get_tool(name: str) -> T.List[str]:
|
|||
return shlex.split(os.environ[evar])
|
||||
return [name]
|
||||
|
||||
def call_tool(name: str, args: T.List[str], **kwargs) -> str:
|
||||
def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
|
||||
tool = get_tool(name)
|
||||
try:
|
||||
p, output, e = Popen_safe(tool + args, **kwargs)
|
||||
|
@ -88,7 +86,7 @@ def call_tool(name: str, args: T.List[str], **kwargs) -> str:
|
|||
return None
|
||||
return output
|
||||
|
||||
def call_tool_nowarn(tool: T.List[str], **kwargs) -> T.Tuple[str, str]:
|
||||
def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
|
||||
try:
|
||||
p, output, e = Popen_safe(tool, **kwargs)
|
||||
except FileNotFoundError:
|
||||
|
@ -99,7 +97,7 @@ def call_tool_nowarn(tool: T.List[str], **kwargs) -> T.Tuple[str, str]:
|
|||
return None, e
|
||||
return output, None
|
||||
|
||||
def gnu_syms(libfilename: str, outfilename: str):
|
||||
def gnu_syms(libfilename: str, outfilename: str) -> None:
|
||||
# Get the name of the library
|
||||
output = call_tool('readelf', ['-d', libfilename])
|
||||
if not output:
|
||||
|
@ -126,7 +124,7 @@ def gnu_syms(libfilename: str, outfilename: str):
|
|||
result += [' '.join(entry)]
|
||||
write_if_changed('\n'.join(result) + '\n', outfilename)
|
||||
|
||||
def solaris_syms(libfilename: str, outfilename: str):
|
||||
def solaris_syms(libfilename: str, outfilename: str) -> None:
|
||||
# gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
|
||||
origpath = os.environ['PATH']
|
||||
try:
|
||||
|
@ -135,7 +133,7 @@ def solaris_syms(libfilename: str, outfilename: str):
|
|||
finally:
|
||||
os.environ['PATH'] = origpath
|
||||
|
||||
def osx_syms(libfilename: str, outfilename: str):
|
||||
def osx_syms(libfilename: str, outfilename: str) -> None:
|
||||
# Get the name of the library
|
||||
output = call_tool('otool', ['-l', libfilename])
|
||||
if not output:
|
||||
|
@ -156,7 +154,7 @@ def osx_syms(libfilename: str, outfilename: str):
|
|||
result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
|
||||
write_if_changed('\n'.join(result) + '\n', outfilename)
|
||||
|
||||
def openbsd_syms(libfilename: str, outfilename: str):
|
||||
def openbsd_syms(libfilename: str, outfilename: str) -> None:
|
||||
# Get the name of the library
|
||||
output = call_tool('readelf', ['-d', libfilename])
|
||||
if not output:
|
||||
|
@ -173,7 +171,7 @@ def openbsd_syms(libfilename: str, outfilename: str):
|
|||
result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
|
||||
write_if_changed('\n'.join(result) + '\n', outfilename)
|
||||
|
||||
def cygwin_syms(impfilename: str, outfilename: str):
|
||||
def cygwin_syms(impfilename: str, outfilename: str) -> None:
|
||||
# Get the name of the library
|
||||
output = call_tool('dlltool', ['-I', impfilename])
|
||||
if not output:
|
||||
|
@ -242,23 +240,23 @@ def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
|
|||
all_stderr += e
|
||||
return ([], all_stderr)
|
||||
|
||||
def windows_syms(impfilename: str, outfilename: str):
|
||||
def windows_syms(impfilename: str, outfilename: str) -> None:
|
||||
# Get the name of the library
|
||||
result, e = _get_implib_dllname(impfilename)
|
||||
if not result:
|
||||
print_tool_warning('lib, llvm-lib, dlltool', 'do not work or were not found', e)
|
||||
print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
|
||||
dummy_syms(outfilename)
|
||||
return
|
||||
# Get a list of all symbols exported
|
||||
symbols, e = _get_implib_exports(impfilename)
|
||||
if not symbols:
|
||||
print_tool_warning('dumpbin, llvm-nm, nm', 'do not work or were not found', e)
|
||||
print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
|
||||
dummy_syms(outfilename)
|
||||
return
|
||||
result += symbols
|
||||
write_if_changed('\n'.join(result) + '\n', outfilename)
|
||||
|
||||
def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str):
|
||||
def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
|
||||
if cross_host is not None:
|
||||
# In case of cross builds just always relink. In theory we could
|
||||
# determine the correct toolset, but we would need to use the correct
|
||||
|
@ -295,7 +293,7 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
|
|||
pass
|
||||
dummy_syms(outfilename)
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
global TOOL_WARNING_FILE
|
||||
options = parser.parse_args(args)
|
||||
if len(options.args) != 4:
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import typing as T
|
||||
|
||||
|
||||
def ls_as_bytestream():
|
||||
def ls_as_bytestream() -> bytes:
|
||||
if os.path.exists('.git'):
|
||||
return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
|
||||
stdout=subprocess.PIPE).stdout
|
||||
|
@ -28,24 +28,24 @@ def ls_as_bytestream():
|
|||
return '\n'.join(files).encode()
|
||||
|
||||
|
||||
def cscope():
|
||||
def cscope() -> int:
|
||||
ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
|
||||
return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
|
||||
|
||||
|
||||
def ctags():
|
||||
def ctags() -> int:
|
||||
ls = ls_as_bytestream()
|
||||
return subprocess.run(['ctags', '-L-'], input=ls).returncode
|
||||
|
||||
|
||||
def etags():
|
||||
def etags() -> int:
|
||||
ls = ls_as_bytestream()
|
||||
return subprocess.run(['etags', '-'], input=ls).returncode
|
||||
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
tool_name = args[0]
|
||||
srcdir_name = args[1]
|
||||
os.chdir(srcdir_name)
|
||||
assert tool_name in ['cscope', 'ctags', 'etags']
|
||||
return globals()[tool_name]()
|
||||
return T.cast(int, globals()[tool_name]())
|
||||
|
|
|
@ -13,10 +13,11 @@
|
|||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import typing as T
|
||||
|
||||
logfile = 'meson-logs/install-log.txt'
|
||||
|
||||
def do_uninstall(log):
|
||||
def do_uninstall(log: str) -> None:
|
||||
failures = 0
|
||||
successes = 0
|
||||
for line in open(log):
|
||||
|
@ -38,7 +39,7 @@ def do_uninstall(log):
|
|||
print('Failed:', failures)
|
||||
print('\nRemember that files created by custom scripts have not been removed.')
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
if args:
|
||||
print('Weird error.')
|
||||
return 1
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
# limitations under the License.
|
||||
|
||||
import sys, os, subprocess, re
|
||||
import typing as T
|
||||
|
||||
|
||||
def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
|
||||
def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
|
||||
try:
|
||||
output = subprocess.check_output(cmd, cwd=source_dir)
|
||||
new_string = re.search(regex_selector, output.decode()).group(1).strip()
|
||||
|
@ -34,7 +34,7 @@ def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_
|
|||
f.write(new_data)
|
||||
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
|
||||
command = args[6:]
|
||||
config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
|
||||
|
|
|
@ -20,6 +20,7 @@ from .. import mlog
|
|||
from ..mesonlib import has_path_sep
|
||||
from . import destdir_join
|
||||
from .gettext import read_linguas
|
||||
import typing as T
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('command')
|
||||
|
@ -31,19 +32,19 @@ parser.add_argument('--media', dest='media', default='')
|
|||
parser.add_argument('--langs', dest='langs', default='')
|
||||
parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
|
||||
|
||||
def build_pot(srcdir, project_id, sources):
|
||||
def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None:
|
||||
# Must be relative paths
|
||||
sources = [os.path.join('C', source) for source in sources]
|
||||
outfile = os.path.join(srcdir, project_id + '.pot')
|
||||
subprocess.call(['itstool', '-o', outfile] + sources)
|
||||
|
||||
def update_po(srcdir, project_id, langs):
|
||||
def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None:
|
||||
potfile = os.path.join(srcdir, project_id + '.pot')
|
||||
for lang in langs:
|
||||
pofile = os.path.join(srcdir, lang, lang + '.po')
|
||||
subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
|
||||
|
||||
def build_translations(srcdir, blddir, langs):
|
||||
def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None:
|
||||
for lang in langs:
|
||||
outdir = os.path.join(blddir, lang)
|
||||
os.makedirs(outdir, exist_ok=True)
|
||||
|
@ -52,14 +53,14 @@ def build_translations(srcdir, blddir, langs):
|
|||
'-o', os.path.join(outdir, lang + '.gmo')
|
||||
])
|
||||
|
||||
def merge_translations(blddir, sources, langs):
|
||||
def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None:
|
||||
for lang in langs:
|
||||
subprocess.call([
|
||||
'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
|
||||
'-o', os.path.join(blddir, lang)
|
||||
] + sources)
|
||||
|
||||
def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
|
||||
def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None:
|
||||
c_install_dir = os.path.join(install_dir, 'C', project_id)
|
||||
for lang in langs + ['C']:
|
||||
indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
|
||||
|
@ -101,7 +102,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr
|
|||
shutil.copyfile(infile, outfile)
|
||||
shutil.copystat(infile, outfile)
|
||||
|
||||
def run(args):
|
||||
def run(args: T.List[str]) -> int:
|
||||
options = parser.parse_args(args)
|
||||
langs = options.langs.split('@@') if options.langs else []
|
||||
media = options.media.split('@@') if options.media else []
|
||||
|
@ -129,3 +130,4 @@ def run(args):
|
|||
merge_translations(build_subdir, abs_sources, langs)
|
||||
install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
|
||||
destdir, options.project_id, options.symlinks)
|
||||
return 0
|
||||
|
|
|
@ -15,6 +15,7 @@ normal_modules = [
|
|||
'mesonbuild/msetup.py',
|
||||
'mesonbuild/ast',
|
||||
'mesonbuild/wrap',
|
||||
'mesonbuild/scripts',
|
||||
'tools',
|
||||
'mesonbuild/modules/fs.py',
|
||||
'mesonbuild/dependencies/boost.py',
|
||||
|
@ -35,6 +36,7 @@ strict_modules = [
|
|||
'mesonbuild/mlog.py',
|
||||
'mesonbuild/ast',
|
||||
'mesonbuild/wrap',
|
||||
'mesonbuild/scripts',
|
||||
'run_mypy.py',
|
||||
'tools',
|
||||
]
|
||||
|
|
Loading…
Reference in New Issue