various python neatness cleanups
All changes were created by running "pyupgrade --py3-only" and committing the results. Although this has been performed in the past, newer versions of pyupgrade can automatically catch more opportunities, notably list comprehensions can use generators instead, in the following cases: - unpacking into function arguments as function(*generator) - unpacking into assignments of the form x, y = generator - as the argument to some builtin functions such as min/max/sorted Also catch a few creeping cases of new code added using older styles.
This commit is contained in:
parent
2d65472c72
commit
e8a85fa8a2
|
@ -1035,7 +1035,7 @@ class BuildTarget(Target):
|
|||
self.link_whole(linktarget)
|
||||
|
||||
c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
|
||||
= [extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args']]
|
||||
= (extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args'])
|
||||
|
||||
self.add_pch('c', c_pchlist)
|
||||
self.add_pch('cpp', cpp_pchlist)
|
||||
|
@ -2456,7 +2456,7 @@ class CustomTarget(Target, CommandBase):
|
|||
self.build_always_stale = kwargs['build_always_stale']
|
||||
if not isinstance(self.build_always_stale, bool):
|
||||
raise InvalidArguments('Argument build_always_stale must be a boolean.')
|
||||
extra_deps, depend_files = [extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files']]
|
||||
extra_deps, depend_files = (extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files'])
|
||||
for ed in extra_deps:
|
||||
if not isinstance(ed, (CustomTarget, BuildTarget)):
|
||||
raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target '
|
||||
|
|
|
@ -127,7 +127,7 @@ class Dependency(HoldableObject):
|
|||
def get_all_compile_args(self) -> T.List[str]:
|
||||
"""Get the compile arguments from this dependency and it's sub dependencies."""
|
||||
return list(itertools.chain(self.get_compile_args(),
|
||||
*[d.get_all_compile_args() for d in self.ext_deps]))
|
||||
*(d.get_all_compile_args() for d in self.ext_deps)))
|
||||
|
||||
def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
|
||||
if raw and self.raw_link_args is not None:
|
||||
|
@ -137,7 +137,7 @@ class Dependency(HoldableObject):
|
|||
def get_all_link_args(self) -> T.List[str]:
|
||||
"""Get the link arguments from this dependency and it's sub dependencies."""
|
||||
return list(itertools.chain(self.get_link_args(),
|
||||
*[d.get_all_link_args() for d in self.ext_deps]))
|
||||
*(d.get_all_link_args() for d in self.ext_deps)))
|
||||
|
||||
def found(self) -> bool:
|
||||
return self.is_found
|
||||
|
|
|
@ -574,7 +574,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
|
|||
KwargInfo('static', (bool, NoneType), since='0.51.0'),
|
||||
KwargInfo('disabler', bool, default=False, since='0.49.0'),
|
||||
KwargInfo('dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
|
||||
*[k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS]
|
||||
*(k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS)
|
||||
)
|
||||
def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'dependencies.ExternalLibrary':
|
||||
# TODO add dependencies support?
|
||||
|
|
|
@ -90,7 +90,7 @@ def _install_mode_convertor(mode: T.Optional[T.List[T.Union[str, bool, int]]]) -
|
|||
emtpy FileMode.
|
||||
"""
|
||||
# this has already been validated by the validator
|
||||
return FileMode(*[m if isinstance(m, str) else None for m in mode])
|
||||
return FileMode(*(m if isinstance(m, str) else None for m in mode))
|
||||
|
||||
|
||||
def _lower_strlist(input: T.List[str]) -> T.List[str]:
|
||||
|
|
|
@ -184,7 +184,7 @@ class GnomeModule(ExtensionModule):
|
|||
glib_compile_resources = state.find_program('glib-compile-resources')
|
||||
cmd = [glib_compile_resources, '@INPUT@']
|
||||
|
||||
source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in ['source_dir', 'dependencies']]
|
||||
source_dirs, dependencies = (mesonlib.extract_as_list(kwargs, c, pop=True) for c in ['source_dir', 'dependencies'])
|
||||
|
||||
if len(args) < 2:
|
||||
raise MesonException('Not enough arguments; the name of the resource '
|
||||
|
|
|
@ -264,7 +264,7 @@ class CudaModule(NewExtensionModule):
|
|||
elif isinstance(cuda_arch_list, str):
|
||||
cuda_arch_list = self._break_arch_string(cuda_arch_list)
|
||||
|
||||
cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])
|
||||
cuda_arch_list = sorted(x for x in set(cuda_arch_list) if x)
|
||||
|
||||
cuda_arch_bin = []
|
||||
cuda_arch_ptx = []
|
||||
|
|
|
@ -1660,7 +1660,7 @@ class TestHarness:
|
|||
# wrapper script.
|
||||
sys.exit(125)
|
||||
|
||||
self.name_max_len = max([uniwidth(self.get_pretty_suite(test)) for test in tests])
|
||||
self.name_max_len = max(uniwidth(self.get_pretty_suite(test)) for test in tests)
|
||||
startdir = os.getcwd()
|
||||
try:
|
||||
os.chdir(self.options.wd)
|
||||
|
@ -1668,8 +1668,8 @@ class TestHarness:
|
|||
for i in range(self.options.repeat):
|
||||
runners.extend(self.get_test_runner(test) for test in tests)
|
||||
if i == 0:
|
||||
self.duration_max_len = max([len(str(int(runner.timeout or 99)))
|
||||
for runner in runners])
|
||||
self.duration_max_len = max(len(str(int(runner.timeout or 99)))
|
||||
for runner in runners)
|
||||
# Disable the progress report if it gets in the way
|
||||
self.need_console = any(runner.console_mode is not ConsoleUser.LOGGER
|
||||
for runner in runners)
|
||||
|
|
|
@ -70,7 +70,7 @@ def clangformat(exelist: T.List[str], srcdir: Path, builddir: Path, check: bool)
|
|||
any(fnmatch.fnmatch(strf, i) for i in ignore):
|
||||
continue
|
||||
futures.append(e.submit(run_clang_format, exelist, f, check))
|
||||
returncode = max([x.result().returncode for x in futures])
|
||||
returncode = max(x.result().returncode for x in futures)
|
||||
return returncode
|
||||
|
||||
def run(args: T.List[str]) -> int:
|
||||
|
|
|
@ -36,7 +36,7 @@ def manual_clangtidy(srcdir_name: str, builddir_name: str) -> int:
|
|||
if strf.startswith(builddir_name):
|
||||
continue
|
||||
futures.append(e.submit(subprocess.run, ['clang-tidy', '-p', builddir_name, strf]))
|
||||
returncode = max([x.result().returncode for x in futures])
|
||||
returncode = max(x.result().returncode for x in futures)
|
||||
return returncode
|
||||
|
||||
def clangtidy(srcdir_name: str, builddir_name: str) -> int:
|
||||
|
|
|
@ -197,7 +197,7 @@ class DependencyScanner:
|
|||
def run(args: T.List[str]) -> int:
|
||||
assert len(args) == 3, 'got wrong number of arguments!'
|
||||
pickle_file, outfile, jsonfile = args
|
||||
with open(jsonfile, 'r', encoding='utf-8') as f:
|
||||
with open(jsonfile, encoding='utf-8') as f:
|
||||
sources = json.load(f)
|
||||
scanner = DependencyScanner(pickle_file, outfile, sources)
|
||||
return scanner.scan()
|
||||
|
|
|
@ -1253,11 +1253,11 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
|
|||
f.status = TestStatus.CANCELED
|
||||
|
||||
if stop and not tests_canceled:
|
||||
num_running = sum([1 if f2.status is TestStatus.RUNNING else 0 for f2 in futures])
|
||||
num_running = sum(1 if f2.status is TestStatus.RUNNING else 0 for f2 in futures)
|
||||
for f2 in futures:
|
||||
f2.cancel()
|
||||
executor.shutdown()
|
||||
num_canceled = sum([1 if f2.status is TestStatus.CANCELED else 0 for f2 in futures])
|
||||
num_canceled = sum(1 if f2.status is TestStatus.CANCELED else 0 for f2 in futures)
|
||||
safe_print(f'\nCanceled {num_canceled} out of {num_running} running tests.')
|
||||
safe_print(f'Finishing the remaining {num_running - num_canceled} tests.\n')
|
||||
tests_canceled = True
|
||||
|
@ -1297,7 +1297,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
|
|||
else:
|
||||
skip_msg = 'Test ran, but was expected to be skipped'
|
||||
status = TestStatus.UNEXRUN
|
||||
result.msg = "%s for MESON_CI_JOBNAME '%s'" % (skip_msg, ci_jobname)
|
||||
result.msg = f"{skip_msg} for MESON_CI_JOBNAME '{ci_jobname}'"
|
||||
|
||||
f.update_log(status)
|
||||
current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, 'classname': t.category})
|
||||
|
@ -1479,7 +1479,7 @@ def print_tool_versions() -> None:
|
|||
args = [t.tool] + t.args
|
||||
pc, o, e = Popen_safe(args)
|
||||
if pc.returncode != 0:
|
||||
return '{} (invalid {} executable)'.format(exe, t.tool)
|
||||
return f'{exe} (invalid {t.tool} executable)'
|
||||
for i in o.split('\n'):
|
||||
i = i.strip('\n\r\t ')
|
||||
m = t.regex.match(i)
|
||||
|
|
|
@ -9,9 +9,9 @@ output = None
|
|||
|
||||
# Only the ninja backend produces compile_commands.json
|
||||
if sys.argv[1] == 'ninja':
|
||||
with open('compile_commands.json', 'r') as f:
|
||||
with open('compile_commands.json') as f:
|
||||
cc = json.load(f)
|
||||
output = set((x['output'] for x in cc))
|
||||
output = {x['output'] for x in cc}
|
||||
|
||||
for obj in sys.argv[2:]:
|
||||
if not os.path.exists(obj):
|
||||
|
|
|
@ -8,5 +8,5 @@ parser.add_argument('input')
|
|||
parser.add_argument('output')
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.input, 'r') as i, open(args.output, 'w') as o:
|
||||
with open(args.input) as i, open(args.output, 'w') as o:
|
||||
o.write(i.read())
|
||||
|
|
|
@ -1339,7 +1339,7 @@ class AllPlatformTests(BasePlatformTests):
|
|||
tar = tarfile.open(xz_distfile, "r:xz") # [ignore encoding]
|
||||
self.assertEqual(sorted(['samerepo-1.0',
|
||||
'samerepo-1.0/meson.build']),
|
||||
sorted([i.name for i in tar]))
|
||||
sorted(i.name for i in tar))
|
||||
|
||||
def test_rpath_uses_ORIGIN(self):
|
||||
'''
|
||||
|
@ -2625,7 +2625,7 @@ class AllPlatformTests(BasePlatformTests):
|
|||
def assertKeyTypes(key_type_list, obj, strict: bool = True):
|
||||
for i in key_type_list:
|
||||
if isinstance(i[1], (list, tuple)) and None in i[1]:
|
||||
i = (i[0], tuple([x for x in i[1] if x is not None]))
|
||||
i = (i[0], tuple(x for x in i[1] if x is not None))
|
||||
if i[0] not in obj or obj[i[0]] is None:
|
||||
continue
|
||||
self.assertIn(i[0], obj)
|
||||
|
|
|
@ -145,8 +145,8 @@ class DataTests(unittest.TestCase):
|
|||
found_entries |= options
|
||||
|
||||
self.assertEqual(found_entries, {
|
||||
*[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS],
|
||||
*[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE],
|
||||
*(str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS),
|
||||
*(str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE),
|
||||
})
|
||||
|
||||
# Check that `buildtype` table inside `Core options` matches how
|
||||
|
|
|
@ -509,8 +509,8 @@ class InternalTests(unittest.TestCase):
|
|||
def _test_all_naming(self, cc, env, patterns, platform):
|
||||
shr = patterns[platform]['shared']
|
||||
stc = patterns[platform]['static']
|
||||
shrstc = shr + tuple([x for x in stc if x not in shr])
|
||||
stcshr = stc + tuple([x for x in shr if x not in stc])
|
||||
shrstc = shr + tuple(x for x in stc if x not in shr)
|
||||
stcshr = stc + tuple(x for x in shr if x not in stc)
|
||||
p = cc.get_library_naming(env, LibType.SHARED)
|
||||
self.assertEqual(p, shr)
|
||||
p = cc.get_library_naming(env, LibType.STATIC)
|
||||
|
|
Loading…
Reference in New Issue