Fix DUB dependencies
- fix the research of target built by DUB - explicitely state that DUB dynamic libraries and source libraries are not supported (yet) (mesonbuild#6581) - fix the build settings of recipes having sub-dependencies (mesonbuild#7560) - fix winlibs added from dub recipe - sanitization, comments, explanations...
This commit is contained in:
parent
7b78c6b41b
commit
04fca24355
|
@ -14,15 +14,15 @@
|
|||
|
||||
from .base import ExternalDependency, DependencyException, DependencyTypeName
|
||||
from .pkgconfig import PkgConfigDependency
|
||||
from ..mesonlib import Popen_safe
|
||||
from ..mesonlib import (Popen_safe, OptionKey)
|
||||
from ..mesonlib.universal import join_args
|
||||
from ..programs import ExternalProgram
|
||||
from ..compilers import DCompiler
|
||||
from ..compilers.d import d_feature_args
|
||||
from .. import mlog
|
||||
import re
|
||||
import os
|
||||
import copy
|
||||
import json
|
||||
import platform
|
||||
import typing as T
|
||||
|
||||
if T.TYPE_CHECKING:
|
||||
|
@ -34,7 +34,6 @@ class DubDependency(ExternalDependency):
|
|||
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
|
||||
super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d')
|
||||
self.name = name
|
||||
self.module_path: T.Optional[str] = None
|
||||
|
||||
_temp_comp = super().get_compiler()
|
||||
assert isinstance(_temp_comp, DCompiler)
|
||||
|
@ -59,158 +58,328 @@ class DubDependency(ExternalDependency):
|
|||
mlog.debug('Determining dependency {!r} with DUB executable '
|
||||
'{!r}'.format(name, self.dubbin.get_path()))
|
||||
|
||||
# if an explicit version spec was stated, use this when querying Dub
|
||||
main_pack_spec = name
|
||||
if 'version' in kwargs:
|
||||
version_spec = kwargs['version']
|
||||
if isinstance(version_spec, list):
|
||||
version_spec = " ".join(version_spec)
|
||||
main_pack_spec = f'{name}@{version_spec}'
|
||||
|
||||
# we need to know the target architecture
|
||||
arch = self.compiler.arch
|
||||
dub_arch = self.compiler.arch
|
||||
|
||||
# we need to know the build type as well
|
||||
dub_buildtype = str(environment.coredata.get_option(OptionKey('buildtype')))
|
||||
# MESON types: choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
|
||||
# DUB types: debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc,
|
||||
# docs, ddox, cov, unittest-cov, syntax and custom
|
||||
if dub_buildtype == 'debugoptimized':
|
||||
dub_buildtype = 'release-debug'
|
||||
elif dub_buildtype == 'minsize':
|
||||
dub_buildtype = 'release'
|
||||
|
||||
# Ask dub for the package
|
||||
ret, res = self._call_dubbin(['describe', name, '--arch=' + arch])
|
||||
describe_cmd = [
|
||||
'describe', main_pack_spec, '--arch=' + dub_arch,
|
||||
'--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1]
|
||||
]
|
||||
ret, res, err = self._call_dubbin(describe_cmd)
|
||||
|
||||
if ret != 0:
|
||||
mlog.debug('DUB describe failed: ' + err)
|
||||
if 'locally' in err:
|
||||
fetch_cmd = ['dub', 'fetch', main_pack_spec]
|
||||
mlog.error(mlog.bold(main_pack_spec), 'is not present locally. You may try the following command:')
|
||||
mlog.log(mlog.bold(join_args(fetch_cmd)))
|
||||
self.is_found = False
|
||||
return
|
||||
|
||||
comp = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
|
||||
packages = []
|
||||
# A command that might be useful in case of missing DUB package
|
||||
def dub_build_deep_command() -> str:
|
||||
cmd = ['dub', 'run', 'dub-build-deep', '--yes', '--', main_pack_spec,
|
||||
'--arch=' + dub_arch, '--compiler=' + self.compiler.get_exelist()[-1],
|
||||
'--build=' + dub_buildtype
|
||||
]
|
||||
return join_args(cmd)
|
||||
|
||||
dub_comp_id = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
|
||||
description = json.loads(res)
|
||||
for package in description['packages']:
|
||||
packages.append(package['name'])
|
||||
if package['name'] == name:
|
||||
|
||||
self.compile_args = []
|
||||
self.link_args = self.raw_link_args = []
|
||||
|
||||
show_buildtype_warning = False
|
||||
|
||||
def find_package_target(pkg: T.Dict[str, str]) -> bool:
|
||||
nonlocal show_buildtype_warning
|
||||
# try to find a static library in a DUB folder corresponding to
|
||||
# version, configuration, compiler, arch and build-type
|
||||
# if can find, add to link_args.
|
||||
# link_args order is meaningful, so this function MUST be called in the right order
|
||||
pack_id = f'{pkg["name"]}@{pkg["version"]}'
|
||||
(tgt_file, compatibilities) = self._find_compatible_package_target(description, pkg, dub_comp_id)
|
||||
if tgt_file is None:
|
||||
if not compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'not found')
|
||||
elif 'compiler' not in compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'found but not compiled with ', mlog.bold(dub_comp_id))
|
||||
elif dub_comp_id != 'gdc' and 'compiler_version' not in compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'found but not compiled with', mlog.bold(f'{dub_comp_id}-{self.compiler.version}'))
|
||||
elif 'arch' not in compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(dub_arch))
|
||||
elif 'platform' not in compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(description['platform'].join('.')))
|
||||
elif 'configuration' not in compatibilities:
|
||||
mlog.error(mlog.bold(pack_id), 'found but not compiled for the', mlog.bold(pkg['configuration']), 'configuration')
|
||||
else:
|
||||
mlog.error(mlog.bold(pack_id), 'not found')
|
||||
|
||||
mlog.log('You may try the following command to install the necessary DUB libraries:')
|
||||
mlog.log(mlog.bold(dub_build_deep_command()))
|
||||
|
||||
return False
|
||||
|
||||
if 'build_type' not in compatibilities:
|
||||
mlog.warning(mlog.bold(pack_id), 'found but not compiled as', mlog.bold(dub_buildtype))
|
||||
show_buildtype_warning = True
|
||||
|
||||
self.link_args.append(tgt_file)
|
||||
return True
|
||||
|
||||
# Main algorithm:
|
||||
# 1. Ensure that the target is a compatible library type (not dynamic)
|
||||
# 2. Find a compatible built library for the main dependency
|
||||
# 3. Do the same for each sub-dependency.
|
||||
# link_args MUST be in the same order than the "linkDependencies" of the main target
|
||||
# 4. Add other build settings (imports, versions etc.)
|
||||
|
||||
# 1
|
||||
self.is_found = False
|
||||
packages = {}
|
||||
for pkg in description['packages']:
|
||||
packages[pkg['name']] = pkg
|
||||
|
||||
if not pkg['active']:
|
||||
continue
|
||||
|
||||
if pkg['targetType'] == 'dynamicLibrary':
|
||||
mlog.error('DUB dynamic library dependencies are not supported.')
|
||||
self.is_found = False
|
||||
return
|
||||
|
||||
## check that the main dependency is indeed a library
|
||||
if pkg['name'] == name:
|
||||
self.is_found = True
|
||||
|
||||
not_lib = True
|
||||
if 'targetType' in package:
|
||||
if package['targetType'] in ['library', 'sourceLibrary', 'staticLibrary', 'dynamicLibrary']:
|
||||
not_lib = False
|
||||
|
||||
if not_lib:
|
||||
if pkg['targetType'] not in ['library', 'sourceLibrary', 'staticLibrary']:
|
||||
mlog.error(mlog.bold(name), "found but it isn't a library")
|
||||
self.is_found = False
|
||||
return
|
||||
|
||||
self.module_path = self._find_right_lib_path(package['path'], comp, description, True, package['targetFileName'])
|
||||
if not os.path.exists(self.module_path):
|
||||
# check if the dependency was built for other archs
|
||||
archs = [['x86_64'], ['x86'], ['x86', 'x86_mscoff']]
|
||||
for a in archs:
|
||||
description_a = copy.deepcopy(description)
|
||||
description_a['architecture'] = a
|
||||
arch_module_path = self._find_right_lib_path(package['path'], comp, description_a, True, package['targetFileName'])
|
||||
if arch_module_path:
|
||||
mlog.error(mlog.bold(name), "found but it wasn't compiled for", mlog.bold(arch))
|
||||
self.is_found = False
|
||||
return
|
||||
self.version = pkg['version']
|
||||
self.pkg = pkg
|
||||
|
||||
mlog.error(mlog.bold(name), "found but it wasn't compiled with", mlog.bold(comp))
|
||||
self.is_found = False
|
||||
return
|
||||
# collect all targets
|
||||
targets = {}
|
||||
for tgt in description['targets']:
|
||||
targets[tgt['rootPackage']] = tgt
|
||||
|
||||
self.version = package['version']
|
||||
self.pkg = package
|
||||
|
||||
if self.pkg['targetFileName'].endswith('.a'):
|
||||
self.static = True
|
||||
|
||||
self.compile_args = []
|
||||
for flag in self.pkg['dflags']:
|
||||
self.link_args.append(flag)
|
||||
for path in self.pkg['importPaths']:
|
||||
self.compile_args.append('-I' + os.path.join(self.pkg['path'], path))
|
||||
|
||||
self.link_args = self.raw_link_args = []
|
||||
for flag in self.pkg['lflags']:
|
||||
self.link_args.append(flag)
|
||||
|
||||
self.link_args.append(os.path.join(self.module_path, self.pkg['targetFileName']))
|
||||
|
||||
# Handle dependencies
|
||||
libs = []
|
||||
|
||||
def add_lib_args(field_name: str, target: T.Dict[str, T.Dict[str, str]]) -> None:
|
||||
if field_name in target['buildSettings']:
|
||||
for lib in target['buildSettings'][field_name]:
|
||||
if lib not in libs:
|
||||
libs.append(lib)
|
||||
if os.name != 'nt':
|
||||
pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
|
||||
for arg in pkgdep.get_compile_args():
|
||||
self.compile_args.append(arg)
|
||||
for arg in pkgdep.get_link_args():
|
||||
self.link_args.append(arg)
|
||||
for arg in pkgdep.get_link_args(raw=True):
|
||||
self.raw_link_args.append(arg)
|
||||
|
||||
for target in description['targets']:
|
||||
if target['rootPackage'] in packages:
|
||||
add_lib_args('libs', target)
|
||||
add_lib_args(f'libs-{platform.machine()}', target)
|
||||
for file in target['buildSettings']['linkerFiles']:
|
||||
lib_path = self._find_right_lib_path(file, comp, description)
|
||||
if lib_path:
|
||||
self.link_args.append(lib_path)
|
||||
else:
|
||||
self.is_found = False
|
||||
|
||||
def _find_right_lib_path(self,
|
||||
default_path: str,
|
||||
comp: str,
|
||||
description: T.Dict[str, str],
|
||||
folder_only: bool = False,
|
||||
file_name: str = '') -> T.Optional[str]:
|
||||
module_path = lib_file_name = ''
|
||||
if folder_only:
|
||||
module_path = default_path
|
||||
lib_file_name = file_name
|
||||
else:
|
||||
module_path = os.path.dirname(default_path)
|
||||
lib_file_name = os.path.basename(default_path)
|
||||
module_build_path = os.path.join(module_path, '.dub', 'build')
|
||||
|
||||
# If default_path is a path to lib file and
|
||||
# directory of lib don't have subdir '.dub/build'
|
||||
if not os.path.isdir(module_build_path) and os.path.isfile(default_path):
|
||||
if folder_only:
|
||||
return module_path
|
||||
if not name in targets:
|
||||
self.is_found = False
|
||||
if self.pkg['targetType'] == 'sourceLibrary':
|
||||
# source libraries have no associated targets,
|
||||
# but some build settings like import folders must be found from the package object.
|
||||
# Current algo only get these from "buildSettings" in the target object.
|
||||
# Let's save this for a future PR.
|
||||
# (See openssl DUB package for example of sourceLibrary)
|
||||
mlog.error('DUB targets of type', mlog.bold('sourceLibrary'), 'are not supported.')
|
||||
else:
|
||||
return default_path
|
||||
mlog.error('Could not find target description for', mlog.bold(main_pack_spec))
|
||||
|
||||
# Get D version implemented in the compiler
|
||||
if not self.is_found:
|
||||
mlog.error(f'Could not find {name} in DUB description')
|
||||
return
|
||||
|
||||
# Current impl only supports static libraries
|
||||
self.static = True
|
||||
|
||||
# 2
|
||||
if not find_package_target(self.pkg):
|
||||
self.is_found = False
|
||||
return
|
||||
|
||||
# 3
|
||||
for link_dep in targets[name]['linkDependencies']:
|
||||
pkg = packages[link_dep]
|
||||
if not find_package_target(pkg):
|
||||
self.is_found = False
|
||||
return
|
||||
|
||||
if show_buildtype_warning:
|
||||
mlog.log('If it is not suitable, try the following command and reconfigure Meson with', mlog.bold('--clearcache'))
|
||||
mlog.log(mlog.bold(dub_build_deep_command()))
|
||||
|
||||
# 4
|
||||
bs = targets[name]['buildSettings']
|
||||
|
||||
for flag in bs['dflags']:
|
||||
self.compile_args.append(flag)
|
||||
|
||||
for path in bs['importPaths']:
|
||||
self.compile_args.append('-I' + path)
|
||||
|
||||
for path in bs['stringImportPaths']:
|
||||
if not 'import_dir' in d_feature_args[self.compiler.id]:
|
||||
break
|
||||
flag = d_feature_args[self.compiler.id]['import_dir']
|
||||
self.compile_args.append(f'{flag}={path}')
|
||||
|
||||
for ver in bs['versions']:
|
||||
if not 'version' in d_feature_args[self.compiler.id]:
|
||||
break
|
||||
flag = d_feature_args[self.compiler.id]['version']
|
||||
self.compile_args.append(f'{flag}={ver}')
|
||||
|
||||
if bs['mainSourceFile']:
|
||||
self.compile_args.append(bs['mainSourceFile'])
|
||||
|
||||
# pass static libraries
|
||||
# linkerFiles are added during step 3
|
||||
# for file in bs['linkerFiles']:
|
||||
# self.link_args.append(file)
|
||||
|
||||
for file in bs['sourceFiles']:
|
||||
# sourceFiles may contain static libraries
|
||||
if file.endswith('.lib') or file.endswith('.a'):
|
||||
self.link_args.append(file)
|
||||
|
||||
for flag in bs['lflags']:
|
||||
self.link_args.append(flag)
|
||||
|
||||
is_windows = self.env.machines.host.is_windows()
|
||||
if is_windows:
|
||||
winlibs = ['kernel32', 'user32', 'gdi32', 'winspool', 'shell32', 'ole32',
|
||||
'oleaut32', 'uuid', 'comdlg32', 'advapi32', 'ws2_32']
|
||||
|
||||
for lib in bs['libs']:
|
||||
if os.name != 'nt':
|
||||
# trying to add system libraries by pkg-config
|
||||
pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
|
||||
if pkgdep.is_found:
|
||||
for arg in pkgdep.get_compile_args():
|
||||
self.compile_args.append(arg)
|
||||
for arg in pkgdep.get_link_args():
|
||||
self.link_args.append(arg)
|
||||
for arg in pkgdep.get_link_args(raw=True):
|
||||
self.raw_link_args.append(arg)
|
||||
continue
|
||||
|
||||
if is_windows and lib in winlibs:
|
||||
self.link_args.append(lib + '.lib')
|
||||
continue
|
||||
|
||||
# fallback
|
||||
self.link_args.append('-l'+lib)
|
||||
|
||||
# This function finds the target of the provided JSON package, built for the right
|
||||
# compiler, architecture, configuration...
|
||||
# It returns (target|None, {compatibilities})
|
||||
# If None is returned for target, compatibilities will list what other targets were found without full compatibility
|
||||
def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict[str, str], dub_comp_id: str) -> T.Tuple[str, T.Set[str]]:
|
||||
dub_build_path = os.path.join(jpack['path'], '.dub', 'build')
|
||||
|
||||
if not os.path.exists(dub_build_path):
|
||||
return (None, None)
|
||||
|
||||
# try to find a dir like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
|
||||
|
||||
# fields are:
|
||||
# - configuration
|
||||
# - build type
|
||||
# - platform
|
||||
# - architecture
|
||||
# - compiler id (dmd, ldc, gdc)
|
||||
# - compiler version or frontend id or frontend version?
|
||||
|
||||
conf = jpack['configuration']
|
||||
build_type = jdesc['buildType']
|
||||
platforms = jdesc['platform']
|
||||
archs = jdesc['architecture']
|
||||
|
||||
# Get D frontend version implemented in the compiler, or the compiler version itself
|
||||
# gdc doesn't support this
|
||||
ret, res = self._call_dubbin(['--version'])
|
||||
comp_versions = []
|
||||
|
||||
if ret != 0:
|
||||
mlog.error('Failed to run {!r}', mlog.bold(comp))
|
||||
return None
|
||||
if dub_comp_id != 'gdc':
|
||||
comp_versions.append(self.compiler.version)
|
||||
|
||||
d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
|
||||
if d_ver_reg is not None:
|
||||
d_ver = d_ver_reg.group().rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081
|
||||
else:
|
||||
d_ver = '' # gdc
|
||||
ret, res = self._call_compbin(['--version'])[0:2]
|
||||
if ret != 0:
|
||||
mlog.error('Failed to run {!r}', mlog.bold(dub_comp_id))
|
||||
return (None, None)
|
||||
d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
|
||||
|
||||
if not os.path.isdir(module_build_path):
|
||||
return ''
|
||||
if d_ver_reg is not None:
|
||||
frontend_version = d_ver_reg.group()
|
||||
frontend_id = frontend_version.rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081
|
||||
comp_versions.extend([frontend_version, frontend_id])
|
||||
|
||||
# Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
|
||||
build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
|
||||
for entry in os.listdir(module_build_path):
|
||||
if build_name in entry:
|
||||
for file in os.listdir(os.path.join(module_build_path, entry)):
|
||||
if file == lib_file_name:
|
||||
if folder_only:
|
||||
return os.path.join(module_build_path, entry)
|
||||
else:
|
||||
return os.path.join(module_build_path, entry, lib_file_name)
|
||||
compatibilities: T.Set[str] = set()
|
||||
|
||||
return ''
|
||||
# build_type is not in check_list because different build types might be compatible.
|
||||
# We do show a WARNING that the build type is not the same.
|
||||
# It might be critical in release builds, and acceptable otherwise
|
||||
check_list = ('configuration', 'platform', 'arch', 'compiler', 'compiler_version')
|
||||
|
||||
def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
|
||||
for entry in os.listdir(dub_build_path):
|
||||
|
||||
target = os.path.join(dub_build_path, entry, jpack['targetFileName'])
|
||||
if not os.path.exists(target):
|
||||
# unless Dub and Meson are racing, the target file should be present
|
||||
# when the directory is present
|
||||
mlog.debug("WARNING: Could not find a Dub target: " + target)
|
||||
continue
|
||||
|
||||
# we build a new set for each entry, because if this target is returned
|
||||
# we want to return only the compatibilities associated to this target
|
||||
# otherwise we could miss the WARNING about build_type
|
||||
comps = set()
|
||||
|
||||
if conf in entry:
|
||||
comps.add('configuration')
|
||||
|
||||
if build_type in entry:
|
||||
comps.add('build_type')
|
||||
|
||||
if all(platform in entry for platform in platforms):
|
||||
comps.add('platform')
|
||||
|
||||
if all(arch in entry for arch in archs):
|
||||
comps.add('arch')
|
||||
|
||||
if dub_comp_id in entry:
|
||||
comps.add('compiler')
|
||||
|
||||
if dub_comp_id == 'gdc' or any(cv in entry for cv in comp_versions):
|
||||
comps.add('compiler_version')
|
||||
|
||||
if all(key in comps for key in check_list):
|
||||
return (target, comps)
|
||||
else:
|
||||
compatibilities = set.union(compatibilities, comps)
|
||||
|
||||
return (None, compatibilities)
|
||||
|
||||
|
||||
def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
|
||||
assert isinstance(self.dubbin, ExternalProgram)
|
||||
p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
|
||||
return p.returncode, out.strip()
|
||||
p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env)
|
||||
return p.returncode, out.strip(), err.strip()
|
||||
|
||||
def _call_copmbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
|
||||
p, out = Popen_safe(self.compiler.get_exelist() + args, env=env)[0:2]
|
||||
return p.returncode, out.strip()
|
||||
def _call_compbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
|
||||
p, out, err = Popen_safe(self.compiler.get_exelist() + args, env=env)
|
||||
return p.returncode, out.strip(), err.strip()
|
||||
|
||||
def _check_dub(self) -> T.Union[bool, ExternalProgram]:
|
||||
dubbin: T.Union[bool, ExternalProgram] = ExternalProgram('dub', silent=True)
|
||||
|
|
Loading…
Reference in New Issue