Merge pull request #4327 from xclaesse/wrap
wrap: Support using local files instead of downloading
This commit is contained in:
commit
066060e8c9
|
@ -45,6 +45,11 @@ If you then use this subproject in your build, Meson will
|
|||
automatically download and extract it during build. This makes
|
||||
subproject embedding extremely easy.
|
||||
|
||||
Since *0.49.0* if `source_filename` is found in project's
|
||||
`subprojects/packagecache` directory, it will be used instead of downloading the
|
||||
source, even if `wrap-mode` option is set to `nodownload`. The file's hash will
|
||||
be checked.
|
||||
|
||||
Unfortunately most software projects in the world do not build with
|
||||
Meson. Because of this Meson allows you to specify a patch URL. This
|
||||
works in much the same way as Debian's distro patches. That is, they
|
||||
|
@ -76,6 +81,11 @@ thousands of lines of code. Once you have a working build definition,
|
|||
just zip up the Meson build files (and others you have changed) and
|
||||
put them somewhere where you can download them.
|
||||
|
||||
Since *0.49.0* if `patch_filename` is found in project's
|
||||
`subprojects/packagecache` directory, it will be used instead of downloading the
|
||||
patch, even if `wrap-mode` option is set to `nodownload`. The file's hash will
|
||||
be checked.
|
||||
|
||||
## Branching subprojects directly from git
|
||||
|
||||
The above mentioned scheme assumes that your subproject is working off
|
||||
|
|
|
@ -2261,19 +2261,21 @@ external dependencies (including libraries) must go to "dependencies".''')
|
|||
r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
|
||||
try:
|
||||
resolved = r.resolve(dirname)
|
||||
except RuntimeError as e:
|
||||
# if the reason subproject execution failed was because
|
||||
# the directory doesn't exist, try to give some helpful
|
||||
# advice if it's a nested subproject that needs
|
||||
# promotion...
|
||||
self.print_nested_info(dirname)
|
||||
except wrap.WrapException as e:
|
||||
subprojdir = os.path.join(self.subproject_dir, r.directory)
|
||||
if not required:
|
||||
mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n')
|
||||
return self.disabled_subproject(dirname)
|
||||
|
||||
if required:
|
||||
msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}'
|
||||
raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e))
|
||||
if isinstance(e, wrap.WrapNotFoundException):
|
||||
# if the reason subproject execution failed was because
|
||||
# the directory doesn't exist, try to give some helpful
|
||||
# advice if it's a nested subproject that needs
|
||||
# promotion...
|
||||
self.print_nested_info(dirname)
|
||||
|
||||
mlog.log('\nSubproject ', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)\n')
|
||||
return self.disabled_subproject(dirname)
|
||||
msg = 'Failed to initialize {!r}:\n{}'
|
||||
raise InterpreterException(msg.format(subprojdir, e))
|
||||
|
||||
subdir = os.path.join(self.subproject_dir, resolved)
|
||||
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
|
||||
|
@ -2979,26 +2981,21 @@ external dependencies (including libraries) must go to "dependencies".''')
|
|||
return Disabler()
|
||||
|
||||
def print_nested_info(self, dependency_name):
|
||||
message_templ = '''\nDependency %s not found but it is available in a sub-subproject.
|
||||
To use it in the current project, promote it by going in the project source
|
||||
root and issuing %s.
|
||||
|
||||
'''
|
||||
message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' +
|
||||
'To use it in the current project, promote it by going in the project source\n'
|
||||
'root and issuing']
|
||||
sprojs = mesonlib.detect_subprojects('subprojects', self.source_root)
|
||||
if dependency_name not in sprojs:
|
||||
return
|
||||
found = sprojs[dependency_name]
|
||||
if len(found) > 1:
|
||||
suffix = 'one of the following commands'
|
||||
message.append('one of the following commands:')
|
||||
else:
|
||||
suffix = 'the following command'
|
||||
message = message_templ % (dependency_name, suffix)
|
||||
cmds = []
|
||||
command_templ = 'meson wrap promote '
|
||||
message.append('the following command:')
|
||||
command_templ = '\nmeson wrap promote {}'
|
||||
for l in found:
|
||||
cmds.append(command_templ + l[len(self.source_root) + 1:])
|
||||
final_message = message + '\n'.join(cmds)
|
||||
print(final_message)
|
||||
message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:])))
|
||||
mlog.warning(*message)
|
||||
|
||||
def get_subproject_infos(self, kwargs):
|
||||
fbinfo = kwargs['fallback']
|
||||
|
|
|
@ -24,7 +24,6 @@ from . import environment, interpreter, mesonlib
|
|||
from . import build
|
||||
from . import mlog, coredata
|
||||
from .mesonlib import MesonException
|
||||
from .wrap import WrapMode
|
||||
|
||||
def add_arguments(parser):
|
||||
coredata.register_builtin_arguments(parser)
|
||||
|
@ -43,14 +42,6 @@ def add_arguments(parser):
|
|||
parser.add_argument('builddir', nargs='?', default=None)
|
||||
parser.add_argument('sourcedir', nargs='?', default=None)
|
||||
|
||||
def wrapmodetype(string):
|
||||
try:
|
||||
return getattr(WrapMode, string)
|
||||
except AttributeError:
|
||||
msg = ', '.join([t.name.lower() for t in WrapMode])
|
||||
msg = 'invalid argument {!r}, use one of {}'.format(string, msg)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
class MesonApp:
|
||||
def __init__(self, options):
|
||||
(self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
|
||||
|
|
|
@ -17,9 +17,9 @@ import contextlib
|
|||
import urllib.request, os, hashlib, shutil, tempfile, stat
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import configparser
|
||||
from . import WrapMode
|
||||
from ..mesonlib import Popen_safe
|
||||
from ..mesonlib import MesonException
|
||||
|
||||
try:
|
||||
import ssl
|
||||
|
@ -67,206 +67,179 @@ def open_wrapdburl(urlstring):
|
|||
urlstring = 'http' + urlstring[5:]
|
||||
return urllib.request.urlopen(urlstring, timeout=req_timeout)
|
||||
|
||||
class WrapException(MesonException):
|
||||
pass
|
||||
|
||||
class WrapNotFoundException(WrapException):
|
||||
pass
|
||||
|
||||
class PackageDefinition:
|
||||
def __init__(self, fname):
|
||||
self.values = {}
|
||||
with open(fname) as ifile:
|
||||
first = ifile.readline().strip()
|
||||
|
||||
if first == '[wrap-file]':
|
||||
self.type = 'file'
|
||||
elif first == '[wrap-git]':
|
||||
self.type = 'git'
|
||||
elif first == '[wrap-hg]':
|
||||
self.type = 'hg'
|
||||
elif first == '[wrap-svn]':
|
||||
self.type = 'svn'
|
||||
else:
|
||||
raise RuntimeError('Invalid format of package file')
|
||||
for line in ifile:
|
||||
line = line.strip()
|
||||
if line == '':
|
||||
continue
|
||||
(k, v) = line.split('=', 1)
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
self.values[k] = v
|
||||
self.basename = os.path.basename(fname)
|
||||
try:
|
||||
self.config = configparser.ConfigParser(interpolation=None)
|
||||
self.config.read(fname)
|
||||
except:
|
||||
raise WrapException('Failed to parse {}'.format(self.basename))
|
||||
if len(self.config.sections()) < 1:
|
||||
raise WrapException('Missing sections in {}'.format(self.basename))
|
||||
self.wrap_section = self.config.sections()[0]
|
||||
if not self.wrap_section.startswith('wrap-'):
|
||||
m = '{!r} is not a valid first section in {}'
|
||||
raise WrapException(m.format(self.wrap_section, self.basename))
|
||||
self.type = self.wrap_section[5:]
|
||||
self.values = dict(self.config[self.wrap_section])
|
||||
|
||||
def get(self, key):
|
||||
return self.values[key]
|
||||
try:
|
||||
return self.values[key]
|
||||
except KeyError:
|
||||
m = 'Missing key {!r} in {}'
|
||||
raise WrapException(m.format(key, self.basename))
|
||||
|
||||
def has_patch(self):
|
||||
return 'patch_url' in self.values
|
||||
|
||||
class Resolver:
|
||||
def __init__(self, subdir_root, wrap_mode=WrapMode(1)):
|
||||
def __init__(self, subdir_root, wrap_mode=WrapMode.default):
|
||||
self.wrap_mode = wrap_mode
|
||||
self.subdir_root = subdir_root
|
||||
self.cachedir = os.path.join(self.subdir_root, 'packagecache')
|
||||
|
||||
def resolve(self, packagename):
|
||||
# Check if the directory is already resolved
|
||||
dirname = Path(os.path.join(self.subdir_root, packagename))
|
||||
subprojdir = os.path.join(*dirname.parts[-2:])
|
||||
if dirname.is_dir():
|
||||
if (dirname / 'meson.build').is_file():
|
||||
# The directory is there and has meson.build? Great, use it.
|
||||
return packagename
|
||||
# Is the dir not empty and also not a git submodule dir that is
|
||||
# not checkout properly? Can't do anything, exception!
|
||||
elif next(dirname.iterdir(), None) and not (dirname / '.git').is_file():
|
||||
m = '{!r} is not empty and has no meson.build files'
|
||||
raise RuntimeError(m.format(subprojdir))
|
||||
elif dirname.exists():
|
||||
m = '{!r} already exists and is not a dir; cannot use as subproject'
|
||||
raise RuntimeError(m.format(subprojdir))
|
||||
self.packagename = packagename
|
||||
self.directory = packagename
|
||||
# We always have to load the wrap file, if it exists, because it could
|
||||
# override the default directory name.
|
||||
self.wrap = self.load_wrap()
|
||||
if self.wrap and 'directory' in self.wrap.values:
|
||||
self.directory = self.wrap.get('directory')
|
||||
self.dirname = os.path.join(self.subdir_root, self.directory)
|
||||
meson_file = os.path.join(self.dirname, 'meson.build')
|
||||
|
||||
# The directory is there and has meson.build? Great, use it.
|
||||
if os.path.exists(meson_file):
|
||||
return self.directory
|
||||
|
||||
dirname = str(dirname)
|
||||
# Check if the subproject is a git submodule
|
||||
if self.resolve_git_submodule(dirname):
|
||||
return packagename
|
||||
self.resolve_git_submodule()
|
||||
|
||||
if os.path.exists(self.dirname):
|
||||
if not os.path.isdir(self.dirname):
|
||||
raise WrapException('Path already exists but is not a directory')
|
||||
else:
|
||||
# A wrap file is required to download
|
||||
if not self.wrap:
|
||||
m = 'Subproject directory not found and {}.wrap file not found'
|
||||
raise WrapNotFoundException(m.format(self.packagename))
|
||||
|
||||
if self.wrap.type == 'file':
|
||||
self.get_file()
|
||||
else:
|
||||
self.check_can_download()
|
||||
if self.wrap.type == 'git':
|
||||
self.get_git()
|
||||
elif self.wrap.type == "hg":
|
||||
self.get_hg()
|
||||
elif self.wrap.type == "svn":
|
||||
self.get_svn()
|
||||
else:
|
||||
raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type))
|
||||
|
||||
# A meson.build file is required in the directory
|
||||
if not os.path.exists(meson_file):
|
||||
raise WrapException('Subproject exists but has no meson.build file')
|
||||
|
||||
return self.directory
|
||||
|
||||
def load_wrap(self):
|
||||
fname = os.path.join(self.subdir_root, self.packagename + '.wrap')
|
||||
if os.path.isfile(fname):
|
||||
return PackageDefinition(fname)
|
||||
return None
|
||||
|
||||
def check_can_download(self):
|
||||
# Don't download subproject data based on wrap file if requested.
|
||||
# Git submodules are ok (see above)!
|
||||
if self.wrap_mode is WrapMode.nodownload:
|
||||
m = 'Automatic wrap-based subproject downloading is disabled'
|
||||
raise RuntimeError(m)
|
||||
raise WrapException(m)
|
||||
|
||||
# Check if there's a .wrap file for this subproject
|
||||
fname = os.path.join(self.subdir_root, packagename + '.wrap')
|
||||
if not os.path.isfile(fname):
|
||||
# No wrap file with this name? Give up.
|
||||
m = 'No {}.wrap found for {!r}'
|
||||
raise RuntimeError(m.format(packagename, subprojdir))
|
||||
p = PackageDefinition(fname)
|
||||
if p.type == 'file':
|
||||
if not os.path.isdir(self.cachedir):
|
||||
os.mkdir(self.cachedir)
|
||||
self.download(p, packagename)
|
||||
self.extract_package(p)
|
||||
elif p.type == 'git':
|
||||
self.get_git(p)
|
||||
elif p.type == "hg":
|
||||
self.get_hg(p)
|
||||
elif p.type == "svn":
|
||||
self.get_svn(p)
|
||||
else:
|
||||
raise AssertionError('Unreachable code.')
|
||||
return p.get('directory')
|
||||
|
||||
def resolve_git_submodule(self, dirname):
|
||||
def resolve_git_submodule(self):
|
||||
# Are we in a git repository?
|
||||
ret, out = quiet_git(['rev-parse'], self.subdir_root)
|
||||
if not ret:
|
||||
return False
|
||||
# Is `dirname` a submodule?
|
||||
ret, out = quiet_git(['submodule', 'status', dirname], self.subdir_root)
|
||||
ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root)
|
||||
if not ret:
|
||||
return False
|
||||
# Submodule has not been added, add it
|
||||
if out.startswith(b'+'):
|
||||
mlog.warning('git submodule {} might be out of date'.format(dirname))
|
||||
mlog.warning('git submodule might be out of date')
|
||||
return True
|
||||
elif out.startswith(b'U'):
|
||||
raise RuntimeError('submodule {} has merge conflicts'.format(dirname))
|
||||
raise WrapException('git submodule has merge conflicts')
|
||||
# Submodule exists, but is deinitialized or wasn't initialized
|
||||
elif out.startswith(b'-'):
|
||||
if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) == 0:
|
||||
if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0:
|
||||
return True
|
||||
raise RuntimeError('Failed to git submodule init {!r}'.format(dirname))
|
||||
raise WrapException('git submodule failed to init')
|
||||
# Submodule looks fine, but maybe it wasn't populated properly. Do a checkout.
|
||||
elif out.startswith(b' '):
|
||||
subprocess.call(['git', 'checkout', '.'], cwd=dirname)
|
||||
subprocess.call(['git', 'checkout', '.'], cwd=self.dirname)
|
||||
# Even if checkout failed, try building it anyway and let the user
|
||||
# handle any problems manually.
|
||||
return True
|
||||
elif out == b'':
|
||||
# It is not a submodule, just a folder that exists in the main repository.
|
||||
return False
|
||||
m = 'Unknown git submodule output: {!r}'
|
||||
raise RuntimeError(m.format(out))
|
||||
raise WrapException(m.format(out))
|
||||
|
||||
def get_git(self, p):
|
||||
checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
|
||||
revno = p.get('revision')
|
||||
is_there = os.path.isdir(checkoutdir)
|
||||
if is_there:
|
||||
try:
|
||||
subprocess.check_call(['git', 'rev-parse'], cwd=checkoutdir)
|
||||
except subprocess.CalledProcessError:
|
||||
raise RuntimeError('%s is not empty but is not a valid '
|
||||
'git repository, we can not work with it'
|
||||
' as a subproject directory.' % (
|
||||
checkoutdir))
|
||||
def get_file(self):
|
||||
path = self.get_file_internal(self.wrap, 'source')
|
||||
extract_dir = self.subdir_root
|
||||
# Some upstreams ship packages that do not have a leading directory.
|
||||
# Create one for them.
|
||||
if 'lead_directory_missing' in self.wrap.values:
|
||||
os.mkdir(self.dirname)
|
||||
extract_dir = self.dirname
|
||||
shutil.unpack_archive(path, extract_dir)
|
||||
if self.wrap.has_patch():
|
||||
self.apply_patch()
|
||||
|
||||
if revno.lower() == 'head':
|
||||
# Failure to do pull is not a fatal error,
|
||||
# because otherwise you can't develop without
|
||||
# a working net connection.
|
||||
subprocess.call(['git', 'pull'], cwd=checkoutdir)
|
||||
else:
|
||||
if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0:
|
||||
subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir)
|
||||
subprocess.check_call(['git', 'checkout', revno],
|
||||
cwd=checkoutdir)
|
||||
def get_git(self):
|
||||
revno = self.wrap.get('revision')
|
||||
if self.wrap.values.get('clone-recursive', '').lower() == 'true':
|
||||
subprocess.check_call(['git', 'clone', '--recursive', self.wrap.get('url'),
|
||||
self.directory], cwd=self.subdir_root)
|
||||
else:
|
||||
if p.values.get('clone-recursive', '').lower() == 'true':
|
||||
subprocess.check_call(['git', 'clone', '--recursive', p.get('url'),
|
||||
p.get('directory')], cwd=self.subdir_root)
|
||||
else:
|
||||
subprocess.check_call(['git', 'clone', p.get('url'),
|
||||
p.get('directory')], cwd=self.subdir_root)
|
||||
if revno.lower() != 'head':
|
||||
if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0:
|
||||
subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir)
|
||||
subprocess.check_call(['git', 'checkout', revno],
|
||||
cwd=checkoutdir)
|
||||
push_url = p.values.get('push-url')
|
||||
if push_url:
|
||||
subprocess.check_call(['git', 'remote', 'set-url',
|
||||
'--push', 'origin', push_url],
|
||||
cwd=checkoutdir)
|
||||
subprocess.check_call(['git', 'clone', self.wrap.get('url'),
|
||||
self.directory], cwd=self.subdir_root)
|
||||
if revno.lower() != 'head':
|
||||
if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0:
|
||||
subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname)
|
||||
subprocess.check_call(['git', 'checkout', revno],
|
||||
cwd=self.dirname)
|
||||
push_url = self.wrap.values.get('push-url')
|
||||
if push_url:
|
||||
subprocess.check_call(['git', 'remote', 'set-url',
|
||||
'--push', 'origin', push_url],
|
||||
cwd=self.dirname)
|
||||
|
||||
def get_hg(self, p):
|
||||
checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
|
||||
revno = p.get('revision')
|
||||
is_there = os.path.isdir(checkoutdir)
|
||||
if is_there:
|
||||
if revno.lower() == 'tip':
|
||||
# Failure to do pull is not a fatal error,
|
||||
# because otherwise you can't develop without
|
||||
# a working net connection.
|
||||
subprocess.call(['hg', 'pull'], cwd=checkoutdir)
|
||||
else:
|
||||
if subprocess.call(['hg', 'checkout', revno], cwd=checkoutdir) != 0:
|
||||
subprocess.check_call(['hg', 'pull'], cwd=checkoutdir)
|
||||
subprocess.check_call(['hg', 'checkout', revno],
|
||||
cwd=checkoutdir)
|
||||
else:
|
||||
subprocess.check_call(['hg', 'clone', p.get('url'),
|
||||
p.get('directory')], cwd=self.subdir_root)
|
||||
if revno.lower() != 'tip':
|
||||
subprocess.check_call(['hg', 'checkout', revno],
|
||||
cwd=checkoutdir)
|
||||
def get_hg(self):
|
||||
revno = self.wrap.get('revision')
|
||||
subprocess.check_call(['hg', 'clone', self.wrap.get('url'),
|
||||
self.directory], cwd=self.subdir_root)
|
||||
if revno.lower() != 'tip':
|
||||
subprocess.check_call(['hg', 'checkout', revno],
|
||||
cwd=self.dirname)
|
||||
|
||||
def get_svn(self, p):
|
||||
checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
|
||||
revno = p.get('revision')
|
||||
is_there = os.path.isdir(checkoutdir)
|
||||
if is_there:
|
||||
p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', checkoutdir])
|
||||
current_revno = out
|
||||
if current_revno == revno:
|
||||
return
|
||||
|
||||
if revno.lower() == 'head':
|
||||
# Failure to do pull is not a fatal error,
|
||||
# because otherwise you can't develop without
|
||||
# a working net connection.
|
||||
subprocess.call(['svn', 'update'], cwd=checkoutdir)
|
||||
else:
|
||||
subprocess.check_call(['svn', 'update', '-r', revno], cwd=checkoutdir)
|
||||
else:
|
||||
subprocess.check_call(['svn', 'checkout', '-r', revno, p.get('url'),
|
||||
p.get('directory')], cwd=self.subdir_root)
|
||||
def get_svn(self):
|
||||
revno = self.wrap.get('revision')
|
||||
subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'),
|
||||
self.directory], cwd=self.subdir_root)
|
||||
|
||||
def get_data(self, url):
|
||||
blocksize = 10 * 1024
|
||||
|
@ -312,41 +285,48 @@ class Resolver:
|
|||
hashvalue = h.hexdigest()
|
||||
return hashvalue, tmpfile.name
|
||||
|
||||
def get_hash(self, data):
|
||||
def check_hash(self, what, path):
|
||||
expected = self.wrap.get(what + '_hash')
|
||||
h = hashlib.sha256()
|
||||
h.update(data)
|
||||
hashvalue = h.hexdigest()
|
||||
return hashvalue
|
||||
with open(path, 'rb') as f:
|
||||
h.update(f.read())
|
||||
dhash = h.hexdigest()
|
||||
if dhash != expected:
|
||||
raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash))
|
||||
|
||||
def download(self, p, packagename):
|
||||
ofname = os.path.join(self.cachedir, p.get('source_filename'))
|
||||
if os.path.exists(ofname):
|
||||
mlog.log('Using', mlog.bold(packagename), 'from cache.')
|
||||
else:
|
||||
srcurl = p.get('source_url')
|
||||
mlog.log('Downloading', mlog.bold(packagename), 'from', mlog.bold(srcurl))
|
||||
dhash, tmpfile = self.get_data(srcurl)
|
||||
expected = p.get('source_hash')
|
||||
if dhash != expected:
|
||||
os.remove(tmpfile)
|
||||
raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash))
|
||||
os.rename(tmpfile, ofname)
|
||||
if p.has_patch():
|
||||
patch_filename = p.get('patch_filename')
|
||||
filename = os.path.join(self.cachedir, patch_filename)
|
||||
if os.path.exists(filename):
|
||||
mlog.log('Using', mlog.bold(patch_filename), 'from cache.')
|
||||
else:
|
||||
purl = p.get('patch_url')
|
||||
mlog.log('Downloading patch from', mlog.bold(purl))
|
||||
phash, tmpfile = self.get_data(purl)
|
||||
expected = p.get('patch_hash')
|
||||
if phash != expected:
|
||||
os.remove(tmpfile)
|
||||
raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash))
|
||||
os.rename(tmpfile, filename)
|
||||
else:
|
||||
mlog.log('Package does not require patch.')
|
||||
def download(self, what, ofname):
|
||||
self.check_can_download()
|
||||
srcurl = self.wrap.get(what + '_url')
|
||||
mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))
|
||||
dhash, tmpfile = self.get_data(srcurl)
|
||||
expected = self.wrap.get(what + '_hash')
|
||||
if dhash != expected:
|
||||
os.remove(tmpfile)
|
||||
raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash))
|
||||
os.rename(tmpfile, ofname)
|
||||
|
||||
def get_file_internal(self, what):
|
||||
filename = self.wrap.get(what + '_filename')
|
||||
cache_path = os.path.join(self.cachedir, filename)
|
||||
|
||||
if os.path.exists(cache_path):
|
||||
self.check_hash(what, cache_path)
|
||||
mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
|
||||
return cache_path
|
||||
|
||||
if not os.path.isdir(self.cachedir):
|
||||
os.mkdir(self.cachedir)
|
||||
self.download(what, cache_path)
|
||||
return cache_path
|
||||
|
||||
def apply_patch(self, p):
|
||||
path = self.get_file_internal(p, 'patch')
|
||||
try:
|
||||
shutil.unpack_archive(path, self.subdir_root)
|
||||
except Exception:
|
||||
with tempfile.TemporaryDirectory() as workdir:
|
||||
shutil.unpack_archive(path, workdir)
|
||||
self.copy_tree(workdir, self.subdir_root)
|
||||
|
||||
def copy_tree(self, root_src_dir, root_dst_dir):
|
||||
"""
|
||||
|
@ -366,36 +346,3 @@ class Resolver:
|
|||
os.chmod(dst_file, stat.S_IWUSR)
|
||||
os.remove(dst_file)
|
||||
shutil.copy2(src_file, dst_dir)
|
||||
|
||||
def extract_package(self, package):
|
||||
if sys.version_info < (3, 5):
|
||||
try:
|
||||
import lzma # noqa: F401
|
||||
del lzma
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
shutil.register_unpack_format('xztar', ['.tar.xz', '.txz'], shutil._unpack_tarfile, [], "xz'ed tar-file")
|
||||
except shutil.RegistryError:
|
||||
pass
|
||||
target_dir = os.path.join(self.subdir_root, package.get('directory'))
|
||||
if os.path.isdir(target_dir):
|
||||
return
|
||||
extract_dir = self.subdir_root
|
||||
# Some upstreams ship packages that do not have a leading directory.
|
||||
# Create one for them.
|
||||
try:
|
||||
package.get('lead_directory_missing')
|
||||
os.mkdir(target_dir)
|
||||
extract_dir = target_dir
|
||||
except KeyError:
|
||||
pass
|
||||
shutil.unpack_archive(os.path.join(self.cachedir, package.get('source_filename')), extract_dir)
|
||||
if package.has_patch():
|
||||
try:
|
||||
shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), self.subdir_root)
|
||||
except Exception:
|
||||
with tempfile.TemporaryDirectory() as workdir:
|
||||
shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), workdir)
|
||||
self.copy_tree(workdir, self.subdir_root)
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
project('mainproj', 'c')
|
||||
project('mainproj', 'c',
|
||||
default_options : ['wrap_mode=nodownload'],
|
||||
)
|
||||
|
||||
subproject('zlib')
|
||||
|
||||
|
|
Loading…
Reference in New Issue