Merge pull request #7607 from bonzini/speedup
Various speedups from profiling QEMU's meson.build
This commit is contained in:
commit
e9a71ebf60
|
@ -119,7 +119,7 @@ class CompilerArgs(collections.abc.MutableSequence):
|
|||
# This correctly deduplicates the entries after _can_dedup definition
|
||||
# Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
|
||||
def flush_pre_post(self) -> None:
|
||||
pre_flush = collections.deque() # type: T.Deque[str]
|
||||
new = list() # type: T.List[str]
|
||||
pre_flush_set = set() # type: T.Set[str]
|
||||
post_flush = collections.deque() # type: T.Deque[str]
|
||||
post_flush_set = set() # type: T.Set[str]
|
||||
|
@ -128,7 +128,7 @@ class CompilerArgs(collections.abc.MutableSequence):
|
|||
for a in self.pre:
|
||||
dedup = self._can_dedup(a)
|
||||
if a not in pre_flush_set:
|
||||
pre_flush.append(a)
|
||||
new.append(a)
|
||||
if dedup is Dedup.OVERRIDEN:
|
||||
pre_flush_set.add(a)
|
||||
for a in reversed(self.post):
|
||||
|
@ -140,12 +140,15 @@ class CompilerArgs(collections.abc.MutableSequence):
|
|||
|
||||
#pre and post will overwrite every element that is in the container
|
||||
#only copy over args that are in _container but not in the post flush or pre flush set
|
||||
|
||||
if pre_flush_set or post_flush_set:
|
||||
for a in self._container:
|
||||
if a not in post_flush_set and a not in pre_flush_set:
|
||||
pre_flush.append(a)
|
||||
new.append(a)
|
||||
else:
|
||||
new.extend(self._container)
|
||||
new.extend(post_flush)
|
||||
|
||||
self._container = list(pre_flush) + list(post_flush)
|
||||
self._container = new
|
||||
self.pre.clear()
|
||||
self.post.clear()
|
||||
|
||||
|
|
|
@ -114,13 +114,17 @@ rsp_threshold = get_rsp_threshold()
|
|||
# from, etc.), so it must not be shell quoted.
|
||||
raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'}
|
||||
|
||||
NINJA_QUOTE_BUILD_PAT = re.compile(r"[$ :\n]")
|
||||
NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]")
|
||||
|
||||
def ninja_quote(text, is_build_line=False):
|
||||
if is_build_line:
|
||||
qcs = ('$', ' ', ':')
|
||||
quote_re = NINJA_QUOTE_BUILD_PAT
|
||||
else:
|
||||
qcs = ('$', ' ')
|
||||
for char in qcs:
|
||||
text = text.replace(char, '$' + char)
|
||||
quote_re = NINJA_QUOTE_VAR_PAT
|
||||
# Fast path for when no quoting is necessary
|
||||
if not quote_re.search(text):
|
||||
return text
|
||||
if '\n' in text:
|
||||
errmsg = '''Ninja does not support newlines in rules. The content was:
|
||||
|
||||
|
@ -128,7 +132,7 @@ def ninja_quote(text, is_build_line=False):
|
|||
|
||||
Please report this error with a test case to the Meson bug tracker.'''.format(text)
|
||||
raise MesonException(errmsg)
|
||||
return text
|
||||
return quote_re.sub(r'$\g<0>', text)
|
||||
|
||||
@unique
|
||||
class Quoting(Enum):
|
||||
|
@ -261,18 +265,20 @@ class NinjaRule:
|
|||
|
||||
# expand variables in command
|
||||
command = ' '.join([self._quoter(x) for x in self.command + self.args])
|
||||
expanded_command = ''
|
||||
for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command):
|
||||
chunk = m.group()
|
||||
if chunk.startswith('$'):
|
||||
estimate = len(command)
|
||||
for m in re.finditer(r'(\${\w*}|\$\w*)?[^$]*', command):
|
||||
if m.start(1) != -1:
|
||||
estimate -= m.end(1) - m.start(1) + 1
|
||||
chunk = m.group(1)
|
||||
if chunk[1] == '{':
|
||||
chunk = chunk[2:-1]
|
||||
else:
|
||||
chunk = chunk[1:]
|
||||
chunk = re.sub(r'{(.*)}', r'\1', chunk)
|
||||
chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
|
||||
chunk = ' '.join(chunk)
|
||||
expanded_command += chunk
|
||||
estimate += len(' '.join(chunk))
|
||||
|
||||
# determine command length
|
||||
return len(expanded_command)
|
||||
return estimate
|
||||
|
||||
class NinjaBuildElement:
|
||||
def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
|
||||
|
@ -380,10 +386,9 @@ class NinjaBuildElement:
|
|||
newelems = []
|
||||
for i in elems:
|
||||
if not should_quote or i == '&&': # Hackety hack hack
|
||||
quoter = ninja_quote
|
||||
newelems.append(ninja_quote(i))
|
||||
else:
|
||||
quoter = lambda x: ninja_quote(qf(x))
|
||||
newelems.append(quoter(i))
|
||||
newelems.append(ninja_quote(qf(i)))
|
||||
line += ' '.join(newelems)
|
||||
line += '\n'
|
||||
outfile.write(line)
|
||||
|
|
|
@ -774,6 +774,7 @@ class BuildTarget(Target):
|
|||
|
||||
def extract_objects(self, srclist):
|
||||
obj_src = []
|
||||
sources_set = set(self.sources)
|
||||
for src in srclist:
|
||||
if isinstance(src, str):
|
||||
src = File(False, self.subdir, src)
|
||||
|
@ -782,7 +783,7 @@ class BuildTarget(Target):
|
|||
else:
|
||||
raise MesonException('Object extraction arguments must be strings or Files.')
|
||||
# FIXME: It could be a generated source
|
||||
if src not in self.sources:
|
||||
if src not in sources_set:
|
||||
raise MesonException('Tried to extract unknown source {}.'.format(src))
|
||||
obj_src.append(src)
|
||||
return ExtractedObjects(self, obj_src)
|
||||
|
|
|
@ -40,7 +40,9 @@ from .visualstudio import VisualStudioLikeCompiler
|
|||
if T.TYPE_CHECKING:
|
||||
from ...environment import Environment
|
||||
|
||||
SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
|
||||
GROUP_FLAGS = re.compile(r'''\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ |
|
||||
^(?:-Wl,)?-l |
|
||||
\.a$''', re.X)
|
||||
|
||||
class CLikeCompilerArgs(arglist.CompilerArgs):
|
||||
prepend_prefixes = ('-I', '-L')
|
||||
|
@ -69,8 +71,7 @@ class CLikeCompilerArgs(arglist.CompilerArgs):
|
|||
group_start = -1
|
||||
group_end = -1
|
||||
for i, each in enumerate(new):
|
||||
if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
|
||||
not SOREGEX.match(each):
|
||||
if not GROUP_FLAGS.search(each):
|
||||
continue
|
||||
group_end = i
|
||||
if group_start < 0:
|
||||
|
@ -85,6 +86,9 @@ class CLikeCompilerArgs(arglist.CompilerArgs):
|
|||
default_dirs = self.compiler.get_default_include_dirs()
|
||||
bad_idx_list = [] # type: T.List[int]
|
||||
for i, each in enumerate(new):
|
||||
if not each.startswith('-isystem'):
|
||||
continue
|
||||
|
||||
# Remove the -isystem and the path if the path is a default path
|
||||
if (each == '-isystem' and
|
||||
i < (len(new) - 1) and
|
||||
|
@ -92,7 +96,7 @@ class CLikeCompilerArgs(arglist.CompilerArgs):
|
|||
bad_idx_list += [i, i + 1]
|
||||
elif each.startswith('-isystem=') and each[9:] in default_dirs:
|
||||
bad_idx_list += [i]
|
||||
elif each.startswith('-isystem') and each[8:] in default_dirs:
|
||||
elif each[8:] in default_dirs:
|
||||
bad_idx_list += [i]
|
||||
for i in reversed(bad_idx_list):
|
||||
new.pop(i)
|
||||
|
|
|
@ -242,6 +242,7 @@ class File:
|
|||
self.is_built = is_built
|
||||
self.subdir = subdir
|
||||
self.fname = fname
|
||||
self.hash = hash((is_built, subdir, fname))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.relative_name()
|
||||
|
@ -291,10 +292,12 @@ class File:
|
|||
def __eq__(self, other) -> bool:
|
||||
if not isinstance(other, File):
|
||||
return NotImplemented
|
||||
if self.hash != other.hash:
|
||||
return False
|
||||
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.fname, self.subdir, self.is_built))
|
||||
return self.hash
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def relative_name(self) -> str:
|
||||
|
|
Loading…
Reference in New Issue