mirror of
https://github.com/intel/llvm.git
synced 2026-01-12 18:27:07 +08:00
[NFC][Py Reformat] Reformat python files in the rest of the dirs
This is an ongoing series of commits that are reformatting our Python code. This catches the last of the python files to reformat. Since they where so few I bunched them together. Reformatting is done with `black`. If you end up having problems merging this commit because you have made changes to a python file, the best way to handle that is to run git checkout --ours <yourfile> and then reformat it with black. If you run into any problems, post to discourse about it and we will try to help. RFC Thread below: https://discourse.llvm.org/t/rfc-document-and-standardize-python-code-style Reviewed By: jhenderson, #libc, Mordante, sivachandra Differential Revision: https://reviews.llvm.org/D150784
This commit is contained in:
@@ -16,92 +16,92 @@ from datetime import date
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo']
|
||||
extensions = ["sphinx.ext.intersphinx", "sphinx.ext.todo"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'BOLT'
|
||||
copyright = u'2015-%d, BOLT team' % date.today().year
|
||||
project = "BOLT"
|
||||
copyright = "2015-%d, BOLT team" % date.today().year
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
today_fmt = '%Y-%m-%d'
|
||||
today_fmt = "%Y-%m-%d"
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
show_authors = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'friendly'
|
||||
pygments_style = "friendly"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'haiku'
|
||||
html_theme = "haiku"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
html_theme_path = ["."]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# If given, this must be the name of an image file (path relative to the
|
||||
# configuration directory) that is the favicon of the docs. Modern browsers use
|
||||
@@ -109,110 +109,104 @@ html_theme_path = ["."]
|
||||
# icon file (.ico), which is 16x16 or 32x32 pixels large. Default: None. The
|
||||
# image file will be copied to the _static directory of the output HTML, but
|
||||
# only if the file does not already exist there.
|
||||
html_favicon = '_static/favicon.ico'
|
||||
html_favicon = "_static/favicon.ico"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
html_last_updated_fmt = '%Y-%m-%d'
|
||||
html_last_updated_fmt = "%Y-%m-%d"
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
html_sidebars = {'index': ['indexsidebar.html']}
|
||||
html_sidebars = {"index": ["indexsidebar.html"]}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {'index': 'index.html'}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'boltdoc'
|
||||
htmlhelp_basename = "boltdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('contents', 'bolt.tex', u'BOLT Documentation',
|
||||
u'LLVM project', 'manual'),
|
||||
("contents", "bolt.tex", "BOLT Documentation", "LLVM project", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('contents', 'bolt', u'BOLT Documentation',
|
||||
[u'LLVM project'], 1)
|
||||
]
|
||||
man_pages = [("contents", "bolt", "BOLT Documentation", ["LLVM project"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
@@ -221,19 +215,25 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('contents', 'BOLT', u'BOLT Documentation',
|
||||
u'LLVM project', 'BOLT', 'Binary Optimization and Layout Tool',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
"contents",
|
||||
"BOLT",
|
||||
"BOLT Documentation",
|
||||
"LLVM project",
|
||||
"BOLT",
|
||||
"Binary Optimization and Layout Tool",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
|
||||
# FIXME: Define intersphinx configuration.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
if 'AArch64' not in config.root.targets:
|
||||
if "AArch64" not in config.root.targets:
|
||||
config.unsupported = True
|
||||
|
||||
flags = '--target=aarch64-pc-linux -nostartfiles -nostdlib -ffreestanding'
|
||||
flags = "--target=aarch64-pc-linux -nostartfiles -nostdlib -ffreestanding"
|
||||
|
||||
config.substitutions.insert(0, ('%cflags', f'%cflags {flags}'))
|
||||
config.substitutions.insert(0, ('%cxxflags', f'%cxxflags {flags}'))
|
||||
config.substitutions.insert(0, ("%cflags", f"%cflags {flags}"))
|
||||
config.substitutions.insert(0, ("%cxxflags", f"%cxxflags {flags}"))
|
||||
|
||||
@@ -8,15 +8,15 @@ import subprocess
|
||||
import lit.formats
|
||||
|
||||
# name: The name of this test suite.
|
||||
config.name = 'BOLT-Unit'
|
||||
config.name = "BOLT-Unit"
|
||||
|
||||
# suffixes: A list of file extensions to treat as test files.
|
||||
config.suffixes = []
|
||||
|
||||
# test_source_root: The root path where tests are located.
|
||||
# test_exec_root: The root path where tests should be run.
|
||||
config.test_exec_root = os.path.join(config.bolt_obj_root, 'unittests')
|
||||
config.test_exec_root = os.path.join(config.bolt_obj_root, "unittests")
|
||||
config.test_source_root = config.test_exec_root
|
||||
|
||||
# testFormat: The test format to use to interpret tests.
|
||||
config.test_format = lit.formats.GoogleTest(config.llvm_build_mode, 'Tests')
|
||||
config.test_format = lit.formats.GoogleTest(config.llvm_build_mode, "Tests")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
if not 'X86' in config.root.targets:
|
||||
if not "X86" in config.root.targets:
|
||||
config.unsupported = True
|
||||
|
||||
flags = '--target=x86_64-pc-linux -nostdlib'
|
||||
flags = "--target=x86_64-pc-linux -nostdlib"
|
||||
|
||||
config.substitutions.insert(0, ('%cflags', f'%cflags {flags}'))
|
||||
config.substitutions.insert(0, ('%cxxflags', f'%cxxflags {flags}'))
|
||||
config.substitutions.insert(0, ("%cflags", f"%cflags {flags}"))
|
||||
config.substitutions.insert(0, ("%cxxflags", f"%cxxflags {flags}"))
|
||||
|
||||
@@ -18,7 +18,7 @@ parser.add_argument("objfile", help="Object file to extract symbol values from")
|
||||
parser.add_argument("output")
|
||||
parser.add_argument("prefix", nargs="?", default="FDATA", help="Custom FDATA prefix")
|
||||
parser.add_argument("--nmtool", default="nm", help="Path to nm tool")
|
||||
parser.add_argument("--no-lbr", action='store_true')
|
||||
parser.add_argument("--no-lbr", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -48,7 +48,7 @@ replace_pat = re.compile(r"#(?P<symname>[^#]+)#")
|
||||
# as (src_tuple, dst_tuple, mispred_count, exec_count) tuples, where src and dst
|
||||
# are represented as (is_sym, anchor, offset) tuples
|
||||
exprs = []
|
||||
with open(args.input, 'r') as f:
|
||||
with open(args.input, "r") as f:
|
||||
for line in f.readlines():
|
||||
prefix_match = prefix_pat.match(line)
|
||||
if not prefix_match:
|
||||
@@ -60,43 +60,49 @@ with open(args.input, 'r') as f:
|
||||
if fdata_match:
|
||||
src_dst, execnt, mispred = fdata_match.groups()
|
||||
# Split by whitespaces not preceded by a backslash (negative lookbehind)
|
||||
chunks = re.split(r'(?<!\\) +', src_dst)
|
||||
chunks = re.split(r"(?<!\\) +", src_dst)
|
||||
# Check if the number of records separated by non-escaped whitespace
|
||||
# exactly matches the format.
|
||||
assert len(chunks) == 6, f"ERROR: wrong format/whitespaces must be escaped:\n{line}"
|
||||
exprs.append(('FDATA', (*chunks, execnt, mispred)))
|
||||
assert (
|
||||
len(chunks) == 6
|
||||
), f"ERROR: wrong format/whitespaces must be escaped:\n{line}"
|
||||
exprs.append(("FDATA", (*chunks, execnt, mispred)))
|
||||
elif nolbr_match:
|
||||
loc, count = nolbr_match.groups()
|
||||
# Split by whitespaces not preceded by a backslash (negative lookbehind)
|
||||
chunks = re.split(r'(?<!\\) +', loc)
|
||||
chunks = re.split(r"(?<!\\) +", loc)
|
||||
# Check if the number of records separated by non-escaped whitespace
|
||||
# exactly matches the format.
|
||||
assert len(chunks) == 3, f"ERROR: wrong format/whitespaces must be escaped:\n{line}"
|
||||
exprs.append(('NOLBR', (*chunks, count)))
|
||||
assert (
|
||||
len(chunks) == 3
|
||||
), f"ERROR: wrong format/whitespaces must be escaped:\n{line}"
|
||||
exprs.append(("NOLBR", (*chunks, count)))
|
||||
elif preagg_match:
|
||||
exprs.append(('PREAGG', preagg_match.groups()))
|
||||
exprs.append(("PREAGG", preagg_match.groups()))
|
||||
else:
|
||||
exit("ERROR: unexpected input:\n%s" % line)
|
||||
|
||||
# Read nm output: <symbol value> <symbol type> <symbol name>
|
||||
nm_output = subprocess.run([args.nmtool, '--defined-only', args.objfile],
|
||||
text = True, capture_output = True).stdout
|
||||
nm_output = subprocess.run(
|
||||
[args.nmtool, "--defined-only", args.objfile], text=True, capture_output=True
|
||||
).stdout
|
||||
# Populate symbol map
|
||||
symbols = {}
|
||||
for symline in nm_output.splitlines():
|
||||
symval, _, symname = symline.split(maxsplit=2)
|
||||
symbols[symname] = symval
|
||||
|
||||
|
||||
def evaluate_symbol(issym, anchor, offsym):
|
||||
sym_match = replace_pat.match(offsym)
|
||||
if not sym_match:
|
||||
# No need to evaluate symbol value, return as is
|
||||
return f'{issym} {anchor} {offsym}'
|
||||
symname = sym_match.group('symname')
|
||||
return f"{issym} {anchor} {offsym}"
|
||||
symname = sym_match.group("symname")
|
||||
assert symname in symbols, f"ERROR: symbol {symname} is not defined in binary"
|
||||
# Evaluate to an absolute offset if issym is false
|
||||
if issym == '0':
|
||||
return f'{issym} {anchor} {symbols[symname]}'
|
||||
if issym == "0":
|
||||
return f"{issym} {anchor} {symbols[symname]}"
|
||||
# Evaluate symbol against its anchor if issym is true
|
||||
assert anchor in symbols, f"ERROR: symbol {anchor} is not defined in binary"
|
||||
anchor_value = int(symbols[anchor], 16)
|
||||
@@ -104,29 +110,34 @@ def evaluate_symbol(issym, anchor, offsym):
|
||||
sym_offset = symbol_value - anchor_value
|
||||
return f'{issym} {anchor} {format(sym_offset, "x")}'
|
||||
|
||||
|
||||
def replace_symbol(matchobj):
|
||||
'''
|
||||
"""
|
||||
Expects matchobj to only capture one group which contains the symbol name.
|
||||
'''
|
||||
symname = matchobj.group('symname')
|
||||
"""
|
||||
symname = matchobj.group("symname")
|
||||
assert symname in symbols, f"ERROR: symbol {symname} is not defined in binary"
|
||||
return symbols[symname]
|
||||
|
||||
with open(args.output, 'w', newline='\n') as f:
|
||||
|
||||
with open(args.output, "w", newline="\n") as f:
|
||||
if args.no_lbr:
|
||||
print('no_lbr', file = f)
|
||||
print("no_lbr", file=f)
|
||||
for etype, expr in exprs:
|
||||
if etype == 'FDATA':
|
||||
if etype == "FDATA":
|
||||
issym1, anchor1, offsym1, issym2, anchor2, offsym2, execnt, mispred = expr
|
||||
print(evaluate_symbol(issym1, anchor1, offsym1),
|
||||
evaluate_symbol(issym2, anchor2, offsym2),
|
||||
execnt, mispred, file = f)
|
||||
elif etype == 'NOLBR':
|
||||
print(
|
||||
evaluate_symbol(issym1, anchor1, offsym1),
|
||||
evaluate_symbol(issym2, anchor2, offsym2),
|
||||
execnt,
|
||||
mispred,
|
||||
file=f,
|
||||
)
|
||||
elif etype == "NOLBR":
|
||||
issym, anchor, offsym, count = expr
|
||||
print(evaluate_symbol(issym, anchor, offsym), count, file = f)
|
||||
elif etype == 'PREAGG':
|
||||
print(evaluate_symbol(issym, anchor, offsym), count, file=f)
|
||||
elif etype == "PREAGG":
|
||||
# Replace all symbols enclosed in ##
|
||||
print(expr[0], re.sub(replace_pat, replace_symbol, expr[1]),
|
||||
file = f)
|
||||
print(expr[0], re.sub(replace_pat, replace_symbol, expr[1]), file=f)
|
||||
else:
|
||||
exit("ERROR: unhandled expression type:\n%s" % etype)
|
||||
|
||||
@@ -16,7 +16,7 @@ from lit.llvm.subst import FindTool
|
||||
# Configuration file for the 'lit' test runner.
|
||||
|
||||
# name: The name of this test suite.
|
||||
config.name = 'BOLT'
|
||||
config.name = "BOLT"
|
||||
|
||||
# testFormat: The test format to use to interpret tests.
|
||||
#
|
||||
@@ -25,19 +25,32 @@ config.name = 'BOLT'
|
||||
config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell)
|
||||
|
||||
# suffixes: A list of file extensions to treat as test files.
|
||||
config.suffixes = ['.c', '.cpp', '.cppm', '.m', '.mm', '.cu',
|
||||
'.ll', '.cl', '.s', '.S', '.modulemap', '.test', '.rs']
|
||||
config.suffixes = [
|
||||
".c",
|
||||
".cpp",
|
||||
".cppm",
|
||||
".m",
|
||||
".mm",
|
||||
".cu",
|
||||
".ll",
|
||||
".cl",
|
||||
".s",
|
||||
".S",
|
||||
".modulemap",
|
||||
".test",
|
||||
".rs",
|
||||
]
|
||||
|
||||
# excludes: A list of directories to exclude from the testsuite. The 'Inputs'
|
||||
# subdirectories contain auxiliary inputs for various tests in their parent
|
||||
# directories.
|
||||
config.excludes = ['Inputs', 'CMakeLists.txt', 'README.txt', 'LICENSE.txt']
|
||||
config.excludes = ["Inputs", "CMakeLists.txt", "README.txt", "LICENSE.txt"]
|
||||
|
||||
# test_source_root: The root path where tests are located.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# test_exec_root: The root path where tests should be run.
|
||||
config.test_exec_root = os.path.join(config.bolt_obj_root, 'test')
|
||||
config.test_exec_root = os.path.join(config.bolt_obj_root, "test")
|
||||
|
||||
# checking if maxIndividualTestTime is available on the platform and sets
|
||||
# it to 60sec if so, declares lit-max-individual-test-time feature for
|
||||
@@ -47,8 +60,11 @@ if supported:
|
||||
config.available_features.add("lit-max-individual-test-time")
|
||||
lit_config.maxIndividualTestTime = 60
|
||||
else:
|
||||
lit_config.warning('Setting a timeout per test not supported. ' + errormsg
|
||||
+ ' Some tests will be skipped.')
|
||||
lit_config.warning(
|
||||
"Setting a timeout per test not supported. "
|
||||
+ errormsg
|
||||
+ " Some tests will be skipped."
|
||||
)
|
||||
|
||||
if config.bolt_enable_runtime:
|
||||
config.available_features.add("bolt-runtime")
|
||||
@@ -58,57 +74,64 @@ if config.gnu_ld:
|
||||
|
||||
llvm_config.use_default_substitutions()
|
||||
|
||||
llvm_config.config.environment['CLANG'] = config.bolt_clang
|
||||
llvm_config.config.environment["CLANG"] = config.bolt_clang
|
||||
llvm_config.use_clang()
|
||||
|
||||
llvm_config.config.environment['LD_LLD'] = config.bolt_lld
|
||||
ld_lld = llvm_config.use_llvm_tool('ld.lld', required=True, search_env='LD_LLD')
|
||||
llvm_config.config.available_features.add('ld.lld')
|
||||
llvm_config.add_tool_substitutions([ToolSubst(r'ld\.lld', command=ld_lld)])
|
||||
llvm_config.config.environment["LD_LLD"] = config.bolt_lld
|
||||
ld_lld = llvm_config.use_llvm_tool("ld.lld", required=True, search_env="LD_LLD")
|
||||
llvm_config.config.available_features.add("ld.lld")
|
||||
llvm_config.add_tool_substitutions([ToolSubst(r"ld\.lld", command=ld_lld)])
|
||||
|
||||
config.substitutions.append(('%cflags', ''))
|
||||
config.substitutions.append(('%cxxflags', ''))
|
||||
config.substitutions.append(("%cflags", ""))
|
||||
config.substitutions.append(("%cxxflags", ""))
|
||||
|
||||
link_fdata_cmd = os.path.join(config.test_source_root, 'link_fdata.py')
|
||||
link_fdata_cmd = os.path.join(config.test_source_root, "link_fdata.py")
|
||||
|
||||
tool_dirs = [config.llvm_tools_dir,
|
||||
config.test_source_root]
|
||||
tool_dirs = [config.llvm_tools_dir, config.test_source_root]
|
||||
|
||||
tools = [
|
||||
ToolSubst('llc', unresolved='fatal'),
|
||||
ToolSubst('llvm-dwarfdump', unresolved='fatal'),
|
||||
ToolSubst('llvm-bolt', unresolved='fatal'),
|
||||
ToolSubst('llvm-boltdiff', unresolved='fatal'),
|
||||
ToolSubst('llvm-bolt-heatmap', unresolved='fatal'),
|
||||
ToolSubst('llvm-bat-dump', unresolved='fatal'),
|
||||
ToolSubst('perf2bolt', unresolved='fatal'),
|
||||
ToolSubst('yaml2obj', unresolved='fatal'),
|
||||
ToolSubst('llvm-mc', unresolved='fatal'),
|
||||
ToolSubst('llvm-nm', unresolved='fatal'),
|
||||
ToolSubst('llvm-objdump', unresolved='fatal'),
|
||||
ToolSubst('llvm-objcopy', unresolved='fatal'),
|
||||
ToolSubst('llvm-strings', unresolved='fatal'),
|
||||
ToolSubst('llvm-strip', unresolved='fatal'),
|
||||
ToolSubst('llvm-readelf', unresolved='fatal'),
|
||||
ToolSubst('link_fdata', command=sys.executable, unresolved='fatal', extra_args=[link_fdata_cmd]),
|
||||
ToolSubst('merge-fdata', unresolved='fatal'),
|
||||
ToolSubst('llvm-readobj', unresolved='fatal'),
|
||||
ToolSubst('llvm-dwp', unresolved='fatal'),
|
||||
ToolSubst('split-file', unresolved='fatal'),
|
||||
ToolSubst("llc", unresolved="fatal"),
|
||||
ToolSubst("llvm-dwarfdump", unresolved="fatal"),
|
||||
ToolSubst("llvm-bolt", unresolved="fatal"),
|
||||
ToolSubst("llvm-boltdiff", unresolved="fatal"),
|
||||
ToolSubst("llvm-bolt-heatmap", unresolved="fatal"),
|
||||
ToolSubst("llvm-bat-dump", unresolved="fatal"),
|
||||
ToolSubst("perf2bolt", unresolved="fatal"),
|
||||
ToolSubst("yaml2obj", unresolved="fatal"),
|
||||
ToolSubst("llvm-mc", unresolved="fatal"),
|
||||
ToolSubst("llvm-nm", unresolved="fatal"),
|
||||
ToolSubst("llvm-objdump", unresolved="fatal"),
|
||||
ToolSubst("llvm-objcopy", unresolved="fatal"),
|
||||
ToolSubst("llvm-strings", unresolved="fatal"),
|
||||
ToolSubst("llvm-strip", unresolved="fatal"),
|
||||
ToolSubst("llvm-readelf", unresolved="fatal"),
|
||||
ToolSubst(
|
||||
"link_fdata",
|
||||
command=sys.executable,
|
||||
unresolved="fatal",
|
||||
extra_args=[link_fdata_cmd],
|
||||
),
|
||||
ToolSubst("merge-fdata", unresolved="fatal"),
|
||||
ToolSubst("llvm-readobj", unresolved="fatal"),
|
||||
ToolSubst("llvm-dwp", unresolved="fatal"),
|
||||
ToolSubst("split-file", unresolved="fatal"),
|
||||
]
|
||||
llvm_config.add_tool_substitutions(tools, tool_dirs)
|
||||
|
||||
|
||||
def calculate_arch_features(arch_string):
|
||||
features = []
|
||||
for arch in arch_string.split():
|
||||
features.append(arch.lower() + '-registered-target')
|
||||
features.append(arch.lower() + "-registered-target")
|
||||
return features
|
||||
|
||||
|
||||
llvm_config.feature_config(
|
||||
[('--assertion-mode', {'ON': 'asserts'}),
|
||||
('--cxxflags', {r'-D_GLIBCXX_DEBUG\b': 'libstdcxx-safe-mode'}),
|
||||
('--targets-built', calculate_arch_features)
|
||||
])
|
||||
[
|
||||
("--assertion-mode", {"ON": "asserts"}),
|
||||
("--cxxflags", {r"-D_GLIBCXX_DEBUG\b": "libstdcxx-safe-mode"}),
|
||||
("--targets-built", calculate_arch_features),
|
||||
]
|
||||
)
|
||||
|
||||
config.targets = frozenset(config.targets_to_build.split(';'))
|
||||
config.targets = frozenset(config.targets_to_build.split(";"))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
host_linux_triple = config.target_triple.split('-')[0]+'-linux'
|
||||
common_linker_flags = '-fuse-ld=lld -Wl,--unresolved-symbols=ignore-all'
|
||||
flags = f'--target={host_linux_triple} {common_linker_flags}'
|
||||
host_linux_triple = config.target_triple.split("-")[0] + "-linux"
|
||||
common_linker_flags = "-fuse-ld=lld -Wl,--unresolved-symbols=ignore-all"
|
||||
flags = f"--target={host_linux_triple} {common_linker_flags}"
|
||||
|
||||
config.substitutions.insert(0, ('%cflags', f'%cflags {flags}'))
|
||||
config.substitutions.insert(0, ('%cxxflags', f'%cxxflags {flags}'))
|
||||
config.substitutions.insert(0, ("%cflags", f"%cflags {flags}"))
|
||||
config.substitutions.insert(0, ("%cxxflags", f"%cxxflags {flags}"))
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.host_arch not in ['aarch64']:
|
||||
if config.host_arch not in ["aarch64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.host_arch not in ['x86', 'X86', 'x86_64']:
|
||||
if config.host_arch not in ["x86", "X86", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Tests are not expected to pass in a cross-compilation setup.
|
||||
if not {'native', 'system-linux'}.issubset(config.available_features):
|
||||
config.unsupported = True
|
||||
if not {"native", "system-linux"}.issubset(config.available_features):
|
||||
config.unsupported = True
|
||||
|
||||
@@ -4,26 +4,36 @@ import os
|
||||
import sys
|
||||
|
||||
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
HTML_TEMPLATE_NAME = 'd3-graphviz-template.html'
|
||||
HTML_TEMPLATE_NAME = "d3-graphviz-template.html"
|
||||
HTML_TEMPLATE_PATH = os.path.join(BASE_PATH, HTML_TEMPLATE_NAME)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('dotfile', nargs='?', type=argparse.FileType('r'),
|
||||
default=sys.stdin,
|
||||
help='Input .dot file, reads from stdin if not set')
|
||||
parser.add_argument('htmlfile', nargs='?', type=argparse.FileType('w'),
|
||||
default=sys.stdout,
|
||||
help='Output .html file, writes to stdout if not set')
|
||||
parser.add_argument(
|
||||
"dotfile",
|
||||
nargs="?",
|
||||
type=argparse.FileType("r"),
|
||||
default=sys.stdin,
|
||||
help="Input .dot file, reads from stdin if not set",
|
||||
)
|
||||
parser.add_argument(
|
||||
"htmlfile",
|
||||
nargs="?",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="Output .html file, writes to stdout if not set",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
template = open(HTML_TEMPLATE_PATH, 'r')
|
||||
template = open(HTML_TEMPLATE_PATH, "r")
|
||||
|
||||
for line in template:
|
||||
if "<INSERT_DOT>" in line:
|
||||
print(args.dotfile.read(), file=args.htmlfile, end='')
|
||||
print(args.dotfile.read(), file=args.htmlfile, end="")
|
||||
else:
|
||||
print(line, file=args.htmlfile, end='')
|
||||
print(line, file=args.htmlfile, end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -41,9 +41,10 @@ from textwrap import dedent
|
||||
# # optional, defaults to timing.log in CWD
|
||||
# timing_file = timing1.log
|
||||
|
||||
|
||||
def read_cfg():
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
cfg = configparser.ConfigParser(allow_no_value = True)
|
||||
cfg = configparser.ConfigParser(allow_no_value=True)
|
||||
cfgs = cfg.read("llvm-bolt-wrapper.ini")
|
||||
if not cfgs:
|
||||
cfgs = cfg.read(os.path.join(src_dir, "llvm-bolt-wrapper.ini"))
|
||||
@@ -51,69 +52,72 @@ def read_cfg():
|
||||
|
||||
def get_cfg(key):
|
||||
# if key is not present in config, assume False
|
||||
if key not in cfg['config']:
|
||||
if key not in cfg["config"]:
|
||||
return False
|
||||
# if key is present, but has no value, assume True
|
||||
if not cfg['config'][key]:
|
||||
if not cfg["config"][key]:
|
||||
return True
|
||||
# if key has associated value, interpret the value
|
||||
return cfg['config'].getboolean(key)
|
||||
return cfg["config"].getboolean(key)
|
||||
|
||||
d = {
|
||||
# BOLT binary locations
|
||||
'BASE_BOLT': cfg['config']['base_bolt'],
|
||||
'CMP_BOLT': cfg['config']['cmp_bolt'],
|
||||
"BASE_BOLT": cfg["config"]["base_bolt"],
|
||||
"CMP_BOLT": cfg["config"]["cmp_bolt"],
|
||||
# optional
|
||||
'VERBOSE': get_cfg('verbose'),
|
||||
'KEEP_TMP': get_cfg('keep_tmp'),
|
||||
'NO_MINIMIZE': get_cfg('no_minimize'),
|
||||
'RUN_SEQUENTIALLY': get_cfg('run_sequentially'),
|
||||
'COMPARE_OUTPUT': get_cfg('compare_output'),
|
||||
'SKIP_BINARY_CMP': get_cfg('skip_binary_cmp'),
|
||||
'TIMING_FILE': cfg['config'].get('timing_file', 'timing.log'),
|
||||
"VERBOSE": get_cfg("verbose"),
|
||||
"KEEP_TMP": get_cfg("keep_tmp"),
|
||||
"NO_MINIMIZE": get_cfg("no_minimize"),
|
||||
"RUN_SEQUENTIALLY": get_cfg("run_sequentially"),
|
||||
"COMPARE_OUTPUT": get_cfg("compare_output"),
|
||||
"SKIP_BINARY_CMP": get_cfg("skip_binary_cmp"),
|
||||
"TIMING_FILE": cfg["config"].get("timing_file", "timing.log"),
|
||||
}
|
||||
if d['VERBOSE']:
|
||||
if d["VERBOSE"]:
|
||||
print(f"Using config {os.path.abspath(cfgs[0])}")
|
||||
return SimpleNamespace(**d)
|
||||
|
||||
|
||||
# perf2bolt mode
|
||||
PERF2BOLT_MODE = ['-aggregate-only', '-ignore-build-id']
|
||||
PERF2BOLT_MODE = ["-aggregate-only", "-ignore-build-id"]
|
||||
|
||||
# boltdiff mode
|
||||
BOLTDIFF_MODE = ['-diff-only', '-o', '/dev/null']
|
||||
BOLTDIFF_MODE = ["-diff-only", "-o", "/dev/null"]
|
||||
|
||||
# options to suppress binary differences as much as possible
|
||||
MINIMIZE_DIFFS = ['-bolt-info=0']
|
||||
MINIMIZE_DIFFS = ["-bolt-info=0"]
|
||||
|
||||
# bolt output options that need to be intercepted
|
||||
BOLT_OUTPUT_OPTS = {
|
||||
'-o': 'BOLT output binary',
|
||||
'-w': 'BOLT recorded profile',
|
||||
"-o": "BOLT output binary",
|
||||
"-w": "BOLT recorded profile",
|
||||
}
|
||||
|
||||
# regex patterns to exclude the line from log comparison
|
||||
SKIP_MATCH = [
|
||||
'BOLT-INFO: BOLT version',
|
||||
r'^Args: ',
|
||||
r'^BOLT-DEBUG:',
|
||||
r'BOLT-INFO:.*data.*output data',
|
||||
'WARNING: reading perf data directly',
|
||||
"BOLT-INFO: BOLT version",
|
||||
r"^Args: ",
|
||||
r"^BOLT-DEBUG:",
|
||||
r"BOLT-INFO:.*data.*output data",
|
||||
"WARNING: reading perf data directly",
|
||||
]
|
||||
|
||||
|
||||
def run_cmd(cmd, out_f, cfg):
|
||||
if cfg.VERBOSE:
|
||||
print(' '.join(cmd))
|
||||
print(" ".join(cmd))
|
||||
return subprocess.Popen(cmd, stdout=out_f, stderr=subprocess.STDOUT)
|
||||
|
||||
|
||||
def run_bolt(bolt_path, bolt_args, out_f, cfg):
|
||||
p2b = os.path.basename(sys.argv[0]) == 'perf2bolt' # perf2bolt mode
|
||||
bd = os.path.basename(sys.argv[0]) == 'llvm-boltdiff' # boltdiff mode
|
||||
hm = sys.argv[1] == 'heatmap' # heatmap mode
|
||||
cmd = ['/usr/bin/time', '-f', '%e %M', bolt_path] + bolt_args
|
||||
p2b = os.path.basename(sys.argv[0]) == "perf2bolt" # perf2bolt mode
|
||||
bd = os.path.basename(sys.argv[0]) == "llvm-boltdiff" # boltdiff mode
|
||||
hm = sys.argv[1] == "heatmap" # heatmap mode
|
||||
cmd = ["/usr/bin/time", "-f", "%e %M", bolt_path] + bolt_args
|
||||
if p2b:
|
||||
# -ignore-build-id can occur at most once, hence remove it from cmd
|
||||
if '-ignore-build-id' in cmd:
|
||||
cmd.remove('-ignore-build-id')
|
||||
if "-ignore-build-id" in cmd:
|
||||
cmd.remove("-ignore-build-id")
|
||||
cmd += PERF2BOLT_MODE
|
||||
elif bd:
|
||||
cmd += BOLTDIFF_MODE
|
||||
@@ -121,55 +125,65 @@ def run_bolt(bolt_path, bolt_args, out_f, cfg):
|
||||
cmd += MINIMIZE_DIFFS
|
||||
return run_cmd(cmd, out_f, cfg)
|
||||
|
||||
|
||||
def prepend_dash(args: Mapping[AnyStr, AnyStr]) -> Sequence[AnyStr]:
|
||||
'''
|
||||
"""
|
||||
Accepts parsed arguments and returns flat list with dash prepended to
|
||||
the option.
|
||||
Example: Namespace(o='test.tmp') -> ['-o', 'test.tmp']
|
||||
'''
|
||||
dashed = [('-'+key,value) for (key,value) in args.items()]
|
||||
"""
|
||||
dashed = [("-" + key, value) for (key, value) in args.items()]
|
||||
flattened = list(sum(dashed, ()))
|
||||
return flattened
|
||||
|
||||
|
||||
def replace_cmp_path(tmp: AnyStr, args: Mapping[AnyStr, AnyStr]) -> Sequence[AnyStr]:
|
||||
'''
|
||||
"""
|
||||
Keeps file names, but replaces the path to a temp folder.
|
||||
Example: Namespace(o='abc/test.tmp') -> Namespace(o='/tmp/tmpf9un/test.tmp')
|
||||
Except preserve /dev/null.
|
||||
'''
|
||||
replace_path = lambda x: os.path.join(tmp, os.path.basename(x)) if x != '/dev/null' else '/dev/null'
|
||||
"""
|
||||
replace_path = (
|
||||
lambda x: os.path.join(tmp, os.path.basename(x))
|
||||
if x != "/dev/null"
|
||||
else "/dev/null"
|
||||
)
|
||||
new_args = {key: replace_path(value) for key, value in args.items()}
|
||||
return prepend_dash(new_args)
|
||||
|
||||
|
||||
def preprocess_args(args: argparse.Namespace) -> Mapping[AnyStr, AnyStr]:
|
||||
'''
|
||||
"""
|
||||
Drop options that weren't parsed (e.g. -w), convert to a dict
|
||||
'''
|
||||
"""
|
||||
return {key: value for key, value in vars(args).items() if value}
|
||||
|
||||
def write_to(txt, filename, mode='w'):
|
||||
|
||||
def write_to(txt, filename, mode="w"):
|
||||
with open(filename, mode) as f:
|
||||
f.write(txt)
|
||||
|
||||
|
||||
def wait(proc, fdesc):
|
||||
proc.wait()
|
||||
fdesc.close()
|
||||
return open(fdesc.name)
|
||||
|
||||
|
||||
def compare_logs(main, cmp, skip_begin=0, skip_end=0, str_input=True):
|
||||
'''
|
||||
"""
|
||||
Compares logs but allows for certain lines to be excluded from comparison.
|
||||
If str_input is True (default), the input it assumed to be a string,
|
||||
which is split into lines. Otherwise the input is assumed to be a file.
|
||||
Returns None on success, mismatch otherwise.
|
||||
'''
|
||||
"""
|
||||
main_inp = main.splitlines() if str_input else main.readlines()
|
||||
cmp_inp = cmp.splitlines() if str_input else cmp.readlines()
|
||||
# rewind logs after consumption
|
||||
if not str_input:
|
||||
main.seek(0)
|
||||
cmp.seek(0)
|
||||
for lhs, rhs in list(zip(main_inp, cmp_inp))[skip_begin:-skip_end or None]:
|
||||
for lhs, rhs in list(zip(main_inp, cmp_inp))[skip_begin : -skip_end or None]:
|
||||
if lhs != rhs:
|
||||
# check skip patterns
|
||||
for skip in SKIP_MATCH:
|
||||
@@ -181,52 +195,59 @@ def compare_logs(main, cmp, skip_begin=0, skip_end=0, str_input=True):
|
||||
return (lhs, rhs)
|
||||
return None
|
||||
|
||||
|
||||
def fmt_cmp(cmp_tuple):
|
||||
if not cmp_tuple:
|
||||
return ''
|
||||
return f'main:\n{cmp_tuple[0]}\ncmp:\n{cmp_tuple[1]}\n'
|
||||
return ""
|
||||
return f"main:\n{cmp_tuple[0]}\ncmp:\n{cmp_tuple[1]}\n"
|
||||
|
||||
|
||||
def compare_with(lhs, rhs, cmd, skip_begin=0, skip_end=0):
|
||||
'''
|
||||
"""
|
||||
Runs cmd on both lhs and rhs and compares stdout.
|
||||
Returns tuple (mismatch, lhs_stdout):
|
||||
- if stdout matches between two files, mismatch is None,
|
||||
- otherwise mismatch is a tuple of mismatching lines.
|
||||
'''
|
||||
run = lambda binary: subprocess.run(cmd.split() + [binary],
|
||||
text=True, check=True,
|
||||
capture_output=True).stdout
|
||||
"""
|
||||
run = lambda binary: subprocess.run(
|
||||
cmd.split() + [binary], text=True, check=True, capture_output=True
|
||||
).stdout
|
||||
run_lhs = run(lhs)
|
||||
run_rhs = run(rhs)
|
||||
cmp = compare_logs(run_lhs, run_rhs, skip_begin, skip_end)
|
||||
return cmp, run_lhs
|
||||
|
||||
|
||||
def parse_cmp_offset(cmp_out):
|
||||
'''
|
||||
"""
|
||||
Extracts byte number from cmp output:
|
||||
file1 file2 differ: byte X, line Y
|
||||
'''
|
||||
"""
|
||||
# NOTE: cmp counts bytes starting from 1!
|
||||
return int(re.search(r'byte (\d+),', cmp_out).groups()[0]) - 1
|
||||
return int(re.search(r"byte (\d+),", cmp_out).groups()[0]) - 1
|
||||
|
||||
|
||||
def report_real_time(binary, main_err, cmp_err, cfg):
|
||||
'''
|
||||
"""
|
||||
Extracts real time from stderr and appends it to TIMING FILE it as csv:
|
||||
"output binary; base bolt; cmp bolt"
|
||||
'''
|
||||
"""
|
||||
|
||||
def get_real_from_stderr(logline):
|
||||
return '; '.join(logline.split())
|
||||
return "; ".join(logline.split())
|
||||
|
||||
for line in main_err:
|
||||
pass
|
||||
main = get_real_from_stderr(line)
|
||||
for line in cmp_err:
|
||||
pass
|
||||
cmp = get_real_from_stderr(line)
|
||||
write_to(f"{binary}; {main}; {cmp}\n", cfg.TIMING_FILE, 'a')
|
||||
write_to(f"{binary}; {main}; {cmp}\n", cfg.TIMING_FILE, "a")
|
||||
# rewind logs after consumption
|
||||
main_err.seek(0)
|
||||
cmp_err.seek(0)
|
||||
|
||||
|
||||
def clean_exit(tmp, out, exitcode, cfg):
|
||||
# temp files are only cleaned on success
|
||||
if not cfg.KEEP_TMP:
|
||||
@@ -236,8 +257,9 @@ def clean_exit(tmp, out, exitcode, cfg):
|
||||
shutil.copyfileobj(out, sys.stdout)
|
||||
sys.exit(exitcode)
|
||||
|
||||
|
||||
def find_section(offset, readelf_hdr):
|
||||
hdr = readelf_hdr.split('\n')
|
||||
hdr = readelf_hdr.split("\n")
|
||||
section = None
|
||||
# extract sections table (parse objdump -hw output)
|
||||
for line in hdr[5:-1]:
|
||||
@@ -247,7 +269,7 @@ def find_section(offset, readelf_hdr):
|
||||
# section size
|
||||
size = int(cols[2], 16)
|
||||
if offset >= file_offset and offset < file_offset + size:
|
||||
if sys.stdout.isatty(): # terminal supports colors
|
||||
if sys.stdout.isatty(): # terminal supports colors
|
||||
print(f"\033[1m{line}\033[0m")
|
||||
else:
|
||||
print(f">{line}")
|
||||
@@ -256,34 +278,57 @@ def find_section(offset, readelf_hdr):
|
||||
print(line)
|
||||
return section
|
||||
|
||||
|
||||
def main_config_generator():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('base_bolt', help='Full path to base llvm-bolt binary')
|
||||
parser.add_argument('cmp_bolt', help='Full path to cmp llvm-bolt binary')
|
||||
parser.add_argument('--verbose', action='store_true',
|
||||
help='Print subprocess invocation cmdline (default False)')
|
||||
parser.add_argument('--keep_tmp', action='store_true',
|
||||
help = 'Preserve tmp folder on a clean exit '
|
||||
'(tmp directory is preserved on crash by default)')
|
||||
parser.add_argument('--no_minimize', action='store_true',
|
||||
help=f'Do not add `{MINIMIZE_DIFFS}` that is used '
|
||||
'by default to reduce binary differences')
|
||||
parser.add_argument('--run_sequentially', action='store_true',
|
||||
help='Run both binaries sequentially (default '
|
||||
'in parallel). Use for timing comparison')
|
||||
parser.add_argument('--compare_output', action='store_true',
|
||||
help = 'Compare bolt stdout/stderr (disabled by default)')
|
||||
parser.add_argument('--skip_binary_cmp', action='store_true',
|
||||
help = 'Disable output comparison')
|
||||
parser.add_argument('--timing_file', help = 'Override path to timing log '
|
||||
'file (default `timing.log` in CWD)')
|
||||
parser.add_argument("base_bolt", help="Full path to base llvm-bolt binary")
|
||||
parser.add_argument("cmp_bolt", help="Full path to cmp llvm-bolt binary")
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Print subprocess invocation cmdline (default False)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keep_tmp",
|
||||
action="store_true",
|
||||
help="Preserve tmp folder on a clean exit "
|
||||
"(tmp directory is preserved on crash by default)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no_minimize",
|
||||
action="store_true",
|
||||
help=f"Do not add `{MINIMIZE_DIFFS}` that is used "
|
||||
"by default to reduce binary differences",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--run_sequentially",
|
||||
action="store_true",
|
||||
help="Run both binaries sequentially (default "
|
||||
"in parallel). Use for timing comparison",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--compare_output",
|
||||
action="store_true",
|
||||
help="Compare bolt stdout/stderr (disabled by default)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip_binary_cmp", action="store_true", help="Disable output comparison"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timing_file",
|
||||
help="Override path to timing log " "file (default `timing.log` in CWD)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
print(dedent(f'''\
|
||||
print(
|
||||
dedent(
|
||||
f"""\
|
||||
[config]
|
||||
# mandatory
|
||||
base_bolt = {args.base_bolt}
|
||||
cmp_bolt = {args.cmp_bolt}'''))
|
||||
cmp_bolt = {args.cmp_bolt}"""
|
||||
)
|
||||
)
|
||||
del args.base_bolt
|
||||
del args.cmp_bolt
|
||||
d = vars(args)
|
||||
@@ -293,6 +338,7 @@ def main_config_generator():
|
||||
if value:
|
||||
print(key)
|
||||
|
||||
|
||||
def main():
|
||||
cfg = read_cfg()
|
||||
# intercept output arguments
|
||||
@@ -309,8 +355,8 @@ def main():
|
||||
args = prepend_dash(args)
|
||||
|
||||
# run both BOLT binaries
|
||||
main_f = open(os.path.join(tmp, 'main_bolt.stdout'), 'w')
|
||||
cmp_f = open(os.path.join(tmp, 'cmp_bolt.stdout'), 'w')
|
||||
main_f = open(os.path.join(tmp, "main_bolt.stdout"), "w")
|
||||
cmp_f = open(os.path.join(tmp, "cmp_bolt.stdout"), "w")
|
||||
main_bolt = run_bolt(cfg.BASE_BOLT, unknownargs + args, main_f, cfg)
|
||||
if cfg.RUN_SEQUENTIALLY:
|
||||
main_out = wait(main_bolt, main_f)
|
||||
@@ -330,22 +376,26 @@ def main():
|
||||
cfg.SKIP_BINARY_CMP = True
|
||||
|
||||
# compare logs, skip_end=1 skips the line with time
|
||||
out = compare_logs(main_out, cmp_out, skip_end=1, str_input=False) if cfg.COMPARE_OUTPUT else None
|
||||
out = (
|
||||
compare_logs(main_out, cmp_out, skip_end=1, str_input=False)
|
||||
if cfg.COMPARE_OUTPUT
|
||||
else None
|
||||
)
|
||||
if out:
|
||||
print(tmp)
|
||||
print(fmt_cmp(out))
|
||||
write_to(fmt_cmp(out), os.path.join(tmp, 'summary.txt'))
|
||||
write_to(fmt_cmp(out), os.path.join(tmp, "summary.txt"))
|
||||
exit("logs mismatch")
|
||||
|
||||
if os.path.basename(sys.argv[0]) == 'llvm-boltdiff': # boltdiff mode
|
||||
if os.path.basename(sys.argv[0]) == "llvm-boltdiff": # boltdiff mode
|
||||
# no output binary to compare, so just exit
|
||||
clean_exit(tmp, main_out, main_bolt.returncode, cfg)
|
||||
|
||||
# compare binaries (using cmp)
|
||||
main_binary = args[args.index('-o')+1]
|
||||
cmp_binary = cmp_args[cmp_args.index('-o')+1]
|
||||
if main_binary == '/dev/null':
|
||||
assert cmp_binary == '/dev/null'
|
||||
main_binary = args[args.index("-o") + 1]
|
||||
cmp_binary = cmp_args[cmp_args.index("-o") + 1]
|
||||
if main_binary == "/dev/null":
|
||||
assert cmp_binary == "/dev/null"
|
||||
cfg.SKIP_BINARY_CMP = True
|
||||
|
||||
# report binary timing as csv: output binary; base bolt real; cmp bolt real
|
||||
@@ -368,23 +418,25 @@ def main():
|
||||
assert not main_exists
|
||||
exit(f"{main_binary} doesn't exist")
|
||||
|
||||
cmp_proc = subprocess.run(['cmp', '-b', main_binary, cmp_binary],
|
||||
capture_output=True, text=True)
|
||||
cmp_proc = subprocess.run(
|
||||
["cmp", "-b", main_binary, cmp_binary], capture_output=True, text=True
|
||||
)
|
||||
if cmp_proc.returncode:
|
||||
# check if output is an ELF file (magic bytes)
|
||||
with open(main_binary, 'rb') as f:
|
||||
with open(main_binary, "rb") as f:
|
||||
magic = f.read(4)
|
||||
if magic != b'\x7fELF':
|
||||
if magic != b"\x7fELF":
|
||||
exit("output mismatch")
|
||||
# check if ELF headers match
|
||||
mismatch, _ = compare_with(main_binary, cmp_binary, 'readelf -We')
|
||||
mismatch, _ = compare_with(main_binary, cmp_binary, "readelf -We")
|
||||
if mismatch:
|
||||
print(fmt_cmp(mismatch))
|
||||
write_to(fmt_cmp(mismatch), os.path.join(tmp, 'headers.txt'))
|
||||
write_to(fmt_cmp(mismatch), os.path.join(tmp, "headers.txt"))
|
||||
exit("headers mismatch")
|
||||
# if headers match, compare sections (skip line with filename)
|
||||
mismatch, hdr = compare_with(main_binary, cmp_binary, 'objdump -hw',
|
||||
skip_begin=2)
|
||||
mismatch, hdr = compare_with(
|
||||
main_binary, cmp_binary, "objdump -hw", skip_begin=2
|
||||
)
|
||||
assert not mismatch
|
||||
# check which section has the first mismatch
|
||||
mismatch_offset = parse_cmp_offset(cmp_proc.stdout)
|
||||
@@ -393,6 +445,7 @@ def main():
|
||||
|
||||
clean_exit(tmp, main_out, main_bolt.returncode, cfg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# config generator mode if the script is launched as is
|
||||
if os.path.basename(__file__) == "llvm-bolt-wrapper.py":
|
||||
|
||||
@@ -7,56 +7,73 @@ import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
|
||||
def get_git_ref_or_rev(dir: str) -> str:
|
||||
# Run 'git symbolic-ref -q --short HEAD || git rev-parse --short HEAD'
|
||||
cmd_ref = 'git symbolic-ref -q --short HEAD'
|
||||
ref = subprocess.run(shlex.split(cmd_ref), cwd=dir, text=True,
|
||||
stdout=subprocess.PIPE)
|
||||
cmd_ref = "git symbolic-ref -q --short HEAD"
|
||||
ref = subprocess.run(
|
||||
shlex.split(cmd_ref), cwd=dir, text=True, stdout=subprocess.PIPE
|
||||
)
|
||||
if not ref.returncode:
|
||||
return ref.stdout.strip()
|
||||
cmd_rev = 'git rev-parse --short HEAD'
|
||||
return subprocess.check_output(shlex.split(cmd_rev), cwd=dir,
|
||||
text=True).strip()
|
||||
cmd_rev = "git rev-parse --short HEAD"
|
||||
return subprocess.check_output(shlex.split(cmd_rev), cwd=dir, text=True).strip()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=textwrap.dedent('''
|
||||
parser = argparse.ArgumentParser(
|
||||
description=textwrap.dedent(
|
||||
"""
|
||||
This script builds two versions of BOLT (with the current and
|
||||
previous revision) and sets up symlink for llvm-bolt-wrapper.
|
||||
Passes the options through to llvm-bolt-wrapper.
|
||||
'''))
|
||||
parser.add_argument('build_dir', nargs='?', default=os.getcwd(),
|
||||
help='Path to BOLT build directory, default is current '
|
||||
'directory')
|
||||
parser.add_argument('--switch-back', default=False, action='store_true',
|
||||
help='Checkout back to the starting revision')
|
||||
"""
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"build_dir",
|
||||
nargs="?",
|
||||
default=os.getcwd(),
|
||||
help="Path to BOLT build directory, default is current " "directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--switch-back",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Checkout back to the starting revision",
|
||||
)
|
||||
args, wrapper_args = parser.parse_known_args()
|
||||
bolt_path = f'{args.build_dir}/bin/llvm-bolt'
|
||||
bolt_path = f"{args.build_dir}/bin/llvm-bolt"
|
||||
|
||||
source_dir = None
|
||||
# find the repo directory
|
||||
with open(f'{args.build_dir}/CMakeCache.txt') as f:
|
||||
with open(f"{args.build_dir}/CMakeCache.txt") as f:
|
||||
for line in f:
|
||||
m = re.match(r'LLVM_SOURCE_DIR:STATIC=(.*)', line)
|
||||
m = re.match(r"LLVM_SOURCE_DIR:STATIC=(.*)", line)
|
||||
if m:
|
||||
source_dir = m.groups()[0]
|
||||
if not source_dir:
|
||||
sys.exit("Source directory is not found")
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
wrapper_path = f'{script_dir}/llvm-bolt-wrapper.py'
|
||||
wrapper_path = f"{script_dir}/llvm-bolt-wrapper.py"
|
||||
# build the current commit
|
||||
subprocess.run(shlex.split("cmake --build . --target llvm-bolt"),
|
||||
cwd=args.build_dir)
|
||||
subprocess.run(
|
||||
shlex.split("cmake --build . --target llvm-bolt"), cwd=args.build_dir
|
||||
)
|
||||
# rename llvm-bolt
|
||||
os.replace(bolt_path, f'{bolt_path}.new')
|
||||
os.replace(bolt_path, f"{bolt_path}.new")
|
||||
# memorize the old hash for logging
|
||||
old_ref = get_git_ref_or_rev(source_dir)
|
||||
|
||||
# determine whether a stash is needed
|
||||
stash = subprocess.run(shlex.split("git status --porcelain"), cwd=source_dir,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
text=True).stdout
|
||||
stash = subprocess.run(
|
||||
shlex.split("git status --porcelain"),
|
||||
cwd=source_dir,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
).stdout
|
||||
if stash:
|
||||
# save local changes before checkout
|
||||
subprocess.run(shlex.split("git stash push -u"), cwd=source_dir)
|
||||
@@ -65,16 +82,17 @@ def main():
|
||||
# get the parent commit hash for logging
|
||||
new_ref = get_git_ref_or_rev(source_dir)
|
||||
# build the previous commit
|
||||
subprocess.run(shlex.split("cmake --build . --target llvm-bolt"),
|
||||
cwd=args.build_dir)
|
||||
subprocess.run(
|
||||
shlex.split("cmake --build . --target llvm-bolt"), cwd=args.build_dir
|
||||
)
|
||||
# rename llvm-bolt
|
||||
os.replace(bolt_path, f'{bolt_path}.old')
|
||||
os.replace(bolt_path, f"{bolt_path}.old")
|
||||
# set up llvm-bolt-wrapper.ini
|
||||
ini = subprocess.check_output(
|
||||
shlex.split(
|
||||
f"{wrapper_path} {bolt_path}.old {bolt_path}.new") + wrapper_args,
|
||||
text=True)
|
||||
with open(f'{args.build_dir}/bin/llvm-bolt-wrapper.ini', 'w') as f:
|
||||
shlex.split(f"{wrapper_path} {bolt_path}.old {bolt_path}.new") + wrapper_args,
|
||||
text=True,
|
||||
)
|
||||
with open(f"{args.build_dir}/bin/llvm-bolt-wrapper.ini", "w") as f:
|
||||
f.write(ini)
|
||||
# symlink llvm-bolt-wrapper
|
||||
os.symlink(wrapper_path, bolt_path)
|
||||
@@ -83,12 +101,16 @@ def main():
|
||||
subprocess.run(shlex.split("git stash pop"), cwd=source_dir)
|
||||
subprocess.run(shlex.split(f"git checkout {old_ref}"), cwd=source_dir)
|
||||
else:
|
||||
print(f"The repository {source_dir} has been switched from {old_ref} "
|
||||
f"to {new_ref}. Local changes were stashed. Switch back using\n\t"
|
||||
f"git checkout {old_ref}\n")
|
||||
print(f"Build directory {args.build_dir} is ready to run BOLT tests, e.g.\n"
|
||||
"\tbin/llvm-lit -sv tools/bolt/test\nor\n"
|
||||
"\tbin/llvm-lit -sv tools/bolttests")
|
||||
print(
|
||||
f"The repository {source_dir} has been switched from {old_ref} "
|
||||
f"to {new_ref}. Local changes were stashed. Switch back using\n\t"
|
||||
f"git checkout {old_ref}\n"
|
||||
)
|
||||
print(
|
||||
f"Build directory {args.build_dir} is ready to run BOLT tests, e.g.\n"
|
||||
"\tbin/llvm-lit -sv tools/bolt/test\nor\n"
|
||||
"\tbin/llvm-lit -sv tools/bolttests"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
#===- lib/dfsan/scripts/build-libc-list.py ---------------------------------===#
|
||||
# ===- lib/dfsan/scripts/build-libc-list.py ---------------------------------===#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
#===------------------------------------------------------------------------===#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
# The purpose of this script is to identify every function symbol in a set of
|
||||
# libraries (in this case, libc and libgcc) so that they can be marked as
|
||||
# uninstrumented, thus allowing the instrumentation pass to treat calls to those
|
||||
@@ -42,50 +42,62 @@ from optparse import OptionParser
|
||||
|
||||
|
||||
def defined_function_list(lib):
|
||||
"""Get non-local function symbols from lib."""
|
||||
functions = []
|
||||
readelf_proc = subprocess.Popen(['readelf', '-s', '-W', lib],
|
||||
stdout=subprocess.PIPE)
|
||||
readelf = readelf_proc.communicate()[0].decode().split('\n')
|
||||
if readelf_proc.returncode != 0:
|
||||
raise subprocess.CalledProcessError(readelf_proc.returncode, 'readelf')
|
||||
for line in readelf:
|
||||
if (line[31:35] == 'FUNC' or line[31:36] == 'IFUNC') and \
|
||||
line[39:44] != 'LOCAL' and \
|
||||
line[55:58] != 'UND':
|
||||
function_name = line[59:].split('@')[0]
|
||||
functions.append(function_name)
|
||||
return functions
|
||||
"""Get non-local function symbols from lib."""
|
||||
functions = []
|
||||
readelf_proc = subprocess.Popen(
|
||||
["readelf", "-s", "-W", lib], stdout=subprocess.PIPE
|
||||
)
|
||||
readelf = readelf_proc.communicate()[0].decode().split("\n")
|
||||
if readelf_proc.returncode != 0:
|
||||
raise subprocess.CalledProcessError(readelf_proc.returncode, "readelf")
|
||||
for line in readelf:
|
||||
if (
|
||||
(line[31:35] == "FUNC" or line[31:36] == "IFUNC")
|
||||
and line[39:44] != "LOCAL"
|
||||
and line[55:58] != "UND"
|
||||
):
|
||||
function_name = line[59:].split("@")[0]
|
||||
functions.append(function_name)
|
||||
return functions
|
||||
|
||||
|
||||
p = OptionParser()
|
||||
|
||||
p.add_option('--lib-file', action='append', metavar='PATH',
|
||||
help='Specific library files to add.',
|
||||
default=[])
|
||||
p.add_option(
|
||||
"--lib-file",
|
||||
action="append",
|
||||
metavar="PATH",
|
||||
help="Specific library files to add.",
|
||||
default=[],
|
||||
)
|
||||
|
||||
p.add_option('--error-missing-lib', action='store_true',
|
||||
help='Make this script exit with an error code if any library is missing.',
|
||||
dest='error_missing_lib', default=False)
|
||||
p.add_option(
|
||||
"--error-missing-lib",
|
||||
action="store_true",
|
||||
help="Make this script exit with an error code if any library is missing.",
|
||||
dest="error_missing_lib",
|
||||
default=False,
|
||||
)
|
||||
|
||||
(options, args) = p.parse_args()
|
||||
|
||||
libs = options.lib_file
|
||||
if not libs:
|
||||
print('No libraries provided.', file=sys.stderr)
|
||||
print("No libraries provided.", file=sys.stderr)
|
||||
exit(1)
|
||||
|
||||
missing_lib = False
|
||||
functions = []
|
||||
for l in libs:
|
||||
if os.path.exists(l):
|
||||
functions += defined_function_list(l)
|
||||
else:
|
||||
missing_lib = True
|
||||
print('warning: library %s not found' % l, file=sys.stderr)
|
||||
if os.path.exists(l):
|
||||
functions += defined_function_list(l)
|
||||
else:
|
||||
missing_lib = True
|
||||
print("warning: library %s not found" % l, file=sys.stderr)
|
||||
|
||||
if options.error_missing_lib and missing_lib:
|
||||
print('Exiting with failure code due to missing library.', file=sys.stderr)
|
||||
print("Exiting with failure code due to missing library.", file=sys.stderr)
|
||||
exit(1)
|
||||
|
||||
for f in sorted(set(functions)):
|
||||
print('fun:%s=uninstrumented' % f)
|
||||
print("fun:%s=uninstrumented" % f)
|
||||
|
||||
@@ -1,92 +1,100 @@
|
||||
#!/usr/bin/env python
|
||||
#===- lib/fuzzer/scripts/unbalanced_allocs.py ------------------------------===#
|
||||
# ===- lib/fuzzer/scripts/unbalanced_allocs.py ------------------------------===#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
#===------------------------------------------------------------------------===#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
#
|
||||
# Post-process -trace_malloc=2 output and printout only allocations and frees
|
||||
# unbalanced inside of fuzzer runs.
|
||||
# Usage:
|
||||
# my_fuzzer -trace_malloc=2 -runs=10 2>&1 | unbalanced_allocs.py -skip=5
|
||||
#
|
||||
#===------------------------------------------------------------------------===#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
_skip = 0
|
||||
|
||||
|
||||
def PrintStack(line, stack):
|
||||
global _skip
|
||||
if _skip > 0:
|
||||
return
|
||||
print('Unbalanced ' + line.rstrip());
|
||||
for l in stack:
|
||||
print(l.rstrip())
|
||||
global _skip
|
||||
if _skip > 0:
|
||||
return
|
||||
print("Unbalanced " + line.rstrip())
|
||||
for l in stack:
|
||||
print(l.rstrip())
|
||||
|
||||
|
||||
def ProcessStack(line, f):
|
||||
stack = []
|
||||
while line and line.startswith(' #'):
|
||||
stack += [line]
|
||||
line = f.readline()
|
||||
return line, stack
|
||||
stack = []
|
||||
while line and line.startswith(" #"):
|
||||
stack += [line]
|
||||
line = f.readline()
|
||||
return line, stack
|
||||
|
||||
|
||||
def ProcessFree(line, f, allocs):
|
||||
if not line.startswith('FREE['):
|
||||
return f.readline()
|
||||
if not line.startswith("FREE["):
|
||||
return f.readline()
|
||||
|
||||
addr = int(line.split()[1], 16)
|
||||
next_line, stack = ProcessStack(f.readline(), f)
|
||||
if addr in allocs:
|
||||
del allocs[addr]
|
||||
else:
|
||||
PrintStack(line, stack)
|
||||
return next_line
|
||||
|
||||
addr = int(line.split()[1], 16)
|
||||
next_line, stack = ProcessStack(f.readline(), f)
|
||||
if addr in allocs:
|
||||
del allocs[addr]
|
||||
else:
|
||||
PrintStack(line, stack)
|
||||
return next_line
|
||||
|
||||
def ProcessMalloc(line, f, allocs):
|
||||
if not line.startswith('MALLOC['):
|
||||
return ProcessFree(line, f, allocs)
|
||||
if not line.startswith("MALLOC["):
|
||||
return ProcessFree(line, f, allocs)
|
||||
|
||||
addr = int(line.split()[1], 16)
|
||||
assert not addr in allocs
|
||||
addr = int(line.split()[1], 16)
|
||||
assert not addr in allocs
|
||||
|
||||
next_line, stack = ProcessStack(f.readline(), f)
|
||||
allocs[addr] = (line, stack)
|
||||
return next_line
|
||||
|
||||
next_line, stack = ProcessStack(f.readline(), f)
|
||||
allocs[addr] = (line, stack)
|
||||
return next_line
|
||||
|
||||
def ProcessRun(line, f):
|
||||
if not line.startswith('MallocFreeTracer: START'):
|
||||
return ProcessMalloc(line, f, {})
|
||||
if not line.startswith("MallocFreeTracer: START"):
|
||||
return ProcessMalloc(line, f, {})
|
||||
|
||||
allocs = {}
|
||||
print(line.rstrip())
|
||||
line = f.readline()
|
||||
while line:
|
||||
if line.startswith("MallocFreeTracer: STOP"):
|
||||
global _skip
|
||||
_skip = _skip - 1
|
||||
for _, (l, s) in allocs.items():
|
||||
PrintStack(l, s)
|
||||
print(line.rstrip())
|
||||
return f.readline()
|
||||
line = ProcessMalloc(line, f, allocs)
|
||||
return line
|
||||
|
||||
allocs = {}
|
||||
print(line.rstrip())
|
||||
line = f.readline()
|
||||
while line:
|
||||
if line.startswith('MallocFreeTracer: STOP'):
|
||||
global _skip
|
||||
_skip = _skip - 1
|
||||
for _, (l, s) in allocs.items():
|
||||
PrintStack(l, s)
|
||||
print(line.rstrip())
|
||||
return f.readline()
|
||||
line = ProcessMalloc(line, f, allocs)
|
||||
return line
|
||||
|
||||
def ProcessFile(f):
|
||||
line = f.readline()
|
||||
while line:
|
||||
line = ProcessRun(line, f);
|
||||
line = f.readline()
|
||||
while line:
|
||||
line = ProcessRun(line, f)
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--skip', default=0, help='number of runs to ignore')
|
||||
args = parser.parse_args()
|
||||
global _skip
|
||||
_skip = int(args.skip) + 1
|
||||
ProcessFile(sys.stdin)
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--skip", default=0, help="number of runs to ignore")
|
||||
args = parser.parse_args()
|
||||
global _skip
|
||||
_skip = int(args.skip) + 1
|
||||
ProcessFile(sys.stdin)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
#===- lib/sanitizer_common/scripts/gen_dynamic_list.py ---------------------===#
|
||||
# ===- lib/sanitizer_common/scripts/gen_dynamic_list.py ---------------------===#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
#===------------------------------------------------------------------------===#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
#
|
||||
# Generates the list of functions that should be exported from sanitizer
|
||||
# runtimes. The output format is recognized by --dynamic-list linker option.
|
||||
# Usage:
|
||||
# gen_dynamic_list.py libclang_rt.*san*.a [ files ... ]
|
||||
#
|
||||
#===------------------------------------------------------------------------===#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import os
|
||||
@@ -21,115 +21,143 @@ import subprocess
|
||||
import sys
|
||||
import platform
|
||||
|
||||
new_delete = set([
|
||||
'_Znam', '_ZnamRKSt9nothrow_t', # operator new[](unsigned long)
|
||||
'_Znwm', '_ZnwmRKSt9nothrow_t', # operator new(unsigned long)
|
||||
'_Znaj', '_ZnajRKSt9nothrow_t', # operator new[](unsigned int)
|
||||
'_Znwj', '_ZnwjRKSt9nothrow_t', # operator new(unsigned int)
|
||||
# operator new(unsigned long, std::align_val_t)
|
||||
'_ZnwmSt11align_val_t', '_ZnwmSt11align_val_tRKSt9nothrow_t',
|
||||
# operator new(unsigned int, std::align_val_t)
|
||||
'_ZnwjSt11align_val_t', '_ZnwjSt11align_val_tRKSt9nothrow_t',
|
||||
# operator new[](unsigned long, std::align_val_t)
|
||||
'_ZnamSt11align_val_t', '_ZnamSt11align_val_tRKSt9nothrow_t',
|
||||
# operator new[](unsigned int, std::align_val_t)
|
||||
'_ZnajSt11align_val_t', '_ZnajSt11align_val_tRKSt9nothrow_t',
|
||||
'_ZdaPv', '_ZdaPvRKSt9nothrow_t', # operator delete[](void *)
|
||||
'_ZdlPv', '_ZdlPvRKSt9nothrow_t', # operator delete(void *)
|
||||
'_ZdaPvm', # operator delete[](void*, unsigned long)
|
||||
'_ZdlPvm', # operator delete(void*, unsigned long)
|
||||
'_ZdaPvj', # operator delete[](void*, unsigned int)
|
||||
'_ZdlPvj', # operator delete(void*, unsigned int)
|
||||
# operator delete(void*, std::align_val_t)
|
||||
'_ZdlPvSt11align_val_t', '_ZdlPvSt11align_val_tRKSt9nothrow_t',
|
||||
# operator delete[](void*, std::align_val_t)
|
||||
'_ZdaPvSt11align_val_t', '_ZdaPvSt11align_val_tRKSt9nothrow_t',
|
||||
# operator delete(void*, unsigned long, std::align_val_t)
|
||||
'_ZdlPvmSt11align_val_t',
|
||||
# operator delete[](void*, unsigned long, std::align_val_t)
|
||||
'_ZdaPvmSt11align_val_t',
|
||||
# operator delete(void*, unsigned int, std::align_val_t)
|
||||
'_ZdlPvjSt11align_val_t',
|
||||
# operator delete[](void*, unsigned int, std::align_val_t)
|
||||
'_ZdaPvjSt11align_val_t',
|
||||
])
|
||||
new_delete = set(
|
||||
[
|
||||
"_Znam",
|
||||
"_ZnamRKSt9nothrow_t", # operator new[](unsigned long)
|
||||
"_Znwm",
|
||||
"_ZnwmRKSt9nothrow_t", # operator new(unsigned long)
|
||||
"_Znaj",
|
||||
"_ZnajRKSt9nothrow_t", # operator new[](unsigned int)
|
||||
"_Znwj",
|
||||
"_ZnwjRKSt9nothrow_t", # operator new(unsigned int)
|
||||
# operator new(unsigned long, std::align_val_t)
|
||||
"_ZnwmSt11align_val_t",
|
||||
"_ZnwmSt11align_val_tRKSt9nothrow_t",
|
||||
# operator new(unsigned int, std::align_val_t)
|
||||
"_ZnwjSt11align_val_t",
|
||||
"_ZnwjSt11align_val_tRKSt9nothrow_t",
|
||||
# operator new[](unsigned long, std::align_val_t)
|
||||
"_ZnamSt11align_val_t",
|
||||
"_ZnamSt11align_val_tRKSt9nothrow_t",
|
||||
# operator new[](unsigned int, std::align_val_t)
|
||||
"_ZnajSt11align_val_t",
|
||||
"_ZnajSt11align_val_tRKSt9nothrow_t",
|
||||
"_ZdaPv",
|
||||
"_ZdaPvRKSt9nothrow_t", # operator delete[](void *)
|
||||
"_ZdlPv",
|
||||
"_ZdlPvRKSt9nothrow_t", # operator delete(void *)
|
||||
"_ZdaPvm", # operator delete[](void*, unsigned long)
|
||||
"_ZdlPvm", # operator delete(void*, unsigned long)
|
||||
"_ZdaPvj", # operator delete[](void*, unsigned int)
|
||||
"_ZdlPvj", # operator delete(void*, unsigned int)
|
||||
# operator delete(void*, std::align_val_t)
|
||||
"_ZdlPvSt11align_val_t",
|
||||
"_ZdlPvSt11align_val_tRKSt9nothrow_t",
|
||||
# operator delete[](void*, std::align_val_t)
|
||||
"_ZdaPvSt11align_val_t",
|
||||
"_ZdaPvSt11align_val_tRKSt9nothrow_t",
|
||||
# operator delete(void*, unsigned long, std::align_val_t)
|
||||
"_ZdlPvmSt11align_val_t",
|
||||
# operator delete[](void*, unsigned long, std::align_val_t)
|
||||
"_ZdaPvmSt11align_val_t",
|
||||
# operator delete(void*, unsigned int, std::align_val_t)
|
||||
"_ZdlPvjSt11align_val_t",
|
||||
# operator delete[](void*, unsigned int, std::align_val_t)
|
||||
"_ZdaPvjSt11align_val_t",
|
||||
]
|
||||
)
|
||||
|
||||
versioned_functions = set(
|
||||
[
|
||||
"memcpy",
|
||||
"pthread_attr_getaffinity_np",
|
||||
"pthread_cond_broadcast",
|
||||
"pthread_cond_destroy",
|
||||
"pthread_cond_init",
|
||||
"pthread_cond_signal",
|
||||
"pthread_cond_timedwait",
|
||||
"pthread_cond_wait",
|
||||
"realpath",
|
||||
"sched_getaffinity",
|
||||
]
|
||||
)
|
||||
|
||||
versioned_functions = set(['memcpy', 'pthread_attr_getaffinity_np',
|
||||
'pthread_cond_broadcast',
|
||||
'pthread_cond_destroy', 'pthread_cond_init',
|
||||
'pthread_cond_signal', 'pthread_cond_timedwait',
|
||||
'pthread_cond_wait', 'realpath',
|
||||
'sched_getaffinity'])
|
||||
|
||||
def get_global_functions(nm_executable, library):
|
||||
functions = []
|
||||
nm = os.environ.get('NM', nm_executable)
|
||||
nm_proc = subprocess.Popen([nm, library], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
nm_out = nm_proc.communicate()[0].decode().split('\n')
|
||||
if nm_proc.returncode != 0:
|
||||
raise subprocess.CalledProcessError(nm_proc.returncode, nm)
|
||||
func_symbols = ['T', 'W']
|
||||
# On PowerPC, nm prints function descriptors from .data section.
|
||||
if platform.uname()[4] in ["powerpc", "ppc64"]:
|
||||
func_symbols += ['D']
|
||||
for line in nm_out:
|
||||
cols = line.split(' ')
|
||||
if len(cols) == 3 and cols[1] in func_symbols :
|
||||
functions.append(cols[2])
|
||||
return functions
|
||||
functions = []
|
||||
nm = os.environ.get("NM", nm_executable)
|
||||
nm_proc = subprocess.Popen(
|
||||
[nm, library], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
nm_out = nm_proc.communicate()[0].decode().split("\n")
|
||||
if nm_proc.returncode != 0:
|
||||
raise subprocess.CalledProcessError(nm_proc.returncode, nm)
|
||||
func_symbols = ["T", "W"]
|
||||
# On PowerPC, nm prints function descriptors from .data section.
|
||||
if platform.uname()[4] in ["powerpc", "ppc64"]:
|
||||
func_symbols += ["D"]
|
||||
for line in nm_out:
|
||||
cols = line.split(" ")
|
||||
if len(cols) == 3 and cols[1] in func_symbols:
|
||||
functions.append(cols[2])
|
||||
return functions
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--version-list', action='store_true')
|
||||
parser.add_argument('--extra', default=[], action='append')
|
||||
parser.add_argument('libraries', default=[], nargs='+')
|
||||
parser.add_argument('--nm-executable', required=True)
|
||||
parser.add_argument('-o', '--output', required=True)
|
||||
args = parser.parse_args()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--version-list", action="store_true")
|
||||
parser.add_argument("--extra", default=[], action="append")
|
||||
parser.add_argument("libraries", default=[], nargs="+")
|
||||
parser.add_argument("--nm-executable", required=True)
|
||||
parser.add_argument("-o", "--output", required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
result = []
|
||||
result = []
|
||||
|
||||
all_functions = []
|
||||
for library in args.libraries:
|
||||
all_functions.extend(get_global_functions(args.nm_executable, library))
|
||||
function_set = set(all_functions)
|
||||
for func in all_functions:
|
||||
# Export new/delete operators.
|
||||
if func in new_delete:
|
||||
result.append(func)
|
||||
continue
|
||||
# Export interceptors.
|
||||
match = re.match('__interceptor_(.*)', func)
|
||||
if match:
|
||||
result.append(func)
|
||||
# We have to avoid exporting the interceptors for versioned library
|
||||
# functions due to gold internal error.
|
||||
orig_name = match.group(1)
|
||||
if orig_name in function_set and (args.version_list or orig_name not in versioned_functions):
|
||||
result.append(orig_name)
|
||||
continue
|
||||
# Export sanitizer interface functions.
|
||||
if re.match('__sanitizer_(.*)', func):
|
||||
result.append(func)
|
||||
all_functions = []
|
||||
for library in args.libraries:
|
||||
all_functions.extend(get_global_functions(args.nm_executable, library))
|
||||
function_set = set(all_functions)
|
||||
for func in all_functions:
|
||||
# Export new/delete operators.
|
||||
if func in new_delete:
|
||||
result.append(func)
|
||||
continue
|
||||
# Export interceptors.
|
||||
match = re.match("__interceptor_(.*)", func)
|
||||
if match:
|
||||
result.append(func)
|
||||
# We have to avoid exporting the interceptors for versioned library
|
||||
# functions due to gold internal error.
|
||||
orig_name = match.group(1)
|
||||
if orig_name in function_set and (
|
||||
args.version_list or orig_name not in versioned_functions
|
||||
):
|
||||
result.append(orig_name)
|
||||
continue
|
||||
# Export sanitizer interface functions.
|
||||
if re.match("__sanitizer_(.*)", func):
|
||||
result.append(func)
|
||||
|
||||
# Additional exported functions from files.
|
||||
for fname in args.extra:
|
||||
f = open(fname, 'r')
|
||||
for line in f:
|
||||
result.append(line.rstrip())
|
||||
# Print the resulting list in the format recognized by ld.
|
||||
with open(args.output, 'w') as f:
|
||||
print('{', file=f)
|
||||
if args.version_list:
|
||||
print('global:', file=f)
|
||||
result.sort()
|
||||
for sym in result:
|
||||
print(u' %s;' % sym, file=f)
|
||||
if args.version_list:
|
||||
print('local:', file=f)
|
||||
print(' *;', file=f)
|
||||
print('};', file=f)
|
||||
# Additional exported functions from files.
|
||||
for fname in args.extra:
|
||||
f = open(fname, "r")
|
||||
for line in f:
|
||||
result.append(line.rstrip())
|
||||
# Print the resulting list in the format recognized by ld.
|
||||
with open(args.output, "w") as f:
|
||||
print("{", file=f)
|
||||
if args.version_list:
|
||||
print("global:", file=f)
|
||||
result.sort()
|
||||
for sym in result:
|
||||
print(" %s;" % sym, file=f)
|
||||
if args.version_list:
|
||||
print("local:", file=f)
|
||||
print(" *;", file=f)
|
||||
print("};", file=f)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
||||
@@ -13,237 +13,276 @@ import sys
|
||||
|
||||
prog_name = ""
|
||||
|
||||
|
||||
def Usage():
|
||||
sys.stderr.write(
|
||||
"Usage: \n" + \
|
||||
" " + prog_name + " merge FILE [FILE...] > OUTPUT\n" \
|
||||
" " + prog_name + " print FILE [FILE...]\n" \
|
||||
" " + prog_name + " unpack FILE [FILE...]\n" \
|
||||
" " + prog_name + " rawunpack FILE [FILE ...]\n" \
|
||||
" " + prog_name + " missing BINARY < LIST_OF_PCS\n" \
|
||||
"\n")
|
||||
exit(1)
|
||||
sys.stderr.write(
|
||||
"Usage: \n" + " " + prog_name + " merge FILE [FILE...] > OUTPUT\n"
|
||||
" " + prog_name + " print FILE [FILE...]\n"
|
||||
" " + prog_name + " unpack FILE [FILE...]\n"
|
||||
" " + prog_name + " rawunpack FILE [FILE ...]\n"
|
||||
" " + prog_name + " missing BINARY < LIST_OF_PCS\n"
|
||||
"\n"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
||||
def CheckBits(bits):
|
||||
if bits != 32 and bits != 64:
|
||||
raise Exception("Wrong bitness: %d" % bits)
|
||||
if bits != 32 and bits != 64:
|
||||
raise Exception("Wrong bitness: %d" % bits)
|
||||
|
||||
|
||||
def TypeCodeForBits(bits):
|
||||
CheckBits(bits)
|
||||
return 'L' if bits == 64 else 'I'
|
||||
CheckBits(bits)
|
||||
return "L" if bits == 64 else "I"
|
||||
|
||||
|
||||
def TypeCodeForStruct(bits):
|
||||
CheckBits(bits)
|
||||
return 'Q' if bits == 64 else 'I'
|
||||
CheckBits(bits)
|
||||
return "Q" if bits == 64 else "I"
|
||||
|
||||
|
||||
kMagic32SecondHalf = 0xFFFFFF32
|
||||
kMagic64SecondHalf = 0xFFFFFF64
|
||||
kMagicFirstHalf = 0xC0BFFFFF
|
||||
|
||||
kMagic32SecondHalf = 0xFFFFFF32;
|
||||
kMagic64SecondHalf = 0xFFFFFF64;
|
||||
kMagicFirstHalf = 0xC0BFFFFF;
|
||||
|
||||
def MagicForBits(bits):
|
||||
CheckBits(bits)
|
||||
if sys.byteorder == 'little':
|
||||
return [kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf, kMagicFirstHalf]
|
||||
else:
|
||||
return [kMagicFirstHalf, kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf]
|
||||
CheckBits(bits)
|
||||
if sys.byteorder == "little":
|
||||
return [
|
||||
kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf,
|
||||
kMagicFirstHalf,
|
||||
]
|
||||
else:
|
||||
return [
|
||||
kMagicFirstHalf,
|
||||
kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf,
|
||||
]
|
||||
|
||||
|
||||
def ReadMagicAndReturnBitness(f, path):
|
||||
magic_bytes = f.read(8)
|
||||
magic_words = struct.unpack('II', magic_bytes);
|
||||
bits = 0
|
||||
idx = 1 if sys.byteorder == 'little' else 0
|
||||
if magic_words[idx] == kMagicFirstHalf:
|
||||
if magic_words[1-idx] == kMagic64SecondHalf:
|
||||
bits = 64
|
||||
elif magic_words[1-idx] == kMagic32SecondHalf:
|
||||
bits = 32
|
||||
if bits == 0:
|
||||
raise Exception('Bad magic word in %s' % path)
|
||||
return bits
|
||||
magic_bytes = f.read(8)
|
||||
magic_words = struct.unpack("II", magic_bytes)
|
||||
bits = 0
|
||||
idx = 1 if sys.byteorder == "little" else 0
|
||||
if magic_words[idx] == kMagicFirstHalf:
|
||||
if magic_words[1 - idx] == kMagic64SecondHalf:
|
||||
bits = 64
|
||||
elif magic_words[1 - idx] == kMagic32SecondHalf:
|
||||
bits = 32
|
||||
if bits == 0:
|
||||
raise Exception("Bad magic word in %s" % path)
|
||||
return bits
|
||||
|
||||
|
||||
def ReadOneFile(path):
|
||||
with open(path, mode="rb") as f:
|
||||
f.seek(0, 2)
|
||||
size = f.tell()
|
||||
f.seek(0, 0)
|
||||
if size < 8:
|
||||
raise Exception('File %s is short (< 8 bytes)' % path)
|
||||
bits = ReadMagicAndReturnBitness(f, path)
|
||||
size -= 8
|
||||
w = size * 8 // bits
|
||||
s = struct.unpack_from(TypeCodeForStruct(bits) * (w), f.read(size))
|
||||
sys.stderr.write(
|
||||
"%s: read %d %d-bit PCs from %s\n" % (prog_name, w, bits, path))
|
||||
return s
|
||||
with open(path, mode="rb") as f:
|
||||
f.seek(0, 2)
|
||||
size = f.tell()
|
||||
f.seek(0, 0)
|
||||
if size < 8:
|
||||
raise Exception("File %s is short (< 8 bytes)" % path)
|
||||
bits = ReadMagicAndReturnBitness(f, path)
|
||||
size -= 8
|
||||
w = size * 8 // bits
|
||||
s = struct.unpack_from(TypeCodeForStruct(bits) * (w), f.read(size))
|
||||
sys.stderr.write("%s: read %d %d-bit PCs from %s\n" % (prog_name, w, bits, path))
|
||||
return s
|
||||
|
||||
|
||||
def Merge(files):
|
||||
s = set()
|
||||
for f in files:
|
||||
s = s.union(set(ReadOneFile(f)))
|
||||
sys.stderr.write(
|
||||
"%s: %d files merged; %d PCs total\n" % (prog_name, len(files), len(s))
|
||||
)
|
||||
return sorted(s)
|
||||
s = set()
|
||||
for f in files:
|
||||
s = s.union(set(ReadOneFile(f)))
|
||||
sys.stderr.write(
|
||||
"%s: %d files merged; %d PCs total\n" % (prog_name, len(files), len(s))
|
||||
)
|
||||
return sorted(s)
|
||||
|
||||
|
||||
def PrintFiles(files):
|
||||
if len(files) > 1:
|
||||
s = Merge(files)
|
||||
else: # If there is just on file, print the PCs in order.
|
||||
s = ReadOneFile(files[0])
|
||||
sys.stderr.write("%s: 1 file merged; %d PCs total\n" % (prog_name, len(s)))
|
||||
for i in s:
|
||||
print("0x%x" % i)
|
||||
if len(files) > 1:
|
||||
s = Merge(files)
|
||||
else: # If there is just on file, print the PCs in order.
|
||||
s = ReadOneFile(files[0])
|
||||
sys.stderr.write("%s: 1 file merged; %d PCs total\n" % (prog_name, len(s)))
|
||||
for i in s:
|
||||
print("0x%x" % i)
|
||||
|
||||
|
||||
def MergeAndPrint(files):
|
||||
if sys.stdout.isatty():
|
||||
Usage()
|
||||
s = Merge(files)
|
||||
bits = 32
|
||||
if max(s) > 0xFFFFFFFF:
|
||||
bits = 64
|
||||
stdout_buf = getattr(sys.stdout, 'buffer', sys.stdout)
|
||||
array.array('I', MagicForBits(bits)).tofile(stdout_buf)
|
||||
a = struct.pack(TypeCodeForStruct(bits) * len(s), *s)
|
||||
stdout_buf.write(a)
|
||||
if sys.stdout.isatty():
|
||||
Usage()
|
||||
s = Merge(files)
|
||||
bits = 32
|
||||
if max(s) > 0xFFFFFFFF:
|
||||
bits = 64
|
||||
stdout_buf = getattr(sys.stdout, "buffer", sys.stdout)
|
||||
array.array("I", MagicForBits(bits)).tofile(stdout_buf)
|
||||
a = struct.pack(TypeCodeForStruct(bits) * len(s), *s)
|
||||
stdout_buf.write(a)
|
||||
|
||||
|
||||
def UnpackOneFile(path):
|
||||
with open(path, mode="rb") as f:
|
||||
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
||||
while True:
|
||||
header = f.read(12)
|
||||
if not header: return
|
||||
if len(header) < 12:
|
||||
break
|
||||
pid, module_length, blob_size = struct.unpack('iII', header)
|
||||
module = f.read(module_length).decode('utf-8')
|
||||
blob = f.read(blob_size)
|
||||
assert(len(module) == module_length)
|
||||
assert(len(blob) == blob_size)
|
||||
extracted_file = "%s.%d.sancov" % (module, pid)
|
||||
sys.stderr.write("%s: extracting %s\n" % (prog_name, extracted_file))
|
||||
# The packed file may contain multiple blobs for the same pid/module
|
||||
# pair. Append to the end of the file instead of overwriting.
|
||||
with open(extracted_file, 'ab') as f2:
|
||||
f2.write(blob)
|
||||
# fail
|
||||
raise Exception('Error reading file %s' % path)
|
||||
with open(path, mode="rb") as f:
|
||||
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
||||
while True:
|
||||
header = f.read(12)
|
||||
if not header:
|
||||
return
|
||||
if len(header) < 12:
|
||||
break
|
||||
pid, module_length, blob_size = struct.unpack("iII", header)
|
||||
module = f.read(module_length).decode("utf-8")
|
||||
blob = f.read(blob_size)
|
||||
assert len(module) == module_length
|
||||
assert len(blob) == blob_size
|
||||
extracted_file = "%s.%d.sancov" % (module, pid)
|
||||
sys.stderr.write("%s: extracting %s\n" % (prog_name, extracted_file))
|
||||
# The packed file may contain multiple blobs for the same pid/module
|
||||
# pair. Append to the end of the file instead of overwriting.
|
||||
with open(extracted_file, "ab") as f2:
|
||||
f2.write(blob)
|
||||
# fail
|
||||
raise Exception("Error reading file %s" % path)
|
||||
|
||||
|
||||
def Unpack(files):
|
||||
for f in files:
|
||||
UnpackOneFile(f)
|
||||
for f in files:
|
||||
UnpackOneFile(f)
|
||||
|
||||
|
||||
def UnpackOneRawFile(path, map_path):
|
||||
mem_map = []
|
||||
with open(map_path, mode="rt") as f_map:
|
||||
sys.stderr.write("%s: reading map %s\n" % (prog_name, map_path))
|
||||
bits = int(f_map.readline())
|
||||
if bits != 32 and bits != 64:
|
||||
raise Exception('Wrong bits size in the map')
|
||||
for line in f_map:
|
||||
parts = line.rstrip().split()
|
||||
mem_map.append((int(parts[0], 16),
|
||||
int(parts[1], 16),
|
||||
int(parts[2], 16),
|
||||
' '.join(parts[3:])))
|
||||
mem_map.sort(key=lambda m : m[0])
|
||||
mem_map_keys = [m[0] for m in mem_map]
|
||||
mem_map = []
|
||||
with open(map_path, mode="rt") as f_map:
|
||||
sys.stderr.write("%s: reading map %s\n" % (prog_name, map_path))
|
||||
bits = int(f_map.readline())
|
||||
if bits != 32 and bits != 64:
|
||||
raise Exception("Wrong bits size in the map")
|
||||
for line in f_map:
|
||||
parts = line.rstrip().split()
|
||||
mem_map.append(
|
||||
(
|
||||
int(parts[0], 16),
|
||||
int(parts[1], 16),
|
||||
int(parts[2], 16),
|
||||
" ".join(parts[3:]),
|
||||
)
|
||||
)
|
||||
mem_map.sort(key=lambda m: m[0])
|
||||
mem_map_keys = [m[0] for m in mem_map]
|
||||
|
||||
with open(path, mode="rb") as f:
|
||||
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
||||
with open(path, mode="rb") as f:
|
||||
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
||||
|
||||
f.seek(0, 2)
|
||||
size = f.tell()
|
||||
f.seek(0, 0)
|
||||
pcs = struct.unpack_from(TypeCodeForStruct(bits) * (size * 8 // bits), f.read(size))
|
||||
mem_map_pcs = [[] for i in range(0, len(mem_map))]
|
||||
f.seek(0, 2)
|
||||
size = f.tell()
|
||||
f.seek(0, 0)
|
||||
pcs = struct.unpack_from(
|
||||
TypeCodeForStruct(bits) * (size * 8 // bits), f.read(size)
|
||||
)
|
||||
mem_map_pcs = [[] for i in range(0, len(mem_map))]
|
||||
|
||||
for pc in pcs:
|
||||
if pc == 0: continue
|
||||
map_idx = bisect.bisect(mem_map_keys, pc) - 1
|
||||
(start, end, base, module_path) = mem_map[map_idx]
|
||||
assert pc >= start
|
||||
if pc >= end:
|
||||
sys.stderr.write("warning: %s: pc %x outside of any known mapping\n" % (prog_name, pc))
|
||||
continue
|
||||
mem_map_pcs[map_idx].append(pc - base)
|
||||
for pc in pcs:
|
||||
if pc == 0:
|
||||
continue
|
||||
map_idx = bisect.bisect(mem_map_keys, pc) - 1
|
||||
(start, end, base, module_path) = mem_map[map_idx]
|
||||
assert pc >= start
|
||||
if pc >= end:
|
||||
sys.stderr.write(
|
||||
"warning: %s: pc %x outside of any known mapping\n"
|
||||
% (prog_name, pc)
|
||||
)
|
||||
continue
|
||||
mem_map_pcs[map_idx].append(pc - base)
|
||||
|
||||
for ((start, end, base, module_path), pc_list) in zip(mem_map, mem_map_pcs):
|
||||
if len(pc_list) == 0:
|
||||
continue
|
||||
assert path.endswith(".sancov.raw")
|
||||
dst_path = module_path + "." + os.path.basename(path)[:-4]
|
||||
sys.stderr.write(
|
||||
"%s: writing %d PCs to %s\n" % (prog_name, len(pc_list), dst_path)
|
||||
)
|
||||
sorted_pc_list = sorted(pc_list)
|
||||
pc_buffer = struct.pack(
|
||||
TypeCodeForStruct(bits) * len(pc_list), *sorted_pc_list
|
||||
)
|
||||
with open(dst_path, "ab+") as f2:
|
||||
array.array("I", MagicForBits(bits)).tofile(f2)
|
||||
f2.seek(0, 2)
|
||||
f2.write(pc_buffer)
|
||||
|
||||
for ((start, end, base, module_path), pc_list) in zip(mem_map, mem_map_pcs):
|
||||
if len(pc_list) == 0: continue
|
||||
assert path.endswith('.sancov.raw')
|
||||
dst_path = module_path + '.' + os.path.basename(path)[:-4]
|
||||
sys.stderr.write("%s: writing %d PCs to %s\n" % (prog_name, len(pc_list), dst_path))
|
||||
sorted_pc_list = sorted(pc_list)
|
||||
pc_buffer = struct.pack(TypeCodeForStruct(bits) * len(pc_list), *sorted_pc_list)
|
||||
with open(dst_path, 'ab+') as f2:
|
||||
array.array('I', MagicForBits(bits)).tofile(f2)
|
||||
f2.seek(0, 2)
|
||||
f2.write(pc_buffer)
|
||||
|
||||
def RawUnpack(files):
|
||||
for f in files:
|
||||
if not f.endswith('.sancov.raw'):
|
||||
raise Exception('Unexpected raw file name %s' % f)
|
||||
f_map = f[:-3] + 'map'
|
||||
UnpackOneRawFile(f, f_map)
|
||||
for f in files:
|
||||
if not f.endswith(".sancov.raw"):
|
||||
raise Exception("Unexpected raw file name %s" % f)
|
||||
f_map = f[:-3] + "map"
|
||||
UnpackOneRawFile(f, f_map)
|
||||
|
||||
|
||||
def GetInstrumentedPCs(binary):
|
||||
# This looks scary, but all it does is extract all offsets where we call:
|
||||
# - __sanitizer_cov() or __sanitizer_cov_with_check(),
|
||||
# - with call or callq,
|
||||
# - directly or via PLT.
|
||||
cmd = r"objdump --no-show-raw-insn -d %s | " \
|
||||
r"grep '^\s\+[0-9a-f]\+:\s\+call\(q\|\)\s\+\(0x\|\)[0-9a-f]\+ <__sanitizer_cov\(_with_check\|\|_trace_pc_guard\)\(@plt\|\)>' | " \
|
||||
# This looks scary, but all it does is extract all offsets where we call:
|
||||
# - __sanitizer_cov() or __sanitizer_cov_with_check(),
|
||||
# - with call or callq,
|
||||
# - directly or via PLT.
|
||||
cmd = (
|
||||
r"objdump --no-show-raw-insn -d %s | "
|
||||
r"grep '^\s\+[0-9a-f]\+:\s\+call\(q\|\)\s\+\(0x\|\)[0-9a-f]\+ <__sanitizer_cov\(_with_check\|\|_trace_pc_guard\)\(@plt\|\)>' | "
|
||||
r"grep -o '^\s\+[0-9a-f]\+'" % binary
|
||||
lines = subprocess.check_output(cmd, stdin=subprocess.PIPE, shell=True).splitlines()
|
||||
# The PCs we get from objdump are off by 4 bytes, as they point to the
|
||||
# beginning of the callq instruction. Empirically this is true on x86 and
|
||||
# x86_64.
|
||||
return set(int(line.strip(), 16) + 4 for line in lines)
|
||||
)
|
||||
lines = subprocess.check_output(cmd, stdin=subprocess.PIPE, shell=True).splitlines()
|
||||
# The PCs we get from objdump are off by 4 bytes, as they point to the
|
||||
# beginning of the callq instruction. Empirically this is true on x86 and
|
||||
# x86_64.
|
||||
return set(int(line.strip(), 16) + 4 for line in lines)
|
||||
|
||||
|
||||
def PrintMissing(binary):
|
||||
if not os.path.isfile(binary):
|
||||
raise Exception('File not found: %s' % binary)
|
||||
instrumented = GetInstrumentedPCs(binary)
|
||||
sys.stderr.write("%s: found %d instrumented PCs in %s\n" % (prog_name,
|
||||
len(instrumented),
|
||||
binary))
|
||||
covered = set(int(line, 16) for line in sys.stdin)
|
||||
sys.stderr.write("%s: read %d PCs from stdin\n" % (prog_name, len(covered)))
|
||||
missing = instrumented - covered
|
||||
sys.stderr.write("%s: %d PCs missing from coverage\n" % (prog_name, len(missing)))
|
||||
if (len(missing) > len(instrumented) - len(covered)):
|
||||
if not os.path.isfile(binary):
|
||||
raise Exception("File not found: %s" % binary)
|
||||
instrumented = GetInstrumentedPCs(binary)
|
||||
sys.stderr.write(
|
||||
"%s: WARNING: stdin contains PCs not found in binary\n" % prog_name
|
||||
"%s: found %d instrumented PCs in %s\n" % (prog_name, len(instrumented), binary)
|
||||
)
|
||||
for pc in sorted(missing):
|
||||
print("0x%x" % pc)
|
||||
covered = set(int(line, 16) for line in sys.stdin)
|
||||
sys.stderr.write("%s: read %d PCs from stdin\n" % (prog_name, len(covered)))
|
||||
missing = instrumented - covered
|
||||
sys.stderr.write("%s: %d PCs missing from coverage\n" % (prog_name, len(missing)))
|
||||
if len(missing) > len(instrumented) - len(covered):
|
||||
sys.stderr.write(
|
||||
"%s: WARNING: stdin contains PCs not found in binary\n" % prog_name
|
||||
)
|
||||
for pc in sorted(missing):
|
||||
print("0x%x" % pc)
|
||||
|
||||
if __name__ == '__main__':
|
||||
prog_name = sys.argv[0]
|
||||
if len(sys.argv) <= 2:
|
||||
Usage();
|
||||
|
||||
if sys.argv[1] == "missing":
|
||||
if len(sys.argv) != 3:
|
||||
Usage()
|
||||
PrintMissing(sys.argv[2])
|
||||
exit(0)
|
||||
if __name__ == "__main__":
|
||||
prog_name = sys.argv[0]
|
||||
if len(sys.argv) <= 2:
|
||||
Usage()
|
||||
|
||||
file_list = []
|
||||
for f in sys.argv[2:]:
|
||||
file_list += glob.glob(f)
|
||||
if not file_list:
|
||||
Usage()
|
||||
if sys.argv[1] == "missing":
|
||||
if len(sys.argv) != 3:
|
||||
Usage()
|
||||
PrintMissing(sys.argv[2])
|
||||
exit(0)
|
||||
|
||||
if sys.argv[1] == "print":
|
||||
PrintFiles(file_list)
|
||||
elif sys.argv[1] == "merge":
|
||||
MergeAndPrint(file_list)
|
||||
elif sys.argv[1] == "unpack":
|
||||
Unpack(file_list)
|
||||
elif sys.argv[1] == "rawunpack":
|
||||
RawUnpack(file_list)
|
||||
else:
|
||||
Usage()
|
||||
file_list = []
|
||||
for f in sys.argv[2:]:
|
||||
file_list += glob.glob(f)
|
||||
if not file_list:
|
||||
Usage()
|
||||
|
||||
if sys.argv[1] == "print":
|
||||
PrintFiles(file_list)
|
||||
elif sys.argv[1] == "merge":
|
||||
MergeAndPrint(file_list)
|
||||
elif sys.argv[1] == "unpack":
|
||||
Unpack(file_list)
|
||||
elif sys.argv[1] == "rawunpack":
|
||||
RawUnpack(file_list)
|
||||
else:
|
||||
Usage()
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.android != "1":
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
config.substitutions.append( ("%device", "/data/local/tmp/Output") )
|
||||
config.substitutions.append(("%device", "/data/local/tmp/Output"))
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
class NoOpPlugin(AsanSymbolizerPlugIn):
|
||||
def register_cmdline_args(self, parser):
|
||||
logging.info('Adding --unlikely-option-name-XXX option')
|
||||
parser.add_argument('--unlikely-option-name-XXX', type=int, default=0)
|
||||
def register_cmdline_args(self, parser):
|
||||
logging.info("Adding --unlikely-option-name-XXX option")
|
||||
parser.add_argument("--unlikely-option-name-XXX", type=int, default=0)
|
||||
|
||||
def process_cmdline_args(self, pargs):
|
||||
logging.info('GOT --unlikely-option-name-XXX=%d', pargs.unlikely_option_name_XXX)
|
||||
return True
|
||||
def process_cmdline_args(self, pargs):
|
||||
logging.info(
|
||||
"GOT --unlikely-option-name-XXX=%d", pargs.unlikely_option_name_XXX
|
||||
)
|
||||
return True
|
||||
|
||||
def destroy(self):
|
||||
logging.info('destroy() called on NoOpPlugin')
|
||||
def destroy(self):
|
||||
logging.info("destroy() called on NoOpPlugin")
|
||||
|
||||
def filter_binary_path(self, path):
|
||||
logging.info("filter_binary_path called in NoOpPlugin")
|
||||
return path
|
||||
|
||||
def filter_binary_path(self, path):
|
||||
logging.info('filter_binary_path called in NoOpPlugin')
|
||||
return path
|
||||
|
||||
register_plugin(NoOpPlugin())
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import logging
|
||||
|
||||
|
||||
class FailOncePlugin(AsanSymbolizerPlugIn):
|
||||
"""
|
||||
"""
|
||||
This is a simple plug-in that always claims
|
||||
that a binary can't be symbolized on the first
|
||||
call but succeeds for all subsequent calls.
|
||||
@@ -14,18 +15,20 @@ class FailOncePlugin(AsanSymbolizerPlugIn):
|
||||
that didn't increment the frame counter which
|
||||
caused subsequent symbolization attempts to
|
||||
print the wrong frame number.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.should_fail = True
|
||||
pass
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.should_fail = True
|
||||
pass
|
||||
|
||||
def filter_binary_path(self, path):
|
||||
logging.info("filter_binary_path called in NoOpPlugin")
|
||||
if self.should_fail:
|
||||
logging.info("Doing first fail")
|
||||
self.should_fail = False
|
||||
return None
|
||||
logging.info("Doing succeed")
|
||||
return path
|
||||
|
||||
def filter_binary_path(self, path):
|
||||
logging.info('filter_binary_path called in NoOpPlugin')
|
||||
if self.should_fail:
|
||||
logging.info('Doing first fail')
|
||||
self.should_fail = False
|
||||
return None
|
||||
logging.info('Doing succeed')
|
||||
return path
|
||||
|
||||
register_plugin(FailOncePlugin())
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import re
|
||||
|
||||
if not re.match(r'.*-windows-msvc$', config.target_triple):
|
||||
if not re.match(r".*-windows-msvc$", config.target_triple):
|
||||
config.unsupported = True
|
||||
|
||||
@@ -7,257 +7,321 @@ import shlex
|
||||
|
||||
import lit.formats
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
def push_dynamic_library_lookup_path(config, new_path):
|
||||
if platform.system() == 'Windows':
|
||||
dynamic_library_lookup_var = 'PATH'
|
||||
elif platform.system() == 'Darwin':
|
||||
dynamic_library_lookup_var = 'DYLD_LIBRARY_PATH'
|
||||
else:
|
||||
dynamic_library_lookup_var = 'LD_LIBRARY_PATH'
|
||||
if platform.system() == "Windows":
|
||||
dynamic_library_lookup_var = "PATH"
|
||||
elif platform.system() == "Darwin":
|
||||
dynamic_library_lookup_var = "DYLD_LIBRARY_PATH"
|
||||
else:
|
||||
dynamic_library_lookup_var = "LD_LIBRARY_PATH"
|
||||
|
||||
new_ld_library_path = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, '')))
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path
|
||||
new_ld_library_path = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, ""))
|
||||
)
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path
|
||||
|
||||
if platform.system() == 'FreeBSD':
|
||||
dynamic_library_lookup_var = 'LD_32_LIBRARY_PATH'
|
||||
new_ld_32_library_path = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, '')))
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_32_library_path
|
||||
if platform.system() == "FreeBSD":
|
||||
dynamic_library_lookup_var = "LD_32_LIBRARY_PATH"
|
||||
new_ld_32_library_path = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, ""))
|
||||
)
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_32_library_path
|
||||
|
||||
if platform.system() == 'SunOS':
|
||||
dynamic_library_lookup_var = 'LD_LIBRARY_PATH_32'
|
||||
new_ld_library_path_32 = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, '')))
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path_32
|
||||
if platform.system() == "SunOS":
|
||||
dynamic_library_lookup_var = "LD_LIBRARY_PATH_32"
|
||||
new_ld_library_path_32 = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, ""))
|
||||
)
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path_32
|
||||
|
||||
dynamic_library_lookup_var = "LD_LIBRARY_PATH_64"
|
||||
new_ld_library_path_64 = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, ""))
|
||||
)
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path_64
|
||||
|
||||
dynamic_library_lookup_var = 'LD_LIBRARY_PATH_64'
|
||||
new_ld_library_path_64 = os.path.pathsep.join(
|
||||
(new_path, config.environment.get(dynamic_library_lookup_var, '')))
|
||||
config.environment[dynamic_library_lookup_var] = new_ld_library_path_64
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'AddressSanitizer' + config.name_suffix
|
||||
config.name = "AddressSanitizer" + config.name_suffix
|
||||
|
||||
# Platform-specific default ASAN_OPTIONS for lit tests.
|
||||
default_asan_opts = list(config.default_sanitizer_opts)
|
||||
|
||||
# On Darwin, leak checking is not enabled by default. Enable on macOS
|
||||
# tests to prevent regressions
|
||||
if config.host_os == 'Darwin' and config.apple_platform == 'osx':
|
||||
default_asan_opts += ['detect_leaks=1']
|
||||
if config.host_os == "Darwin" and config.apple_platform == "osx":
|
||||
default_asan_opts += ["detect_leaks=1"]
|
||||
|
||||
default_asan_opts_str = ':'.join(default_asan_opts)
|
||||
default_asan_opts_str = ":".join(default_asan_opts)
|
||||
if default_asan_opts_str:
|
||||
config.environment['ASAN_OPTIONS'] = default_asan_opts_str
|
||||
default_asan_opts_str += ':'
|
||||
config.substitutions.append(('%env_asan_opts=',
|
||||
'env ASAN_OPTIONS=' + default_asan_opts_str))
|
||||
config.environment["ASAN_OPTIONS"] = default_asan_opts_str
|
||||
default_asan_opts_str += ":"
|
||||
config.substitutions.append(
|
||||
("%env_asan_opts=", "env ASAN_OPTIONS=" + default_asan_opts_str)
|
||||
)
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
if config.host_os not in ['FreeBSD', 'NetBSD']:
|
||||
libdl_flag = "-ldl"
|
||||
if config.host_os not in ["FreeBSD", "NetBSD"]:
|
||||
libdl_flag = "-ldl"
|
||||
else:
|
||||
libdl_flag = ""
|
||||
libdl_flag = ""
|
||||
|
||||
# GCC-ASan doesn't link in all the necessary libraries automatically, so
|
||||
# we have to do it ourselves.
|
||||
if config.compiler_id == 'GNU':
|
||||
extra_link_flags = ["-pthread", "-lstdc++", libdl_flag]
|
||||
if config.compiler_id == "GNU":
|
||||
extra_link_flags = ["-pthread", "-lstdc++", libdl_flag]
|
||||
else:
|
||||
extra_link_flags = []
|
||||
extra_link_flags = []
|
||||
|
||||
# Setup default compiler flags used with -fsanitize=address option.
|
||||
# FIXME: Review the set of required flags and check if it can be reduced.
|
||||
target_cflags = [get_required_attr(config, "target_cflags")] + extra_link_flags
|
||||
target_cxxflags = config.cxx_mode_flags + target_cflags
|
||||
clang_asan_static_cflags = (["-fsanitize=address",
|
||||
"-mno-omit-leaf-frame-pointer",
|
||||
"-fno-omit-frame-pointer",
|
||||
"-fno-optimize-sibling-calls"] +
|
||||
config.debug_info_flags + target_cflags)
|
||||
if config.target_arch == 's390x':
|
||||
clang_asan_static_cflags.append("-mbackchain")
|
||||
clang_asan_static_cflags = (
|
||||
[
|
||||
"-fsanitize=address",
|
||||
"-mno-omit-leaf-frame-pointer",
|
||||
"-fno-omit-frame-pointer",
|
||||
"-fno-optimize-sibling-calls",
|
||||
]
|
||||
+ config.debug_info_flags
|
||||
+ target_cflags
|
||||
)
|
||||
if config.target_arch == "s390x":
|
||||
clang_asan_static_cflags.append("-mbackchain")
|
||||
clang_asan_static_cxxflags = config.cxx_mode_flags + clang_asan_static_cflags
|
||||
|
||||
target_is_msvc = bool(re.match(r'.*-windows-msvc$', config.target_triple))
|
||||
target_is_msvc = bool(re.match(r".*-windows-msvc$", config.target_triple))
|
||||
|
||||
asan_dynamic_flags = []
|
||||
if config.asan_dynamic:
|
||||
asan_dynamic_flags = ["-shared-libasan"]
|
||||
if platform.system() == 'Windows' and target_is_msvc:
|
||||
# On MSVC target, we need to simulate "clang-cl /MD" on the clang driver side.
|
||||
asan_dynamic_flags += ["-D_MT", "-D_DLL", "-Wl,-nodefaultlib:libcmt,-defaultlib:msvcrt,-defaultlib:oldnames"]
|
||||
elif platform.system() == 'FreeBSD':
|
||||
# On FreeBSD, we need to add -pthread to ensure pthread functions are available.
|
||||
asan_dynamic_flags += ['-pthread']
|
||||
config.available_features.add("asan-dynamic-runtime")
|
||||
asan_dynamic_flags = ["-shared-libasan"]
|
||||
if platform.system() == "Windows" and target_is_msvc:
|
||||
# On MSVC target, we need to simulate "clang-cl /MD" on the clang driver side.
|
||||
asan_dynamic_flags += [
|
||||
"-D_MT",
|
||||
"-D_DLL",
|
||||
"-Wl,-nodefaultlib:libcmt,-defaultlib:msvcrt,-defaultlib:oldnames",
|
||||
]
|
||||
elif platform.system() == "FreeBSD":
|
||||
# On FreeBSD, we need to add -pthread to ensure pthread functions are available.
|
||||
asan_dynamic_flags += ["-pthread"]
|
||||
config.available_features.add("asan-dynamic-runtime")
|
||||
else:
|
||||
config.available_features.add("asan-static-runtime")
|
||||
config.available_features.add("asan-static-runtime")
|
||||
clang_asan_cflags = clang_asan_static_cflags + asan_dynamic_flags
|
||||
clang_asan_cxxflags = clang_asan_static_cxxflags + asan_dynamic_flags
|
||||
|
||||
# Add win32-(static|dynamic)-asan features to mark tests as passing or failing
|
||||
# in those modes. lit doesn't support logical feature test combinations.
|
||||
if platform.system() == 'Windows':
|
||||
if config.asan_dynamic:
|
||||
win_runtime_feature = "win32-dynamic-asan"
|
||||
else:
|
||||
win_runtime_feature = "win32-static-asan"
|
||||
config.available_features.add(win_runtime_feature)
|
||||
if platform.system() == "Windows":
|
||||
if config.asan_dynamic:
|
||||
win_runtime_feature = "win32-dynamic-asan"
|
||||
else:
|
||||
win_runtime_feature = "win32-static-asan"
|
||||
config.available_features.add(win_runtime_feature)
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang ", build_invocation(target_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(target_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_asan ", build_invocation(clang_asan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_asan ", build_invocation(clang_asan_cxxflags)) )
|
||||
|
||||
config.substitutions.append(("%clang ", build_invocation(target_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(target_cxxflags)))
|
||||
config.substitutions.append(("%clang_asan ", build_invocation(clang_asan_cflags)))
|
||||
config.substitutions.append(("%clangxx_asan ", build_invocation(clang_asan_cxxflags)))
|
||||
if config.asan_dynamic:
|
||||
if config.host_os in ['Linux', 'FreeBSD', 'NetBSD', 'SunOS']:
|
||||
shared_libasan_path = os.path.join(config.compiler_rt_libdir, "libclang_rt.asan{}.so".format(config.target_suffix))
|
||||
elif config.host_os == 'Darwin':
|
||||
shared_libasan_path = os.path.join(config.compiler_rt_libdir, 'libclang_rt.asan_{}_dynamic.dylib'.format(config.apple_platform))
|
||||
else:
|
||||
lit_config.warning('%shared_libasan substitution not set but dynamic ASan is available.')
|
||||
shared_libasan_path = None
|
||||
if config.host_os in ["Linux", "FreeBSD", "NetBSD", "SunOS"]:
|
||||
shared_libasan_path = os.path.join(
|
||||
config.compiler_rt_libdir,
|
||||
"libclang_rt.asan{}.so".format(config.target_suffix),
|
||||
)
|
||||
elif config.host_os == "Darwin":
|
||||
shared_libasan_path = os.path.join(
|
||||
config.compiler_rt_libdir,
|
||||
"libclang_rt.asan_{}_dynamic.dylib".format(config.apple_platform),
|
||||
)
|
||||
else:
|
||||
lit_config.warning(
|
||||
"%shared_libasan substitution not set but dynamic ASan is available."
|
||||
)
|
||||
shared_libasan_path = None
|
||||
|
||||
if shared_libasan_path is not None:
|
||||
config.substitutions.append( ("%shared_libasan", shared_libasan_path) )
|
||||
config.substitutions.append( ("%clang_asan_static ", build_invocation(clang_asan_static_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_asan_static ", build_invocation(clang_asan_static_cxxflags)) )
|
||||
if shared_libasan_path is not None:
|
||||
config.substitutions.append(("%shared_libasan", shared_libasan_path))
|
||||
config.substitutions.append(
|
||||
("%clang_asan_static ", build_invocation(clang_asan_static_cflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_asan_static ", build_invocation(clang_asan_static_cxxflags))
|
||||
)
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
# MSVC-specific tests might also use the clang-cl.exe driver.
|
||||
if target_is_msvc:
|
||||
clang_cl_cxxflags = ["-Wno-deprecated-declarations",
|
||||
"-WX",
|
||||
"-D_HAS_EXCEPTIONS=0",
|
||||
"-Zi"] + target_cflags
|
||||
clang_cl_asan_cxxflags = ["-fsanitize=address"] + clang_cl_cxxflags
|
||||
if config.asan_dynamic:
|
||||
clang_cl_asan_cxxflags.append("-MD")
|
||||
if platform.system() == "Windows":
|
||||
# MSVC-specific tests might also use the clang-cl.exe driver.
|
||||
if target_is_msvc:
|
||||
clang_cl_cxxflags = [
|
||||
"-Wno-deprecated-declarations",
|
||||
"-WX",
|
||||
"-D_HAS_EXCEPTIONS=0",
|
||||
"-Zi",
|
||||
] + target_cflags
|
||||
clang_cl_asan_cxxflags = ["-fsanitize=address"] + clang_cl_cxxflags
|
||||
if config.asan_dynamic:
|
||||
clang_cl_asan_cxxflags.append("-MD")
|
||||
|
||||
clang_cl_invocation = build_invocation(clang_cl_cxxflags)
|
||||
clang_cl_invocation = clang_cl_invocation.replace("clang.exe","clang-cl.exe")
|
||||
config.substitutions.append( ("%clang_cl ", clang_cl_invocation) )
|
||||
clang_cl_invocation = build_invocation(clang_cl_cxxflags)
|
||||
clang_cl_invocation = clang_cl_invocation.replace("clang.exe", "clang-cl.exe")
|
||||
config.substitutions.append(("%clang_cl ", clang_cl_invocation))
|
||||
|
||||
clang_cl_asan_invocation = build_invocation(clang_cl_asan_cxxflags)
|
||||
clang_cl_asan_invocation = clang_cl_asan_invocation.replace("clang.exe","clang-cl.exe")
|
||||
config.substitutions.append( ("%clang_cl_asan ", clang_cl_asan_invocation) )
|
||||
config.substitutions.append( ("%clang_cl_nocxx_asan ", clang_cl_asan_invocation) )
|
||||
config.substitutions.append( ("%Od", "-Od") )
|
||||
config.substitutions.append( ("%Fe", "-Fe") )
|
||||
config.substitutions.append( ("%LD", "-LD") )
|
||||
config.substitutions.append( ("%MD", "-MD") )
|
||||
config.substitutions.append( ("%MT", "-MT") )
|
||||
config.substitutions.append( ("%Gw", "-Gw") )
|
||||
clang_cl_asan_invocation = build_invocation(clang_cl_asan_cxxflags)
|
||||
clang_cl_asan_invocation = clang_cl_asan_invocation.replace(
|
||||
"clang.exe", "clang-cl.exe"
|
||||
)
|
||||
config.substitutions.append(("%clang_cl_asan ", clang_cl_asan_invocation))
|
||||
config.substitutions.append(("%clang_cl_nocxx_asan ", clang_cl_asan_invocation))
|
||||
config.substitutions.append(("%Od", "-Od"))
|
||||
config.substitutions.append(("%Fe", "-Fe"))
|
||||
config.substitutions.append(("%LD", "-LD"))
|
||||
config.substitutions.append(("%MD", "-MD"))
|
||||
config.substitutions.append(("%MT", "-MT"))
|
||||
config.substitutions.append(("%Gw", "-Gw"))
|
||||
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "clang_rt.asan%%s%s.lib" % config.target_suffix)
|
||||
config.substitutions.append( ("%asan_lib", base_lib % "") )
|
||||
config.substitutions.append( ("%asan_cxx_lib", base_lib % "_cxx") )
|
||||
config.substitutions.append( ("%asan_dll_thunk", base_lib % "_dll_thunk") )
|
||||
else:
|
||||
# To make some of these tests work on MinGW target without changing their
|
||||
# behaviour for MSVC target, substitute clang-cl flags with gcc-like ones.
|
||||
config.substitutions.append( ("%clang_cl ", build_invocation(target_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_cl_asan ", build_invocation(clang_asan_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_cl_nocxx_asan ", build_invocation(clang_asan_cflags)) )
|
||||
config.substitutions.append( ("%Od", "-O0") )
|
||||
config.substitutions.append( ("%Fe", "-o") )
|
||||
config.substitutions.append( ("%LD", "-shared") )
|
||||
config.substitutions.append( ("%MD", "") )
|
||||
config.substitutions.append( ("%MT", "") )
|
||||
config.substitutions.append( ("%Gw", "-fdata-sections") )
|
||||
base_lib = os.path.join(
|
||||
config.compiler_rt_libdir, "clang_rt.asan%%s%s.lib" % config.target_suffix
|
||||
)
|
||||
config.substitutions.append(("%asan_lib", base_lib % ""))
|
||||
config.substitutions.append(("%asan_cxx_lib", base_lib % "_cxx"))
|
||||
config.substitutions.append(("%asan_dll_thunk", base_lib % "_dll_thunk"))
|
||||
else:
|
||||
# To make some of these tests work on MinGW target without changing their
|
||||
# behaviour for MSVC target, substitute clang-cl flags with gcc-like ones.
|
||||
config.substitutions.append(("%clang_cl ", build_invocation(target_cxxflags)))
|
||||
config.substitutions.append(
|
||||
("%clang_cl_asan ", build_invocation(clang_asan_cxxflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clang_cl_nocxx_asan ", build_invocation(clang_asan_cflags))
|
||||
)
|
||||
config.substitutions.append(("%Od", "-O0"))
|
||||
config.substitutions.append(("%Fe", "-o"))
|
||||
config.substitutions.append(("%LD", "-shared"))
|
||||
config.substitutions.append(("%MD", ""))
|
||||
config.substitutions.append(("%MT", ""))
|
||||
config.substitutions.append(("%Gw", "-fdata-sections"))
|
||||
|
||||
# FIXME: De-hardcode this path.
|
||||
asan_source_dir = os.path.join(
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "asan")
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "asan"
|
||||
)
|
||||
python_exec = shlex.quote(get_required_attr(config, "python_executable"))
|
||||
# Setup path to asan_symbolize.py script.
|
||||
asan_symbolize = os.path.join(asan_source_dir, "scripts", "asan_symbolize.py")
|
||||
if not os.path.exists(asan_symbolize):
|
||||
lit_config.fatal("Can't find script on path %r" % asan_symbolize)
|
||||
config.substitutions.append( ("%asan_symbolize", python_exec + " " + asan_symbolize + " ") )
|
||||
lit_config.fatal("Can't find script on path %r" % asan_symbolize)
|
||||
config.substitutions.append(
|
||||
("%asan_symbolize", python_exec + " " + asan_symbolize + " ")
|
||||
)
|
||||
# Setup path to sancov.py script.
|
||||
sanitizer_common_source_dir = os.path.join(
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "sanitizer_common")
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "sanitizer_common"
|
||||
)
|
||||
sancov = os.path.join(sanitizer_common_source_dir, "scripts", "sancov.py")
|
||||
if not os.path.exists(sancov):
|
||||
lit_config.fatal("Can't find script on path %r" % sancov)
|
||||
config.substitutions.append( ("%sancov ", python_exec + " " + sancov + " ") )
|
||||
lit_config.fatal("Can't find script on path %r" % sancov)
|
||||
config.substitutions.append(("%sancov ", python_exec + " " + sancov + " "))
|
||||
|
||||
# Determine kernel bitness
|
||||
if config.host_arch.find('64') != -1 and not config.android:
|
||||
kernel_bits = '64'
|
||||
if config.host_arch.find("64") != -1 and not config.android:
|
||||
kernel_bits = "64"
|
||||
else:
|
||||
kernel_bits = '32'
|
||||
kernel_bits = "32"
|
||||
|
||||
config.substitutions.append( ('CHECK-%kernel_bits', ("CHECK-kernel-" + kernel_bits + "-bits")))
|
||||
config.substitutions.append(
|
||||
("CHECK-%kernel_bits", ("CHECK-kernel-" + kernel_bits + "-bits"))
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%libdl", libdl_flag) )
|
||||
config.substitutions.append(("%libdl", libdl_flag))
|
||||
|
||||
config.available_features.add("asan-" + config.bits + "-bits")
|
||||
|
||||
# Fast unwinder doesn't work with Thumb
|
||||
if not config.arm_thumb:
|
||||
config.available_features.add('fast-unwinder-works')
|
||||
config.available_features.add("fast-unwinder-works")
|
||||
|
||||
# Turn on leak detection on 64-bit Linux.
|
||||
leak_detection_android = config.android and 'android-thread-properties-api' in config.available_features and (config.target_arch in ['x86_64', 'i386', 'i686', 'aarch64'])
|
||||
leak_detection_linux = (config.host_os == 'Linux') and (not config.android) and (config.target_arch in ['x86_64', 'i386', 'riscv64', 'loongarch64'])
|
||||
leak_detection_mac = (config.host_os == 'Darwin') and (config.apple_platform == 'osx')
|
||||
leak_detection_netbsd = (config.host_os == 'NetBSD') and (config.target_arch in ['x86_64', 'i386'])
|
||||
if leak_detection_android or leak_detection_linux or leak_detection_mac or leak_detection_netbsd:
|
||||
config.available_features.add('leak-detection')
|
||||
leak_detection_android = (
|
||||
config.android
|
||||
and "android-thread-properties-api" in config.available_features
|
||||
and (config.target_arch in ["x86_64", "i386", "i686", "aarch64"])
|
||||
)
|
||||
leak_detection_linux = (
|
||||
(config.host_os == "Linux")
|
||||
and (not config.android)
|
||||
and (config.target_arch in ["x86_64", "i386", "riscv64", "loongarch64"])
|
||||
)
|
||||
leak_detection_mac = (config.host_os == "Darwin") and (config.apple_platform == "osx")
|
||||
leak_detection_netbsd = (config.host_os == "NetBSD") and (
|
||||
config.target_arch in ["x86_64", "i386"]
|
||||
)
|
||||
if (
|
||||
leak_detection_android
|
||||
or leak_detection_linux
|
||||
or leak_detection_mac
|
||||
or leak_detection_netbsd
|
||||
):
|
||||
config.available_features.add("leak-detection")
|
||||
|
||||
# Set LD_LIBRARY_PATH to pick dynamic runtime up properly.
|
||||
push_dynamic_library_lookup_path(config, config.compiler_rt_libdir)
|
||||
|
||||
# GCC-ASan uses dynamic runtime by default.
|
||||
if config.compiler_id == 'GNU':
|
||||
gcc_dir = os.path.dirname(config.clang)
|
||||
libasan_dir = os.path.join(gcc_dir, "..", "lib" + config.bits)
|
||||
push_dynamic_library_lookup_path(config, libasan_dir)
|
||||
if config.compiler_id == "GNU":
|
||||
gcc_dir = os.path.dirname(config.clang)
|
||||
libasan_dir = os.path.join(gcc_dir, "..", "lib" + config.bits)
|
||||
push_dynamic_library_lookup_path(config, libasan_dir)
|
||||
|
||||
# Add the RT libdir to PATH directly so that we can successfully run the gtest
|
||||
# binary to list its tests.
|
||||
if config.host_os == 'Windows' and config.asan_dynamic:
|
||||
os.environ['PATH'] = os.path.pathsep.join([config.compiler_rt_libdir,
|
||||
os.environ.get('PATH', '')])
|
||||
if config.host_os == "Windows" and config.asan_dynamic:
|
||||
os.environ["PATH"] = os.path.pathsep.join(
|
||||
[config.compiler_rt_libdir, os.environ.get("PATH", "")]
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
config.suffixes.append('.mm')
|
||||
if config.host_os == "Darwin":
|
||||
config.suffixes.append(".mm")
|
||||
|
||||
if config.host_os == 'Windows':
|
||||
config.substitutions.append(('%fPIC', ''))
|
||||
config.substitutions.append(('%fPIE', ''))
|
||||
config.substitutions.append(('%pie', ''))
|
||||
if config.host_os == "Windows":
|
||||
config.substitutions.append(("%fPIC", ""))
|
||||
config.substitutions.append(("%fPIE", ""))
|
||||
config.substitutions.append(("%pie", ""))
|
||||
else:
|
||||
config.substitutions.append(('%fPIC', '-fPIC'))
|
||||
config.substitutions.append(('%fPIE', '-fPIE'))
|
||||
config.substitutions.append(('%pie', '-pie'))
|
||||
config.substitutions.append(("%fPIC", "-fPIC"))
|
||||
config.substitutions.append(("%fPIE", "-fPIE"))
|
||||
config.substitutions.append(("%pie", "-pie"))
|
||||
|
||||
# Only run the tests on supported OSs.
|
||||
if config.host_os not in ['Linux', 'Darwin', 'FreeBSD', 'SunOS', 'Windows', 'NetBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux", "Darwin", "FreeBSD", "SunOS", "Windows", "NetBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
config.parallelism_group = "shadow-memory"
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -10,26 +10,29 @@ import lit.formats
|
||||
use_lit_shell = os.environ.get("LIT_USE_INTERNAL_SHELL")
|
||||
if use_lit_shell:
|
||||
# 0 is external, "" is default, and everything else is internal.
|
||||
execute_external = (use_lit_shell == "0")
|
||||
execute_external = use_lit_shell == "0"
|
||||
else:
|
||||
# Otherwise we default to internal on Windows and external elsewhere, as
|
||||
# bash on Windows is usually very slow.
|
||||
execute_external = (not sys.platform in ['win32'])
|
||||
execute_external = not sys.platform in ["win32"]
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'Builtins' + config.name_suffix
|
||||
config.name = "Builtins" + config.name_suffix
|
||||
|
||||
# Platform-specific default Builtins_OPTIONS for lit tests.
|
||||
default_builtins_opts = ''
|
||||
default_builtins_opts = ""
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
@@ -37,44 +40,52 @@ config.test_source_root = os.path.dirname(__file__)
|
||||
# Path to the static library
|
||||
is_msvc = get_required_attr(config, "is_msvc")
|
||||
if is_msvc:
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "clang_rt.builtins%s.lib "
|
||||
% config.target_suffix)
|
||||
config.substitutions.append( ("%librt ", base_lib) )
|
||||
elif config.host_os == 'Darwin':
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "libclang_rt.osx.a ")
|
||||
config.substitutions.append( ("%librt ", base_lib + ' -lSystem ') )
|
||||
elif config.host_os == 'Windows':
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "libclang_rt.builtins%s.a"
|
||||
% config.target_suffix)
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
base_lib = base_lib.replace('\\', '/')
|
||||
config.substitutions.append( ("%librt ", base_lib + ' -lmingw32 -lmoldname -lmingwex -lmsvcrt -ladvapi32 -lshell32 -luser32 -lkernel32 ') )
|
||||
base_lib = os.path.join(
|
||||
config.compiler_rt_libdir, "clang_rt.builtins%s.lib " % config.target_suffix
|
||||
)
|
||||
config.substitutions.append(("%librt ", base_lib))
|
||||
elif config.host_os == "Darwin":
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "libclang_rt.osx.a ")
|
||||
config.substitutions.append(("%librt ", base_lib + " -lSystem "))
|
||||
elif config.host_os == "Windows":
|
||||
base_lib = os.path.join(
|
||||
config.compiler_rt_libdir, "libclang_rt.builtins%s.a" % config.target_suffix
|
||||
)
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
base_lib = base_lib.replace("\\", "/")
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%librt ",
|
||||
base_lib
|
||||
+ " -lmingw32 -lmoldname -lmingwex -lmsvcrt -ladvapi32 -lshell32 -luser32 -lkernel32 ",
|
||||
)
|
||||
)
|
||||
else:
|
||||
base_lib = os.path.join(config.compiler_rt_libdir, "libclang_rt.builtins%s.a"
|
||||
% config.target_suffix)
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
base_lib = base_lib.replace('\\', '/')
|
||||
config.substitutions.append( ("%librt ", base_lib + ' -lc -lm ') )
|
||||
base_lib = os.path.join(
|
||||
config.compiler_rt_libdir, "libclang_rt.builtins%s.a" % config.target_suffix
|
||||
)
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
base_lib = base_lib.replace("\\", "/")
|
||||
config.substitutions.append(("%librt ", base_lib + " -lc -lm "))
|
||||
|
||||
builtins_source_dir = os.path.join(
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "builtins")
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
builtins_source_dir = builtins_source_dir.replace('\\', '/')
|
||||
get_required_attr(config, "compiler_rt_src_root"), "lib", "builtins"
|
||||
)
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
builtins_source_dir = builtins_source_dir.replace("\\", "/")
|
||||
builtins_lit_source_dir = get_required_attr(config, "builtins_lit_source_dir")
|
||||
|
||||
extra_link_flags = ["-nodefaultlibs"]
|
||||
|
||||
target_cflags = [get_required_attr(config, "target_cflags")]
|
||||
target_cflags += ['-fno-builtin', '-I', builtins_source_dir]
|
||||
target_cflags += ["-fno-builtin", "-I", builtins_source_dir]
|
||||
target_cflags += extra_link_flags
|
||||
target_cxxflags = config.cxx_mode_flags + target_cflags
|
||||
clang_builtins_static_cflags = ([""] +
|
||||
config.debug_info_flags + target_cflags)
|
||||
clang_builtins_static_cxxflags = config.cxx_mode_flags + \
|
||||
clang_builtins_static_cflags
|
||||
clang_builtins_static_cflags = [""] + config.debug_info_flags + target_cflags
|
||||
clang_builtins_static_cxxflags = config.cxx_mode_flags + clang_builtins_static_cflags
|
||||
|
||||
clang_builtins_cflags = clang_builtins_static_cflags
|
||||
clang_builtins_cxxflags = clang_builtins_static_cxxflags
|
||||
@@ -82,49 +93,53 @@ clang_builtins_cxxflags = clang_builtins_static_cxxflags
|
||||
# FIXME: Right now we don't compile the C99 complex builtins when using
|
||||
# clang-cl. Fix that.
|
||||
if not is_msvc:
|
||||
config.available_features.add('c99-complex')
|
||||
config.available_features.add("c99-complex")
|
||||
|
||||
builtins_is_msvc = get_required_attr(config, "builtins_is_msvc")
|
||||
if not builtins_is_msvc:
|
||||
config.available_features.add('int128')
|
||||
config.available_features.add("int128")
|
||||
|
||||
clang_wrapper = ""
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([clang_wrapper, config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([clang_wrapper, config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
config.substitutions.append( ("%clang ", build_invocation(target_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(target_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_builtins ", \
|
||||
build_invocation(clang_builtins_cflags)))
|
||||
config.substitutions.append( ("%clangxx_builtins ", \
|
||||
build_invocation(clang_builtins_cxxflags)))
|
||||
config.substitutions.append(("%clang ", build_invocation(target_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(target_cxxflags)))
|
||||
config.substitutions.append(
|
||||
("%clang_builtins ", build_invocation(clang_builtins_cflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_builtins ", build_invocation(clang_builtins_cxxflags))
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if not config.emulator:
|
||||
config.available_features.add('native-run')
|
||||
config.available_features.add("native-run")
|
||||
|
||||
# Add features for available sources
|
||||
builtins_source_features = config.builtins_lit_source_features.split(';')
|
||||
builtins_source_features = config.builtins_lit_source_features.split(";")
|
||||
# Sanity checks
|
||||
if not builtins_source_features:
|
||||
lit_config.fatal('builtins_source_features cannot be empty')
|
||||
lit_config.fatal("builtins_source_features cannot be empty")
|
||||
builtins_source_features_set = set()
|
||||
builtins_source_feature_duplicates = []
|
||||
for builtin_source_feature in builtins_source_features:
|
||||
if len(builtin_source_feature) == 0:
|
||||
lit_config.fatal('builtins_source_feature cannot contain empty features')
|
||||
if builtin_source_feature not in builtins_source_features_set:
|
||||
builtins_source_features_set.add(builtin_source_feature)
|
||||
else:
|
||||
builtins_source_feature_duplicates.append(builtin_source_feature)
|
||||
if len(builtin_source_feature) == 0:
|
||||
lit_config.fatal("builtins_source_feature cannot contain empty features")
|
||||
if builtin_source_feature not in builtins_source_features_set:
|
||||
builtins_source_features_set.add(builtin_source_feature)
|
||||
else:
|
||||
builtins_source_feature_duplicates.append(builtin_source_feature)
|
||||
|
||||
if len(builtins_source_feature_duplicates) > 0:
|
||||
lit_config.fatal(
|
||||
'builtins_source_features contains duplicates: {}'.format(
|
||||
builtins_source_feature_duplicates)
|
||||
)
|
||||
lit_config.fatal(
|
||||
"builtins_source_features contains duplicates: {}".format(
|
||||
builtins_source_feature_duplicates
|
||||
)
|
||||
)
|
||||
config.available_features.update(builtins_source_features)
|
||||
|
||||
@@ -3,18 +3,24 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'Builtins'
|
||||
config.name = "Builtins"
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m', '.mm']
|
||||
config.suffixes = [".c", ".cpp", ".m", ".mm"]
|
||||
|
||||
# Define %clang and %clangxx substitutions to use in test RUN lines.
|
||||
config.substitutions.append( ("%clang ", " " + config.clang + " ") )
|
||||
config.substitutions.append(("%clang ", " " + config.clang + " "))
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
config.substitutions.append( ("%macos_version_major", str(config.darwin_osx_version[0])) )
|
||||
config.substitutions.append( ("%macos_version_minor", str(config.darwin_osx_version[1])) )
|
||||
config.substitutions.append( ("%macos_version_subminor", str(config.darwin_osx_version[2])) )
|
||||
if config.host_os == "Darwin":
|
||||
config.substitutions.append(
|
||||
("%macos_version_major", str(config.darwin_osx_version[0]))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%macos_version_minor", str(config.darwin_osx_version[1]))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%macos_version_subminor", str(config.darwin_osx_version[2]))
|
||||
)
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# The cfi-icall checker is only supported on x86 and x86_64 for now.
|
||||
if config.root.host_arch not in ['x86', 'x86_64']:
|
||||
config.unsupported = True
|
||||
if config.root.host_arch not in ["x86", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux', 'FreeBSD', 'NetBSD']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux", "FreeBSD", "NetBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
# Android O (API level 26) has support for cross-dso cfi in libdl.so.
|
||||
if config.android and 'android-26' not in config.available_features:
|
||||
config.unsupported = True
|
||||
if config.android and "android-26" not in config.available_features:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# The cfi-icall checker is only supported on x86 and x86_64 for now.
|
||||
if config.root.host_arch not in ['x86', 'x86_64']:
|
||||
config.unsupported = True
|
||||
if config.root.host_arch not in ["x86", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,50 +1,60 @@
|
||||
import lit.formats
|
||||
import os
|
||||
|
||||
config.name = 'cfi' + config.name_suffix
|
||||
config.suffixes = ['.c', '.cpp', '.test']
|
||||
config.name = "cfi" + config.name_suffix
|
||||
config.suffixes = [".c", ".cpp", ".test"]
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
clang = build_invocation([config.target_cflags])
|
||||
clangxx = build_invocation([config.target_cflags] + config.cxx_mode_flags)
|
||||
|
||||
config.substitutions.append((r"%clang ", clang + ' '))
|
||||
config.substitutions.append((r"%clangxx ", clangxx + ' '))
|
||||
config.substitutions.append((r"%clang ", clang + " "))
|
||||
config.substitutions.append((r"%clangxx ", clangxx + " "))
|
||||
|
||||
if 'darwin' in config.available_features:
|
||||
# -fsanitize=cfi is not supported on Darwin hosts
|
||||
config.unsupported = True
|
||||
if "darwin" in config.available_features:
|
||||
# -fsanitize=cfi is not supported on Darwin hosts
|
||||
config.unsupported = True
|
||||
elif config.lto_supported:
|
||||
clang_cfi = clang + '-fsanitize=cfi '
|
||||
clang_cfi = clang + "-fsanitize=cfi "
|
||||
|
||||
if config.cfi_lit_test_mode == "Devirt":
|
||||
config.available_features.add('devirt')
|
||||
clang_cfi += '-fwhole-program-vtables '
|
||||
config.substitutions.append((r"%expect_crash_unless_devirt ", ""))
|
||||
else:
|
||||
config.substitutions.append((r"%expect_crash_unless_devirt ", config.expect_crash))
|
||||
if config.cfi_lit_test_mode == "Devirt":
|
||||
config.available_features.add("devirt")
|
||||
clang_cfi += "-fwhole-program-vtables "
|
||||
config.substitutions.append((r"%expect_crash_unless_devirt ", ""))
|
||||
else:
|
||||
config.substitutions.append(
|
||||
(r"%expect_crash_unless_devirt ", config.expect_crash)
|
||||
)
|
||||
|
||||
cxx = ' '.join(config.cxx_mode_flags) + ' '
|
||||
diag = '-fno-sanitize-trap=cfi -fsanitize-recover=cfi '
|
||||
non_dso = '-fvisibility=hidden '
|
||||
dso = '-fsanitize-cfi-cross-dso -fvisibility=default '
|
||||
if config.android:
|
||||
dso += '-include ' + config.test_source_root + '/cross-dso/util/cfi_stubs.h '
|
||||
config.substitutions.append((r"%clang_cfi ", clang_cfi + non_dso))
|
||||
config.substitutions.append((r"%clangxx_cfi ", clang_cfi + cxx + non_dso))
|
||||
config.substitutions.append((r"%clang_cfi_diag ", clang_cfi + non_dso + diag))
|
||||
config.substitutions.append((r"%clangxx_cfi_diag ", clang_cfi + cxx + non_dso + diag))
|
||||
config.substitutions.append((r"%clangxx_cfi_dso ", clang_cfi + cxx + dso))
|
||||
config.substitutions.append((r"%clangxx_cfi_dso_diag ", clang_cfi + cxx + dso + diag))
|
||||
config.substitutions.append((r"%debug_info_flags", ' '.join(config.debug_info_flags)))
|
||||
cxx = " ".join(config.cxx_mode_flags) + " "
|
||||
diag = "-fno-sanitize-trap=cfi -fsanitize-recover=cfi "
|
||||
non_dso = "-fvisibility=hidden "
|
||||
dso = "-fsanitize-cfi-cross-dso -fvisibility=default "
|
||||
if config.android:
|
||||
dso += "-include " + config.test_source_root + "/cross-dso/util/cfi_stubs.h "
|
||||
config.substitutions.append((r"%clang_cfi ", clang_cfi + non_dso))
|
||||
config.substitutions.append((r"%clangxx_cfi ", clang_cfi + cxx + non_dso))
|
||||
config.substitutions.append((r"%clang_cfi_diag ", clang_cfi + non_dso + diag))
|
||||
config.substitutions.append(
|
||||
(r"%clangxx_cfi_diag ", clang_cfi + cxx + non_dso + diag)
|
||||
)
|
||||
config.substitutions.append((r"%clangxx_cfi_dso ", clang_cfi + cxx + dso))
|
||||
config.substitutions.append(
|
||||
(r"%clangxx_cfi_dso_diag ", clang_cfi + cxx + dso + diag)
|
||||
)
|
||||
config.substitutions.append(
|
||||
(r"%debug_info_flags", " ".join(config.debug_info_flags))
|
||||
)
|
||||
else:
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
if config.default_sanitizer_opts:
|
||||
config.environment['UBSAN_OPTIONS'] = ':'.join(config.default_sanitizer_opts)
|
||||
config.environment["UBSAN_OPTIONS"] = ":".join(config.default_sanitizer_opts)
|
||||
|
||||
if lit_config.params.get('check_supported', None) and config.unsupported:
|
||||
raise BaseException("Tests unsupported")
|
||||
if lit_config.params.get("check_supported", None) and config.unsupported:
|
||||
raise BaseException("Tests unsupported")
|
||||
|
||||
@@ -5,7 +5,7 @@ import subprocess
|
||||
import shlex
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'CRT' + config.name_suffix
|
||||
config.name = "CRT" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
@@ -16,80 +16,80 @@ config.test_source_root = os.path.dirname(__file__)
|
||||
use_lit_shell = os.environ.get("LIT_USE_INTERNAL_SHELL")
|
||||
if use_lit_shell:
|
||||
# 0 is external, "" is default, and everything else is internal.
|
||||
execute_external = (use_lit_shell == "0")
|
||||
execute_external = use_lit_shell == "0"
|
||||
else:
|
||||
# Otherwise we default to internal on Windows and external elsewhere, as
|
||||
# bash on Windows is usually very slow.
|
||||
execute_external = (not sys.platform in ['win32'])
|
||||
execute_external = not sys.platform in ["win32"]
|
||||
|
||||
|
||||
def get_library_path(file):
|
||||
cmd = subprocess.Popen([config.clang.strip(),
|
||||
'-print-file-name=%s' % file] +
|
||||
shlex.split(config.target_cflags),
|
||||
stdout=subprocess.PIPE,
|
||||
env=config.environment,
|
||||
universal_newlines=True)
|
||||
cmd = subprocess.Popen(
|
||||
[config.clang.strip(), "-print-file-name=%s" % file]
|
||||
+ shlex.split(config.target_cflags),
|
||||
stdout=subprocess.PIPE,
|
||||
env=config.environment,
|
||||
universal_newlines=True,
|
||||
)
|
||||
if not cmd.stdout:
|
||||
lit_config.fatal("Couldn't find the library path for '%s'" % file)
|
||||
lit_config.fatal("Couldn't find the library path for '%s'" % file)
|
||||
dir = cmd.stdout.read().strip()
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
dir = dir.replace('\\', '/')
|
||||
dir = dir.replace("\\", "/")
|
||||
return dir
|
||||
|
||||
|
||||
def get_libgcc_file_name():
|
||||
cmd = subprocess.Popen([config.clang.strip(),
|
||||
'-print-libgcc-file-name'] +
|
||||
shlex.split(config.target_cflags),
|
||||
stdout=subprocess.PIPE,
|
||||
env=config.environment,
|
||||
universal_newlines=True)
|
||||
cmd = subprocess.Popen(
|
||||
[config.clang.strip(), "-print-libgcc-file-name"]
|
||||
+ shlex.split(config.target_cflags),
|
||||
stdout=subprocess.PIPE,
|
||||
env=config.environment,
|
||||
universal_newlines=True,
|
||||
)
|
||||
if not cmd.stdout:
|
||||
lit_config.fatal("Couldn't find the library path for '%s'" % file)
|
||||
lit_config.fatal("Couldn't find the library path for '%s'" % file)
|
||||
dir = cmd.stdout.read().strip()
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
dir = dir.replace('\\', '/')
|
||||
dir = dir.replace("\\", "/")
|
||||
return dir
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return ' ' + ' '.join([config.clang] + compile_flags) + ' '
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
# Setup substitutions.
|
||||
config.substitutions.append(("%clang ", build_invocation([config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clang ', build_invocation([config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clangxx ',
|
||||
build_invocation(config.cxx_mode_flags + [config.target_cflags])))
|
||||
("%clangxx ", build_invocation(config.cxx_mode_flags + [config.target_cflags]))
|
||||
)
|
||||
|
||||
base_lib = os.path.join(
|
||||
config.compiler_rt_libdir, "clang_rt.%%s%s.o" % config.target_suffix)
|
||||
config.compiler_rt_libdir, "clang_rt.%%s%s.o" % config.target_suffix
|
||||
)
|
||||
|
||||
if sys.platform in ['win32'] and execute_external:
|
||||
if sys.platform in ["win32"] and execute_external:
|
||||
# Don't pass dosish path separator to msys bash.exe.
|
||||
base_lib = base_lib.replace('\\', '/')
|
||||
base_lib = base_lib.replace("\\", "/")
|
||||
|
||||
config.substitutions.append(('%crtbegin', base_lib % "crtbegin"))
|
||||
config.substitutions.append(('%crtend', base_lib % "crtend"))
|
||||
config.substitutions.append(("%crtbegin", base_lib % "crtbegin"))
|
||||
config.substitutions.append(("%crtend", base_lib % "crtend"))
|
||||
|
||||
config.substitutions.append(("%crt1", get_library_path("crt1.o")))
|
||||
config.substitutions.append(("%crti", get_library_path("crti.o")))
|
||||
config.substitutions.append(("%crtn", get_library_path("crtn.o")))
|
||||
|
||||
config.substitutions.append(("%libgcc", get_libgcc_file_name()))
|
||||
|
||||
config.substitutions.append(
|
||||
('%crt1', get_library_path('crt1.o')))
|
||||
config.substitutions.append(
|
||||
('%crti', get_library_path('crti.o')))
|
||||
config.substitutions.append(
|
||||
('%crtn', get_library_path('crtn.o')))
|
||||
|
||||
config.substitutions.append(
|
||||
('%libgcc', get_libgcc_file_name()))
|
||||
|
||||
config.substitutions.append(
|
||||
('%libstdcxx', '-l' + config.sanitizer_cxx_lib.lstrip('lib')))
|
||||
("%libstdcxx", "-l" + config.sanitizer_cxx_lib.lstrip("lib"))
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os not in ['Linux']:
|
||||
if config.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,26 +3,27 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'DataFlowSanitizer' + config.name_suffix
|
||||
config.name = "DataFlowSanitizer" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Setup default compiler flags used with -fsanitize=dataflow option.
|
||||
clang_dfsan_cflags = (["-fsanitize=dataflow"] +
|
||||
[config.target_cflags])
|
||||
clang_dfsan_cflags = ["-fsanitize=dataflow"] + [config.target_cflags]
|
||||
|
||||
clang_dfsan_cxxflags = config.cxx_mode_flags + clang_dfsan_cflags
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang_dfsan ", build_invocation(clang_dfsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_dfsan ", build_invocation(clang_dfsan_cxxflags)) )
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
config.substitutions.append(("%clang_dfsan ", build_invocation(clang_dfsan_cflags)))
|
||||
config.substitutions.append(("%clangxx_dfsan ", build_invocation(clang_dfsan_cxxflags)))
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
# DataFlowSanitizer tests are currently supported on Linux only.
|
||||
if not (config.host_os in ['Linux'] and config.target_arch in ['aarch64', 'x86_64']):
|
||||
config.unsupported = True
|
||||
if not (config.host_os in ["Linux"] and config.target_arch in ["aarch64", "x86_64"]):
|
||||
config.unsupported = True
|
||||
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
|
||||
config.name = "libFuzzer" + config.name_suffix
|
||||
config.test_format = lit.formats.ShTest(True)
|
||||
config.suffixes = ['.test']
|
||||
config.suffixes = [".test"]
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
config.available_features.add(config.target_arch)
|
||||
lit_config.note(f'arch feature "{config.target_arch}" available')
|
||||
@@ -14,11 +14,11 @@ lit_config.note(f'arch feature "{config.target_arch}" available')
|
||||
use_lit_shell = os.environ.get("LIT_USE_INTERNAL_SHELL")
|
||||
if use_lit_shell:
|
||||
# 0 is external, "" is default, and everything else is internal.
|
||||
execute_external = (use_lit_shell == "0")
|
||||
execute_external = use_lit_shell == "0"
|
||||
else:
|
||||
# Otherwise we default to internal on Windows and external elsewhere, as
|
||||
# bash on Windows is usually very slow.
|
||||
execute_external = (not sys.platform in ['win32'])
|
||||
execute_external = not sys.platform in ["win32"]
|
||||
|
||||
# testFormat: The test format to use to interpret tests.
|
||||
#
|
||||
@@ -27,100 +27,116 @@ else:
|
||||
config.test_format = lit.formats.ShTest(execute_external)
|
||||
|
||||
# LeakSanitizer is not supported on OSX or Windows right now.
|
||||
if (sys.platform.startswith('darwin') or
|
||||
sys.platform.startswith('freebsd') or
|
||||
sys.platform.startswith('win')):
|
||||
lit_config.note('lsan feature unavailable')
|
||||
if (
|
||||
sys.platform.startswith("darwin")
|
||||
or sys.platform.startswith("freebsd")
|
||||
or sys.platform.startswith("win")
|
||||
):
|
||||
lit_config.note("lsan feature unavailable")
|
||||
else:
|
||||
lit_config.note('lsan feature available')
|
||||
config.available_features.add('lsan')
|
||||
lit_config.note("lsan feature available")
|
||||
config.available_features.add("lsan")
|
||||
|
||||
# MemorySanitizer is not supported on OSX or Windows right now
|
||||
if (sys.platform.startswith('darwin') or sys.platform.startswith('win') or
|
||||
config.target_arch == 'i386'):
|
||||
lit_config.note('msan feature unavailable')
|
||||
assert 'msan' not in config.available_features
|
||||
if (
|
||||
sys.platform.startswith("darwin")
|
||||
or sys.platform.startswith("win")
|
||||
or config.target_arch == "i386"
|
||||
):
|
||||
lit_config.note("msan feature unavailable")
|
||||
assert "msan" not in config.available_features
|
||||
else:
|
||||
lit_config.note('msan feature available')
|
||||
config.available_features.add('msan')
|
||||
lit_config.note("msan feature available")
|
||||
config.available_features.add("msan")
|
||||
|
||||
if sys.platform.startswith('win') or sys.platform.startswith('cygwin'):
|
||||
config.available_features.add('windows')
|
||||
if sys.platform.startswith("win") or sys.platform.startswith("cygwin"):
|
||||
config.available_features.add("windows")
|
||||
|
||||
if sys.platform.startswith('darwin'):
|
||||
config.available_features.add('darwin')
|
||||
if sys.platform.startswith("darwin"):
|
||||
config.available_features.add("darwin")
|
||||
|
||||
if sys.platform.startswith('linux'):
|
||||
# Note the value of ``sys.platform`` is not consistent
|
||||
# between python 2 and 3, hence the use of ``.startswith()``.
|
||||
lit_config.note('linux feature available')
|
||||
config.available_features.add('linux')
|
||||
if sys.platform.startswith("linux"):
|
||||
# Note the value of ``sys.platform`` is not consistent
|
||||
# between python 2 and 3, hence the use of ``.startswith()``.
|
||||
lit_config.note("linux feature available")
|
||||
config.available_features.add("linux")
|
||||
else:
|
||||
lit_config.note('linux feature unavailable')
|
||||
lit_config.note("linux feature unavailable")
|
||||
|
||||
if config.arm_thumb:
|
||||
config.available_features.add('thumb')
|
||||
config.available_features.add("thumb")
|
||||
|
||||
config.substitutions.append(('%build_dir', config.cmake_binary_dir))
|
||||
config.substitutions.append(("%build_dir", config.cmake_binary_dir))
|
||||
libfuzzer_src_root = os.path.join(config.compiler_rt_src_root, "lib", "fuzzer")
|
||||
config.substitutions.append(('%libfuzzer_src', libfuzzer_src_root))
|
||||
config.substitutions.append(("%libfuzzer_src", libfuzzer_src_root))
|
||||
|
||||
config.substitutions.append(("%python", '"%s"' % (sys.executable)))
|
||||
|
||||
config.substitutions.append(('%python', '"%s"' % (sys.executable)))
|
||||
|
||||
def generate_compiler_cmd(is_cpp=True, fuzzer_enabled=True, msan_enabled=False):
|
||||
compiler_cmd = config.clang
|
||||
extra_cmd = config.target_flags
|
||||
compiler_cmd = config.clang
|
||||
extra_cmd = config.target_flags
|
||||
|
||||
if is_cpp:
|
||||
std_cmd = '--driver-mode=g++'
|
||||
else:
|
||||
std_cmd = ''
|
||||
if is_cpp:
|
||||
std_cmd = "--driver-mode=g++"
|
||||
else:
|
||||
std_cmd = ""
|
||||
|
||||
if msan_enabled:
|
||||
sanitizers = ['memory']
|
||||
else:
|
||||
sanitizers = ['address']
|
||||
if fuzzer_enabled:
|
||||
sanitizers.append('fuzzer')
|
||||
sanitizers_cmd = ('-fsanitize=%s' % ','.join(sanitizers))
|
||||
return " ".join([
|
||||
compiler_cmd,
|
||||
std_cmd,
|
||||
"-O2 -gline-tables-only",
|
||||
sanitizers_cmd,
|
||||
"-I%s" % libfuzzer_src_root,
|
||||
extra_cmd
|
||||
])
|
||||
if msan_enabled:
|
||||
sanitizers = ["memory"]
|
||||
else:
|
||||
sanitizers = ["address"]
|
||||
if fuzzer_enabled:
|
||||
sanitizers.append("fuzzer")
|
||||
sanitizers_cmd = "-fsanitize=%s" % ",".join(sanitizers)
|
||||
return " ".join(
|
||||
[
|
||||
compiler_cmd,
|
||||
std_cmd,
|
||||
"-O2 -gline-tables-only",
|
||||
sanitizers_cmd,
|
||||
"-I%s" % libfuzzer_src_root,
|
||||
extra_cmd,
|
||||
]
|
||||
)
|
||||
|
||||
config.substitutions.append(('%cpp_compiler',
|
||||
generate_compiler_cmd(is_cpp=True, fuzzer_enabled=True)
|
||||
))
|
||||
|
||||
config.substitutions.append(('%c_compiler',
|
||||
generate_compiler_cmd(is_cpp=False, fuzzer_enabled=True)
|
||||
))
|
||||
config.substitutions.append(
|
||||
("%cpp_compiler", generate_compiler_cmd(is_cpp=True, fuzzer_enabled=True))
|
||||
)
|
||||
|
||||
config.substitutions.append(('%no_fuzzer_cpp_compiler',
|
||||
generate_compiler_cmd(is_cpp=True, fuzzer_enabled=False)
|
||||
))
|
||||
config.substitutions.append(
|
||||
("%c_compiler", generate_compiler_cmd(is_cpp=False, fuzzer_enabled=True))
|
||||
)
|
||||
|
||||
config.substitutions.append(('%no_fuzzer_c_compiler',
|
||||
generate_compiler_cmd(is_cpp=False, fuzzer_enabled=False)
|
||||
))
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%no_fuzzer_cpp_compiler",
|
||||
generate_compiler_cmd(is_cpp=True, fuzzer_enabled=False),
|
||||
)
|
||||
)
|
||||
|
||||
config.substitutions.append(('%msan_compiler',
|
||||
generate_compiler_cmd(is_cpp=True, fuzzer_enabled=True, msan_enabled=True)
|
||||
))
|
||||
config.substitutions.append(
|
||||
("%no_fuzzer_c_compiler", generate_compiler_cmd(is_cpp=False, fuzzer_enabled=False))
|
||||
)
|
||||
|
||||
default_asan_opts_str = ':'.join(config.default_sanitizer_opts)
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%msan_compiler",
|
||||
generate_compiler_cmd(is_cpp=True, fuzzer_enabled=True, msan_enabled=True),
|
||||
)
|
||||
)
|
||||
|
||||
default_asan_opts_str = ":".join(config.default_sanitizer_opts)
|
||||
if default_asan_opts_str:
|
||||
config.environment['ASAN_OPTIONS'] = default_asan_opts_str
|
||||
default_asan_opts_str += ':'
|
||||
config.substitutions.append(('%env_asan_opts=',
|
||||
'env ASAN_OPTIONS=' + default_asan_opts_str))
|
||||
config.environment["ASAN_OPTIONS"] = default_asan_opts_str
|
||||
default_asan_opts_str += ":"
|
||||
config.substitutions.append(
|
||||
("%env_asan_opts=", "env ASAN_OPTIONS=" + default_asan_opts_str)
|
||||
)
|
||||
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
config.parallelism_group = "shadow-memory"
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -3,54 +3,69 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'GWP-ASan' + config.name_suffix
|
||||
config.name = "GWP-ASan" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".test"]
|
||||
|
||||
# C & CXX flags.
|
||||
c_flags = ([config.target_cflags])
|
||||
c_flags = [config.target_cflags]
|
||||
|
||||
cxx_flags = (c_flags + config.cxx_mode_flags + ["-std=c++14"])
|
||||
cxx_flags = c_flags + config.cxx_mode_flags + ["-std=c++14"]
|
||||
|
||||
libscudo_standalone = os.path.join(
|
||||
config.compiler_rt_libdir,
|
||||
"libclang_rt.scudo_standalone%s.a" % config.target_suffix)
|
||||
config.compiler_rt_libdir, "libclang_rt.scudo_standalone%s.a" % config.target_suffix
|
||||
)
|
||||
libscudo_standalone_cxx = os.path.join(
|
||||
config.compiler_rt_libdir,
|
||||
"libclang_rt.scudo_standalone_cxx%s.a" % config.target_suffix)
|
||||
"libclang_rt.scudo_standalone_cxx%s.a" % config.target_suffix,
|
||||
)
|
||||
|
||||
scudo_link_flags = ["-pthread", "-Wl,--whole-archive", libscudo_standalone,
|
||||
"-Wl,--no-whole-archive"]
|
||||
scudo_link_cxx_flags = ["-Wl,--whole-archive", libscudo_standalone_cxx,
|
||||
"-Wl,--no-whole-archive"]
|
||||
scudo_link_flags = [
|
||||
"-pthread",
|
||||
"-Wl,--whole-archive",
|
||||
libscudo_standalone,
|
||||
"-Wl,--no-whole-archive",
|
||||
]
|
||||
scudo_link_cxx_flags = [
|
||||
"-Wl,--whole-archive",
|
||||
libscudo_standalone_cxx,
|
||||
"-Wl,--no-whole-archive",
|
||||
]
|
||||
|
||||
# -rdynamic is necessary for online function symbolization.
|
||||
gwp_asan_flags = ["-rdynamic"] + scudo_link_flags
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
# Add substitutions.
|
||||
config.substitutions.append(("%clang ", build_invocation(c_flags)))
|
||||
config.substitutions.append(
|
||||
("%clang_gwp_asan ", build_invocation(c_flags + gwp_asan_flags)))
|
||||
config.substitutions.append((
|
||||
"%clangxx_gwp_asan ",
|
||||
build_invocation(cxx_flags + gwp_asan_flags + scudo_link_cxx_flags)))
|
||||
("%clang_gwp_asan ", build_invocation(c_flags + gwp_asan_flags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clangxx_gwp_asan ",
|
||||
build_invocation(cxx_flags + gwp_asan_flags + scudo_link_cxx_flags),
|
||||
)
|
||||
)
|
||||
|
||||
# Platform-specific default GWP_ASAN for lit tests. Ensure that GWP-ASan is
|
||||
# enabled and that it samples every allocation.
|
||||
default_gwp_asan_options = 'GWP_ASAN_Enabled=1:GWP_ASAN_SampleRate=1'
|
||||
default_gwp_asan_options = "GWP_ASAN_Enabled=1:GWP_ASAN_SampleRate=1"
|
||||
|
||||
config.environment['SCUDO_OPTIONS'] = default_gwp_asan_options
|
||||
default_gwp_asan_options += ':'
|
||||
config.substitutions.append(('%env_scudo_options=',
|
||||
'env SCUDO_OPTIONS=' + default_gwp_asan_options))
|
||||
config.environment["SCUDO_OPTIONS"] = default_gwp_asan_options
|
||||
default_gwp_asan_options += ":"
|
||||
config.substitutions.append(
|
||||
("%env_scudo_options=", "env SCUDO_OPTIONS=" + default_gwp_asan_options)
|
||||
)
|
||||
|
||||
# GWP-ASan tests are currently supported on Linux only.
|
||||
if config.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'HWAddressSanitizer' + getattr(config, 'name_suffix', 'default')
|
||||
config.name = "HWAddressSanitizer" + getattr(config, "name_suffix", "default")
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
@@ -13,46 +13,69 @@ clang_cflags = [config.target_cflags] + config.debug_info_flags
|
||||
clang_cxxflags = config.cxx_mode_flags + clang_cflags
|
||||
clang_hwasan_common_cflags = clang_cflags + ["-fsanitize=hwaddress", "-fuse-ld=lld"]
|
||||
|
||||
if config.target_arch == 'x86_64' and config.enable_aliases == '1':
|
||||
clang_hwasan_common_cflags += ["-fsanitize-hwaddress-experimental-aliasing"]
|
||||
if config.target_arch == "x86_64" and config.enable_aliases == "1":
|
||||
clang_hwasan_common_cflags += ["-fsanitize-hwaddress-experimental-aliasing"]
|
||||
else:
|
||||
config.available_features.add('pointer-tagging')
|
||||
if config.target_arch == 'x86_64':
|
||||
# The callback instrumentation used on x86_64 has a 1/64 chance of choosing a
|
||||
# stack tag of 0. This causes stack tests to become flaky, so we force tags
|
||||
# to be generated via calls to __hwasan_generate_tag, which never returns 0.
|
||||
# TODO: See if we can remove this once we use the outlined instrumentation.
|
||||
clang_hwasan_common_cflags += ["-mllvm", "-hwasan-generate-tags-with-calls=1"]
|
||||
clang_hwasan_cflags = clang_hwasan_common_cflags + ["-mllvm", "-hwasan-globals",
|
||||
"-mllvm", "-hwasan-use-short-granules",
|
||||
"-mllvm", "-hwasan-instrument-landing-pads=0",
|
||||
"-mllvm", "-hwasan-instrument-personality-functions"]
|
||||
clang_hwasan_oldrt_cflags = clang_hwasan_common_cflags + ["-mllvm", "-hwasan-use-short-granules=0",
|
||||
"-mllvm", "-hwasan-instrument-landing-pads=1",
|
||||
"-mllvm", "-hwasan-instrument-personality-functions=0"]
|
||||
config.available_features.add("pointer-tagging")
|
||||
if config.target_arch == "x86_64":
|
||||
# The callback instrumentation used on x86_64 has a 1/64 chance of choosing a
|
||||
# stack tag of 0. This causes stack tests to become flaky, so we force tags
|
||||
# to be generated via calls to __hwasan_generate_tag, which never returns 0.
|
||||
# TODO: See if we can remove this once we use the outlined instrumentation.
|
||||
clang_hwasan_common_cflags += ["-mllvm", "-hwasan-generate-tags-with-calls=1"]
|
||||
clang_hwasan_cflags = clang_hwasan_common_cflags + [
|
||||
"-mllvm",
|
||||
"-hwasan-globals",
|
||||
"-mllvm",
|
||||
"-hwasan-use-short-granules",
|
||||
"-mllvm",
|
||||
"-hwasan-instrument-landing-pads=0",
|
||||
"-mllvm",
|
||||
"-hwasan-instrument-personality-functions",
|
||||
]
|
||||
clang_hwasan_oldrt_cflags = clang_hwasan_common_cflags + [
|
||||
"-mllvm",
|
||||
"-hwasan-use-short-granules=0",
|
||||
"-mllvm",
|
||||
"-hwasan-instrument-landing-pads=1",
|
||||
"-mllvm",
|
||||
"-hwasan-instrument-personality-functions=0",
|
||||
]
|
||||
|
||||
clang_hwasan_cxxflags = config.cxx_mode_flags + clang_hwasan_cflags
|
||||
clang_hwasan_oldrt_cxxflags = config.cxx_mode_flags + clang_hwasan_oldrt_cflags
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_hwasan ", build_invocation(clang_hwasan_cflags)) )
|
||||
config.substitutions.append( ("%clang_hwasan_oldrt ", build_invocation(clang_hwasan_oldrt_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_hwasan ", build_invocation(clang_hwasan_cxxflags)) )
|
||||
config.substitutions.append( ("%clangxx_hwasan_oldrt ", build_invocation(clang_hwasan_oldrt_cxxflags)) )
|
||||
config.substitutions.append( ("%compiler_rt_libdir", config.compiler_rt_libdir) )
|
||||
|
||||
default_hwasan_opts_str = ':'.join(['disable_allocator_tagging=1', 'random_tags=0', 'fail_without_syscall_abi=0'] + config.default_sanitizer_opts)
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_cxxflags)))
|
||||
config.substitutions.append(("%clang_hwasan ", build_invocation(clang_hwasan_cflags)))
|
||||
config.substitutions.append(
|
||||
("%clang_hwasan_oldrt ", build_invocation(clang_hwasan_oldrt_cflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_hwasan ", build_invocation(clang_hwasan_cxxflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_hwasan_oldrt ", build_invocation(clang_hwasan_oldrt_cxxflags))
|
||||
)
|
||||
config.substitutions.append(("%compiler_rt_libdir", config.compiler_rt_libdir))
|
||||
|
||||
default_hwasan_opts_str = ":".join(
|
||||
["disable_allocator_tagging=1", "random_tags=0", "fail_without_syscall_abi=0"]
|
||||
+ config.default_sanitizer_opts
|
||||
)
|
||||
if default_hwasan_opts_str:
|
||||
config.environment['HWASAN_OPTIONS'] = default_hwasan_opts_str
|
||||
default_hwasan_opts_str += ':'
|
||||
config.substitutions.append(('%env_hwasan_opts=',
|
||||
'env HWASAN_OPTIONS=' + default_hwasan_opts_str))
|
||||
config.environment["HWASAN_OPTIONS"] = default_hwasan_opts_str
|
||||
default_hwasan_opts_str += ":"
|
||||
config.substitutions.append(
|
||||
("%env_hwasan_opts=", "env HWASAN_OPTIONS=" + default_hwasan_opts_str)
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os not in ['Linux', 'Android'] or not config.has_lld:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux", "Android"] or not config.has_lld:
|
||||
config.unsupported = True
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -7,66 +7,70 @@ import re
|
||||
|
||||
import lit.util
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Choose between standalone and LSan+(ASan|HWAsan) modes.
|
||||
lsan_lit_test_mode = get_required_attr(config, 'lsan_lit_test_mode')
|
||||
target_arch = getattr(config, 'target_arch', None)
|
||||
lsan_lit_test_mode = get_required_attr(config, "lsan_lit_test_mode")
|
||||
target_arch = getattr(config, "target_arch", None)
|
||||
|
||||
if lsan_lit_test_mode == "Standalone":
|
||||
config.name = "LeakSanitizer-Standalone"
|
||||
lsan_cflags = ["-fsanitize=leak"]
|
||||
config.available_features.add('lsan-standalone')
|
||||
config.name = "LeakSanitizer-Standalone"
|
||||
lsan_cflags = ["-fsanitize=leak"]
|
||||
config.available_features.add("lsan-standalone")
|
||||
elif lsan_lit_test_mode == "AddressSanitizer":
|
||||
config.name = "LeakSanitizer-AddressSanitizer"
|
||||
lsan_cflags = ["-fsanitize=address"]
|
||||
config.available_features.add('asan')
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
config.name = "LeakSanitizer-AddressSanitizer"
|
||||
lsan_cflags = ["-fsanitize=address"]
|
||||
config.available_features.add("asan")
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
elif lsan_lit_test_mode == "HWAddressSanitizer":
|
||||
config.name = "LeakSanitizer-HWAddressSanitizer"
|
||||
lsan_cflags = ["-fsanitize=hwaddress", "-fuse-ld=lld"]
|
||||
if target_arch == "x86_64":
|
||||
lsan_cflags = lsan_cflags + [ '-fsanitize-hwaddress-experimental-aliasing']
|
||||
config.available_features.add('hwasan')
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
config.name = "LeakSanitizer-HWAddressSanitizer"
|
||||
lsan_cflags = ["-fsanitize=hwaddress", "-fuse-ld=lld"]
|
||||
if target_arch == "x86_64":
|
||||
lsan_cflags = lsan_cflags + ["-fsanitize-hwaddress-experimental-aliasing"]
|
||||
config.available_features.add("hwasan")
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
else:
|
||||
lit_config.fatal("Unknown LSan test mode: %r" % lsan_lit_test_mode)
|
||||
lit_config.fatal("Unknown LSan test mode: %r" % lsan_lit_test_mode)
|
||||
config.name += config.name_suffix
|
||||
|
||||
# Platform-specific default LSAN_OPTIONS for lit tests.
|
||||
default_common_opts_str = ':'.join(list(config.default_sanitizer_opts))
|
||||
default_lsan_opts = default_common_opts_str + ':detect_leaks=1'
|
||||
if config.host_os == 'Darwin':
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
# Also, make sure we do not overwhelm the syslog while testing.
|
||||
default_lsan_opts += ':abort_on_error=0'
|
||||
default_lsan_opts += ':log_to_syslog=0'
|
||||
default_common_opts_str = ":".join(list(config.default_sanitizer_opts))
|
||||
default_lsan_opts = default_common_opts_str + ":detect_leaks=1"
|
||||
if config.host_os == "Darwin":
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
# Also, make sure we do not overwhelm the syslog while testing.
|
||||
default_lsan_opts += ":abort_on_error=0"
|
||||
default_lsan_opts += ":log_to_syslog=0"
|
||||
|
||||
if default_lsan_opts:
|
||||
config.environment['LSAN_OPTIONS'] = default_lsan_opts
|
||||
default_lsan_opts += ':'
|
||||
config.substitutions.append(('%env_lsan_opts=',
|
||||
'env LSAN_OPTIONS=' + default_lsan_opts))
|
||||
config.environment["LSAN_OPTIONS"] = default_lsan_opts
|
||||
default_lsan_opts += ":"
|
||||
config.substitutions.append(
|
||||
("%env_lsan_opts=", "env LSAN_OPTIONS=" + default_lsan_opts)
|
||||
)
|
||||
|
||||
if lit.util.which('strace'):
|
||||
config.available_features.add('strace')
|
||||
if lit.util.which("strace"):
|
||||
config.available_features.add("strace")
|
||||
|
||||
clang_cflags = ["-O0", config.target_cflags] + config.debug_info_flags
|
||||
if config.android:
|
||||
clang_cflags = clang_cflags + ["-fno-emulated-tls"]
|
||||
clang_cflags = clang_cflags + ["-fno-emulated-tls"]
|
||||
clang_cxxflags = config.cxx_mode_flags + clang_cflags
|
||||
lsan_incdir = config.test_source_root + "/../"
|
||||
clang_lsan_cflags = clang_cflags + lsan_cflags + ["-I%s" % lsan_incdir]
|
||||
@@ -75,33 +79,59 @@ clang_lsan_cxxflags = clang_cxxflags + lsan_cflags + ["-I%s" % lsan_incdir]
|
||||
config.clang_cflags = clang_cflags
|
||||
config.clang_cxxflags = clang_cxxflags
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang ", build_invocation(clang_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_lsan ", build_invocation(clang_lsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_lsan ", build_invocation(clang_lsan_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_hwasan ", build_invocation(clang_lsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_hwasan ", build_invocation(clang_lsan_cxxflags)) )
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
config.substitutions.append(("%clang ", build_invocation(clang_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_cxxflags)))
|
||||
config.substitutions.append(("%clang_lsan ", build_invocation(clang_lsan_cflags)))
|
||||
config.substitutions.append(("%clangxx_lsan ", build_invocation(clang_lsan_cxxflags)))
|
||||
config.substitutions.append(("%clang_hwasan ", build_invocation(clang_lsan_cflags)))
|
||||
config.substitutions.append(("%clangxx_hwasan ", build_invocation(clang_lsan_cxxflags)))
|
||||
|
||||
|
||||
# LeakSanitizer tests are currently supported on
|
||||
# Android{aarch64, x86, x86_64}, x86-64 Linux, PowerPC64 Linux, arm Linux, mips64 Linux, s390x Linux, loongarch64 Linux and x86_64 Darwin.
|
||||
supported_android = config.android and config.target_arch in ['x86_64', 'i386', 'aarch64'] and 'android-thread-properties-api' in config.available_features
|
||||
supported_linux = (not config.android) and config.host_os == 'Linux' and config.host_arch in ['aarch64', 'x86_64', 'ppc64', 'ppc64le', 'mips64', 'riscv64', 'arm', 'armhf', 'armv7l', 's390x', 'loongarch64']
|
||||
supported_darwin = config.host_os == 'Darwin' and config.target_arch in ['x86_64']
|
||||
supported_netbsd = config.host_os == 'NetBSD' and config.target_arch in ['x86_64', 'i386']
|
||||
supported_android = (
|
||||
config.android
|
||||
and config.target_arch in ["x86_64", "i386", "aarch64"]
|
||||
and "android-thread-properties-api" in config.available_features
|
||||
)
|
||||
supported_linux = (
|
||||
(not config.android)
|
||||
and config.host_os == "Linux"
|
||||
and config.host_arch
|
||||
in [
|
||||
"aarch64",
|
||||
"x86_64",
|
||||
"ppc64",
|
||||
"ppc64le",
|
||||
"mips64",
|
||||
"riscv64",
|
||||
"arm",
|
||||
"armhf",
|
||||
"armv7l",
|
||||
"s390x",
|
||||
"loongarch64",
|
||||
]
|
||||
)
|
||||
supported_darwin = config.host_os == "Darwin" and config.target_arch in ["x86_64"]
|
||||
supported_netbsd = config.host_os == "NetBSD" and config.target_arch in [
|
||||
"x86_64",
|
||||
"i386",
|
||||
]
|
||||
if not (supported_android or supported_linux or supported_darwin or supported_netbsd):
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
# Don't support Thumb due to broken fast unwinder
|
||||
if re.search('mthumb', config.target_cflags) is not None:
|
||||
config.unsupported = True
|
||||
if re.search("mthumb", config.target_cflags) is not None:
|
||||
config.unsupported = True
|
||||
|
||||
# HWASAN tests require lld because without D65857, ld.bfd and ld.gold would
|
||||
# generate a corrupted binary. Mark them unsupported if lld is not available.
|
||||
if 'hwasan' in config.available_features and not config.has_lld:
|
||||
config.unsupported = True
|
||||
if "hwasan" in config.available_features and not config.has_lld:
|
||||
config.unsupported = True
|
||||
|
||||
config.suffixes = ['.c', '.cpp', '.mm']
|
||||
config.suffixes = [".c", ".cpp", ".mm"]
|
||||
|
||||
@@ -6,87 +6,108 @@ import re
|
||||
|
||||
import lit.formats
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'MemProfiler' + config.name_suffix
|
||||
config.name = "MemProfiler" + config.name_suffix
|
||||
|
||||
# Platform-specific default MEMPROF_OPTIONS for lit tests.
|
||||
default_memprof_opts = list(config.default_sanitizer_opts)
|
||||
|
||||
default_memprof_opts_str = ':'.join(default_memprof_opts)
|
||||
default_memprof_opts_str = ":".join(default_memprof_opts)
|
||||
if default_memprof_opts_str:
|
||||
config.environment['MEMPROF_OPTIONS'] = default_memprof_opts_str
|
||||
default_memprof_opts_str += ':'
|
||||
config.substitutions.append(('%env_memprof_opts=',
|
||||
'env MEMPROF_OPTIONS=' + default_memprof_opts_str))
|
||||
config.environment["MEMPROF_OPTIONS"] = default_memprof_opts_str
|
||||
default_memprof_opts_str += ":"
|
||||
config.substitutions.append(
|
||||
("%env_memprof_opts=", "env MEMPROF_OPTIONS=" + default_memprof_opts_str)
|
||||
)
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
libdl_flag = '-ldl'
|
||||
libdl_flag = "-ldl"
|
||||
|
||||
# Setup default compiler flags used with -fmemory-profile option.
|
||||
# FIXME: Review the set of required flags and check if it can be reduced.
|
||||
target_cflags = [get_required_attr(config, 'target_cflags')]
|
||||
target_cflags = [get_required_attr(config, "target_cflags")]
|
||||
target_cxxflags = config.cxx_mode_flags + target_cflags
|
||||
clang_memprof_static_cflags = (['-fmemory-profile',
|
||||
'-mno-omit-leaf-frame-pointer',
|
||||
'-fno-omit-frame-pointer',
|
||||
'-fno-optimize-sibling-calls'] +
|
||||
config.debug_info_flags + target_cflags)
|
||||
clang_memprof_static_cflags = (
|
||||
[
|
||||
"-fmemory-profile",
|
||||
"-mno-omit-leaf-frame-pointer",
|
||||
"-fno-omit-frame-pointer",
|
||||
"-fno-optimize-sibling-calls",
|
||||
]
|
||||
+ config.debug_info_flags
|
||||
+ target_cflags
|
||||
)
|
||||
clang_memprof_static_cxxflags = config.cxx_mode_flags + clang_memprof_static_cflags
|
||||
|
||||
memprof_dynamic_flags = []
|
||||
if config.memprof_dynamic:
|
||||
memprof_dynamic_flags = ['-shared-libsan']
|
||||
config.available_features.add('memprof-dynamic-runtime')
|
||||
memprof_dynamic_flags = ["-shared-libsan"]
|
||||
config.available_features.add("memprof-dynamic-runtime")
|
||||
else:
|
||||
config.available_features.add('memprof-static-runtime')
|
||||
config.available_features.add("memprof-static-runtime")
|
||||
clang_memprof_cflags = clang_memprof_static_cflags + memprof_dynamic_flags
|
||||
clang_memprof_cxxflags = clang_memprof_static_cxxflags + memprof_dynamic_flags
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return ' ' + ' '.join([config.clang] + compile_flags) + ' '
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang ", build_invocation(target_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(target_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_memprof ", build_invocation(clang_memprof_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_memprof ", build_invocation(clang_memprof_cxxflags)) )
|
||||
|
||||
config.substitutions.append(("%clang ", build_invocation(target_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(target_cxxflags)))
|
||||
config.substitutions.append(("%clang_memprof ", build_invocation(clang_memprof_cflags)))
|
||||
config.substitutions.append(
|
||||
("%clangxx_memprof ", build_invocation(clang_memprof_cxxflags))
|
||||
)
|
||||
if config.memprof_dynamic:
|
||||
shared_libmemprof_path = os.path.join(config.compiler_rt_libdir, 'libclang_rt.memprof{}.so'.format(config.target_suffix))
|
||||
config.substitutions.append( ("%shared_libmemprof", shared_libmemprof_path) )
|
||||
config.substitutions.append( ("%clang_memprof_static ", build_invocation(clang_memprof_static_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_memprof_static ", build_invocation(clang_memprof_static_cxxflags)) )
|
||||
shared_libmemprof_path = os.path.join(
|
||||
config.compiler_rt_libdir,
|
||||
"libclang_rt.memprof{}.so".format(config.target_suffix),
|
||||
)
|
||||
config.substitutions.append(("%shared_libmemprof", shared_libmemprof_path))
|
||||
config.substitutions.append(
|
||||
("%clang_memprof_static ", build_invocation(clang_memprof_static_cflags))
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_memprof_static ", build_invocation(clang_memprof_static_cxxflags))
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%libdl", libdl_flag) )
|
||||
config.substitutions.append(("%libdl", libdl_flag))
|
||||
|
||||
config.available_features.add('memprof-' + config.bits + '-bits')
|
||||
config.available_features.add("memprof-" + config.bits + "-bits")
|
||||
|
||||
config.available_features.add('fast-unwinder-works')
|
||||
config.available_features.add("fast-unwinder-works")
|
||||
|
||||
# Set LD_LIBRARY_PATH to pick dynamic runtime up properly.
|
||||
new_ld_library_path = os.path.pathsep.join(
|
||||
(config.compiler_rt_libdir, config.environment.get('LD_LIBRARY_PATH', '')))
|
||||
config.environment['LD_LIBRARY_PATH'] = new_ld_library_path
|
||||
(config.compiler_rt_libdir, config.environment.get("LD_LIBRARY_PATH", ""))
|
||||
)
|
||||
config.environment["LD_LIBRARY_PATH"] = new_ld_library_path
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
config.substitutions.append(('%fPIC', '-fPIC'))
|
||||
config.substitutions.append(('%fPIE', '-fPIE'))
|
||||
config.substitutions.append(('%pie', '-pie'))
|
||||
config.substitutions.append(("%fPIC", "-fPIC"))
|
||||
config.substitutions.append(("%fPIE", "-fPIE"))
|
||||
config.substitutions.append(("%pie", "-pie"))
|
||||
|
||||
# Only run the tests on supported OSs.
|
||||
if config.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
config.parallelism_group = "shadow-memory"
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import os
|
||||
|
||||
config.name = 'SanitizerBinaryMetadata'
|
||||
config.name = "SanitizerBinaryMetadata"
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
config.suffixes = ['.cpp']
|
||||
config.suffixes = [".cpp"]
|
||||
# Binary metadata is currently emited only for ELF binaries
|
||||
# and sizes of stack arguments depend on the arch.
|
||||
if config.host_os not in ['Linux'] or config.target_arch not in ['x86_64']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux"] or config.target_arch not in ["x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,51 +3,57 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'MemorySanitizer' + getattr(config, 'name_suffix', 'default')
|
||||
config.name = "MemorySanitizer" + getattr(config, "name_suffix", "default")
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Setup default compiler flags used with -fsanitize=memory option.
|
||||
clang_msan_cflags = (["-fsanitize=memory",
|
||||
"-mno-omit-leaf-frame-pointer",
|
||||
"-fno-omit-frame-pointer",
|
||||
"-fno-optimize-sibling-calls"] +
|
||||
[config.target_cflags] +
|
||||
config.debug_info_flags)
|
||||
clang_msan_cflags = (
|
||||
[
|
||||
"-fsanitize=memory",
|
||||
"-mno-omit-leaf-frame-pointer",
|
||||
"-fno-omit-frame-pointer",
|
||||
"-fno-optimize-sibling-calls",
|
||||
]
|
||||
+ [config.target_cflags]
|
||||
+ config.debug_info_flags
|
||||
)
|
||||
# Some Msan tests leverage backtrace() which requires libexecinfo on FreeBSD.
|
||||
if config.host_os == 'FreeBSD':
|
||||
clang_msan_cflags += ["-lexecinfo", "-fPIC"]
|
||||
if config.host_os == "FreeBSD":
|
||||
clang_msan_cflags += ["-lexecinfo", "-fPIC"]
|
||||
# On SystemZ we need -mbackchain to make the fast unwinder work.
|
||||
if config.target_arch == 's390x':
|
||||
clang_msan_cflags.append("-mbackchain")
|
||||
if config.target_arch == "s390x":
|
||||
clang_msan_cflags.append("-mbackchain")
|
||||
clang_msan_cxxflags = config.cxx_mode_flags + clang_msan_cflags
|
||||
|
||||
# Flags for KMSAN invocation. This is C-only, we're not interested in C++.
|
||||
clang_kmsan_cflags = (["-fsanitize=kernel-memory"] +
|
||||
[config.target_cflags] +
|
||||
config.debug_info_flags)
|
||||
clang_kmsan_cflags = (
|
||||
["-fsanitize=kernel-memory"] + [config.target_cflags] + config.debug_info_flags
|
||||
)
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang_msan ", build_invocation(clang_msan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_msan ", build_invocation(clang_msan_cxxflags)) )
|
||||
config.substitutions.append( ("%clang_kmsan ", build_invocation(clang_kmsan_cflags)) )
|
||||
|
||||
config.substitutions.append(("%clang_msan ", build_invocation(clang_msan_cflags)))
|
||||
config.substitutions.append(("%clangxx_msan ", build_invocation(clang_msan_cxxflags)))
|
||||
config.substitutions.append(("%clang_kmsan ", build_invocation(clang_kmsan_cflags)))
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os not in ['Linux', 'NetBSD', 'FreeBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux", "NetBSD", "FreeBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
# For mips64, mips64el we have forced store_context_size to 1 because these
|
||||
# archs use slow unwinder which is not async signal safe. Therefore we only
|
||||
# check the first frame since store_context size is 1.
|
||||
if config.host_arch in ['mips64', 'mips64el']:
|
||||
config.substitutions.append( ('CHECK-%short-stack', 'CHECK-SHORT-STACK'))
|
||||
if config.host_arch in ["mips64", "mips64el"]:
|
||||
config.substitutions.append(("CHECK-%short-stack", "CHECK-SHORT-STACK"))
|
||||
else:
|
||||
config.substitutions.append( ('CHECK-%short-stack', 'CHECK-FULL-STACK'))
|
||||
config.substitutions.append(("CHECK-%short-stack", "CHECK-FULL-STACK"))
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch not in ['aarch64', 'arm64']:
|
||||
config.unsupported = True
|
||||
if config.root.host_arch not in ["aarch64", "arm64"]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch not in ['aarch64', 'arm64']:
|
||||
config.unsupported = True
|
||||
if config.target_arch not in ["aarch64", "arm64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.root.host_os != 'Darwin':
|
||||
config.unsupported = True
|
||||
if config.root.host_os != "Darwin":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch != 'x86_64':
|
||||
config.unsupported = True
|
||||
if config.root.host_arch != "x86_64":
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch != 'x86_64':
|
||||
config.unsupported = True
|
||||
if config.target_arch != "x86_64":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.root.host_os != 'FreeBSD':
|
||||
config.unsupported = True
|
||||
if config.root.host_os != "FreeBSD":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch not in ['x86_64', 'amd64']:
|
||||
config.unsupported = True
|
||||
if config.root.host_arch not in ["x86_64", "amd64"]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch not in ['x86_64', 'amd64']:
|
||||
config.unsupported = True
|
||||
if config.target_arch not in ["x86_64", "amd64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if not config.test_target_is_host_executable:
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch != 'aarch64':
|
||||
config.unsupported = True
|
||||
if config.root.host_arch != "aarch64":
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch != 'aarch64':
|
||||
config.unsupported = True
|
||||
if config.target_arch != "aarch64":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.root.host_os != 'Linux':
|
||||
config.unsupported = True
|
||||
if config.root.host_os != "Linux":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch != 'x86_64':
|
||||
config.unsupported = True
|
||||
if config.root.host_arch != "x86_64":
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch != 'x86_64':
|
||||
config.unsupported = True
|
||||
if config.target_arch != "x86_64":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if config.root.host_os != 'Windows':
|
||||
config.unsupported = True
|
||||
if config.root.host_os != "Windows":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
if config.root.host_arch not in ['AMD64','x86_64']:
|
||||
config.unsupported = True
|
||||
if config.root.host_arch not in ["AMD64", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.target_arch not in ['AMD64','x86_64']:
|
||||
config.unsupported = True
|
||||
if config.target_arch not in ["AMD64", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'ORC' + config.name_suffix
|
||||
config.name = "ORC" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
@@ -12,51 +12,70 @@ config.test_source_root = os.path.dirname(__file__)
|
||||
host_arch_compatible = config.target_arch == config.host_arch
|
||||
|
||||
if config.host_arch == "x86_64h" and config.target_arch == "x86_64":
|
||||
host_arch_compatible = True
|
||||
config.test_target_is_host_executable = config.target_os == config.host_os and host_arch_compatible
|
||||
host_arch_compatible = True
|
||||
config.test_target_is_host_executable = (
|
||||
config.target_os == config.host_os and host_arch_compatible
|
||||
)
|
||||
|
||||
# Assume that llvm-jitlink is in the config.llvm_tools_dir.
|
||||
llvm_jitlink = os.path.join(config.llvm_tools_dir, 'llvm-jitlink')
|
||||
orc_rt_executor_stem = os.path.join(config.compiler_rt_obj_root, 'lib/orc/tests/tools/orc-rt-executor')
|
||||
lli = os.path.join(config.llvm_tools_dir, 'lli')
|
||||
if config.host_os == 'Darwin':
|
||||
orc_rt_path = '%s/liborc_rt_osx.a' % config.compiler_rt_libdir
|
||||
llvm_jitlink = os.path.join(config.llvm_tools_dir, "llvm-jitlink")
|
||||
orc_rt_executor_stem = os.path.join(
|
||||
config.compiler_rt_obj_root, "lib/orc/tests/tools/orc-rt-executor"
|
||||
)
|
||||
lli = os.path.join(config.llvm_tools_dir, "lli")
|
||||
if config.host_os == "Darwin":
|
||||
orc_rt_path = "%s/liborc_rt_osx.a" % config.compiler_rt_libdir
|
||||
else:
|
||||
orc_rt_path = '%s/liborc_rt%s.a' % (config.compiler_rt_libdir, config.target_suffix)
|
||||
orc_rt_path = "%s/liborc_rt%s.a" % (config.compiler_rt_libdir, config.target_suffix)
|
||||
|
||||
if config.libunwind_shared:
|
||||
config.available_features.add('libunwind-available')
|
||||
shared_libunwind_path = os.path.join(config.libunwind_install_dir, 'libunwind.so')
|
||||
config.substitutions.append( ("%shared_libunwind", shared_libunwind_path) )
|
||||
config.available_features.add("libunwind-available")
|
||||
shared_libunwind_path = os.path.join(config.libunwind_install_dir, "libunwind.so")
|
||||
config.substitutions.append(("%shared_libunwind", shared_libunwind_path))
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return ' ' + ' '.join([config.clang] + compile_flags) + ' '
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
config.substitutions.append(("%clang ", build_invocation([config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clang ', build_invocation([config.target_cflags])))
|
||||
("%clangxx ", build_invocation(config.cxx_mode_flags + [config.target_cflags]))
|
||||
)
|
||||
config.substitutions.append(
|
||||
('%clangxx ',
|
||||
build_invocation(config.cxx_mode_flags + [config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clang_cl ',
|
||||
build_invocation(['--driver-mode=cl'] + [config.target_cflags])))
|
||||
if config.host_os == 'Windows':
|
||||
config.substitutions.append(
|
||||
('%llvm_jitlink', (llvm_jitlink + ' -orc-runtime=' +
|
||||
orc_rt_path + ' -no-process-syms=true -slab-allocate=64MB')))
|
||||
("%clang_cl ", build_invocation(["--driver-mode=cl"] + [config.target_cflags]))
|
||||
)
|
||||
if config.host_os == "Windows":
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%llvm_jitlink",
|
||||
(
|
||||
llvm_jitlink
|
||||
+ " -orc-runtime="
|
||||
+ orc_rt_path
|
||||
+ " -no-process-syms=true -slab-allocate=64MB"
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
config.substitutions.append(
|
||||
('%llvm_jitlink', (llvm_jitlink + ' -orc-runtime=' + orc_rt_path)))
|
||||
config.substitutions.append(
|
||||
("%llvm_jitlink", (llvm_jitlink + " -orc-runtime=" + orc_rt_path))
|
||||
)
|
||||
config.substitutions.append(
|
||||
('%orc_rt_executor', orc_rt_executor_stem + "-" + config.host_arch))
|
||||
("%orc_rt_executor", orc_rt_executor_stem + "-" + config.host_arch)
|
||||
)
|
||||
config.substitutions.append(
|
||||
('%lli_orc_jitlink', (lli + ' -jit-kind=orc -jit-linker=jitlink -orc-runtime=' + orc_rt_path)))
|
||||
(
|
||||
"%lli_orc_jitlink",
|
||||
(lli + " -jit-kind=orc -jit-linker=jitlink -orc-runtime=" + orc_rt_path),
|
||||
)
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.S', '.ll', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".S", ".ll", ".test"]
|
||||
|
||||
# Exclude Inputs directories.
|
||||
config.excludes = ['Inputs']
|
||||
config.excludes = ["Inputs"]
|
||||
|
||||
if config.host_os not in ['Darwin', 'FreeBSD', 'Linux', 'Windows']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Darwin", "FreeBSD", "Linux", "Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['AIX']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["AIX"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,43 +1,49 @@
|
||||
import subprocess
|
||||
|
||||
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
def is_gold_linker_available():
|
||||
|
||||
if not config.gold_executable:
|
||||
return False
|
||||
try:
|
||||
ld_cmd = subprocess.Popen([config.gold_executable, '--help'], stdout = subprocess.PIPE)
|
||||
ld_out = ld_cmd.stdout.read().decode()
|
||||
ld_cmd.wait()
|
||||
except:
|
||||
if not config.gold_executable:
|
||||
return False
|
||||
try:
|
||||
ld_cmd = subprocess.Popen(
|
||||
[config.gold_executable, "--help"], stdout=subprocess.PIPE
|
||||
)
|
||||
ld_out = ld_cmd.stdout.read().decode()
|
||||
ld_cmd.wait()
|
||||
except:
|
||||
return False
|
||||
|
||||
if not "-plugin" in ld_out:
|
||||
return False
|
||||
|
||||
# config.clang is not guaranteed to be just the executable!
|
||||
clang_cmd = subprocess.Popen(
|
||||
" ".join([config.clang, "-fuse-ld=gold", "-xc", "-"]),
|
||||
shell=True,
|
||||
universal_newlines=True,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
clang_err = clang_cmd.communicate("int main() { return 0; }")[1]
|
||||
|
||||
if not "invalid linker" in clang_err:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if not '-plugin' in ld_out:
|
||||
return False
|
||||
|
||||
# config.clang is not guaranteed to be just the executable!
|
||||
clang_cmd = subprocess.Popen(" ".join([config.clang, '-fuse-ld=gold', '-xc', '-']),
|
||||
shell=True,
|
||||
universal_newlines = True,
|
||||
stdin = subprocess.PIPE,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE)
|
||||
clang_err = clang_cmd.communicate('int main() { return 0; }')[1]
|
||||
|
||||
if not 'invalid linker' in clang_err:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux'] or not is_gold_linker_available():
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"] or not is_gold_linker_available():
|
||||
config.unsupported = True
|
||||
|
||||
if config.have_curl:
|
||||
config.available_features.add('curl')
|
||||
config.available_features.add("curl")
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
# AIX usually usually makes use of an explicit export list when linking a shared
|
||||
# object, since the linker doesn't export anything by default.
|
||||
if root.host_os in ['AIX']:
|
||||
config.substitutions.append(('%shared_linker_xopts', '-Wl,-bE:shr.exp'))
|
||||
if root.host_os in ["AIX"]:
|
||||
config.substitutions.append(("%shared_linker_xopts", "-Wl,-bE:shr.exp"))
|
||||
else:
|
||||
config.substitutions.append(('%shared_linker_xopts', ''))
|
||||
config.substitutions.append(("%shared_linker_xopts", ""))
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,45 +3,50 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'Profile-' + config.target_arch
|
||||
config.name = "Profile-" + config.target_arch
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Setup executable root.
|
||||
if hasattr(config, 'profile_lit_binary_dir') and \
|
||||
config.profile_lit_binary_dir is not None:
|
||||
if (
|
||||
hasattr(config, "profile_lit_binary_dir")
|
||||
and config.profile_lit_binary_dir is not None
|
||||
):
|
||||
config.test_exec_root = os.path.join(config.profile_lit_binary_dir, config.name)
|
||||
|
||||
target_is_msvc = bool(re.match(r'.*-windows-msvc$', config.target_triple))
|
||||
target_is_msvc = bool(re.match(r".*-windows-msvc$", config.target_triple))
|
||||
|
||||
if config.host_os in ['Linux']:
|
||||
extra_link_flags = ["-ldl"]
|
||||
if config.host_os in ["Linux"]:
|
||||
extra_link_flags = ["-ldl"]
|
||||
elif target_is_msvc:
|
||||
# InstrProf is incompatible with incremental linking. Disable it as a
|
||||
# workaround.
|
||||
extra_link_flags = ["-Wl,-incremental:no"]
|
||||
# InstrProf is incompatible with incremental linking. Disable it as a
|
||||
# workaround.
|
||||
extra_link_flags = ["-Wl,-incremental:no"]
|
||||
else:
|
||||
extra_link_flags = []
|
||||
extra_link_flags = []
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m', '.mm', '.ll', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".m", ".mm", ".ll", ".test"]
|
||||
|
||||
# What to exclude.
|
||||
config.excludes = ['Inputs']
|
||||
config.excludes = ["Inputs"]
|
||||
|
||||
# Clang flags.
|
||||
target_cflags=[get_required_attr(config, "target_cflags")]
|
||||
target_cflags = [get_required_attr(config, "target_cflags")]
|
||||
clang_cflags = target_cflags + extra_link_flags
|
||||
clang_cxxflags = config.cxx_mode_flags + clang_cflags
|
||||
|
||||
@@ -51,65 +56,126 @@ clang_cxxflags = config.cxx_mode_flags + clang_cflags
|
||||
# We remove -stdlib= from the cflags here to avoid problems, but the interaction between
|
||||
# CMake and compiler-rt's tests should be reworked so that cflags don't contain C++ only
|
||||
# flags.
|
||||
clang_cflags = [flag.replace('-stdlib=libc++', '').replace('-stdlib=libstdc++', '') for flag in clang_cflags]
|
||||
clang_cflags = [
|
||||
flag.replace("-stdlib=libc++", "").replace("-stdlib=libstdc++", "")
|
||||
for flag in clang_cflags
|
||||
]
|
||||
|
||||
|
||||
def build_invocation(compile_flags, with_lto=False):
|
||||
lto_flags = []
|
||||
if with_lto and config.lto_supported:
|
||||
lto_flags += config.lto_flags
|
||||
return " " + " ".join([config.clang] + lto_flags + compile_flags) + " "
|
||||
|
||||
def build_invocation(compile_flags, with_lto = False):
|
||||
lto_flags = []
|
||||
if with_lto and config.lto_supported:
|
||||
lto_flags += config.lto_flags
|
||||
return " " + " ".join([config.clang] + lto_flags + compile_flags) + " "
|
||||
|
||||
def exclude_unsupported_files_for_aix(dirname):
|
||||
for filename in os.listdir(dirname):
|
||||
source_path = os.path.join( dirname, filename)
|
||||
if os.path.isdir(source_path):
|
||||
continue
|
||||
f = open(source_path, 'r')
|
||||
try:
|
||||
data = f.read()
|
||||
# -fprofile-instr-generate and rpath are not supported on AIX, exclude all tests with them.
|
||||
if ("%clang_profgen" in data or "%clangxx_profgen" in data or "-rpath" in data):
|
||||
config.excludes += [ filename ]
|
||||
finally:
|
||||
f.close()
|
||||
for filename in os.listdir(dirname):
|
||||
source_path = os.path.join(dirname, filename)
|
||||
if os.path.isdir(source_path):
|
||||
continue
|
||||
f = open(source_path, "r")
|
||||
try:
|
||||
data = f.read()
|
||||
# -fprofile-instr-generate and rpath are not supported on AIX, exclude all tests with them.
|
||||
if (
|
||||
"%clang_profgen" in data
|
||||
or "%clangxx_profgen" in data
|
||||
or "-rpath" in data
|
||||
):
|
||||
config.excludes += [filename]
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
# Add clang substitutions.
|
||||
config.substitutions.append( ("%clang ", build_invocation(clang_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_cxxflags)) )
|
||||
config.substitutions.append(("%clang ", build_invocation(clang_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_cxxflags)))
|
||||
|
||||
config.substitutions.append( ("%clang_profgen ", build_invocation(clang_cflags) + " -fprofile-instr-generate ") )
|
||||
config.substitutions.append( ("%clang_profgen=", build_invocation(clang_cflags) + " -fprofile-instr-generate=") )
|
||||
config.substitutions.append( ("%clangxx_profgen ", build_invocation(clang_cxxflags) + " -fprofile-instr-generate ") )
|
||||
config.substitutions.append( ("%clangxx_profgen=", build_invocation(clang_cxxflags) + " -fprofile-instr-generate=") )
|
||||
config.substitutions.append(
|
||||
("%clang_profgen ", build_invocation(clang_cflags) + " -fprofile-instr-generate ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clang_profgen=", build_invocation(clang_cflags) + " -fprofile-instr-generate=")
|
||||
)
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clangxx_profgen ",
|
||||
build_invocation(clang_cxxflags) + " -fprofile-instr-generate ",
|
||||
)
|
||||
)
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clangxx_profgen=",
|
||||
build_invocation(clang_cxxflags) + " -fprofile-instr-generate=",
|
||||
)
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%clang_pgogen ", build_invocation(clang_cflags) + " -fprofile-generate ") )
|
||||
config.substitutions.append( ("%clang_pgogen=", build_invocation(clang_cflags) + " -fprofile-generate=") )
|
||||
config.substitutions.append( ("%clangxx_pgogen ", build_invocation(clang_cxxflags) + " -fprofile-generate ") )
|
||||
config.substitutions.append( ("%clangxx_pgogen=", build_invocation(clang_cxxflags) + " -fprofile-generate=") )
|
||||
config.substitutions.append(
|
||||
("%clang_pgogen ", build_invocation(clang_cflags) + " -fprofile-generate ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clang_pgogen=", build_invocation(clang_cflags) + " -fprofile-generate=")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_pgogen ", build_invocation(clang_cxxflags) + " -fprofile-generate ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_pgogen=", build_invocation(clang_cxxflags) + " -fprofile-generate=")
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%clang_cspgogen ", build_invocation(clang_cflags) + " -fcs-profile-generate ") )
|
||||
config.substitutions.append( ("%clang_cspgogen=", build_invocation(clang_cflags) + " -fcs-profile-generate=") )
|
||||
config.substitutions.append( ("%clangxx_cspgogen ", build_invocation(clang_cxxflags) + " -fcs-profile-generate ") )
|
||||
config.substitutions.append( ("%clangxx_cspgogen=", build_invocation(clang_cxxflags) + " -fcs-profile-generate=") )
|
||||
config.substitutions.append(
|
||||
("%clang_cspgogen ", build_invocation(clang_cflags) + " -fcs-profile-generate ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clang_cspgogen=", build_invocation(clang_cflags) + " -fcs-profile-generate=")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_cspgogen ", build_invocation(clang_cxxflags) + " -fcs-profile-generate ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_cspgogen=", build_invocation(clang_cxxflags) + " -fcs-profile-generate=")
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%clang_profuse=", build_invocation(clang_cflags) + " -fprofile-instr-use=") )
|
||||
config.substitutions.append( ("%clangxx_profuse=", build_invocation(clang_cxxflags) + " -fprofile-instr-use=") )
|
||||
config.substitutions.append(
|
||||
("%clang_profuse=", build_invocation(clang_cflags) + " -fprofile-instr-use=")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_profuse=", build_invocation(clang_cxxflags) + " -fprofile-instr-use=")
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%clang_pgouse=", build_invocation(clang_cflags) + " -fprofile-use=") )
|
||||
config.substitutions.append( ("%clangxx_profuse=", build_invocation(clang_cxxflags) + " -fprofile-instr-use=") )
|
||||
config.substitutions.append(
|
||||
("%clang_pgouse=", build_invocation(clang_cflags) + " -fprofile-use=")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clangxx_profuse=", build_invocation(clang_cxxflags) + " -fprofile-instr-use=")
|
||||
)
|
||||
|
||||
config.substitutions.append( ("%clang_lto_profgen=", build_invocation(clang_cflags, True) + " -fprofile-instr-generate=") )
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clang_lto_profgen=",
|
||||
build_invocation(clang_cflags, True) + " -fprofile-instr-generate=",
|
||||
)
|
||||
)
|
||||
|
||||
if config.host_os not in ['Windows', 'Darwin', 'FreeBSD', 'Linux', 'NetBSD', 'SunOS', 'AIX']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in [
|
||||
"Windows",
|
||||
"Darwin",
|
||||
"FreeBSD",
|
||||
"Linux",
|
||||
"NetBSD",
|
||||
"SunOS",
|
||||
"AIX",
|
||||
]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.host_os in ['AIX']:
|
||||
config.available_features.add('system-aix')
|
||||
exclude_unsupported_files_for_aix(config.test_source_root)
|
||||
exclude_unsupported_files_for_aix(config.test_source_root + "/Posix")
|
||||
if config.host_os in ["AIX"]:
|
||||
config.available_features.add("system-aix")
|
||||
exclude_unsupported_files_for_aix(config.test_source_root)
|
||||
exclude_unsupported_files_for_aix(config.test_source_root + "/Posix")
|
||||
|
||||
if config.target_arch in ['armv7l']:
|
||||
config.unsupported = True
|
||||
if config.target_arch in ["armv7l"]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.android:
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,20 +3,29 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'SafeStack'
|
||||
config.name = "SafeStack"
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m', '.mm', '.ll', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".m", ".mm", ".ll", ".test"]
|
||||
|
||||
# Add clang substitutions.
|
||||
config.substitutions.append( ("%clang_nosafestack ", config.clang + " -O0 -fno-sanitize=safe-stack ") )
|
||||
config.substitutions.append( ("%clang_safestack ", config.clang + " -O0 -fsanitize=safe-stack ") )
|
||||
config.substitutions.append(
|
||||
("%clang_nosafestack ", config.clang + " -O0 -fno-sanitize=safe-stack ")
|
||||
)
|
||||
config.substitutions.append(
|
||||
("%clang_safestack ", config.clang + " -O0 -fsanitize=safe-stack ")
|
||||
)
|
||||
|
||||
if config.lto_supported:
|
||||
config.substitutions.append((r"%clang_lto_safestack ", ' '.join([config.clang] + config.lto_flags + ['-fsanitize=safe-stack '])))
|
||||
config.substitutions.append(
|
||||
(
|
||||
r"%clang_lto_safestack ",
|
||||
" ".join([config.clang] + config.lto_flags + ["-fsanitize=safe-stack "]),
|
||||
)
|
||||
)
|
||||
|
||||
if config.host_os not in ['Linux', 'FreeBSD', 'NetBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux", "FreeBSD", "NetBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['FreeBSD']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["FreeBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['NetBSD']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["NetBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os in ['Windows']:
|
||||
config.unsupported = True
|
||||
if root.host_os in ["Windows"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,44 +1,52 @@
|
||||
import os, sys, subprocess, tempfile
|
||||
import time
|
||||
|
||||
ANDROID_TMPDIR = '/data/local/tmp/Output'
|
||||
ADB = os.environ.get('ADB', 'adb')
|
||||
ANDROID_TMPDIR = "/data/local/tmp/Output"
|
||||
ADB = os.environ.get("ADB", "adb")
|
||||
|
||||
verbose = False
|
||||
if os.environ.get('ANDROID_RUN_VERBOSE') == '1':
|
||||
if os.environ.get("ANDROID_RUN_VERBOSE") == "1":
|
||||
verbose = True
|
||||
|
||||
|
||||
def host_to_device_path(path):
|
||||
rel = os.path.relpath(path, "/")
|
||||
dev = os.path.join(ANDROID_TMPDIR, rel)
|
||||
return dev
|
||||
|
||||
def adb(args, attempts = 1, timeout_sec = 600):
|
||||
|
||||
def adb(args, attempts=1, timeout_sec=600):
|
||||
if verbose:
|
||||
print(args)
|
||||
tmpname = tempfile.mktemp()
|
||||
out = open(tmpname, 'w')
|
||||
out = open(tmpname, "w")
|
||||
ret = 255
|
||||
while attempts > 0 and ret != 0:
|
||||
attempts -= 1
|
||||
ret = subprocess.call(['timeout', str(timeout_sec), ADB] + args, stdout=out, stderr=subprocess.STDOUT)
|
||||
attempts -= 1
|
||||
ret = subprocess.call(
|
||||
["timeout", str(timeout_sec), ADB] + args,
|
||||
stdout=out,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
if ret != 0:
|
||||
print("adb command failed", args)
|
||||
print(tmpname)
|
||||
out.close()
|
||||
out = open(tmpname, 'r')
|
||||
print(out.read())
|
||||
print("adb command failed", args)
|
||||
print(tmpname)
|
||||
out.close()
|
||||
out = open(tmpname, "r")
|
||||
print(out.read())
|
||||
out.close()
|
||||
os.unlink(tmpname)
|
||||
return ret
|
||||
|
||||
|
||||
def pull_from_device(path):
|
||||
tmp = tempfile.mktemp()
|
||||
adb(['pull', path, tmp], 5, 60)
|
||||
text = open(tmp, 'r').read()
|
||||
adb(["pull", path, tmp], 5, 60)
|
||||
text = open(tmp, "r").read()
|
||||
os.unlink(tmp)
|
||||
return text
|
||||
|
||||
|
||||
def push_to_device(path):
|
||||
dst_path = host_to_device_path(path)
|
||||
adb(['push', path, dst_path], 5, 60)
|
||||
adb(["push", path, dst_path], 5, 60)
|
||||
|
||||
@@ -5,19 +5,19 @@ from android_common import *
|
||||
|
||||
|
||||
here = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
android_run = os.path.join(here, 'android_run.py')
|
||||
android_run = os.path.join(here, "android_run.py")
|
||||
|
||||
output = None
|
||||
output_type = 'executable'
|
||||
output_type = "executable"
|
||||
|
||||
args = sys.argv[1:]
|
||||
while args:
|
||||
arg = args.pop(0)
|
||||
if arg == '-shared':
|
||||
output_type = 'shared'
|
||||
elif arg == '-c':
|
||||
output_type = 'object'
|
||||
elif arg == '-o':
|
||||
if arg == "-shared":
|
||||
output_type = "shared"
|
||||
elif arg == "-c":
|
||||
output_type = "object"
|
||||
elif arg == "-o":
|
||||
output = args.pop(0)
|
||||
|
||||
if output == None:
|
||||
@@ -28,9 +28,9 @@ ret = subprocess.call(sys.argv[1:])
|
||||
if ret != 0:
|
||||
sys.exit(ret)
|
||||
|
||||
if output_type in ['executable', 'shared']:
|
||||
if output_type in ["executable", "shared"]:
|
||||
push_to_device(output)
|
||||
|
||||
if output_type == 'executable':
|
||||
os.rename(output, output + '.real')
|
||||
if output_type == "executable":
|
||||
os.rename(output, output + ".real")
|
||||
os.symlink(android_run, output)
|
||||
|
||||
@@ -3,29 +3,47 @@
|
||||
import os, signal, sys, subprocess, tempfile
|
||||
from android_common import *
|
||||
|
||||
ANDROID_TMPDIR = '/data/local/tmp/Output'
|
||||
ANDROID_TMPDIR = "/data/local/tmp/Output"
|
||||
|
||||
device_binary = host_to_device_path(sys.argv[0])
|
||||
|
||||
|
||||
def build_env():
|
||||
args = []
|
||||
# Android linker ignores RPATH. Set LD_LIBRARY_PATH to Output dir.
|
||||
args.append('LD_LIBRARY_PATH=%s' % (ANDROID_TMPDIR,))
|
||||
args.append("LD_LIBRARY_PATH=%s" % (ANDROID_TMPDIR,))
|
||||
for (key, value) in list(os.environ.items()):
|
||||
if key in ['ASAN_ACTIVATION_OPTIONS', 'SCUDO_OPTIONS'] or key.endswith('SAN_OPTIONS'):
|
||||
if key in ["ASAN_ACTIVATION_OPTIONS", "SCUDO_OPTIONS"] or key.endswith(
|
||||
"SAN_OPTIONS"
|
||||
):
|
||||
args.append('%s="%s"' % (key, value.replace('"', '\\"')))
|
||||
return ' '.join(args)
|
||||
return " ".join(args)
|
||||
|
||||
is_64bit = str(subprocess.check_output(['file', sys.argv[0] + '.real'])).find('64-bit') != -1
|
||||
|
||||
is_64bit = (
|
||||
str(subprocess.check_output(["file", sys.argv[0] + ".real"])).find("64-bit") != -1
|
||||
)
|
||||
|
||||
device_env = build_env()
|
||||
device_args = ' '.join(sys.argv[1:]) # FIXME: escape?
|
||||
device_stdout = device_binary + '.stdout'
|
||||
device_stderr = device_binary + '.stderr'
|
||||
device_exitcode = device_binary + '.exitcode'
|
||||
ret = adb(['shell', 'cd %s && %s %s %s >%s 2>%s ; echo $? >%s' %
|
||||
(ANDROID_TMPDIR, device_env, device_binary, device_args,
|
||||
device_stdout, device_stderr, device_exitcode)])
|
||||
device_args = " ".join(sys.argv[1:]) # FIXME: escape?
|
||||
device_stdout = device_binary + ".stdout"
|
||||
device_stderr = device_binary + ".stderr"
|
||||
device_exitcode = device_binary + ".exitcode"
|
||||
ret = adb(
|
||||
[
|
||||
"shell",
|
||||
"cd %s && %s %s %s >%s 2>%s ; echo $? >%s"
|
||||
% (
|
||||
ANDROID_TMPDIR,
|
||||
device_env,
|
||||
device_binary,
|
||||
device_args,
|
||||
device_stdout,
|
||||
device_stderr,
|
||||
device_exitcode,
|
||||
),
|
||||
]
|
||||
)
|
||||
if ret != 0:
|
||||
sys.exit(ret)
|
||||
|
||||
@@ -35,5 +53,5 @@ retcode = int(pull_from_device(device_exitcode))
|
||||
# If the device process died with a signal, do abort().
|
||||
# Not exactly the same, but good enough to fool "not --crash".
|
||||
if retcode > 128:
|
||||
os.kill(os.getpid(), signal.SIGABRT)
|
||||
os.kill(os.getpid(), signal.SIGABRT)
|
||||
sys.exit(retcode)
|
||||
|
||||
@@ -3,10 +3,23 @@ Parses the id of the process that ran with ASAN from the output logs.
|
||||
"""
|
||||
import sys, argparse, re
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin, help='The sanitizer output to get the pid from')
|
||||
parser.add_argument('--outfile', nargs='?', type=argparse.FileType('r'), default=sys.stdout, help='Where to write the result')
|
||||
parser.add_argument(
|
||||
"--infile",
|
||||
nargs="?",
|
||||
type=argparse.FileType("r"),
|
||||
default=sys.stdin,
|
||||
help="The sanitizer output to get the pid from",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--outfile",
|
||||
nargs="?",
|
||||
type=argparse.FileType("r"),
|
||||
default=sys.stdout,
|
||||
help="Where to write the result",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
pid = process_file(args.infile)
|
||||
@@ -15,15 +28,14 @@ def main():
|
||||
args.outfile.close()
|
||||
|
||||
|
||||
|
||||
def process_file(infile):
|
||||
# check first line is just ==== divider
|
||||
first_line_pattern = re.compile(r'=*')
|
||||
first_line_pattern = re.compile(r"=*")
|
||||
assert first_line_pattern.match(infile.readline())
|
||||
|
||||
# parse out pid from 2nd line
|
||||
# parse out pid from 2nd line
|
||||
# `==PID==ERROR: SanitizerName: error-type on address...`
|
||||
pid_pattern = re.compile(r'==([0-9]*)==ERROR:')
|
||||
pid_pattern = re.compile(r"==([0-9]*)==ERROR:")
|
||||
pid = pid_pattern.search(infile.readline()).group(1)
|
||||
|
||||
# ignore the rest
|
||||
@@ -32,5 +44,6 @@ def process_file(infile):
|
||||
|
||||
return pid
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -3,20 +3,20 @@
|
||||
import os, sys, subprocess
|
||||
|
||||
output = None
|
||||
output_type = 'executable'
|
||||
output_type = "executable"
|
||||
|
||||
args = sys.argv[1:]
|
||||
while args:
|
||||
arg = args.pop(0)
|
||||
if arg == '-shared':
|
||||
output_type = 'shared'
|
||||
elif arg == '-dynamiclib':
|
||||
output_type = 'dylib'
|
||||
elif arg == '-c':
|
||||
output_type = 'object'
|
||||
elif arg == '-S':
|
||||
output_type = 'assembly'
|
||||
elif arg == '-o':
|
||||
if arg == "-shared":
|
||||
output_type = "shared"
|
||||
elif arg == "-dynamiclib":
|
||||
output_type = "dylib"
|
||||
elif arg == "-c":
|
||||
output_type = "object"
|
||||
elif arg == "-S":
|
||||
output_type = "assembly"
|
||||
elif arg == "-o":
|
||||
output = args.pop(0)
|
||||
|
||||
if output == None:
|
||||
@@ -28,5 +28,5 @@ if ret != 0:
|
||||
sys.exit(ret)
|
||||
|
||||
# If we produce a dylib, ad-hoc sign it.
|
||||
if output_type in ['shared', 'dylib']:
|
||||
if output_type in ["shared", "dylib"]:
|
||||
ret = subprocess.call(["codesign", "-s", "-", output])
|
||||
|
||||
@@ -3,65 +3,76 @@
|
||||
import glob, os, pipes, sys, subprocess
|
||||
|
||||
|
||||
device_id = os.environ.get('SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER')
|
||||
iossim_run_verbose = os.environ.get('SANITIZER_IOSSIM_RUN_VERBOSE')
|
||||
wait_for_debug = os.environ.get('SANITIZER_IOSSIM_RUN_WAIT_FOR_DEBUGGER')
|
||||
device_id = os.environ.get("SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER")
|
||||
iossim_run_verbose = os.environ.get("SANITIZER_IOSSIM_RUN_VERBOSE")
|
||||
wait_for_debug = os.environ.get("SANITIZER_IOSSIM_RUN_WAIT_FOR_DEBUGGER")
|
||||
|
||||
if not device_id:
|
||||
raise EnvironmentError("Specify SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER to select which simulator to use.")
|
||||
raise EnvironmentError(
|
||||
"Specify SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER to select which simulator to use."
|
||||
)
|
||||
|
||||
for e in [
|
||||
"ASAN_OPTIONS",
|
||||
"TSAN_OPTIONS",
|
||||
"UBSAN_OPTIONS",
|
||||
"LSAN_OPTIONS",
|
||||
"APPLE_ASAN_INIT_FOR_DLOPEN",
|
||||
"ASAN_ACTIVATION_OPTIONS",
|
||||
"MallocNanoZone",
|
||||
"ASAN_OPTIONS",
|
||||
"TSAN_OPTIONS",
|
||||
"UBSAN_OPTIONS",
|
||||
"LSAN_OPTIONS",
|
||||
"APPLE_ASAN_INIT_FOR_DLOPEN",
|
||||
"ASAN_ACTIVATION_OPTIONS",
|
||||
"MallocNanoZone",
|
||||
]:
|
||||
if e in os.environ:
|
||||
os.environ["SIMCTL_CHILD_" + e] = os.environ[e]
|
||||
if e in os.environ:
|
||||
os.environ["SIMCTL_CHILD_" + e] = os.environ[e]
|
||||
|
||||
find_atos_cmd = 'xcrun -sdk iphonesimulator -f atos'
|
||||
atos_path = subprocess.run(find_atos_cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True).stdout.decode().strip()
|
||||
for san in ['ASAN', 'TSAN', 'UBSAN', 'LSAN']:
|
||||
os.environ[f'SIMCTL_CHILD_{san}_SYMBOLIZER_PATH'] = atos_path
|
||||
find_atos_cmd = "xcrun -sdk iphonesimulator -f atos"
|
||||
atos_path = (
|
||||
subprocess.run(
|
||||
find_atos_cmd.split(),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip()
|
||||
)
|
||||
for san in ["ASAN", "TSAN", "UBSAN", "LSAN"]:
|
||||
os.environ[f"SIMCTL_CHILD_{san}_SYMBOLIZER_PATH"] = atos_path
|
||||
|
||||
prog = sys.argv[1]
|
||||
exit_code = None
|
||||
if prog == 'rm':
|
||||
# The simulator and host actually share the same file system so we can just
|
||||
# execute directly on the host.
|
||||
rm_args = []
|
||||
for arg in sys.argv[2:]:
|
||||
if '*' in arg or '?' in arg:
|
||||
# Don't quote glob pattern
|
||||
rm_args.append(arg)
|
||||
else:
|
||||
# FIXME(dliew): pipes.quote() is deprecated
|
||||
rm_args.append(pipes.quote(arg))
|
||||
rm_cmd_line = ["/bin/rm"] + rm_args
|
||||
rm_cmd_line_str = ' '.join(rm_cmd_line)
|
||||
# We use `shell=True` so that any wildcard globs get expanded by the shell.
|
||||
if prog == "rm":
|
||||
# The simulator and host actually share the same file system so we can just
|
||||
# execute directly on the host.
|
||||
rm_args = []
|
||||
for arg in sys.argv[2:]:
|
||||
if "*" in arg or "?" in arg:
|
||||
# Don't quote glob pattern
|
||||
rm_args.append(arg)
|
||||
else:
|
||||
# FIXME(dliew): pipes.quote() is deprecated
|
||||
rm_args.append(pipes.quote(arg))
|
||||
rm_cmd_line = ["/bin/rm"] + rm_args
|
||||
rm_cmd_line_str = " ".join(rm_cmd_line)
|
||||
# We use `shell=True` so that any wildcard globs get expanded by the shell.
|
||||
|
||||
if iossim_run_verbose:
|
||||
print("RUNNING: \t{}".format(rm_cmd_line_str), flush=True)
|
||||
if iossim_run_verbose:
|
||||
print("RUNNING: \t{}".format(rm_cmd_line_str), flush=True)
|
||||
|
||||
exitcode = subprocess.call(rm_cmd_line_str, shell=True)
|
||||
exitcode = subprocess.call(rm_cmd_line_str, shell=True)
|
||||
|
||||
else:
|
||||
cmd = ["xcrun", "simctl", "spawn", "--standalone"]
|
||||
cmd = ["xcrun", "simctl", "spawn", "--standalone"]
|
||||
|
||||
if wait_for_debug:
|
||||
cmd.append("--wait-for-debugger")
|
||||
if wait_for_debug:
|
||||
cmd.append("--wait-for-debugger")
|
||||
|
||||
cmd.append(device_id)
|
||||
cmd += sys.argv[1:]
|
||||
cmd.append(device_id)
|
||||
cmd += sys.argv[1:]
|
||||
|
||||
if iossim_run_verbose:
|
||||
print("RUNNING: \t{}".format(" ".join(cmd)), flush=True)
|
||||
if iossim_run_verbose:
|
||||
print("RUNNING: \t{}".format(" ".join(cmd)), flush=True)
|
||||
|
||||
exitcode = subprocess.call(cmd)
|
||||
exitcode = subprocess.call(cmd)
|
||||
if exitcode > 125:
|
||||
exitcode = 126
|
||||
exitcode = 126
|
||||
sys.exit(exitcode)
|
||||
|
||||
@@ -6,44 +6,91 @@ Errors if the report cannot be found after `retry_count` retries.
|
||||
"""
|
||||
import sys, os, argparse, re, glob, shutil, time
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--pid', type=str, required=True, help='The process id of the process that crashed')
|
||||
parser.add_argument('--binary-filename', type=str, required=True, help='The name of the file that crashed')
|
||||
parser.add_argument('--retry-count', type=int, nargs='?', default=10, help='The number of retries to make')
|
||||
parser.add_argument('--max-wait-time', type=float, nargs='?', default=5.0, help='The max amount of seconds to wait between tries')
|
||||
parser.add_argument(
|
||||
"--pid",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The process id of the process that crashed",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--binary-filename",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The name of the file that crashed",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--retry-count",
|
||||
type=int,
|
||||
nargs="?",
|
||||
default=10,
|
||||
help="The number of retries to make",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-wait-time",
|
||||
type=float,
|
||||
nargs="?",
|
||||
default=5.0,
|
||||
help="The max amount of seconds to wait between tries",
|
||||
)
|
||||
|
||||
parser.add_argument('--dir', nargs='?', type=str, default="~/Library/Logs/DiagnosticReports", help='The directory to look for the crash report')
|
||||
parser.add_argument('--outfile', nargs='?', type=argparse.FileType('r'), default=sys.stdout, help='Where to write the result')
|
||||
parser.add_argument(
|
||||
"--dir",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default="~/Library/Logs/DiagnosticReports",
|
||||
help="The directory to look for the crash report",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--outfile",
|
||||
nargs="?",
|
||||
type=argparse.FileType("r"),
|
||||
default=sys.stdout,
|
||||
help="Where to write the result",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
assert args.pid, "pid can't be empty"
|
||||
assert args.binary_filename, "binary-filename can't be empty"
|
||||
|
||||
os.chdir(os.path.expanduser(args.dir))
|
||||
output_report_with_retries(args.outfile, args.pid.strip(), args.binary_filename, args.retry_count, args.max_wait_time)
|
||||
output_report_with_retries(
|
||||
args.outfile,
|
||||
args.pid.strip(),
|
||||
args.binary_filename,
|
||||
args.retry_count,
|
||||
args.max_wait_time,
|
||||
)
|
||||
|
||||
def output_report_with_retries(outfile, pid, filename, attempts_remaining, max_wait_time):
|
||||
|
||||
def output_report_with_retries(
|
||||
outfile, pid, filename, attempts_remaining, max_wait_time
|
||||
):
|
||||
report_name = find_report_in_cur_dir(pid, filename)
|
||||
if report_name:
|
||||
with open(report_name, "r") as f:
|
||||
shutil.copyfileobj(f, outfile)
|
||||
return
|
||||
elif(attempts_remaining > 0):
|
||||
elif attempts_remaining > 0:
|
||||
# As the number of attempts remaining decreases, increase the number of seconds waited
|
||||
# if the max wait time is 2s and there are 10 attempts remaining, wait .2 seconds.
|
||||
# if the max wait time is 2s and there are 2 attempts remaining, wait 1 second.
|
||||
# if the max wait time is 2s and there are 2 attempts remaining, wait 1 second.
|
||||
time.sleep(max_wait_time / attempts_remaining)
|
||||
output_report_with_retries(outfile, pid, filename, attempts_remaining - 1, max_wait_time)
|
||||
output_report_with_retries(
|
||||
outfile, pid, filename, attempts_remaining - 1, max_wait_time
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Report not found for ({}, {}).".format(filename, pid))
|
||||
|
||||
|
||||
def find_report_in_cur_dir(pid, filename):
|
||||
for report_name in sorted(glob.glob("{}_*.crash".format(filename)), reverse=True):
|
||||
# parse out pid from first line of report
|
||||
# `Process: filename [pid]``
|
||||
with open(report_name) as cur_report:
|
||||
pattern = re.compile(r'Process: *{} \[([0-9]*)\]'.format(filename))
|
||||
pattern = re.compile(r"Process: *{} \[([0-9]*)\]".format(filename))
|
||||
cur_report_pid = pattern.search(cur_report.readline()).group(1)
|
||||
|
||||
assert cur_report_pid and cur_report_pid.isdigit()
|
||||
@@ -52,7 +99,7 @@ def find_report_in_cur_dir(pid, filename):
|
||||
|
||||
# did not find the crash report
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -8,85 +8,92 @@ config.name = "SanitizerCommon-" + config.name_suffix
|
||||
default_tool_options = []
|
||||
collect_stack_traces = ""
|
||||
if config.tool_name == "asan":
|
||||
tool_cflags = ["-fsanitize=address"]
|
||||
tool_options = "ASAN_OPTIONS"
|
||||
tool_cflags = ["-fsanitize=address"]
|
||||
tool_options = "ASAN_OPTIONS"
|
||||
elif config.tool_name == "hwasan":
|
||||
tool_cflags = ["-fsanitize=hwaddress", "-fuse-ld=lld"]
|
||||
if config.target_arch == "x86_64":
|
||||
tool_cflags += ["-fsanitize-hwaddress-experimental-aliasing"]
|
||||
config.available_features.add("hwasan-aliasing")
|
||||
tool_options = "HWASAN_OPTIONS"
|
||||
if not config.has_lld:
|
||||
config.unsupported = True
|
||||
tool_cflags = ["-fsanitize=hwaddress", "-fuse-ld=lld"]
|
||||
if config.target_arch == "x86_64":
|
||||
tool_cflags += ["-fsanitize-hwaddress-experimental-aliasing"]
|
||||
config.available_features.add("hwasan-aliasing")
|
||||
tool_options = "HWASAN_OPTIONS"
|
||||
if not config.has_lld:
|
||||
config.unsupported = True
|
||||
elif config.tool_name == "tsan":
|
||||
tool_cflags = ["-fsanitize=thread"]
|
||||
tool_options = "TSAN_OPTIONS"
|
||||
tool_cflags = ["-fsanitize=thread"]
|
||||
tool_options = "TSAN_OPTIONS"
|
||||
elif config.tool_name == "msan":
|
||||
tool_cflags = ["-fsanitize=memory"]
|
||||
tool_options = "MSAN_OPTIONS"
|
||||
collect_stack_traces = "-fsanitize-memory-track-origins"
|
||||
tool_cflags = ["-fsanitize=memory"]
|
||||
tool_options = "MSAN_OPTIONS"
|
||||
collect_stack_traces = "-fsanitize-memory-track-origins"
|
||||
elif config.tool_name == "lsan":
|
||||
tool_cflags = ["-fsanitize=leak"]
|
||||
tool_options = "LSAN_OPTIONS"
|
||||
tool_cflags = ["-fsanitize=leak"]
|
||||
tool_options = "LSAN_OPTIONS"
|
||||
elif config.tool_name == "ubsan":
|
||||
tool_cflags = ["-fsanitize=undefined"]
|
||||
tool_options = "UBSAN_OPTIONS"
|
||||
tool_cflags = ["-fsanitize=undefined"]
|
||||
tool_options = "UBSAN_OPTIONS"
|
||||
else:
|
||||
lit_config.fatal("Unknown tool for sanitizer_common tests: %r" % config.tool_name)
|
||||
lit_config.fatal("Unknown tool for sanitizer_common tests: %r" % config.tool_name)
|
||||
|
||||
config.available_features.add(config.tool_name)
|
||||
|
||||
if config.host_os == 'Linux' and config.tool_name == "lsan" and config.target_arch == 'i386':
|
||||
config.available_features.add("lsan-x86")
|
||||
if (
|
||||
config.host_os == "Linux"
|
||||
and config.tool_name == "lsan"
|
||||
and config.target_arch == "i386"
|
||||
):
|
||||
config.available_features.add("lsan-x86")
|
||||
|
||||
if config.arm_thumb:
|
||||
config.available_features.add('thumb')
|
||||
config.available_features.add("thumb")
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
default_tool_options += ['abort_on_error=0']
|
||||
if config.tool_name == "tsan":
|
||||
default_tool_options += ['ignore_interceptors_accesses=0']
|
||||
if config.host_os == "Darwin":
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
default_tool_options += ["abort_on_error=0"]
|
||||
if config.tool_name == "tsan":
|
||||
default_tool_options += ["ignore_interceptors_accesses=0"]
|
||||
elif config.android:
|
||||
# The same as on Darwin, we default to "abort_on_error=1" which slows down
|
||||
# testing. Also, all existing tests are using "not" instead of "not --crash"
|
||||
# which does not work for abort()-terminated programs.
|
||||
default_tool_options += ['abort_on_error=0']
|
||||
# The same as on Darwin, we default to "abort_on_error=1" which slows down
|
||||
# testing. Also, all existing tests are using "not" instead of "not --crash"
|
||||
# which does not work for abort()-terminated programs.
|
||||
default_tool_options += ["abort_on_error=0"]
|
||||
|
||||
default_tool_options_str = ':'.join(default_tool_options)
|
||||
default_tool_options_str = ":".join(default_tool_options)
|
||||
if default_tool_options_str:
|
||||
config.environment[tool_options] = default_tool_options_str
|
||||
default_tool_options_str += ':'
|
||||
config.environment[tool_options] = default_tool_options_str
|
||||
default_tool_options_str += ":"
|
||||
|
||||
extra_link_flags = []
|
||||
|
||||
if config.host_os in ['Linux']:
|
||||
extra_link_flags += ["-ldl"]
|
||||
if config.host_os in ["Linux"]:
|
||||
extra_link_flags += ["-ldl"]
|
||||
|
||||
clang_cflags = config.debug_info_flags + tool_cflags + [config.target_cflags]
|
||||
clang_cflags += ["-I%s" % os.path.dirname(os.path.dirname(__file__))]
|
||||
clang_cflags += extra_link_flags
|
||||
clang_cxxflags = config.cxx_mode_flags + clang_cflags
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang ", build_invocation(clang_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_cxxflags)) )
|
||||
config.substitutions.append( ("%collect_stack_traces", collect_stack_traces) )
|
||||
config.substitutions.append( ("%tool_name", config.tool_name) )
|
||||
config.substitutions.append( ("%tool_options", tool_options) )
|
||||
config.substitutions.append( ('%env_tool_opts=',
|
||||
'env ' + tool_options + '=' + default_tool_options_str))
|
||||
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.substitutions.append(("%clang ", build_invocation(clang_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_cxxflags)))
|
||||
config.substitutions.append(("%collect_stack_traces", collect_stack_traces))
|
||||
config.substitutions.append(("%tool_name", config.tool_name))
|
||||
config.substitutions.append(("%tool_options", tool_options))
|
||||
config.substitutions.append(
|
||||
("%env_tool_opts=", "env " + tool_options + "=" + default_tool_options_str)
|
||||
)
|
||||
|
||||
if config.host_os not in ['Linux', 'Darwin', 'NetBSD', 'FreeBSD', 'SunOS']:
|
||||
config.unsupported = True
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os not in ["Linux", "Darwin", "NetBSD", "FreeBSD", "SunOS"]:
|
||||
config.unsupported = True
|
||||
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
config.parallelism_group = "shadow-memory"
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -3,62 +3,72 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'Scudo' + config.name_suffix
|
||||
config.name = "Scudo" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Path to the shared library
|
||||
shared_libscudo = os.path.join(config.compiler_rt_libdir, "libclang_rt.scudo%s.so" % config.target_suffix)
|
||||
shared_minlibscudo = os.path.join(config.compiler_rt_libdir, "libclang_rt.scudo_minimal%s.so" % config.target_suffix)
|
||||
shared_libscudo = os.path.join(
|
||||
config.compiler_rt_libdir, "libclang_rt.scudo%s.so" % config.target_suffix
|
||||
)
|
||||
shared_minlibscudo = os.path.join(
|
||||
config.compiler_rt_libdir, "libclang_rt.scudo_minimal%s.so" % config.target_suffix
|
||||
)
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".test"]
|
||||
|
||||
# C & CXX flags.
|
||||
c_flags = ([config.target_cflags] +
|
||||
["-pthread",
|
||||
"-fPIE",
|
||||
"-pie",
|
||||
"-O0",
|
||||
"-UNDEBUG",
|
||||
"-ldl",
|
||||
"-Wl,--gc-sections"])
|
||||
c_flags = [config.target_cflags] + [
|
||||
"-pthread",
|
||||
"-fPIE",
|
||||
"-pie",
|
||||
"-O0",
|
||||
"-UNDEBUG",
|
||||
"-ldl",
|
||||
"-Wl,--gc-sections",
|
||||
]
|
||||
|
||||
# Android doesn't want -lrt.
|
||||
if not config.android:
|
||||
c_flags += ["-lrt"]
|
||||
c_flags += ["-lrt"]
|
||||
|
||||
cxx_flags = (c_flags + config.cxx_mode_flags + ["-std=c++11"])
|
||||
cxx_flags = c_flags + config.cxx_mode_flags + ["-std=c++11"]
|
||||
|
||||
scudo_flags = ["-fsanitize=scudo"]
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
# Add substitutions.
|
||||
config.substitutions.append(("%clang ", build_invocation(c_flags)))
|
||||
config.substitutions.append(("%clang_scudo ", build_invocation(c_flags + scudo_flags)))
|
||||
config.substitutions.append(("%clangxx_scudo ", build_invocation(cxx_flags + scudo_flags)))
|
||||
config.substitutions.append(
|
||||
("%clangxx_scudo ", build_invocation(cxx_flags + scudo_flags))
|
||||
)
|
||||
config.substitutions.append(("%shared_libscudo", shared_libscudo))
|
||||
config.substitutions.append(("%shared_minlibscudo", shared_minlibscudo))
|
||||
|
||||
# Platform-specific default SCUDO_OPTIONS for lit tests.
|
||||
default_scudo_opts = ''
|
||||
default_scudo_opts = ""
|
||||
if config.android:
|
||||
# Android defaults to abort_on_error=1, which doesn't work for us.
|
||||
default_scudo_opts = 'abort_on_error=0'
|
||||
# Android defaults to abort_on_error=1, which doesn't work for us.
|
||||
default_scudo_opts = "abort_on_error=0"
|
||||
|
||||
# Disable GWP-ASan for scudo internal tests.
|
||||
if config.gwp_asan:
|
||||
config.environment['GWP_ASAN_OPTIONS'] = 'Enabled=0'
|
||||
config.environment["GWP_ASAN_OPTIONS"] = "Enabled=0"
|
||||
|
||||
if default_scudo_opts:
|
||||
config.environment['SCUDO_OPTIONS'] = default_scudo_opts
|
||||
default_scudo_opts += ':'
|
||||
config.substitutions.append(('%env_scudo_opts=',
|
||||
'env SCUDO_OPTIONS=' + default_scudo_opts))
|
||||
config.environment["SCUDO_OPTIONS"] = default_scudo_opts
|
||||
default_scudo_opts += ":"
|
||||
config.substitutions.append(
|
||||
("%env_scudo_opts=", "env SCUDO_OPTIONS=" + default_scudo_opts)
|
||||
)
|
||||
|
||||
# Hardened Allocator tests are currently supported on Linux only.
|
||||
if config.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -3,21 +3,34 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'ShadowCallStack'
|
||||
config.name = "ShadowCallStack"
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m', '.mm', '.ll', '.test']
|
||||
config.suffixes = [".c", ".cpp", ".m", ".mm", ".ll", ".test"]
|
||||
|
||||
# Add clang substitutions.
|
||||
config.substitutions.append( ("%clang_noscs ", config.clang + ' -O0 -fno-sanitize=shadow-call-stack ' + config.target_cflags + ' ') )
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clang_noscs ",
|
||||
config.clang
|
||||
+ " -O0 -fno-sanitize=shadow-call-stack "
|
||||
+ config.target_cflags
|
||||
+ " ",
|
||||
)
|
||||
)
|
||||
|
||||
scs_arch_cflags = config.target_cflags
|
||||
if config.target_arch == 'aarch64':
|
||||
scs_arch_cflags += ' -ffixed-x18 '
|
||||
config.substitutions.append( ("%clang_scs ", config.clang + ' -O0 -fsanitize=shadow-call-stack ' + scs_arch_cflags + ' ') )
|
||||
if config.target_arch == "aarch64":
|
||||
scs_arch_cflags += " -ffixed-x18 "
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%clang_scs ",
|
||||
config.clang + " -O0 -fsanitize=shadow-call-stack " + scs_arch_cflags + " ",
|
||||
)
|
||||
)
|
||||
|
||||
if config.host_os not in ['Linux'] or config.target_arch not in ['aarch64','riscv64']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Linux"] or config.target_arch not in ["aarch64", "riscv64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Darwin']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Darwin"]:
|
||||
config.unsupported = True
|
||||
|
||||
config.environment['TSAN_OPTIONS'] += ':ignore_noninstrumented_modules=1'
|
||||
config.environment["TSAN_OPTIONS"] += ":ignore_noninstrumented_modules=1"
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
# Only run if we have an instrumented libcxx. On Darwin, run always (we have
|
||||
# interceptors to support the system-provided libcxx).
|
||||
if not root.has_libcxx and root.host_os != 'Darwin':
|
||||
config.unsupported = True
|
||||
|
||||
if not root.has_libcxx and root.host_os != "Darwin":
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if 'libdispatch' in root.available_features:
|
||||
additional_cflags = ' -fblocks '
|
||||
for index, (template, replacement) in enumerate(config.substitutions):
|
||||
if template in ['%clang_tsan ', '%clangxx_tsan ']:
|
||||
config.substitutions[index] = (template, replacement + additional_cflags)
|
||||
if "libdispatch" in root.available_features:
|
||||
additional_cflags = " -fblocks "
|
||||
for index, (template, replacement) in enumerate(config.substitutions):
|
||||
if template in ["%clang_tsan ", "%clangxx_tsan "]:
|
||||
config.substitutions[index] = (template, replacement + additional_cflags)
|
||||
else:
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
config.environment['TSAN_OPTIONS'] += ':ignore_noninstrumented_modules=1'
|
||||
if config.host_os == "Darwin":
|
||||
config.environment["TSAN_OPTIONS"] += ":ignore_noninstrumented_modules=1"
|
||||
|
||||
@@ -2,17 +2,20 @@
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if not attr_value:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if not attr_value:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'ThreadSanitizer' + config.name_suffix
|
||||
config.name = "ThreadSanitizer" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
@@ -20,75 +23,92 @@ config.test_source_root = os.path.dirname(__file__)
|
||||
# Setup environment variables for running ThreadSanitizer.
|
||||
default_tsan_opts = "atexit_sleep_ms=0"
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
default_tsan_opts += ':abort_on_error=0'
|
||||
# On Darwin, we default to ignore_noninstrumented_modules=1, which also
|
||||
# suppresses some races the tests are supposed to find. Let's run without this
|
||||
# setting, but turn it back on for Darwin tests (see Darwin/lit.local.cfg.py).
|
||||
default_tsan_opts += ':ignore_noninstrumented_modules=0'
|
||||
default_tsan_opts += ':ignore_interceptors_accesses=0'
|
||||
if config.host_os == "Darwin":
|
||||
# On Darwin, we default to `abort_on_error=1`, which would make tests run
|
||||
# much slower. Let's override this and run lit tests with 'abort_on_error=0'.
|
||||
default_tsan_opts += ":abort_on_error=0"
|
||||
# On Darwin, we default to ignore_noninstrumented_modules=1, which also
|
||||
# suppresses some races the tests are supposed to find. Let's run without this
|
||||
# setting, but turn it back on for Darwin tests (see Darwin/lit.local.cfg.py).
|
||||
default_tsan_opts += ":ignore_noninstrumented_modules=0"
|
||||
default_tsan_opts += ":ignore_interceptors_accesses=0"
|
||||
|
||||
# Platform-specific default TSAN_OPTIONS for lit tests.
|
||||
if default_tsan_opts:
|
||||
config.environment['TSAN_OPTIONS'] = default_tsan_opts
|
||||
default_tsan_opts += ':'
|
||||
config.substitutions.append(('%env_tsan_opts=',
|
||||
'env TSAN_OPTIONS=' + default_tsan_opts))
|
||||
config.environment["TSAN_OPTIONS"] = default_tsan_opts
|
||||
default_tsan_opts += ":"
|
||||
config.substitutions.append(
|
||||
("%env_tsan_opts=", "env TSAN_OPTIONS=" + default_tsan_opts)
|
||||
)
|
||||
|
||||
# GCC driver doesn't add necessary compile/link flags with -fsanitize=thread.
|
||||
if config.compiler_id == 'GNU':
|
||||
extra_cflags = ["-fPIE", "-pthread", "-ldl", "-lrt", "-pie"]
|
||||
if config.compiler_id == "GNU":
|
||||
extra_cflags = ["-fPIE", "-pthread", "-ldl", "-lrt", "-pie"]
|
||||
else:
|
||||
extra_cflags = []
|
||||
extra_cflags = []
|
||||
|
||||
tsan_incdir = config.test_source_root + "/../"
|
||||
# Setup default compiler flags used with -fsanitize=thread option.
|
||||
clang_tsan_cflags = (["-fsanitize=thread",
|
||||
"-Wall"] +
|
||||
[config.target_cflags] +
|
||||
config.debug_info_flags +
|
||||
extra_cflags +
|
||||
["-I%s" % tsan_incdir])
|
||||
clang_tsan_cxxflags = config.cxx_mode_flags + clang_tsan_cflags + ["-std=c++11"] + ["-I%s" % tsan_incdir]
|
||||
clang_tsan_cflags = (
|
||||
["-fsanitize=thread", "-Wall"]
|
||||
+ [config.target_cflags]
|
||||
+ config.debug_info_flags
|
||||
+ extra_cflags
|
||||
+ ["-I%s" % tsan_incdir]
|
||||
)
|
||||
clang_tsan_cxxflags = (
|
||||
config.cxx_mode_flags + clang_tsan_cflags + ["-std=c++11"] + ["-I%s" % tsan_incdir]
|
||||
)
|
||||
# Add additional flags if we're using instrumented libc++.
|
||||
# Instrumented libcxx currently not supported on Darwin.
|
||||
if config.has_libcxx and config.host_os != 'Darwin':
|
||||
# FIXME: Dehardcode this path somehow.
|
||||
libcxx_path = os.path.join(config.compiler_rt_obj_root, "lib",
|
||||
"tsan", "libcxx_tsan_%s" % config.target_arch)
|
||||
libcxx_incdir = os.path.join(libcxx_path, "include", "c++", "v1")
|
||||
libcxx_libdir = os.path.join(libcxx_path, "lib")
|
||||
libcxx_a = os.path.join(libcxx_libdir, "libc++.a")
|
||||
clang_tsan_cxxflags += ["-nostdinc++",
|
||||
"-I%s" % libcxx_incdir]
|
||||
config.substitutions.append( ("%link_libcxx_tsan", libcxx_a) )
|
||||
if config.has_libcxx and config.host_os != "Darwin":
|
||||
# FIXME: Dehardcode this path somehow.
|
||||
libcxx_path = os.path.join(
|
||||
config.compiler_rt_obj_root,
|
||||
"lib",
|
||||
"tsan",
|
||||
"libcxx_tsan_%s" % config.target_arch,
|
||||
)
|
||||
libcxx_incdir = os.path.join(libcxx_path, "include", "c++", "v1")
|
||||
libcxx_libdir = os.path.join(libcxx_path, "lib")
|
||||
libcxx_a = os.path.join(libcxx_libdir, "libc++.a")
|
||||
clang_tsan_cxxflags += ["-nostdinc++", "-I%s" % libcxx_incdir]
|
||||
config.substitutions.append(("%link_libcxx_tsan", libcxx_a))
|
||||
else:
|
||||
config.substitutions.append( ("%link_libcxx_tsan", "") )
|
||||
config.substitutions.append(("%link_libcxx_tsan", ""))
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
config.substitutions.append( ("%clang_tsan ", build_invocation(clang_tsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx_tsan ", build_invocation(clang_tsan_cxxflags)) )
|
||||
|
||||
config.substitutions.append(("%clang_tsan ", build_invocation(clang_tsan_cflags)))
|
||||
config.substitutions.append(("%clangxx_tsan ", build_invocation(clang_tsan_cxxflags)))
|
||||
|
||||
# Define CHECK-%os to check for OS-dependent output.
|
||||
config.substitutions.append( ('CHECK-%os', ("CHECK-" + config.host_os)))
|
||||
config.substitutions.append(("CHECK-%os", ("CHECK-" + config.host_os)))
|
||||
|
||||
config.substitutions.append( ("%deflake ", os.path.join(os.path.dirname(__file__), "deflake.bash") + " " + config.deflake_threshold + " "))
|
||||
config.substitutions.append(
|
||||
(
|
||||
"%deflake ",
|
||||
os.path.join(os.path.dirname(__file__), "deflake.bash")
|
||||
+ " "
|
||||
+ config.deflake_threshold
|
||||
+ " ",
|
||||
)
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m', '.mm']
|
||||
config.suffixes = [".c", ".cpp", ".m", ".mm"]
|
||||
|
||||
if config.host_os not in ['FreeBSD', 'Linux', 'Darwin', 'NetBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["FreeBSD", "Linux", "Darwin", "NetBSD"]:
|
||||
config.unsupported = True
|
||||
|
||||
if config.android:
|
||||
config.unsupported = True
|
||||
config.unsupported = True
|
||||
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
config.parallelism_group = "shadow-memory"
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
if config.host_os not in ['Darwin', 'FreeBSD', 'Linux', 'NetBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in ["Darwin", "FreeBSD", "Linux", "NetBSD"]:
|
||||
config.unsupported = True
|
||||
# Work around "Cannot represent a difference across sections"
|
||||
if config.target_arch == 'powerpc64':
|
||||
config.unsupported = True
|
||||
if config.target_arch == "powerpc64":
|
||||
config.unsupported = True
|
||||
# Work around "library ... not found: needed by main executable" in qemu.
|
||||
if config.android and config.target_arch not in ['x86', 'x86_64']:
|
||||
config.unsupported = True
|
||||
if config.android and config.target_arch not in ["x86", "x86_64"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
def getRoot(config):
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
if not config.parent:
|
||||
return config
|
||||
return getRoot(config.parent)
|
||||
|
||||
|
||||
root = getRoot(config)
|
||||
|
||||
if root.host_os not in ['Linux']:
|
||||
config.unsupported = True
|
||||
if root.host_os not in ["Linux"]:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -2,81 +2,95 @@
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'UBSan-' + config.name_suffix
|
||||
config.name = "UBSan-" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
default_ubsan_opts = list(config.default_sanitizer_opts)
|
||||
# Choose between standalone and UBSan+ASan modes.
|
||||
ubsan_lit_test_mode = get_required_attr(config, 'ubsan_lit_test_mode')
|
||||
ubsan_lit_test_mode = get_required_attr(config, "ubsan_lit_test_mode")
|
||||
if ubsan_lit_test_mode == "Standalone":
|
||||
config.available_features.add("ubsan-standalone")
|
||||
clang_ubsan_cflags = []
|
||||
config.available_features.add("ubsan-standalone")
|
||||
clang_ubsan_cflags = []
|
||||
elif ubsan_lit_test_mode == "StandaloneStatic":
|
||||
config.available_features.add("ubsan-standalone-static")
|
||||
clang_ubsan_cflags = ['-static-libsan']
|
||||
config.available_features.add("ubsan-standalone-static")
|
||||
clang_ubsan_cflags = ["-static-libsan"]
|
||||
elif ubsan_lit_test_mode == "AddressSanitizer":
|
||||
config.available_features.add("ubsan-asan")
|
||||
clang_ubsan_cflags = ["-fsanitize=address"]
|
||||
default_ubsan_opts += ['detect_leaks=0']
|
||||
config.available_features.add("ubsan-asan")
|
||||
clang_ubsan_cflags = ["-fsanitize=address"]
|
||||
default_ubsan_opts += ["detect_leaks=0"]
|
||||
elif ubsan_lit_test_mode == "MemorySanitizer":
|
||||
config.available_features.add("ubsan-msan")
|
||||
clang_ubsan_cflags = ["-fsanitize=memory"]
|
||||
config.available_features.add("ubsan-msan")
|
||||
clang_ubsan_cflags = ["-fsanitize=memory"]
|
||||
elif ubsan_lit_test_mode == "ThreadSanitizer":
|
||||
config.available_features.add("ubsan-tsan")
|
||||
clang_ubsan_cflags = ["-fsanitize=thread"]
|
||||
config.available_features.add("ubsan-tsan")
|
||||
clang_ubsan_cflags = ["-fsanitize=thread"]
|
||||
else:
|
||||
lit_config.fatal("Unknown UBSan test mode: %r" % ubsan_lit_test_mode)
|
||||
lit_config.fatal("Unknown UBSan test mode: %r" % ubsan_lit_test_mode)
|
||||
|
||||
# Platform-specific default for lit tests.
|
||||
if config.target_arch == 's390x':
|
||||
# On SystemZ we need -mbackchain to make the fast unwinder work.
|
||||
clang_ubsan_cflags.append("-mbackchain")
|
||||
if config.target_arch == "s390x":
|
||||
# On SystemZ we need -mbackchain to make the fast unwinder work.
|
||||
clang_ubsan_cflags.append("-mbackchain")
|
||||
|
||||
default_ubsan_opts_str = ':'.join(default_ubsan_opts)
|
||||
default_ubsan_opts_str = ":".join(default_ubsan_opts)
|
||||
if default_ubsan_opts_str:
|
||||
config.environment['UBSAN_OPTIONS'] = default_ubsan_opts_str
|
||||
default_ubsan_opts_str += ':'
|
||||
config.environment["UBSAN_OPTIONS"] = default_ubsan_opts_str
|
||||
default_ubsan_opts_str += ":"
|
||||
# Substitution to setup UBSAN_OPTIONS in portable way.
|
||||
config.substitutions.append(('%env_ubsan_opts=',
|
||||
'env UBSAN_OPTIONS=' + default_ubsan_opts_str))
|
||||
config.substitutions.append(
|
||||
("%env_ubsan_opts=", "env UBSAN_OPTIONS=" + default_ubsan_opts_str)
|
||||
)
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
target_cflags = [get_required_attr(config, "target_cflags")]
|
||||
clang_ubsan_cflags += target_cflags
|
||||
clang_ubsan_cxxflags = config.cxx_mode_flags + clang_ubsan_cflags
|
||||
|
||||
# Define %clang and %clangxx substitutions to use in test RUN lines.
|
||||
config.substitutions.append( ("%clang ", build_invocation(clang_ubsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_ubsan_cxxflags)) )
|
||||
config.substitutions.append( ("%gmlt ", " ".join(config.debug_info_flags) + " ") )
|
||||
config.substitutions.append(("%clang ", build_invocation(clang_ubsan_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_ubsan_cxxflags)))
|
||||
config.substitutions.append(("%gmlt ", " ".join(config.debug_info_flags) + " "))
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp', '.m']
|
||||
config.suffixes = [".c", ".cpp", ".m"]
|
||||
|
||||
# Check that the host supports UndefinedBehaviorSanitizer tests
|
||||
if config.host_os not in ['Linux', 'Darwin', 'FreeBSD', 'Windows', 'NetBSD', 'SunOS', 'OpenBSD']:
|
||||
config.unsupported = True
|
||||
if config.host_os not in [
|
||||
"Linux",
|
||||
"Darwin",
|
||||
"FreeBSD",
|
||||
"Windows",
|
||||
"NetBSD",
|
||||
"SunOS",
|
||||
"OpenBSD",
|
||||
]:
|
||||
config.unsupported = True
|
||||
|
||||
config.available_features.add('arch=' + config.target_arch)
|
||||
config.available_features.add("arch=" + config.target_arch)
|
||||
|
||||
config.excludes = ['Inputs']
|
||||
config.excludes = ["Inputs"]
|
||||
|
||||
if ubsan_lit_test_mode in ['AddressSanitizer', 'MemorySanitizer', 'ThreadSanitizer']:
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = 'shadow-memory'
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_noaslr_prefix))
|
||||
if ubsan_lit_test_mode in ["AddressSanitizer", "MemorySanitizer", "ThreadSanitizer"]:
|
||||
if not config.parallelism_group:
|
||||
config.parallelism_group = "shadow-memory"
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_noaslr_prefix))
|
||||
|
||||
@@ -2,39 +2,51 @@
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_required_attr(config, attr_name):
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name)
|
||||
return attr_value
|
||||
attr_value = getattr(config, attr_name, None)
|
||||
if attr_value == None:
|
||||
lit_config.fatal(
|
||||
"No attribute %r in test configuration! You may need to run "
|
||||
"tests from your build directory or add this attribute "
|
||||
"to lit.site.cfg.py " % attr_name
|
||||
)
|
||||
return attr_value
|
||||
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
config.name = 'UBSan-Minimal-' + config.target_arch
|
||||
config.name = "UBSan-Minimal-" + config.target_arch
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
target_cflags = [get_required_attr(config, "target_cflags")]
|
||||
clang_ubsan_cflags = ["-fsanitize-minimal-runtime"] + target_cflags
|
||||
clang_ubsan_cxxflags = config.cxx_mode_flags + clang_ubsan_cflags
|
||||
|
||||
# Define %clang and %clangxx substitutions to use in test RUN lines.
|
||||
config.substitutions.append( ("%clang ", build_invocation(clang_ubsan_cflags)) )
|
||||
config.substitutions.append( ("%clangxx ", build_invocation(clang_ubsan_cxxflags)) )
|
||||
config.substitutions.append(("%clang ", build_invocation(clang_ubsan_cflags)))
|
||||
config.substitutions.append(("%clangxx ", build_invocation(clang_ubsan_cxxflags)))
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
# Check that the host supports UndefinedBehaviorSanitizerMinimal tests
|
||||
if config.host_os not in ['Linux', 'FreeBSD', 'NetBSD', 'Darwin', 'OpenBSD', 'SunOS']: # TODO: Windows
|
||||
config.unsupported = True
|
||||
if config.host_os not in [
|
||||
"Linux",
|
||||
"FreeBSD",
|
||||
"NetBSD",
|
||||
"Darwin",
|
||||
"OpenBSD",
|
||||
"SunOS",
|
||||
]: # TODO: Windows
|
||||
config.unsupported = True
|
||||
|
||||
# Don't target x86_64h if the test machine can't execute x86_64h binaries.
|
||||
if '-arch x86_64h' in target_cflags and 'x86_64h' not in config.available_features:
|
||||
config.unsupported = True
|
||||
if "-arch x86_64h" in target_cflags and "x86_64h" not in config.available_features:
|
||||
config.unsupported = True
|
||||
|
||||
config.available_features.add('arch=' + config.target_arch)
|
||||
config.available_features.add("arch=" + config.target_arch)
|
||||
|
||||
@@ -3,64 +3,67 @@
|
||||
import os
|
||||
|
||||
# Setup config name.
|
||||
config.name = 'XRay' + config.name_suffix
|
||||
config.name = "XRay" + config.name_suffix
|
||||
|
||||
# Setup source root.
|
||||
config.test_source_root = os.path.dirname(__file__)
|
||||
|
||||
# Setup default compiler flags use with -fxray-instrument option.
|
||||
clang_xray_cflags = (['-fxray-instrument', config.target_cflags])
|
||||
clang_xray_cflags = ["-fxray-instrument", config.target_cflags]
|
||||
|
||||
# If libc++ was used to build XRAY libraries, libc++ is needed. Fix applied
|
||||
# to Linux only since -rpath may not be portable. This can be extended to
|
||||
# other platforms.
|
||||
if config.libcxx_used == "1" and config.host_os == "Linux":
|
||||
clang_xray_cflags = clang_xray_cflags + (['-L%s -lc++ -Wl,-rpath=%s'
|
||||
% (config.llvm_shlib_dir,
|
||||
config.llvm_shlib_dir)])
|
||||
clang_xray_cflags = clang_xray_cflags + (
|
||||
["-L%s -lc++ -Wl,-rpath=%s" % (config.llvm_shlib_dir, config.llvm_shlib_dir)]
|
||||
)
|
||||
|
||||
clang_xray_cxxflags = config.cxx_mode_flags + clang_xray_cflags
|
||||
|
||||
|
||||
def build_invocation(compile_flags):
|
||||
return ' ' + ' '.join([config.clang] + compile_flags) + ' '
|
||||
return " " + " ".join([config.clang] + compile_flags) + " "
|
||||
|
||||
|
||||
# Assume that llvm-xray is in the config.llvm_tools_dir.
|
||||
llvm_xray = os.path.join(config.llvm_tools_dir, 'llvm-xray')
|
||||
llvm_xray = os.path.join(config.llvm_tools_dir, "llvm-xray")
|
||||
|
||||
# Setup substitutions.
|
||||
if config.host_os == "Linux":
|
||||
libdl_flag = "-ldl"
|
||||
libdl_flag = "-ldl"
|
||||
else:
|
||||
libdl_flag = ""
|
||||
libdl_flag = ""
|
||||
|
||||
config.substitutions.append(("%clang ", build_invocation([config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clang ', build_invocation([config.target_cflags])))
|
||||
("%clangxx ", build_invocation(config.cxx_mode_flags + [config.target_cflags]))
|
||||
)
|
||||
config.substitutions.append(("%clang_xray ", build_invocation(clang_xray_cflags)))
|
||||
config.substitutions.append(("%clangxx_xray", build_invocation(clang_xray_cxxflags)))
|
||||
config.substitutions.append(("%llvm_xray", llvm_xray))
|
||||
config.substitutions.append(
|
||||
('%clangxx ',
|
||||
build_invocation(config.cxx_mode_flags + [config.target_cflags])))
|
||||
config.substitutions.append(
|
||||
('%clang_xray ', build_invocation(clang_xray_cflags)))
|
||||
config.substitutions.append(
|
||||
('%clangxx_xray', build_invocation(clang_xray_cxxflags)))
|
||||
config.substitutions.append(
|
||||
('%llvm_xray', llvm_xray))
|
||||
config.substitutions.append(
|
||||
('%xraylib',
|
||||
('-lm -lpthread %s -lrt -L%s '
|
||||
'-Wl,-whole-archive -lclang_rt.xray%s -Wl,-no-whole-archive')
|
||||
% (libdl_flag, config.compiler_rt_libdir, config.target_suffix)))
|
||||
(
|
||||
"%xraylib",
|
||||
(
|
||||
"-lm -lpthread %s -lrt -L%s "
|
||||
"-Wl,-whole-archive -lclang_rt.xray%s -Wl,-no-whole-archive"
|
||||
)
|
||||
% (libdl_flag, config.compiler_rt_libdir, config.target_suffix),
|
||||
)
|
||||
)
|
||||
|
||||
# Default test suffixes.
|
||||
config.suffixes = ['.c', '.cpp']
|
||||
config.suffixes = [".c", ".cpp"]
|
||||
|
||||
if config.host_os not in ['FreeBSD', 'Linux', 'NetBSD', 'OpenBSD']:
|
||||
config.unsupported = True
|
||||
elif '64' not in config.host_arch:
|
||||
if 'arm' in config.host_arch:
|
||||
if '-mthumb' in config.target_cflags:
|
||||
config.unsupported = True
|
||||
else:
|
||||
if config.host_os not in ["FreeBSD", "Linux", "NetBSD", "OpenBSD"]:
|
||||
config.unsupported = True
|
||||
elif "64" not in config.host_arch:
|
||||
if "arm" in config.host_arch:
|
||||
if "-mthumb" in config.target_cflags:
|
||||
config.unsupported = True
|
||||
else:
|
||||
config.unsupported = True
|
||||
|
||||
if config.host_os == 'NetBSD':
|
||||
config.substitutions.insert(0, ('%run', config.netbsd_nomprotect_prefix))
|
||||
if config.host_os == "NetBSD":
|
||||
config.substitutions.insert(0, ("%run", config.netbsd_nomprotect_prefix))
|
||||
|
||||
@@ -18,7 +18,8 @@ def get_lit_conf(name, default=None):
|
||||
val = default
|
||||
return val
|
||||
|
||||
emulator = get_lit_conf('emulator', None)
|
||||
|
||||
emulator = get_lit_conf("emulator", None)
|
||||
|
||||
# Setup test format
|
||||
llvm_build_mode = getattr(config, "llvm_build_mode", "Debug")
|
||||
@@ -30,28 +31,28 @@ config.suffixes = []
|
||||
# Tweak PATH to include llvm tools dir.
|
||||
llvm_tools_dir = config.llvm_tools_dir
|
||||
if (not llvm_tools_dir) or (not os.path.exists(llvm_tools_dir)):
|
||||
lit_config.fatal("Invalid llvm_tools_dir config attribute: %r" % llvm_tools_dir)
|
||||
path = os.path.pathsep.join((llvm_tools_dir, config.environment['PATH']))
|
||||
config.environment['PATH'] = path
|
||||
lit_config.fatal("Invalid llvm_tools_dir config attribute: %r" % llvm_tools_dir)
|
||||
path = os.path.pathsep.join((llvm_tools_dir, config.environment["PATH"]))
|
||||
config.environment["PATH"] = path
|
||||
|
||||
# Propagate the temp directory. Windows requires this because it uses \Windows\
|
||||
# if none of these are present.
|
||||
if 'TMP' in os.environ:
|
||||
config.environment['TMP'] = os.environ['TMP']
|
||||
if 'TEMP' in os.environ:
|
||||
config.environment['TEMP'] = os.environ['TEMP']
|
||||
if "TMP" in os.environ:
|
||||
config.environment["TMP"] = os.environ["TMP"]
|
||||
if "TEMP" in os.environ:
|
||||
config.environment["TEMP"] = os.environ["TEMP"]
|
||||
|
||||
if config.host_os == 'Darwin':
|
||||
# Only run up to 3 processes that require shadow memory simultaneously on
|
||||
# 64-bit Darwin. Using more scales badly and hogs the system due to
|
||||
# inefficient handling of large mmap'd regions (terabytes) by the kernel.
|
||||
lit_config.parallelism_groups["shadow-memory"] = 3
|
||||
if config.host_os == "Darwin":
|
||||
# Only run up to 3 processes that require shadow memory simultaneously on
|
||||
# 64-bit Darwin. Using more scales badly and hogs the system due to
|
||||
# inefficient handling of large mmap'd regions (terabytes) by the kernel.
|
||||
lit_config.parallelism_groups["shadow-memory"] = 3
|
||||
|
||||
# Disable libmalloc nano allocator due to crashes running on macOS 12.0.
|
||||
# rdar://80086125
|
||||
config.environment['MallocNanoZone'] = '0'
|
||||
# Disable libmalloc nano allocator due to crashes running on macOS 12.0.
|
||||
# rdar://80086125
|
||||
config.environment["MallocNanoZone"] = "0"
|
||||
|
||||
# We crash when we set DYLD_INSERT_LIBRARIES for unit tests, so interceptors
|
||||
# don't work.
|
||||
config.environment['ASAN_OPTIONS'] = 'verify_interceptors=0'
|
||||
config.environment['TSAN_OPTIONS'] = 'verify_interceptors=0'
|
||||
# We crash when we set DYLD_INSERT_LIBRARIES for unit tests, so interceptors
|
||||
# don't work.
|
||||
config.environment["ASAN_OPTIONS"] = "verify_interceptors=0"
|
||||
config.environment["TSAN_OPTIONS"] = "verify_interceptors=0"
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if 'clang' not in config.available_features or 'AMDGPU' not in config.targets_to_build:
|
||||
if "clang" not in config.available_features or "AMDGPU" not in config.targets_to_build:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# In MSVC mode DWARF isn't produced & is needed for these tests
|
||||
if 'native' not in config.available_features or config.is_msvc:
|
||||
if "native" not in config.available_features or config.is_msvc:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
if 'dexter' not in config.available_features:
|
||||
if "dexter" not in config.available_features:
|
||||
config.unsupported = True
|
||||
|
||||
@@ -5,4 +5,4 @@
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
|
||||
__version__ = '1.0.0'
|
||||
__version__ = "1.0.0"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user