Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 52 additions & 66 deletions mesonbuild/backend/ninjabackend.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from .. import build
from .. import mlog
from .. import compilers
from ..compilers.cpp import CPPCompiler
from .. import tooldetect
from ..arglist import CompilerArgs
from ..compilers import Compiler, is_library
Expand All @@ -48,7 +49,6 @@
from ..compilers.rust import RustCompiler
from ..mesonlib import FileOrString
from .backends import TargetIntrospectionData

CommandArgOrStr = T.List[T.Union['NinjaCommandArg', str]]
RUST_EDITIONS = Literal['2015', '2018', '2021']

Expand Down Expand Up @@ -493,6 +493,8 @@ def __init__(self, build: T.Optional[build.Build]):
self.implicit_meson_outs: T.List[str] = []
self._uses_dyndeps = False
self._generated_header_cache: T.Dict[str, T.List[FileOrString]] = {}
self._first_deps_dd_rule_generated = False
self._all_scan_sources = []
# nvcc chokes on thin archives:
# nvlink fatal : Could not open input file 'libfoo.a.p'
# nvlink fatal : elfLink internal error
Expand Down Expand Up @@ -624,10 +626,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)

num_pools = self.environment.coredata.optstore.get_value_for('backend_max_links')
if num_pools > 0:
outfile.write(f'''pool link_pool
depth = {num_pools}

''')
outfile.write(f'pool link_pool\n depth = {num_pools}\n\n')

with self.detect_vs_dep_prefix(tempfilename) as outfile:
self.generate_rules()
Expand All @@ -645,6 +644,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)

for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'):
self.generate_target(t)
self.generate_global_dependency_scan_target()
mlog.log_timestamp("Targets generated")
self.add_build_comment(NinjaComment('Test rules'))
self.generate_tests()
Expand Down Expand Up @@ -1089,9 +1089,6 @@ def generate_target(self, target: T.Union[build.BuildTarget, build.CustomTarget,
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list

self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)

if target.uses_rust():
self.generate_rust_target(target, outname, final_obj_list, fortran_order_deps)
return
Expand All @@ -1112,10 +1109,14 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
return True
if 'cpp' not in target.compilers:
return False
if '-fmodules-ts' in target.extra_args['cpp']:
if '-fmodules' in target.extra_args['cpp']:
return True
# Currently only the preview version of Visual Studio is supported.
cpp = target.compilers['cpp']
if cpp.get_id() == 'clang':
if not mesonlib.version_compare(cpp.version, '>=17'):
raise MesonException('C++20 modules require Clang 17 or newer.')
return True
if cpp.get_id() != 'msvc':
return False
cppversion = self.get_target_option(target, OptionKey('cpp_std',
Expand All @@ -1136,47 +1137,31 @@ def generate_dependency_scan_target(self, target: build.BuildTarget,
if not self.should_use_dyndeps_for_target(target):
return
self._uses_dyndeps = True
json_file, depscan_file = self.get_dep_scan_file_for(target)
pickle_base = target.name + '.dat'
pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
rule_name = 'depscan'
scan_sources = list(self.select_sources_to_scan(compiled_sources))

scaninfo = TargetDependencyScannerInfo(
self.get_target_private_dir(target), source2object, scan_sources)

write = True
if os.path.exists(pickle_abs):
with open(pickle_abs, 'rb') as p:
old = pickle.load(p)
write = old != scaninfo

if write:
with open(pickle_abs, 'wb') as p:
pickle.dump(scaninfo, p)

elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file)
# A full dependency is required on all scanned sources, if any of them
# are updated we need to rescan, as they may have changed the modules
# they use or export.
for s in scan_sources:
elem.deps.add(s[0])
elem.orderdeps.update(object_deps)
elem.add_item('name', target.name)
self.add_build(elem)

infiles: T.Set[str] = set()
for t in target.get_all_linked_targets():
if self.should_use_dyndeps_for_target(t):
infiles.add(self.get_dep_scan_file_for(t)[0])
_, od = self.flatten_object_list(target)
infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()})

elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles))
elem.add_item('name', target.name)
self.add_build(elem)
if not self._first_deps_dd_rule_generated:
self._first_deps_dd_rule_generated = True
self.generate_project_wide_cpp_scanner_rules()
rule_name = 'depscanaccumulate'
elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json")
self.add_build(elem)
def generate_project_wide_cpp_scanner_rules(self) -> None:
rulename = 'depscanaccumulate'
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
return

command = self.environment.get_build_command() + \
['--internal', 'depscanaccumulate']
args = ['$in', 'deps.json', '$out']
description = 'Scanning project for modules'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)
def generate_global_dependency_scan_target(self) -> None:
self._uses_dyndeps = True
self.generate_project_wide_cpp_scanner_rules()
rule_name = 'depscanaccumulate'
elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json")
elem.add_dep(self._all_scan_sources)
self.add_build(elem)
def select_sources_to_scan(self, compiled_sources: T.List[str],
) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]:
# in practice pick up C++ and Fortran files. If some other language
Expand Down Expand Up @@ -2712,21 +2697,7 @@ def generate_scanner_rules(self) -> None:
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
return

command = self.environment.get_build_command() + \
['--internal', 'depscan']
args = ['$picklefile', '$out', '$in']
description = 'Scanning target $name for modules'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

rulename = 'depaccumulate'
command = self.environment.get_build_command() + \
['--internal', 'depaccumulate']
args = ['$out', '$in']
description = 'Generating dynamic dependency information for target $name'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)
self.generate_project_wide_cpp_scanner_rules()

def generate_compile_rules(self) -> None:
for for_machine in MachineChoice:
Expand Down Expand Up @@ -3121,7 +3092,12 @@ def generate_common_compile_args_per_src_type(self, target: build.BuildTarget) -

src_type_to_args[src_type_str] = commands.to_native()
return src_type_to_args

def _get_cpp_module_output_name(self, src_basename: str,
compiler: CPPCompiler,
target: build.BuildTarget):
if not src_basename.endswith('.cppm'):
return 'dummy'
return src_basename.replace('.cppm', '.pcm')
def generate_single_compile(self, target: build.BuildTarget, src,
is_generated: bool = False, header_deps=None,
order_deps: T.Optional[T.List[FileOrString]] = None,
Expand Down Expand Up @@ -3270,6 +3246,16 @@ def quote_make_target(targetName: str) -> str:
result += c
return result
element.add_item('CUDA_ESCAPED_TARGET', quote_make_target(rel_obj))
if self.should_use_dyndeps_for_target(target) and compiler.get_language() == 'cpp' and compiler.get_id() == 'clang':
src_basename = os.path.basename(src.fname)
mod_output = self._get_cpp_module_output_name(src_basename, compiler, target)
build_dir = self.environment.get_build_dir()
commands.extend([
'--start-no-unused-arguments',
f'-fmodule-output={mod_output}',
f'-fprebuilt-module-path={build_dir}',
'--end-no-unused-arguments'
])
element.add_item('ARGS', commands)

self.add_dependency_scanner_entries_to_element(target, compiler, element, src)
Expand All @@ -3288,7 +3274,7 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c
extension = extension.lower()
if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']):
return
dep_scan_file = self.get_dep_scan_file_for(target)[1]
dep_scan_file = 'deps.dd'
element.add_item('dyndep', dep_scan_file)
element.add_orderdep(dep_scan_file)

Expand Down
3 changes: 2 additions & 1 deletion mesonbuild/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1745,7 +1745,8 @@ def get_used_stdlib_args(self, link_language: str) -> T.List[str]:
# subproject
stdlib_args.extend(all_compilers[dl].language_stdlib_only_link_flags(self.environment))
return stdlib_args

def uses_cpp(self) -> bool:
return 'cpp' in self.compilers
def uses_rust(self) -> bool:
return 'rust' in self.compilers

Expand Down
36 changes: 31 additions & 5 deletions mesonbuild/scripts/depscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,35 @@ def scan(self) -> int:
json.dump(description, f)

return 0

class CppDependenciesScanner:
pass

class ClangDependencyScanner(CppDependenciesScanner):
def __init__(self, compilation_db_file, json_output_file, dd_output_file=None):
self.compilation_db_file = compilation_db_file
self.json_output_file = json_output_file
self.dd_output_file = dd_output_file

def scan(self):
try:
result = sp.run(
["clang-scan-deps",
"-format=p1689",
"-compilation-database", self.compilation_db_file],
capture_output=True,
check=True
)
print(result.stdout)
return 0
except sp.SubprocessError:
return 1
except sp.TimeoutExpired:
return 2
def run(args: T.List[str]) -> int:
assert len(args) == 2, 'got wrong number of arguments!'
outfile, pickle_file = args
scanner = DependencyScanner(pickle_file, outfile)
return scanner.scan()
assert len(args) > 2, 'At least <compilation_db> and <json_output-file> arguments'
comp_db, json_output, dd_output = args
ClangDependencyScanner(compilation_db_file, output_file)
# assert len(args) == 2, 'got wrong number of arguments!'
# outfile, pickle_file = args
# scanner = DependencyScanner(pickle_file, outfile)
# return scanner.scan()
119 changes: 119 additions & 0 deletions mesonbuild/scripts/depscanaccumulate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
#!/usr/bin/env python3

from collections import defaultdict
from dataclasses import dataclass
import json
import subprocess as sp
import sys
import typing as T

ModuleName: T.TypeAlias = str
ObjectFile: T.TypeAlias = str


@dataclass(frozen=True)
class ModuleProviderInfo:
logical_name: ModuleName
source_path: str
is_interface: bool = False


class CppDependenciesScanner:
pass


def normalize_filename(fname):
return fname.replace(':', '-')


class DynDepRule:
def __init__(self, out: str, imp_outs: T.Optional[T.List[str]], imp_ins: T.List[str]):
self.output = [f'build {out}']
if imp_outs:
imp_out_str = " ".join([normalize_filename(o) for o in imp_outs])
self.output.append(f" | {imp_out_str}")
self.output.append(": dyndep")
if imp_ins:
imp_ins_str = " ".join([normalize_filename(inf) for inf in imp_ins])
self.output.append(" | " + imp_ins_str)
self.output_str = "".join(self.output) + "\n"

def __str__(self):
return self.output_str


class ClangDependencyScanner(CppDependenciesScanner):
def __init__(self, compilation_db_file, json_output_file, dd_output_file=None):
self.compilation_db_file = compilation_db_file
self.json_output_file = json_output_file
self.dd_output_file = dd_output_file

def scan(self) -> T.Tuple[T.Mapping[ObjectFile, ModuleName], T.Mapping[ObjectFile, ModuleProviderInfo]]:
try:
result = sp.run(
["clang-scan-deps",
"-format=p1689",
"-compilation-database", self.compilation_db_file],
capture_output=True,
check=False
)

if result.returncode != 0:
print(result.stderr.decode())
raise sp.SubprocessError("Failed to run clang-scan-deps")

with open(self.json_output_file, 'wb') as f:
f.write(result.stdout)

dependencies_info = json.loads(result.stdout)
all_deps_per_objfile = self.generate_dependencies(dependencies_info["rules"])
self.generate_dd_file(all_deps_per_objfile)
return 0

except sp.SubprocessError:
return 1
except sp.TimeoutExpired:
return 2

def generate_dd_file(self, deps_per_object_file):
with open('deps.dd', "w") as f:
f.write('ninja_dyndep_version = 1\n')
for obj, reqprov in deps_per_object_file.items():
requires, provides = reqprov
dd = DynDepRule(
obj,
[p.logical_name + ".pcm" for p in provides],
[r + '.pcm' for r in requires]
)
f.write(str(dd))

def generate_dependencies(self, rules: T.List):
all_entries: T.Mapping[ObjectFile, T.Tuple[T.Set[ModuleName], T.Set[ModuleProviderInfo]]] = \
defaultdict(lambda: (set(), set()))

for r in rules:
obj_processed = r["primary-output"]
all_entries[obj_processed] = (set(), set())

for req in r.get("requires", []):
all_entries[obj_processed][0].add(req["logical-name"])

for prov in r.get("provides", []):
all_entries[obj_processed][1].add(ModuleProviderInfo(
logical_name=prov["logical-name"],
source_path=prov["source-path"],
is_interface=prov.get('is-interface', False)
))

return all_entries


def run(args: T.List[str]) -> int:
assert len(args) >= 2, 'At least <compilation_db> and <json_output_file> arguments required'
comp_db_path, json_output_path, dd_output = args
scanner = ClangDependencyScanner(comp_db_path, json_output_path)
return scanner.scan()


if __name__ == '__main__':
run(sys.argv[1:])
Loading