Skip to content

Commit

Permalink
Fix subprojects logic in cross build scenarios
Browse files Browse the repository at this point in the history
Where we cannot use the same subproject state for both machines.

We may not even need to use a subproject for both machines, for example
if the build machine has a dependency installed, and we only need to
fall back to a subproject for the host machine.

Fixes: mesonbuild#10947
  • Loading branch information
oleavr committed Feb 8, 2024
1 parent 9ad2639 commit d9c30f8
Show file tree
Hide file tree
Showing 16 changed files with 284 additions and 77 deletions.
34 changes: 28 additions & 6 deletions mesonbuild/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,23 +291,43 @@ def get_custom_targets(self):

def copy(self) -> Build:
other = Build(self.environment)
self._copy_to(other)
return other

def copy_to_native(self) -> Build:
other = Build(self.environment.copy_to_native())
self._copy_to(other)
return other

def _copy_to(self, other: Build) -> None:
for k, v in self.__dict__.items():
if k == 'environment':
continue
if isinstance(v, (list, dict, set, OrderedDict)):
other.__dict__[k] = v.copy()
else:
other.__dict__[k] = v
return other

def merge(self, other: Build) -> None:
for k, v in other.__dict__.items():
if k == 'environment':
continue
self.__dict__[k] = v

def ensure_static_linker(self, compiler: Compiler) -> None:
if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker():
self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler)

def get_project(self):
return self.projects['']
return self.projects[('', MachineChoice.HOST)]

def find_subproject_descriptive_name(self, name: str) -> T.Optional[str]:
for for_machine in iter(MachineChoice):
subp_id = (name, for_machine)
p = self.projects.get(subp_id, None)
if p is not None:
return p
return None

def get_subproject_dir(self):
return self.subproject_dir
Expand Down Expand Up @@ -596,7 +616,7 @@ def get_basename(self) -> str:
return self.name

def get_subdir(self) -> str:
return self.subdir
return self.environment.build_output_rpath(self.subdir)

def get_typename(self) -> str:
return self.typename
Expand All @@ -612,7 +632,7 @@ def _get_id_hash(target_id: str) -> str:
return h.hexdigest()[:7]

@staticmethod
def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
def construct_id_from_path(subdir: str, name: str, type_suffix: str, extra_suffix: str = '') -> str:
"""Construct target ID from subdir, name and type suffix.
This helper function is made public mostly for tests."""
Expand All @@ -623,7 +643,7 @@ def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
# FIXME replace with assert when slash in names is prohibited
name_part = name.replace('/', '@').replace('\\', '@')
assert not has_path_sep(type_suffix)
my_id = name_part + type_suffix
my_id = name_part + type_suffix + extra_suffix
if subdir:
subdir_part = Target._get_id_hash(subdir)
# preserve myid for better debuggability
Expand All @@ -635,7 +655,7 @@ def get_id(self) -> str:
if getattr(self, 'name_suffix_set', False):
name += '.' + self.suffix
return self.construct_id_from_path(
self.subdir, name, self.type_suffix())
self.subdir, name, self.type_suffix(), '@native' if self.environment.coredata.is_native_clone else '')

def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
if 'build_by_default' in kwargs:
Expand Down Expand Up @@ -1481,6 +1501,8 @@ def check_can_link_together(self, t: BuildTargetTypes) -> None:
links_with_rust_abi = isinstance(t, BuildTarget) and t.uses_rust_abi()
if not self.uses_rust() and links_with_rust_abi:
raise InvalidArguments(f'Try to link Rust ABI library {t.name!r} with a non-Rust target {self.name!r}')
if isinstance(t, Target) and t.subproject and not self.environment.is_cross_build():
return
if self.for_machine is not t.for_machine and (not links_with_rust_abi or t.rust_crate_type != 'proc-macro'):
msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}'
if self.environment.is_cross_build():
Expand Down
30 changes: 28 additions & 2 deletions mesonbuild/coredata.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,6 +586,30 @@ def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command:
self.builtin_options_libdir_cross_fixup()
self.init_builtins('')

self.is_native_clone = False

def copy_to_native(self) -> CoreData:
other = CoreData.__new__(CoreData)
for k, v in self.__dict__.items():
other.__dict__[k] = v

other.cross_files = []

other.compilers = PerMachine(OrderedDict(), OrderedDict())
other.compilers.build = self.compilers.build

other.deps = PerMachineDefaultable.default(
is_cross=False,
build=self.deps.build,
host=self.deps.host)

other.is_native_clone = True

other.compiler_check_cache.clear()
other.run_check_cache.clear()

return other

@staticmethod
def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]:
# Need to try and make the passed filenames absolute because when the
Expand Down Expand Up @@ -927,7 +951,9 @@ def copy_build_options_from_regular_ones(self) -> bool:
def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool:
dirty = False
if not self.is_cross_build():
options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD}
other_machine = MachineChoice.HOST if self.is_native_clone else MachineChoice.BUILD
options = {k: v for k, v in options.items() if k.machine is not other_machine}

# Set prefix first because it's needed to sanitize other options
pfk = OptionKey('prefix')
if pfk in options:
Expand All @@ -950,7 +976,7 @@ def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', f
sub = f'In subproject {subproject}: ' if subproject else ''
raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')

if not self.is_cross_build():
if not self.is_cross_build() and not self.is_native_clone:
dirty |= self.copy_build_options_from_regular_ones()

return dirty
Expand Down
33 changes: 33 additions & 0 deletions mesonbuild/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -620,6 +620,33 @@ def __init__(self, source_dir: str, build_dir: str, options: 'argparse.Namespace
self.default_pkgconfig = ['pkg-config']
self.wrap_resolver: T.Optional['Resolver'] = None

def copy_to_native(self) -> Environment:
other = Environment.__new__(Environment)
for k, v in self.__dict__.items():
other.__dict__[k] = v

other.coredata = self.coredata.copy_to_native()

machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable()
machines.build = self.machines.build
other.machines = machines.default_missing()

binaries: PerMachineDefaultable[BinaryTable] = PerMachineDefaultable()
binaries.build = self.binaries.build
other.binaries = binaries.default_missing()

properties: PerMachineDefaultable[Properties] = PerMachineDefaultable()
properties.build = self.properties.build
other.properties = properties.default_missing()

cmakevars: PerMachineDefaultable[CMakeVariables] = PerMachineDefaultable()
cmakevars.build = self.cmakevars.build
other.cmakevars = cmakevars.default_missing()

other.exe_wrapper = None

return other

def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
"""Read the contents of a Machine file and put it in the options store."""

Expand Down Expand Up @@ -841,6 +868,12 @@ def get_source_dir(self) -> str:
def get_build_dir(self) -> str:
return self.build_dir

def build_output_rpath(self, subdir: str, *parts: T.Sequence[str]) -> str:
result = subdir
if self.coredata.is_native_clone:
result += '-native'
return os.path.join(result, *parts)

def get_import_lib_dir(self) -> str:
"Install dir for the import library (library used for linking)"
return self.get_libdir()
Expand Down
24 changes: 15 additions & 9 deletions mesonbuild/interpreter/dependencyfallbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_
name = func_args[0]
cached_dep = self._get_cached_dep(name, kwargs)
if cached_dep:
self._verify_fallback_consistency(cached_dep)
self._verify_fallback_consistency(cached_dep, kwargs.get('native', False))
return cached_dep

def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
Expand All @@ -95,7 +95,7 @@ def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs
def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
subp_name = func_args[0]
varname = self.subproject_varname
if subp_name and self._get_subproject(subp_name):
if subp_name and self._get_subproject(subp_name, kwargs.get('native', False)):
return self._get_subproject_dep(subp_name, varname, kwargs)
return None

Expand Down Expand Up @@ -127,18 +127,21 @@ def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs
func_kwargs.setdefault('version', [])
if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
func_kwargs['default_options'] = listify(kwargs['default_options'])
func_kwargs.setdefault('native', kwargs.get('native', False))
self.interpreter.do_subproject(subp_name, func_kwargs)
return self._get_subproject_dep(subp_name, varname, kwargs)

def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
sub = self.interpreter.subprojects.get(subp_name)
def _get_subproject(self, subp_name: str, native: bool) -> T.Optional[SubprojectHolder]:
sub = self.interpreter.find_subproject(subp_name, native)
if sub and sub.found():
return sub
return None

def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
native = kwargs.get('native', False)

# Verify the subproject is found
subproject = self._get_subproject(subp_name)
subproject = self._get_subproject(subp_name, native)
if not subproject:
mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
mlog.bold(subp_name), 'found:', mlog.red('NO'),
Expand All @@ -160,7 +163,7 @@ def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs
# If we have cached_dep we did all the checks and logging already in
# self._get_cached_dep().
if cached_dep:
self._verify_fallback_consistency(cached_dep)
self._verify_fallback_consistency(cached_dep, native)
return cached_dep

# Legacy: Use the variable name if provided instead of relying on the
Expand Down Expand Up @@ -256,10 +259,12 @@ def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -
return None
return var_dep

def _verify_fallback_consistency(self, cached_dep: Dependency) -> None:
def _verify_fallback_consistency(self, cached_dep: Dependency, native: bool) -> None:
subp_name = self.subproject_name
if subp_name is None:
return
varname = self.subproject_varname
subproject = self._get_subproject(subp_name)
subproject = self._get_subproject(subp_name, native)
if subproject and varname:
var_dep = self._get_subproject_variable(subproject, varname)
if var_dep and cached_dep.found() and var_dep != cached_dep:
Expand Down Expand Up @@ -336,7 +341,8 @@ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependen
subp_name, varname = self.wrap_resolver.find_dep_provider(name)
if subp_name:
self.forcefallback |= subp_name in force_fallback_for
if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
if self.forcefallback or self.allow_fallback is True or required \
or self._get_subproject(subp_name, kwargs.get('native', False)):
self._subproject_impl(subp_name, varname)
break

Expand Down
Loading

0 comments on commit d9c30f8

Please sign in to comment.