Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
167 changes: 64 additions & 103 deletions mesonbuild/cargo/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""Rust CFG parser.

Rust uses its `cfg()` format in cargo.
https://doc.rust-lang.org/reference/conditional-compilation.html

This may have the following functions:
- all()
Expand All @@ -22,18 +23,15 @@
from __future__ import annotations
import dataclasses
import enum
import functools
import typing as T


from . import builder
from .. import mparser
from ..mesonlib import MesonBugException

if T.TYPE_CHECKING:
_T = T.TypeVar('_T')
_LEX_TOKEN = T.Tuple['TokenType', T.Optional[str]]
_LEX_STREAM = T.Iterable[_LEX_TOKEN]
_LEX_STREAM = T.Iterator[_LEX_TOKEN]
_LEX_STREAM_AH = T.Iterator[T.Tuple[_LEX_TOKEN, T.Optional[_LEX_TOKEN]]]


Expand All @@ -48,6 +46,7 @@ class TokenType(enum.Enum):
NOT = enum.auto()
COMMA = enum.auto()
EQUAL = enum.auto()
CFG = enum.auto()


def lexer(raw: str) -> _LEX_STREAM:
Expand All @@ -56,45 +55,41 @@ def lexer(raw: str) -> _LEX_STREAM:
:param raw: The raw cfg() expression
:return: An iterable of tokens
"""
buffer: T.List[str] = []
start: int = 0
is_string: bool = False
for s in raw:
if s.isspace() or s in {')', '(', ',', '='} or (s == '"' and buffer):
val = ''.join(buffer)
buffer.clear()
if is_string:
for i, s in enumerate(raw):
if s.isspace() or s in {')', '(', ',', '=', '"'}:
val = raw[start:i]
start = i + 1
if s == '"' and is_string:
yield (TokenType.STRING, val)
is_string = False
continue
elif val == 'any':
yield (TokenType.ANY, None)
elif val == 'all':
yield (TokenType.ALL, None)
elif val == 'not':
yield (TokenType.NOT, None)
elif val == 'cfg':
yield (TokenType.CFG, None)
elif val:
yield (TokenType.IDENTIFIER, val)

if s == '(':
yield (TokenType.LPAREN, None)
continue
elif s == ')':
yield (TokenType.RPAREN, None)
continue
elif s == ',':
yield (TokenType.COMMA, None)
continue
elif s == '=':
yield (TokenType.EQUAL, None)
continue
elif s.isspace():
continue

if s == '"':
is_string = not is_string
else:
buffer.append(s)
if buffer:
elif s == '"':
is_string = True
val = raw[start:]
if val:
# This should always be an identifier
yield (TokenType.IDENTIFIER, ''.join(buffer))
yield (TokenType.IDENTIFIER, val)


def lookahead(iter: T.Iterator[_T]) -> T.Iterator[T.Tuple[_T, T.Optional[_T]]]:
Expand Down Expand Up @@ -146,8 +141,8 @@ class Identifier(IR):
@dataclasses.dataclass
class Equal(IR):

lhs: IR
rhs: IR
lhs: Identifier
rhs: String


@dataclasses.dataclass
Expand Down Expand Up @@ -175,41 +170,40 @@ def _parse(ast: _LEX_STREAM_AH) -> IR:
else:
ntoken, _ = (None, None)

stream: T.List[_LEX_TOKEN]
if token is TokenType.IDENTIFIER:
assert value
id_ = Identifier(value)
if ntoken is TokenType.EQUAL:
return Equal(Identifier(value), _parse(ast))
if token is TokenType.STRING:
return String(value)
if token is TokenType.EQUAL:
# In this case the previous caller already has handled the equal
return _parse(ast)
if token in {TokenType.ANY, TokenType.ALL}:
next(ast)
(token, value), _ = next(ast)
assert token is TokenType.STRING
assert value is not None
return Equal(id_, String(value))
return id_
elif token in {TokenType.ANY, TokenType.ALL}:
type_ = All if token is TokenType.ALL else Any
assert ntoken is TokenType.LPAREN
next(ast) # advance the iterator to get rid of the LPAREN
stream = []
args: T.List[IR] = []
while token is not TokenType.RPAREN:
(token, value), n_stream = next(ast)
assert token is TokenType.LPAREN
if n_stream and n_stream[0] == TokenType.RPAREN:
return type_(args)
while True:
args.append(_parse(ast))
(token, value), _ = next(ast)
if token is TokenType.COMMA:
args.append(_parse(lookahead(iter(stream))))
stream.clear()
else:
stream.append((token, value))
if stream:
args.append(_parse(lookahead(iter(stream))))
if token is TokenType.RPAREN:
break
assert token is TokenType.COMMA
return type_(args)
if token is TokenType.NOT:
next(ast) # advance the iterator to get rid of the LPAREN
stream = []
# Mypy can't figure out that token is overridden inside the while loop
while token is not TokenType.RPAREN: # type: ignore
(token, value), _ = next(ast)
stream.append((token, value))
return Not(_parse(lookahead(iter(stream))))

raise MesonBugException(f'Unhandled Cargo token: {token}')
elif token in {TokenType.NOT, TokenType.CFG}:
is_not = token is TokenType.NOT
(token, value), _ = next(ast)
assert token is TokenType.LPAREN
arg = _parse(ast)
(token, value), _ = next(ast)
assert token is TokenType.RPAREN
return Not(arg) if is_not else arg
else:
raise MesonBugException(f'Unhandled Cargo token:{token} {value}')


def parse(ast: _LEX_STREAM) -> IR:
Expand All @@ -218,57 +212,24 @@ def parse(ast: _LEX_STREAM) -> IR:
:param ast: An iterable of Tokens
:return: An mparser Node to be used as a conditional
"""
ast_i: _LEX_STREAM_AH = lookahead(iter(ast))
ast_i: _LEX_STREAM_AH = lookahead(ast)
return _parse(ast_i)


@functools.singledispatch
def ir_to_meson(ir: T.Any, build: builder.Builder) -> mparser.BaseNode:
raise NotImplementedError


@ir_to_meson.register
def _(ir: String, build: builder.Builder) -> mparser.BaseNode:
return build.string(ir.value)


@ir_to_meson.register
def _(ir: Identifier, build: builder.Builder) -> mparser.BaseNode:
host_machine = build.identifier('host_machine')
if ir.value == "target_arch":
return build.method('cpu_family', host_machine)
elif ir.value in {"target_os", "target_family"}:
return build.method('system', host_machine)
elif ir.value == "target_endian":
return build.method('endian', host_machine)
raise MesonBugException(f"Unhandled Cargo identifier: {ir.value}")


@ir_to_meson.register
def _(ir: Equal, build: builder.Builder) -> mparser.BaseNode:
return build.equal(ir_to_meson(ir.lhs, build), ir_to_meson(ir.rhs, build))


@ir_to_meson.register
def _(ir: Not, build: builder.Builder) -> mparser.BaseNode:
return build.not_(ir_to_meson(ir.value, build))


@ir_to_meson.register
def _(ir: Any, build: builder.Builder) -> mparser.BaseNode:
args = iter(reversed(ir.args))
last = next(args)
cur = build.or_(ir_to_meson(next(args), build), ir_to_meson(last, build))
for a in args:
cur = build.or_(ir_to_meson(a, build), cur)
return cur
def _eval_cfg(ir: IR, cfgs: T.Dict[str, str]) -> bool:
if isinstance(ir, Identifier):
return ir.value in cfgs
elif isinstance(ir, Equal):
return cfgs.get(ir.lhs.value) == ir.rhs.value
elif isinstance(ir, Not):
return not _eval_cfg(ir.value, cfgs)
elif isinstance(ir, Any):
return any(_eval_cfg(i, cfgs) for i in ir.args)
elif isinstance(ir, All):
return all(_eval_cfg(i, cfgs) for i in ir.args)
else:
raise MesonBugException(f'Unhandled Cargo cfg IR: {ir}')


@ir_to_meson.register
def _(ir: All, build: builder.Builder) -> mparser.BaseNode:
args = iter(reversed(ir.args))
last = next(args)
cur = build.and_(ir_to_meson(next(args), build), ir_to_meson(last, build))
for a in args:
cur = build.and_(ir_to_meson(a, build), cur)
return cur
def eval_cfg(raw: str, cfgs: T.Dict[str, str]) -> bool:
return _eval_cfg(parse(lexer(raw)), cfgs)
30 changes: 27 additions & 3 deletions mesonbuild/cargo/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@
import itertools
import typing as T

from . import builder
from . import version
from ..mesonlib import MesonException, Popen_safe
from . import builder, version, cfg
from ..mesonlib import MesonException, Popen_safe, MachineChoice
from .. import coredata, mlog
from ..wrap.wrap import PackageDefinition

Expand All @@ -35,6 +34,7 @@
from .. import mparser
from ..environment import Environment
from ..interpreterbase import SubProject
from ..compilers.rust import RustCompiler

Check notice

Code scanning / CodeQL

Unused import

Import of 'RustCompiler' is not used.

# Copied from typeshed. Blarg that they don't expose this
class DataclassInstance(Protocol):
Expand Down Expand Up @@ -476,10 +476,13 @@ class PackageKey:
class Interpreter:
def __init__(self, env: Environment) -> None:
self.environment = env
self.host_rustc = T.cast('RustCompiler', self.environment.coredata.compilers[MachineChoice.HOST]['rust'])
# Map Cargo.toml's subdir to loaded manifest.
self.manifests: T.Dict[str, Manifest] = {}
# Map of cargo package (name + api) to its state
self.packages: T.Dict[PackageKey, PackageState] = {}
# Rustc's config
self.cfgs = self._get_cfgs()

def interpret(self, subdir: str) -> mparser.CodeBlockNode:
manifest = self._load_manifest(subdir)
Expand Down Expand Up @@ -526,6 +529,10 @@ def _fetch_package(self, package_name: str, api: str) -> T.Tuple[PackageState, b
self.environment.wrap_resolver.wraps[meson_depname].type is not None
pkg = PackageState(manifest, downloaded)
self.packages[key] = pkg
# Merge target specific dependencies that are enabled
for condition, dependencies in manifest.target.items():
if cfg.eval_cfg(condition, self.cfgs):
manifest.dependencies.update(dependencies)
# Fetch required dependencies recursively.
for depname, dep in manifest.dependencies.items():
if not dep.optional:
Expand Down Expand Up @@ -599,6 +606,23 @@ def _enable_feature(self, pkg: PackageState, feature: str) -> None:
else:
self._enable_feature(pkg, f)

def _get_cfgs(self) -> T.Dict[str, str]:
cfgs = self.host_rustc.get_cfgs().copy()
rustflags = self.environment.coredata.get_external_args(MachineChoice.HOST, 'rust')
rustflags_i = iter(rustflags)
for i in rustflags_i:
if i == '--cfg':
cfgs.append(next(rustflags_i))
return dict(self._split_cfg(i) for i in cfgs)

@staticmethod
def _split_cfg(cfg: str) -> T.Tuple[str, str]:
pair = cfg.split('=', maxsplit=1)
value = pair[1] if len(pair) > 1 else ''
if value and value[0] == '"':
value = value[1:-1]
return pair[0], value

def _create_project(self, pkg: PackageState, build: builder.Builder) -> T.List[mparser.BaseNode]:
"""Create the project() function call

Expand Down
8 changes: 6 additions & 2 deletions mesonbuild/compilers/rust.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,10 +182,14 @@ def get_target_libdir(self) -> str:
return stdo.split('\n', maxsplit=1)[0]

@functools.lru_cache(maxsize=None)
def get_crt_static(self) -> bool:
def get_cfgs(self) -> T.List[str]:
cmd = self.get_exelist(ccache=False) + ['--print', 'cfg']
p, stdo, stde = Popen_safe_logged(cmd)
return bool(re.search('^target_feature="crt-static"$', stdo, re.MULTILINE))
return stdo.splitlines()

@functools.lru_cache(maxsize=None)
def get_crt_static(self) -> bool:
return 'target_feature="crt-static"' in self.get_cfgs()

def get_debug_args(self, is_debug: bool) -> T.List[str]:
return clike_debug_args[is_debug]
Expand Down
1 change: 1 addition & 0 deletions mesonbuild/interpreter/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -1050,6 +1050,7 @@ def _do_subproject_cargo(self, subp_name: str, subdir: str,
mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.',
once=True, location=self.current_node)
if self.environment.cargo is None:
self.add_languages(['rust'], True, MachineChoice.HOST)
self.environment.cargo = cargo.Interpreter(self.environment)
with mlog.nested(subp_name):
ast = self.environment.cargo.interpret(subdir)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ features = ["f1"]
[dependencies.libname]
version = "1"

[target."cfg(unix)".dependencies.unixdep]
version = "0.1"

[features]
default = ["f1"]
f1 = ["f2", "f3"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ extern "C" {
#[cfg(feature = "foo")]
#[no_mangle]
pub extern "C" fn rust_func() -> i32 {
#[cfg(unix)]
{
extern crate unixdep;
assert!(unixdep::only_on_unix() == 0);
}
assert!(common::common_func() == 0);
assert!(libothername::stuff() == 42);
let v: i32;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[wrap-file]
method = cargo
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[package]
name = "unixdep"
version = "0.1"
edition = "2021"

[lib]
path = "lib.rs"
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
pub fn only_on_unix() -> i32 {
0
}

#[cfg(not(unix))]
pub fn broken() -> i32 {
plop
}
Loading
Loading