aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild/cargo/cfg.py
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild/cargo/cfg.py')
-rw-r--r--mesonbuild/cargo/cfg.py167
1 files changed, 64 insertions, 103 deletions
diff --git a/mesonbuild/cargo/cfg.py b/mesonbuild/cargo/cfg.py
index 0d49527..a0ee6e2 100644
--- a/mesonbuild/cargo/cfg.py
+++ b/mesonbuild/cargo/cfg.py
@@ -4,6 +4,7 @@
"""Rust CFG parser.
Rust uses its `cfg()` format in cargo.
+https://doc.rust-lang.org/reference/conditional-compilation.html
This may have the following functions:
- all()
@@ -22,18 +23,15 @@ so you could have examples like:
from __future__ import annotations
import dataclasses
import enum
-import functools
import typing as T
-from . import builder
-from .. import mparser
from ..mesonlib import MesonBugException
if T.TYPE_CHECKING:
_T = T.TypeVar('_T')
_LEX_TOKEN = T.Tuple['TokenType', T.Optional[str]]
- _LEX_STREAM = T.Iterable[_LEX_TOKEN]
+ _LEX_STREAM = T.Iterator[_LEX_TOKEN]
_LEX_STREAM_AH = T.Iterator[T.Tuple[_LEX_TOKEN, T.Optional[_LEX_TOKEN]]]
@@ -48,6 +46,7 @@ class TokenType(enum.Enum):
NOT = enum.auto()
COMMA = enum.auto()
EQUAL = enum.auto()
+ CFG = enum.auto()
def lexer(raw: str) -> _LEX_STREAM:
@@ -56,45 +55,41 @@ def lexer(raw: str) -> _LEX_STREAM:
:param raw: The raw cfg() expression
:return: An iterable of tokens
"""
- buffer: T.List[str] = []
+ start: int = 0
is_string: bool = False
- for s in raw:
- if s.isspace() or s in {')', '(', ',', '='} or (s == '"' and buffer):
- val = ''.join(buffer)
- buffer.clear()
- if is_string:
+ for i, s in enumerate(raw):
+ if s.isspace() or s in {')', '(', ',', '=', '"'}:
+ val = raw[start:i]
+ start = i + 1
+ if s == '"' and is_string:
yield (TokenType.STRING, val)
+ is_string = False
+ continue
elif val == 'any':
yield (TokenType.ANY, None)
elif val == 'all':
yield (TokenType.ALL, None)
elif val == 'not':
yield (TokenType.NOT, None)
+ elif val == 'cfg':
+ yield (TokenType.CFG, None)
elif val:
yield (TokenType.IDENTIFIER, val)
if s == '(':
yield (TokenType.LPAREN, None)
- continue
elif s == ')':
yield (TokenType.RPAREN, None)
- continue
elif s == ',':
yield (TokenType.COMMA, None)
- continue
elif s == '=':
yield (TokenType.EQUAL, None)
- continue
- elif s.isspace():
- continue
-
- if s == '"':
- is_string = not is_string
- else:
- buffer.append(s)
- if buffer:
+ elif s == '"':
+ is_string = True
+ val = raw[start:]
+ if val:
# This should always be an identifier
- yield (TokenType.IDENTIFIER, ''.join(buffer))
+ yield (TokenType.IDENTIFIER, val)
def lookahead(iter: T.Iterator[_T]) -> T.Iterator[T.Tuple[_T, T.Optional[_T]]]:
@@ -146,8 +141,8 @@ class Identifier(IR):
@dataclasses.dataclass
class Equal(IR):
- lhs: IR
- rhs: IR
+ lhs: Identifier
+ rhs: String
@dataclasses.dataclass
@@ -175,41 +170,40 @@ def _parse(ast: _LEX_STREAM_AH) -> IR:
else:
ntoken, _ = (None, None)
- stream: T.List[_LEX_TOKEN]
if token is TokenType.IDENTIFIER:
+ assert value
+ id_ = Identifier(value)
if ntoken is TokenType.EQUAL:
- return Equal(Identifier(value), _parse(ast))
- if token is TokenType.STRING:
- return String(value)
- if token is TokenType.EQUAL:
- # In this case the previous caller already has handled the equal
- return _parse(ast)
- if token in {TokenType.ANY, TokenType.ALL}:
+ next(ast)
+ (token, value), _ = next(ast)
+ assert token is TokenType.STRING
+ assert value is not None
+ return Equal(id_, String(value))
+ return id_
+ elif token in {TokenType.ANY, TokenType.ALL}:
type_ = All if token is TokenType.ALL else Any
- assert ntoken is TokenType.LPAREN
- next(ast) # advance the iterator to get rid of the LPAREN
- stream = []
args: T.List[IR] = []
- while token is not TokenType.RPAREN:
+ (token, value), n_stream = next(ast)
+ assert token is TokenType.LPAREN
+ if n_stream and n_stream[0] == TokenType.RPAREN:
+ return type_(args)
+ while True:
+ args.append(_parse(ast))
(token, value), _ = next(ast)
- if token is TokenType.COMMA:
- args.append(_parse(lookahead(iter(stream))))
- stream.clear()
- else:
- stream.append((token, value))
- if stream:
- args.append(_parse(lookahead(iter(stream))))
+ if token is TokenType.RPAREN:
+ break
+ assert token is TokenType.COMMA
return type_(args)
- if token is TokenType.NOT:
- next(ast) # advance the iterator to get rid of the LPAREN
- stream = []
- # Mypy can't figure out that token is overridden inside the while loop
- while token is not TokenType.RPAREN: # type: ignore
- (token, value), _ = next(ast)
- stream.append((token, value))
- return Not(_parse(lookahead(iter(stream))))
-
- raise MesonBugException(f'Unhandled Cargo token: {token}')
+ elif token in {TokenType.NOT, TokenType.CFG}:
+ is_not = token is TokenType.NOT
+ (token, value), _ = next(ast)
+ assert token is TokenType.LPAREN
+ arg = _parse(ast)
+ (token, value), _ = next(ast)
+ assert token is TokenType.RPAREN
+ return Not(arg) if is_not else arg
+ else:
+ raise MesonBugException(f'Unhandled Cargo token:{token} {value}')
def parse(ast: _LEX_STREAM) -> IR:
@@ -218,57 +212,24 @@ def parse(ast: _LEX_STREAM) -> IR:
:param ast: An iterable of Tokens
:return: An mparser Node to be used as a conditional
"""
- ast_i: _LEX_STREAM_AH = lookahead(iter(ast))
+ ast_i: _LEX_STREAM_AH = lookahead(ast)
return _parse(ast_i)
-@functools.singledispatch
-def ir_to_meson(ir: T.Any, build: builder.Builder) -> mparser.BaseNode:
- raise NotImplementedError
-
-
-@ir_to_meson.register
-def _(ir: String, build: builder.Builder) -> mparser.BaseNode:
- return build.string(ir.value)
-
-
-@ir_to_meson.register
-def _(ir: Identifier, build: builder.Builder) -> mparser.BaseNode:
- host_machine = build.identifier('host_machine')
- if ir.value == "target_arch":
- return build.method('cpu_family', host_machine)
- elif ir.value in {"target_os", "target_family"}:
- return build.method('system', host_machine)
- elif ir.value == "target_endian":
- return build.method('endian', host_machine)
- raise MesonBugException(f"Unhandled Cargo identifier: {ir.value}")
-
-
-@ir_to_meson.register
-def _(ir: Equal, build: builder.Builder) -> mparser.BaseNode:
- return build.equal(ir_to_meson(ir.lhs, build), ir_to_meson(ir.rhs, build))
-
-
-@ir_to_meson.register
-def _(ir: Not, build: builder.Builder) -> mparser.BaseNode:
- return build.not_(ir_to_meson(ir.value, build))
-
-
-@ir_to_meson.register
-def _(ir: Any, build: builder.Builder) -> mparser.BaseNode:
- args = iter(reversed(ir.args))
- last = next(args)
- cur = build.or_(ir_to_meson(next(args), build), ir_to_meson(last, build))
- for a in args:
- cur = build.or_(ir_to_meson(a, build), cur)
- return cur
+def _eval_cfg(ir: IR, cfgs: T.Dict[str, str]) -> bool:
+ if isinstance(ir, Identifier):
+ return ir.value in cfgs
+ elif isinstance(ir, Equal):
+ return cfgs.get(ir.lhs.value) == ir.rhs.value
+ elif isinstance(ir, Not):
+ return not _eval_cfg(ir.value, cfgs)
+ elif isinstance(ir, Any):
+ return any(_eval_cfg(i, cfgs) for i in ir.args)
+ elif isinstance(ir, All):
+ return all(_eval_cfg(i, cfgs) for i in ir.args)
+ else:
+ raise MesonBugException(f'Unhandled Cargo cfg IR: {ir}')
-@ir_to_meson.register
-def _(ir: All, build: builder.Builder) -> mparser.BaseNode:
- args = iter(reversed(ir.args))
- last = next(args)
- cur = build.and_(ir_to_meson(next(args), build), ir_to_meson(last, build))
- for a in args:
- cur = build.and_(ir_to_meson(a, build), cur)
- return cur
+def eval_cfg(raw: str, cfgs: T.Dict[str, str]) -> bool:
+ return _eval_cfg(parse(lexer(raw)), cfgs)