aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild')
-rw-r--r--mesonbuild/ast/interpreter.py718
-rw-r--r--mesonbuild/ast/introspection.py217
-rw-r--r--mesonbuild/ast/printer.py50
-rw-r--r--mesonbuild/backend/backends.py211
-rw-r--r--mesonbuild/backend/ninjabackend.py193
-rw-r--r--mesonbuild/backend/vs2010backend.py102
-rw-r--r--mesonbuild/backend/vs2012backend.py2
-rw-r--r--mesonbuild/backend/vs2013backend.py2
-rw-r--r--mesonbuild/backend/vs2015backend.py2
-rw-r--r--mesonbuild/backend/vs2017backend.py2
-rw-r--r--mesonbuild/backend/vs2019backend.py2
-rw-r--r--mesonbuild/backend/vs2022backend.py2
-rw-r--r--mesonbuild/backend/xcodebackend.py57
-rw-r--r--mesonbuild/build.py223
-rw-r--r--mesonbuild/cargo/cfg.py167
-rw-r--r--mesonbuild/cargo/interpreter.py530
-rw-r--r--mesonbuild/cargo/manifest.py645
-rw-r--r--mesonbuild/cargo/raw.py192
-rw-r--r--mesonbuild/cargo/toml.py49
-rw-r--r--mesonbuild/cargo/version.py14
-rw-r--r--mesonbuild/cmake/common.py1
-rw-r--r--mesonbuild/cmake/interpreter.py21
-rw-r--r--mesonbuild/cmake/toolchain.py9
-rw-r--r--mesonbuild/cmake/tracetargets.py10
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c.py2
-rw-r--r--mesonbuild/compilers/compilers.py73
-rw-r--r--mesonbuild/compilers/cpp.py15
-rw-r--r--mesonbuild/compilers/cs.py5
-rw-r--r--mesonbuild/compilers/cuda.py42
-rw-r--r--mesonbuild/compilers/d.py21
-rw-r--r--mesonbuild/compilers/detect.py62
-rw-r--r--mesonbuild/compilers/fortran.py15
-rw-r--r--mesonbuild/compilers/java.py5
-rw-r--r--mesonbuild/compilers/mixins/clang.py5
-rw-r--r--mesonbuild/compilers/mixins/clike.py48
-rw-r--r--mesonbuild/compilers/mixins/emscripten.py2
-rw-r--r--mesonbuild/compilers/mixins/gnu.py2
-rw-r--r--mesonbuild/compilers/mixins/islinker.py5
-rw-r--r--mesonbuild/compilers/mixins/pgi.py6
-rw-r--r--mesonbuild/compilers/rust.py67
-rw-r--r--mesonbuild/compilers/swift.py32
-rw-r--r--mesonbuild/compilers/vala.py2
-rw-r--r--mesonbuild/coredata.py107
-rw-r--r--mesonbuild/dependencies/base.py5
-rw-r--r--mesonbuild/dependencies/cuda.py74
-rw-r--r--mesonbuild/dependencies/detect.py10
-rw-r--r--mesonbuild/dependencies/python.py16
-rw-r--r--mesonbuild/dependencies/qt.py5
-rw-r--r--mesonbuild/dependencies/scalapack.py11
-rw-r--r--mesonbuild/dependencies/ui.py11
-rw-r--r--mesonbuild/environment.py116
-rw-r--r--mesonbuild/interpreter/compiler.py82
-rw-r--r--mesonbuild/interpreter/dependencyfallbacks.py16
-rw-r--r--mesonbuild/interpreter/interpreter.py101
-rw-r--r--mesonbuild/interpreter/interpreterobjects.py168
-rw-r--r--mesonbuild/interpreter/kwargs.py9
-rw-r--r--mesonbuild/interpreter/mesonmain.py67
-rw-r--r--mesonbuild/interpreter/primitives/array.py51
-rw-r--r--mesonbuild/interpreter/primitives/boolean.py26
-rw-r--r--mesonbuild/interpreter/primitives/dict.py43
-rw-r--r--mesonbuild/interpreter/primitives/integer.py53
-rw-r--r--mesonbuild/interpreter/primitives/range.py7
-rw-r--r--mesonbuild/interpreter/primitives/string.py79
-rw-r--r--mesonbuild/interpreter/type_checking.py32
-rw-r--r--mesonbuild/interpreterbase/__init__.py6
-rw-r--r--mesonbuild/interpreterbase/baseobjects.py123
-rw-r--r--mesonbuild/interpreterbase/decorators.py2
-rw-r--r--mesonbuild/linkers/detect.py7
-rw-r--r--mesonbuild/linkers/linkers.py118
-rw-r--r--mesonbuild/mconf.py57
-rw-r--r--mesonbuild/mdevenv.py11
-rw-r--r--mesonbuild/mdist.py2
-rw-r--r--mesonbuild/mformat.py26
-rw-r--r--mesonbuild/mintro.py118
-rw-r--r--mesonbuild/modules/__init__.py20
-rw-r--r--mesonbuild/modules/cmake.py5
-rw-r--r--mesonbuild/modules/fs.py121
-rw-r--r--mesonbuild/modules/gnome.py162
-rw-r--r--mesonbuild/modules/hotdoc.py7
-rw-r--r--mesonbuild/modules/pkgconfig.py32
-rw-r--r--mesonbuild/modules/python.py28
-rw-r--r--mesonbuild/modules/rust.py8
-rw-r--r--mesonbuild/mparser.py63
-rw-r--r--mesonbuild/msetup.py49
-rwxr-xr-xmesonbuild/msubprojects.py17
-rw-r--r--mesonbuild/options.py522
-rw-r--r--mesonbuild/rewriter.py794
-rw-r--r--mesonbuild/scripts/clangtidy.py4
-rw-r--r--mesonbuild/scripts/run_tool.py20
-rw-r--r--mesonbuild/templates/cpptemplates.py2
-rw-r--r--mesonbuild/utils/platform.py22
-rw-r--r--mesonbuild/utils/posix.py26
-rw-r--r--mesonbuild/utils/universal.py184
-rw-r--r--mesonbuild/utils/win32.py24
-rw-r--r--mesonbuild/wrap/wrap.py131
-rw-r--r--mesonbuild/wrap/wraptool.py8
97 files changed, 4533 insertions, 3107 deletions
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index cd8156a..62c4839 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -8,8 +8,12 @@ from __future__ import annotations
import os
import sys
import typing as T
+from collections import defaultdict
+from dataclasses import dataclass
+import itertools
+from pathlib import Path
-from .. import mparser, mesonlib
+from .. import mparser, mesonlib, mlog
from .. import environment
from ..interpreterbase import (
@@ -20,8 +24,14 @@ from ..interpreterbase import (
ContinueRequest,
Disabler,
default_resolve_key,
+ is_disabled,
+ UnknownValue,
+ UndefinedVariable,
+ InterpreterObject,
)
+from ..interpreterbase.helpers import flatten
+
from ..interpreter import (
StringHolder,
BooleanHolder,
@@ -36,19 +46,21 @@ from ..mparser import (
ArrayNode,
AssignmentNode,
BaseNode,
- ElementaryNode,
EmptyNode,
IdNode,
MethodNode,
NotNode,
PlusAssignmentNode,
TernaryNode,
+ SymbolNode,
+ Token,
+ FunctionNode,
)
if T.TYPE_CHECKING:
from .visitor import AstVisitor
from ..interpreter import Interpreter
- from ..interpreterbase import SubProject, TYPE_nkwargs, TYPE_var
+ from ..interpreterbase import SubProject, TYPE_var, TYPE_nvar
from ..mparser import (
AndNode,
ComparisonNode,
@@ -60,38 +72,122 @@ if T.TYPE_CHECKING:
UMinusNode,
)
-class DontCareObject(MesonInterpreterObject):
- pass
-
-class MockExecutable(MesonInterpreterObject):
- pass
-
-class MockStaticLibrary(MesonInterpreterObject):
- pass
-
-class MockSharedLibrary(MesonInterpreterObject):
- pass
-
-class MockCustomTarget(MesonInterpreterObject):
- pass
-
-class MockRunTarget(MesonInterpreterObject):
- pass
-
-ADD_SOURCE = 0
-REMOVE_SOURCE = 1
-
_T = T.TypeVar('_T')
_V = T.TypeVar('_V')
+def _symbol(val: str) -> SymbolNode:
+ return SymbolNode(Token('', '', 0, 0, 0, (0, 0), val))
+
+# `IntrospectionFile` is to the `IntrospectionInterpreter` what `File` is to the normal `Interpreter`.
+@dataclass
+class IntrospectionFile:
+ subdir: str
+ rel: str
+
+ def to_abs_path(self, root_dir: Path) -> Path:
+ return (root_dir / self.subdir / self.rel).resolve()
+
+ def __hash__(self) -> int:
+ return hash((self.__class__.__name__, self.subdir, self.rel))
+
+# `IntrospectionDependency` is to the `IntrospectionInterpreter` what `Dependency` is to the normal `Interpreter`.
+@dataclass
+class IntrospectionDependency(MesonInterpreterObject):
+ name: T.Union[str, UnknownValue]
+ required: T.Union[bool, UnknownValue]
+ version: T.Union[T.List[str], UnknownValue]
+ has_fallback: bool
+ conditional: bool
+ node: FunctionNode
+
+# `IntrospectionBuildTarget` is to the `IntrospectionInterpreter` what `BuildTarget` is to the normal `Interpreter`.
+@dataclass
+class IntrospectionBuildTarget(MesonInterpreterObject):
+ name: str
+ machine: str
+ id: str
+ typename: str
+ defined_in: str
+ subdir: str
+ build_by_default: bool
+ installed: bool
+ outputs: T.List[str]
+ source_nodes: T.List[BaseNode]
+ extra_files: BaseNode
+ kwargs: T.Dict[str, TYPE_var]
+ node: FunctionNode
+
+def is_ignored_edge(src: T.Union[BaseNode, UnknownValue]) -> bool:
+ return (isinstance(src, FunctionNode) and src.func_name.value not in {'files', 'get_variable'}) or isinstance(src, MethodNode)
+
+class DataflowDAG:
+ src_to_tgts: T.DefaultDict[T.Union[BaseNode, UnknownValue], T.Set[T.Union[BaseNode, UnknownValue]]]
+ tgt_to_srcs: T.DefaultDict[T.Union[BaseNode, UnknownValue], T.Set[T.Union[BaseNode, UnknownValue]]]
+
+ def __init__(self) -> None:
+ self.src_to_tgts = defaultdict(set)
+ self.tgt_to_srcs = defaultdict(set)
+
+ def add_edge(self, source: T.Union[BaseNode, UnknownValue], target: T.Union[BaseNode, UnknownValue]) -> None:
+ self.src_to_tgts[source].add(target)
+ self.tgt_to_srcs[target].add(source)
+
+ # Returns all nodes in the DAG that are reachable from a node in `srcs`.
+ # In other words, A node `a` is part of the returned set exactly if data
+ # from `srcs` flows into `a`, directly or indirectly.
+ # Certain edges are ignored.
+ def reachable(self, srcs: T.Set[T.Union[BaseNode, UnknownValue]], reverse: bool) -> T.Set[T.Union[BaseNode, UnknownValue]]:
+ reachable = srcs.copy()
+ active = srcs.copy()
+ while active:
+ new: T.Set[T.Union[BaseNode, UnknownValue]] = set()
+ if reverse:
+ for tgt in active:
+ new.update(src for src in self.tgt_to_srcs[tgt] if not is_ignored_edge(src))
+ else:
+ for src in active:
+ if is_ignored_edge(src):
+ continue
+ new.update(tgt for tgt in self.src_to_tgts[src])
+ reachable.update(new)
+ active = new
+ return reachable
+
+ # Returns all paths from src to target.
+ # Certain edges are ignored.
+ def find_all_paths(self, src: T.Union[BaseNode, UnknownValue], target: T.Union[BaseNode, UnknownValue]) -> T.List[T.List[T.Union[BaseNode, UnknownValue]]]:
+ queue = [(src, [src])]
+ paths = []
+ while queue:
+ cur, path = queue.pop()
+ if cur == target:
+ paths.append(path)
+ if is_ignored_edge(cur):
+ continue
+ queue.extend((tgt, path + [tgt]) for tgt in self.src_to_tgts[cur])
+ return paths
class AstInterpreter(InterpreterBase):
def __init__(self, source_root: str, subdir: str, subproject: SubProject, subproject_dir: str, env: environment.Environment, visitors: T.Optional[T.List[AstVisitor]] = None):
super().__init__(source_root, subdir, subproject, subproject_dir, env)
self.visitors = visitors if visitors is not None else []
- self.assignments: T.Dict[str, BaseNode] = {}
- self.assign_vals: T.Dict[str, T.Any] = {}
- self.reverse_assignment: T.Dict[str, BaseNode] = {}
+ self.nesting: T.List[int] = []
+ self.cur_assignments: T.DefaultDict[str, T.List[T.Tuple[T.List[int], T.Union[BaseNode, UnknownValue]]]] = defaultdict(list)
+ self.all_assignment_nodes: T.DefaultDict[str, T.List[AssignmentNode]] = defaultdict(list)
+ # dataflow_dag is an acyclic directed graph that contains an edge
+ # from one instance of `BaseNode` to another instance of `BaseNode` if
+ # data flows directly from one to the other. Example: If meson.build
+ # contains this:
+ # var = 'foo' + '123'
+ # executable(var, 'src.c')
+ # var = 'bar'
+ # dataflow_dag will contain an edge from the IdNode corresponding to
+ # 'var' in line 2 to the ArithmeticNode corresponding to 'foo' + '123'.
+ # This graph is crucial for e.g. node_to_runtime_value because we have
+ # to know that 'var' in line2 is 'foo123' and not 'bar'.
+ self.dataflow_dag = DataflowDAG()
+ self.funcvals: T.Dict[BaseNode, T.Any] = {}
+ self.tainted = False
self.funcs.update({'project': self.func_do_nothing,
'test': self.func_do_nothing,
'benchmark': self.func_do_nothing,
@@ -124,7 +220,7 @@ class AstInterpreter(InterpreterBase):
'vcs_tag': self.func_do_nothing,
'add_languages': self.func_do_nothing,
'declare_dependency': self.func_do_nothing,
- 'files': self.func_do_nothing,
+ 'files': self.func_files,
'executable': self.func_do_nothing,
'static_library': self.func_do_nothing,
'shared_library': self.func_do_nothing,
@@ -133,9 +229,9 @@ class AstInterpreter(InterpreterBase):
'custom_target': self.func_do_nothing,
'run_target': self.func_do_nothing,
'subdir': self.func_subdir,
- 'set_variable': self.func_do_nothing,
- 'get_variable': self.func_do_nothing,
- 'unset_variable': self.func_do_nothing,
+ 'set_variable': self.func_set_variable,
+ 'get_variable': self.func_get_variable,
+ 'unset_variable': self.func_unset_variable,
'is_disabler': self.func_do_nothing,
'is_variable': self.func_do_nothing,
'disabler': self.func_do_nothing,
@@ -153,14 +249,14 @@ class AstInterpreter(InterpreterBase):
'debug': self.func_do_nothing,
})
- def _unholder_args(self, args: _T, kwargs: _V) -> T.Tuple[_T, _V]:
+ def _unholder_args(self, args: T.Any, kwargs: T.Any) -> T.Tuple[T.Any, T.Any]:
return args, kwargs
- def _holderify(self, res: _T) -> _T:
+ def _holderify(self, res: T.Any) -> T.Any:
return res
- def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> bool:
- return True
+ def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> UnknownValue:
+ return UnknownValue()
def load_root_meson_file(self) -> None:
super().load_root_meson_file()
@@ -182,24 +278,50 @@ class AstInterpreter(InterpreterBase):
buildfilename = os.path.join(subdir, environment.build_filename)
sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n')
- def method_call(self, node: BaseNode) -> bool:
- return True
+ def inner_method_call(self, obj: BaseNode, method_name: str, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any:
+ for arg in itertools.chain(args, kwargs.values()):
+ if isinstance(arg, UnknownValue):
+ return UnknownValue()
+
+ if isinstance(obj, str):
+ result = StringHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs)
+ elif isinstance(obj, bool):
+ result = BooleanHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs)
+ elif isinstance(obj, int):
+ result = IntegerHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs)
+ elif isinstance(obj, list):
+ result = ArrayHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs)
+ elif isinstance(obj, dict):
+ result = DictHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs)
+ else:
+ return UnknownValue()
+ return result
- def evaluate_fstring(self, node: mparser.StringNode) -> str:
- assert isinstance(node, mparser.StringNode)
- return node.value
+ def method_call(self, node: mparser.MethodNode) -> None:
+ invocable = node.source_object
+ self.evaluate_statement(invocable)
+ obj = self.node_to_runtime_value(invocable)
+ method_name = node.name.value
+ (args, kwargs) = self.reduce_arguments(node.args)
+ if is_disabled(args, kwargs):
+ res = Disabler()
+ else:
+ res = self.inner_method_call(obj, method_name, args, kwargs)
+ self.funcvals[node] = res
+
+ def evaluate_fstring(self, node: mparser.StringNode) -> None:
+ pass
- def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> TYPE_var:
- return self.reduce_arguments(cur.args)[0]
+ def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> None:
+ for arg in cur.args.arguments:
+ self.evaluate_statement(arg)
- def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int:
+ def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> None:
self.evaluate_statement(cur.left)
self.evaluate_statement(cur.right)
- return 0
- def evaluate_uminusstatement(self, cur: UMinusNode) -> int:
+ def evaluate_uminusstatement(self, cur: UMinusNode) -> None:
self.evaluate_statement(cur.value)
- return 0
def evaluate_ternary(self, node: TernaryNode) -> None:
assert isinstance(node, TernaryNode)
@@ -207,42 +329,27 @@ class AstInterpreter(InterpreterBase):
self.evaluate_statement(node.trueblock)
self.evaluate_statement(node.falseblock)
- def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs:
- def resolve_key(node: mparser.BaseNode) -> str:
- if isinstance(node, mparser.StringNode):
- return node.value
- return '__AST_UNKNOWN__'
- arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key)
- assert not arguments
- self.argument_depth += 1
- for key, value in kwargs.items():
- if isinstance(key, BaseNode):
- self.evaluate_statement(key)
- self.argument_depth -= 1
- return {}
-
- def evaluate_plusassign(self, node: PlusAssignmentNode) -> None:
- assert isinstance(node, PlusAssignmentNode)
- # Cheat by doing a reassignment
- self.assignments[node.var_name.value] = node.value # Save a reference to the value node
- if node.value.ast_id:
- self.reverse_assignment[node.value.ast_id] = node
- self.assign_vals[node.var_name.value] = self.evaluate_statement(node.value)
+ def evaluate_dictstatement(self, node: mparser.DictNode) -> None:
+ for k, v in node.args.kwargs.items():
+ self.evaluate_statement(k)
+ self.evaluate_statement(v)
- def evaluate_indexing(self, node: IndexNode) -> int:
- return 0
-
- def unknown_function_called(self, func_name: str) -> None:
- pass
+ def evaluate_indexing(self, node: IndexNode) -> None:
+ self.evaluate_statement(node.iobject)
+ self.evaluate_statement(node.index)
def reduce_arguments(
self,
args: mparser.ArgumentNode,
key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
duplicate_key_error: T.Optional[str] = None,
- ) -> T.Tuple[T.List[TYPE_var], TYPE_nkwargs]:
+ ) -> T.Tuple[T.List[T.Any], T.Any]:
+ for arg in args.arguments:
+ self.evaluate_statement(arg)
+ for value in args.kwargs.values():
+ self.evaluate_statement(value)
if isinstance(args, ArgumentNode):
- kwargs: T.Dict[str, TYPE_var] = {}
+ kwargs = {}
for key, val in args.kwargs.items():
kwargs[key_resolver(key)] = val
if args.incorrect_order():
@@ -251,139 +358,370 @@ class AstInterpreter(InterpreterBase):
else:
return self.flatten_args(args), {}
- def evaluate_comparison(self, node: ComparisonNode) -> bool:
+ def evaluate_comparison(self, node: ComparisonNode) -> None:
self.evaluate_statement(node.left)
self.evaluate_statement(node.right)
- return False
- def evaluate_andstatement(self, cur: AndNode) -> bool:
+ def evaluate_andstatement(self, cur: AndNode) -> None:
self.evaluate_statement(cur.left)
self.evaluate_statement(cur.right)
- return False
- def evaluate_orstatement(self, cur: OrNode) -> bool:
+ def evaluate_orstatement(self, cur: OrNode) -> None:
self.evaluate_statement(cur.left)
self.evaluate_statement(cur.right)
- return False
- def evaluate_notstatement(self, cur: NotNode) -> bool:
+ def evaluate_notstatement(self, cur: NotNode) -> None:
self.evaluate_statement(cur.value)
- return False
+
+ def find_potential_writes(self, node: BaseNode) -> T.Set[str]:
+ if isinstance(node, mparser.ForeachClauseNode):
+ return {el.value for el in node.varnames} | self.find_potential_writes(node.block)
+ elif isinstance(node, mparser.CodeBlockNode):
+ ret = set()
+ for line in node.lines:
+ ret.update(self.find_potential_writes(line))
+ return ret
+ elif isinstance(node, (AssignmentNode, PlusAssignmentNode)):
+ return set([node.var_name.value]) | self.find_potential_writes(node.value)
+ elif isinstance(node, IdNode):
+ return set()
+ elif isinstance(node, ArrayNode):
+ ret = set()
+ for arg in node.args.arguments:
+ ret.update(self.find_potential_writes(arg))
+ return ret
+ elif isinstance(node, mparser.DictNode):
+ ret = set()
+ for k, v in node.args.kwargs.items():
+ ret.update(self.find_potential_writes(k))
+ ret.update(self.find_potential_writes(v))
+ return ret
+ elif isinstance(node, FunctionNode):
+ ret = set()
+ for arg in node.args.arguments:
+ ret.update(self.find_potential_writes(arg))
+ for arg in node.args.kwargs.values():
+ ret.update(self.find_potential_writes(arg))
+ return ret
+ elif isinstance(node, MethodNode):
+ ret = self.find_potential_writes(node.source_object)
+ for arg in node.args.arguments:
+ ret.update(self.find_potential_writes(arg))
+ for arg in node.args.kwargs.values():
+ ret.update(self.find_potential_writes(arg))
+ return ret
+ elif isinstance(node, ArithmeticNode):
+ return self.find_potential_writes(node.left) | self.find_potential_writes(node.right)
+ elif isinstance(node, (mparser.NumberNode, mparser.StringNode, mparser.BreakNode, mparser.BooleanNode, mparser.ContinueNode)):
+ return set()
+ elif isinstance(node, mparser.IfClauseNode):
+ if isinstance(node.elseblock, EmptyNode):
+ ret = set()
+ else:
+ ret = self.find_potential_writes(node.elseblock.block)
+ for i in node.ifs:
+ ret.update(self.find_potential_writes(i))
+ return ret
+ elif isinstance(node, mparser.IndexNode):
+ return self.find_potential_writes(node.iobject) | self.find_potential_writes(node.index)
+ elif isinstance(node, mparser.IfNode):
+ return self.find_potential_writes(node.condition) | self.find_potential_writes(node.block)
+ elif isinstance(node, (mparser.ComparisonNode, mparser.OrNode, mparser.AndNode)):
+ return self.find_potential_writes(node.left) | self.find_potential_writes(node.right)
+ elif isinstance(node, mparser.NotNode):
+ return self.find_potential_writes(node.value)
+ elif isinstance(node, mparser.TernaryNode):
+ return self.find_potential_writes(node.condition) | self.find_potential_writes(node.trueblock) | self.find_potential_writes(node.falseblock)
+ elif isinstance(node, mparser.UMinusNode):
+ return self.find_potential_writes(node.value)
+ elif isinstance(node, mparser.ParenthesizedNode):
+ return self.find_potential_writes(node.inner)
+ raise mesonlib.MesonBugException('Unhandled node type')
def evaluate_foreach(self, node: ForeachClauseNode) -> None:
+ asses = self.find_potential_writes(node)
+ for ass in asses:
+ self.cur_assignments[ass].append((self.nesting.copy(), UnknownValue()))
try:
self.evaluate_codeblock(node.block)
except ContinueRequest:
pass
except BreakRequest:
pass
+ for ass in asses:
+ self.cur_assignments[ass].append((self.nesting.copy(), UnknownValue())) # In case the foreach loops 0 times.
def evaluate_if(self, node: IfClauseNode) -> None:
+ self.nesting.append(0)
for i in node.ifs:
self.evaluate_codeblock(i.block)
+ self.nesting[-1] += 1
if not isinstance(node.elseblock, EmptyNode):
self.evaluate_codeblock(node.elseblock.block)
-
- def get_variable(self, varname: str) -> int:
- return 0
-
- def assignment(self, node: AssignmentNode) -> None:
- assert isinstance(node, AssignmentNode)
- self.assignments[node.var_name.value] = node.value # Save a reference to the value node
- if node.value.ast_id:
- self.reverse_assignment[node.value.ast_id] = node
- self.assign_vals[node.var_name.value] = self.evaluate_statement(node.value) # Evaluate the value just in case
-
- def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]:
- def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any:
- if loop_detect is None:
- loop_detect = []
- if isinstance(n, IdNode):
- assert isinstance(n.value, str)
- if n.value in loop_detect or n.value not in self.assignments:
- return []
- return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value])
- elif isinstance(n, ElementaryNode):
- return n.value
+ self.nesting.pop()
+ for var_name in self.cur_assignments:
+ potential_values = []
+ oldval = self.get_cur_value_if_defined(var_name)
+ if not isinstance(oldval, UndefinedVariable):
+ potential_values.append(oldval)
+ for nesting, value in self.cur_assignments[var_name]:
+ if len(nesting) > len(self.nesting):
+ potential_values.append(value)
+ self.cur_assignments[var_name] = [(nesting, v) for (nesting, v) in self.cur_assignments[var_name] if len(nesting) <= len(self.nesting)]
+ if len(potential_values) > 1 or (len(potential_values) > 0 and isinstance(oldval, UndefinedVariable)):
+ uv = UnknownValue()
+ for pv in potential_values:
+ self.dataflow_dag.add_edge(pv, uv)
+ self.cur_assignments[var_name].append((self.nesting.copy(), uv))
+
+ def func_files(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any:
+ ret: T.List[T.Union[IntrospectionFile, UnknownValue]] = []
+ for arg in args:
+ if isinstance(arg, str):
+ ret.append(IntrospectionFile(self.subdir, arg))
+ elif isinstance(arg, UnknownValue):
+ ret.append(UnknownValue())
else:
- return n
-
- if id_loop_detect is None:
- id_loop_detect = []
- result = None
-
- if not isinstance(node, BaseNode):
- return None
-
- assert node.ast_id
- if node.ast_id in id_loop_detect:
- return None # Loop detected
- id_loop_detect += [node.ast_id]
-
- # Try to evaluate the value of the node
- if isinstance(node, IdNode):
- result = quick_resolve(node)
-
- elif isinstance(node, ElementaryNode):
- result = node.value
-
- elif isinstance(node, NotNode):
- result = self.resolve_node(node.value, include_unknown_args, id_loop_detect)
- if isinstance(result, bool):
- result = not result
-
+ raise TypeError
+ return ret
+
+ def get_cur_value_if_defined(self, var_name: str) -> T.Union[BaseNode, UnknownValue, UndefinedVariable]:
+ if var_name in {'meson', 'host_machine', 'build_machine', 'target_machine'}:
+ return UnknownValue()
+ ret: T.Union[BaseNode, UnknownValue, UndefinedVariable] = UndefinedVariable()
+ for nesting, value in reversed(self.cur_assignments[var_name]):
+ if len(self.nesting) >= len(nesting) and self.nesting[:len(nesting)] == nesting:
+ ret = value
+ break
+ if isinstance(ret, UndefinedVariable) and self.tainted:
+ return UnknownValue()
+ return ret
+
+ def get_cur_value(self, var_name: str) -> T.Union[BaseNode, UnknownValue]:
+ ret = self.get_cur_value_if_defined(var_name)
+ if isinstance(ret, UndefinedVariable):
+ path = mlog.get_relative_path(Path(self.current_node.filename), Path(os.getcwd()))
+ mlog.warning(f"{path}:{self.current_node.lineno}:{self.current_node.colno} will always crash if executed, since a variable named `{var_name}` is not defined")
+ # We could add more advanced analysis of code referencing undefined
+ # variables, but it is probably not worth the effort and the
+ # complexity. So we do the simplest thing, returning an
+ # UnknownValue.
+ return UnknownValue()
+ return ret
+
+ # The function `node_to_runtime_value` takes a node of the ast as an
+ # argument and tries to return the same thing that would be passed to e.g.
+ # `func_message` if you put `message(node)` in your `meson.build` file and
+ # run `meson setup`. If this is not possible, `UnknownValue()` is returned.
+ # There are 3 Reasons why this is sometimes impossible:
+ # 1. Because the meson rewriter is imperfect and has not implemented everything yet
+ # 2. Because the value is different on different machines, example:
+ # ```meson
+ # node = somedep.found()
+ # message(node)
+ # ```
+ # will print `true` on some machines and `false` on others, so
+ # `node_to_runtime_value` does not know whether to return `true` or
+ # `false` and will return `UnknownValue()`.
+ # 3. Here:
+ # ```meson
+ # foreach x : [1, 2]
+ # node = x
+ # message(node)
+ # endforeach
+ # ```
+ # `node_to_runtime_value` does not know whether to return `1` or `2` and
+ # will return `UnknownValue()`.
+ #
+ # If you have something like
+ # ```
+ # node = [123, somedep.found()]
+ # ```
+ # `node_to_runtime_value` will return `[123, UnknownValue()]`.
+ def node_to_runtime_value(self, node: T.Union[UnknownValue, BaseNode, TYPE_var]) -> T.Any:
+ if isinstance(node, (mparser.StringNode, mparser.BooleanNode, mparser.NumberNode)):
+ return node.value
+ elif isinstance(node, mparser.StringNode):
+ if node.is_fstring:
+ return UnknownValue()
+ else:
+ return node.value
+ elif isinstance(node, list):
+ return [self.node_to_runtime_value(x) for x in node]
elif isinstance(node, ArrayNode):
- result = node.args.arguments.copy()
+ return [self.node_to_runtime_value(x) for x in node.args.arguments]
+ elif isinstance(node, mparser.DictNode):
+ return {self.node_to_runtime_value(k): self.node_to_runtime_value(v) for k, v in node.args.kwargs.items()}
+ elif isinstance(node, IdNode):
+ assert len(self.dataflow_dag.tgt_to_srcs[node]) == 1
+ val = next(iter(self.dataflow_dag.tgt_to_srcs[node]))
+ return self.node_to_runtime_value(val)
+ elif isinstance(node, (MethodNode, FunctionNode)):
+ funcval = self.funcvals[node]
+ if isinstance(funcval, (dict, str)):
+ return funcval
+ else:
+ return self.node_to_runtime_value(funcval)
+ elif isinstance(node, ArithmeticNode):
+ left = self.node_to_runtime_value(node.left)
+ right = self.node_to_runtime_value(node.right)
+ if isinstance(left, list) and isinstance(right, UnknownValue):
+ return left + [right]
+ if isinstance(right, list) and isinstance(left, UnknownValue):
+ return [left] + right
+ if isinstance(left, UnknownValue) or isinstance(right, UnknownValue):
+ return UnknownValue()
+ if node.operation == 'add':
+ if isinstance(left, dict) and isinstance(right, dict):
+ ret = left.copy()
+ for k, v in right.items():
+ ret[k] = v
+ return ret
+ if isinstance(left, list):
+ if not isinstance(right, list):
+ right = [right]
+ return left + right
+ return left + right
+ elif node.operation == 'sub':
+ return left - right
+ elif node.operation == 'mul':
+ return left * right
+ elif node.operation == 'div':
+ if isinstance(left, int) and isinstance(right, int):
+ return left // right
+ elif isinstance(left, str) and isinstance(right, str):
+ return os.path.join(left, right).replace('\\', '/')
+ elif node.operation == 'mod':
+ if isinstance(left, int) and isinstance(right, int):
+ return left % right
+ elif isinstance(node, (UnknownValue, IntrospectionBuildTarget, IntrospectionFile, IntrospectionDependency, str, bool, int)):
+ return node
+ elif isinstance(node, mparser.IndexNode):
+ iobject = self.node_to_runtime_value(node.iobject)
+ index = self.node_to_runtime_value(node.index)
+ if isinstance(iobject, UnknownValue) or isinstance(index, UnknownValue):
+ return UnknownValue()
+ return iobject[index]
+ elif isinstance(node, mparser.ComparisonNode):
+ left = self.node_to_runtime_value(node.left)
+ right = self.node_to_runtime_value(node.right)
+ if isinstance(left, UnknownValue) or isinstance(right, UnknownValue):
+ return UnknownValue()
+ if node.ctype == '==':
+ return left == right
+ elif node.ctype == '!=':
+ return left != right
+ elif node.ctype == 'in':
+ return left in right
+ elif node.ctype == 'notin':
+ return left not in right
+ elif isinstance(node, mparser.TernaryNode):
+ cond = self.node_to_runtime_value(node.condition)
+ if isinstance(cond, UnknownValue):
+ return UnknownValue()
+ if cond is True:
+ return self.node_to_runtime_value(node.trueblock)
+ if cond is False:
+ return self.node_to_runtime_value(node.falseblock)
+ elif isinstance(node, mparser.OrNode):
+ left = self.node_to_runtime_value(node.left)
+ right = self.node_to_runtime_value(node.right)
+ if isinstance(left, UnknownValue) or isinstance(right, UnknownValue):
+ return UnknownValue()
+ return left or right
+ elif isinstance(node, mparser.AndNode):
+ left = self.node_to_runtime_value(node.left)
+ right = self.node_to_runtime_value(node.right)
+ if isinstance(left, UnknownValue) or isinstance(right, UnknownValue):
+ return UnknownValue()
+ return left and right
+ elif isinstance(node, mparser.UMinusNode):
+ val = self.node_to_runtime_value(node.value)
+ if isinstance(val, UnknownValue):
+ return val
+ if isinstance(val, (int, float)):
+ return -val
+ elif isinstance(node, mparser.NotNode):
+ val = self.node_to_runtime_value(node.value)
+ if isinstance(val, UnknownValue):
+ return val
+ if isinstance(val, bool):
+ return not val
+ elif isinstance(node, mparser.ParenthesizedNode):
+ return self.node_to_runtime_value(node.inner)
+ raise mesonlib.MesonBugException('Unhandled node type')
- elif isinstance(node, ArgumentNode):
- result = node.arguments.copy()
+ def assignment(self, node: AssignmentNode) -> None:
+ assert isinstance(node, AssignmentNode)
+ self.evaluate_statement(node.value)
+ self.cur_assignments[node.var_name.value].append((self.nesting.copy(), node.value))
+ self.all_assignment_nodes[node.var_name.value].append(node)
- elif isinstance(node, ArithmeticNode):
- if node.operation != 'add':
- return None # Only handle string and array concats
- l = self.resolve_node(node.left, include_unknown_args, id_loop_detect)
- r = self.resolve_node(node.right, include_unknown_args, id_loop_detect)
- if isinstance(l, str) and isinstance(r, str):
- result = l + r # String concatenation detected
+ def evaluate_plusassign(self, node: PlusAssignmentNode) -> None:
+ assert isinstance(node, PlusAssignmentNode)
+ self.evaluate_statement(node.value)
+ lhs = self.get_cur_value(node.var_name.value)
+ newval: T.Union[UnknownValue, ArithmeticNode]
+ if isinstance(lhs, UnknownValue):
+ newval = UnknownValue()
+ else:
+ newval = mparser.ArithmeticNode(operation='add', left=lhs, operator=_symbol('+'), right=node.value)
+ self.cur_assignments[node.var_name.value].append((self.nesting.copy(), newval))
+ self.all_assignment_nodes[node.var_name.value].append(node)
+
+ self.dataflow_dag.add_edge(lhs, newval)
+ self.dataflow_dag.add_edge(node.value, newval)
+
+ def func_set_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None:
+ assert isinstance(node, FunctionNode)
+ if bool(node.args.kwargs):
+ raise InvalidArguments('set_variable accepts no keyword arguments')
+ if len(node.args.arguments) != 2:
+ raise InvalidArguments('set_variable requires exactly two positional arguments')
+ var_name = args[0]
+ value = node.args.arguments[1]
+ if isinstance(var_name, UnknownValue):
+ self.evaluate_statement(value)
+ self.tainted = True
+ return
+ assert isinstance(var_name, str)
+ equiv = AssignmentNode(var_name=IdNode(Token('', '', 0, 0, 0, (0, 0), var_name)), value=value, operator=_symbol('='))
+ equiv.ast_id = str(id(equiv))
+ self.evaluate_statement(equiv)
+
+ def func_get_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any:
+ assert isinstance(node, FunctionNode)
+ var_name = args[0]
+ assert isinstance(var_name, str)
+ val = self.get_cur_value(var_name)
+ self.dataflow_dag.add_edge(val, node)
+ return val
+
+ def func_unset_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None:
+ assert isinstance(node, FunctionNode)
+ if bool(node.args.kwargs):
+ raise InvalidArguments('unset_variable accepts no keyword arguments')
+ if len(node.args.arguments) != 1:
+ raise InvalidArguments('unset_variable requires exactly one positional arguments')
+ var_name = args[0]
+ assert isinstance(var_name, str)
+ self.cur_assignments[var_name].append((self.nesting.copy(), node))
+
+ def nodes_to_pretty_filelist(self, root_path: Path, subdir: str, nodes: T.List[BaseNode]) -> T.List[T.Union[str, UnknownValue]]:
+ def src_to_abs(src: T.Union[str, IntrospectionFile, UnknownValue]) -> T.Union[str, UnknownValue]:
+ if isinstance(src, str):
+ return os.path.normpath(os.path.join(root_path, subdir, src))
+ elif isinstance(src, IntrospectionFile):
+ return str(src.to_abs_path(root_path))
+ elif isinstance(src, UnknownValue):
+ return src
else:
- result = self.flatten_args(l, include_unknown_args, id_loop_detect) + self.flatten_args(r, include_unknown_args, id_loop_detect)
-
- elif isinstance(node, MethodNode):
- src = quick_resolve(node.source_object)
- margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect)
- mkwargs: T.Dict[str, TYPE_var] = {}
- method_name = node.name.value
- try:
- if isinstance(src, str):
- result = StringHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs)
- elif isinstance(src, bool):
- result = BooleanHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs)
- elif isinstance(src, int):
- result = IntegerHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs)
- elif isinstance(src, list):
- result = ArrayHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs)
- elif isinstance(src, dict):
- result = DictHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs)
- except mesonlib.MesonException:
- return None
-
- # Ensure that the result is fully resolved (no more nodes)
- if isinstance(result, BaseNode):
- result = self.resolve_node(result, include_unknown_args, id_loop_detect)
- elif isinstance(result, list):
- new_res: T.List[TYPE_var] = []
- for i in result:
- if isinstance(i, BaseNode):
- resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
- if resolved is not None:
- new_res += self.flatten_args(resolved, include_unknown_args, id_loop_detect)
- else:
- new_res += [i]
- result = new_res
+ raise TypeError
- return result
+ rtvals: T.List[T.Any] = flatten([self.node_to_runtime_value(sn) for sn in nodes])
+ return [src_to_abs(x) for x in rtvals]
- def flatten_args(self, args_raw: T.Union[TYPE_var, T.Sequence[TYPE_var]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_var]:
+ def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False) -> T.List[TYPE_var]:
# Make sure we are always dealing with lists
if isinstance(args_raw, list):
args = args_raw
@@ -395,14 +733,38 @@ class AstInterpreter(InterpreterBase):
# Resolve the contents of args
for i in args:
if isinstance(i, BaseNode):
- resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
+ resolved = self.node_to_runtime_value(i)
if resolved is not None:
if not isinstance(resolved, list):
resolved = [resolved]
flattened_args += resolved
- elif isinstance(i, (str, bool, int, float)) or include_unknown_args:
+ elif isinstance(i, (str, bool, int, float, UnknownValue, IntrospectionFile)) or include_unknown_args:
flattened_args += [i]
+ else:
+ raise NotImplementedError
return flattened_args
def evaluate_testcase(self, node: TestCaseClauseNode) -> Disabler | None:
return Disabler(subproject=self.subproject)
+
+ def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[InterpreterObject]:
+ if hasattr(cur, 'args'):
+ for arg in cur.args.arguments:
+ self.dataflow_dag.add_edge(arg, cur)
+ for k, v in cur.args.kwargs.items():
+ self.dataflow_dag.add_edge(v, cur)
+ for attr in ['source_object', 'left', 'right', 'items', 'iobject', 'index', 'condition']:
+ if hasattr(cur, attr):
+ assert isinstance(getattr(cur, attr), mparser.BaseNode)
+ self.dataflow_dag.add_edge(getattr(cur, attr), cur)
+ if isinstance(cur, mparser.IdNode):
+ self.dataflow_dag.add_edge(self.get_cur_value(cur.value), cur)
+ return None
+ else:
+ return super().evaluate_statement(cur)
+
+ def function_call(self, node: mparser.FunctionNode) -> T.Any:
+ ret = super().function_call(node)
+ if ret is not None:
+ self.funcvals[node] = ret
+ return ret
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 4eb3fec..decce4b 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -6,19 +6,17 @@
# or an interpreter-based tool
from __future__ import annotations
-import copy
import os
import typing as T
from .. import compilers, environment, mesonlib, options
-from .. import coredata as cdata
from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
from ..compilers import detect_compiler_for
-from ..interpreterbase import InvalidArguments, SubProject
+from ..interpreterbase import InvalidArguments, SubProject, UnknownValue
from ..mesonlib import MachineChoice
from ..options import OptionKey
-from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
-from .interpreter import AstInterpreter
+from ..mparser import BaseNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
+from .interpreter import AstInterpreter, IntrospectionBuildTarget, IntrospectionDependency
if T.TYPE_CHECKING:
from ..build import BuildTarget
@@ -44,8 +42,11 @@ class IntrospectionHelper:
return NotImplemented
class IntrospectionInterpreter(AstInterpreter):
- # Interpreter to detect the options without a build directory
- # Most of the code is stolen from interpreter.Interpreter
+ # If you run `meson setup ...` the `Interpreter`-class walks over the AST.
+ # If you run `meson rewrite ...` and `meson introspect meson.build ...`,
+ # the `AstInterpreter`-class walks over the AST.
+ # Works without a build directory.
+ # Most of the code is stolen from interpreter.Interpreter .
def __init__(self,
source_root: str,
subdir: str,
@@ -61,11 +62,10 @@ class IntrospectionInterpreter(AstInterpreter):
self.cross_file = cross_file
self.backend = backend
- self.default_options = {OptionKey('backend'): self.backend}
self.project_data: T.Dict[str, T.Any] = {}
- self.targets: T.List[T.Dict[str, T.Any]] = []
- self.dependencies: T.List[T.Dict[str, T.Any]] = []
- self.project_node: BaseNode = None
+ self.targets: T.List[IntrospectionBuildTarget] = []
+ self.dependencies: T.List[IntrospectionDependency] = []
+ self.project_node: FunctionNode = None
self.funcs.update({
'add_languages': self.func_add_languages,
@@ -83,6 +83,7 @@ class IntrospectionInterpreter(AstInterpreter):
def func_project(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None:
if self.project_node:
raise InvalidArguments('Second call to project()')
+ assert isinstance(node, FunctionNode)
self.project_node = node
if len(args) < 1:
raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
@@ -99,6 +100,16 @@ class IntrospectionInterpreter(AstInterpreter):
return [node.value]
return None
+ def create_options_dict(options: T.List[str], subproject: str = '') -> T.Mapping[OptionKey, str]:
+ result: T.MutableMapping[OptionKey, str] = {}
+ for o in options:
+ try:
+ (key, value) = o.split('=', 1)
+ except ValueError:
+ raise mesonlib.MesonException(f'Option {o!r} must have a value separated by equals sign.')
+ result[OptionKey(key)] = value
+ return result
+
proj_name = args[0]
proj_vers = kwargs.get('version', 'undefined')
if isinstance(proj_vers, ElementaryNode):
@@ -114,25 +125,6 @@ class IntrospectionInterpreter(AstInterpreter):
self._load_option_file()
- def_opts = self.flatten_args(kwargs.get('default_options', []))
- _project_default_options = mesonlib.stringlistify(def_opts)
- string_dict = cdata.create_options_dict(_project_default_options, self.subproject)
- self.project_default_options = {OptionKey(s): v for s, v in string_dict.items()}
- self.default_options.update(self.project_default_options)
- if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
- if self.subproject == '':
- self.coredata.optstore.initialize_from_top_level_project_call(
- T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict),
- {}, # TODO: not handled by this Interpreter.
- self.environment.options)
- else:
- self.coredata.optstore.initialize_from_subproject_call(
- self.subproject,
- {}, # TODO: this isn't handled by the introspection interpreter...
- T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict),
- {}) # TODO: this isn't handled by the introspection interpreter...
- self.coredata.initialized_subprojects.add(self.subproject)
-
if not self.is_subproject() and 'subproject_dir' in kwargs:
spdirname = kwargs['subproject_dir']
if isinstance(spdirname, StringNode):
@@ -164,10 +156,10 @@ class IntrospectionInterpreter(AstInterpreter):
except (mesonlib.MesonException, RuntimeError):
pass
- def func_add_languages(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None:
+ def func_add_languages(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> UnknownValue:
kwargs = self.flatten_kwargs(kwargs)
required = kwargs.get('required', True)
- assert isinstance(required, (bool, options.UserFeatureOption)), 'for mypy'
+ assert isinstance(required, (bool, options.UserFeatureOption, UnknownValue)), 'for mypy'
if isinstance(required, options.UserFeatureOption):
required = required.is_enabled()
if 'native' in kwargs:
@@ -176,8 +168,9 @@ class IntrospectionInterpreter(AstInterpreter):
else:
for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]:
self._add_languages(args, required, for_machine)
+ return UnknownValue()
- def _add_languages(self, raw_langs: T.List[TYPE_var], required: bool, for_machine: MachineChoice) -> None:
+ def _add_languages(self, raw_langs: T.List[TYPE_var], required: T.Union[bool, UnknownValue], for_machine: MachineChoice) -> None:
langs: T.List[str] = []
for l in self.flatten_args(raw_langs):
if isinstance(l, str):
@@ -192,48 +185,47 @@ class IntrospectionInterpreter(AstInterpreter):
comp = detect_compiler_for(self.environment, lang, for_machine, True, self.subproject)
except mesonlib.MesonException:
# do we even care about introspecting this language?
- if required:
+ if isinstance(required, UnknownValue) or required:
raise
else:
continue
- if self.subproject:
- options = {}
- for k in comp.get_options():
- v = copy.copy(self.coredata.optstore.get_value_object(k))
- k = k.evolve(subproject=self.subproject)
- options[k] = v
- self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject)
-
- def func_dependency(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None:
+ if comp:
+ self.coredata.process_compiler_options(lang, comp, self.subproject)
+
+ def func_dependency(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[IntrospectionDependency]:
+ assert isinstance(node, FunctionNode)
args = self.flatten_args(args)
kwargs = self.flatten_kwargs(kwargs)
if not args:
- return
+ return None
name = args[0]
+ assert isinstance(name, (str, UnknownValue))
has_fallback = 'fallback' in kwargs
required = kwargs.get('required', True)
version = kwargs.get('version', [])
- if not isinstance(version, list):
- version = [version]
- if isinstance(required, ElementaryNode):
- required = required.value
- if not isinstance(required, bool):
- required = False
- self.dependencies += [{
- 'name': name,
- 'required': required,
- 'version': version,
- 'has_fallback': has_fallback,
- 'conditional': node.condition_level > 0,
- 'node': node
- }]
-
- def build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs_raw: T.Dict[str, TYPE_var], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]:
+ if not isinstance(version, UnknownValue):
+ if not isinstance(version, list):
+ version = [version]
+ assert all(isinstance(el, str) for el in version)
+ version = T.cast(T.List[str], version)
+ assert isinstance(required, (bool, UnknownValue))
+ newdep = IntrospectionDependency(
+ name=name,
+ required=required,
+ version=version,
+ has_fallback=has_fallback,
+ conditional=node.condition_level > 0,
+ node=node)
+ self.dependencies += [newdep]
+ return newdep
+
+ def build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs_raw: T.Dict[str, TYPE_var], targetclass: T.Type[BuildTarget]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
+ assert isinstance(node, FunctionNode)
args = self.flatten_args(args)
if not args or not isinstance(args[0], str):
- return None
+ return UnknownValue()
name = args[0]
- srcqueue = [node]
+ srcqueue: T.List[BaseNode] = [node]
extra_queue = []
# Process the sources BEFORE flattening the kwargs, to preserve the original nodes
@@ -245,43 +237,23 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs = self.flatten_kwargs(kwargs_raw, True)
- def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
- res: T.List[BaseNode] = []
- while inqueue:
- curr = inqueue.pop(0)
- arg_node = None
- assert isinstance(curr, BaseNode)
- if isinstance(curr, FunctionNode):
- arg_node = curr.args
- elif isinstance(curr, ArrayNode):
- arg_node = curr.args
- elif isinstance(curr, IdNode):
- # Try to resolve the ID and append the node to the queue
- assert isinstance(curr.value, str)
- var_name = curr.value
- if var_name in self.assignments:
- tmp_node = self.assignments[var_name]
- if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)):
- inqueue += [tmp_node]
- elif isinstance(curr, ArithmeticNode):
- inqueue += [curr.left, curr.right]
- if arg_node is None:
- continue
- arg_nodes = arg_node.arguments.copy()
- # Pop the first element if the function is a build target function
- if isinstance(curr, FunctionNode) and curr.func_name.value in BUILD_TARGET_FUNCTIONS:
- arg_nodes.pop(0)
- elementary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))]
- inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))]
- if elementary_nodes:
- res += [curr]
- return res
-
- source_nodes = traverse_nodes(srcqueue)
- extraf_nodes = traverse_nodes(extra_queue)
+ oldlen = len(node.args.arguments)
+ source_nodes = node.args.arguments[1:]
+ for k, v in node.args.kwargs.items():
+ assert isinstance(k, IdNode)
+ if k.value == 'sources':
+ source_nodes.append(v)
+ assert oldlen == len(node.args.arguments)
+
+ extraf_nodes = None
+ for k, v in node.args.kwargs.items():
+ assert isinstance(k, IdNode)
+ if k.value == 'extra_files':
+ assert extraf_nodes is None
+ extraf_nodes = v
# Make sure nothing can crash when creating the build class
- kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}}
+ kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always', 'name_prefix'}}
kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
@@ -293,27 +265,26 @@ class IntrospectionInterpreter(AstInterpreter):
self.environment, self.coredata.compilers[for_machine], kwargs_reduced)
target.process_compilers_late()
- new_target = {
- 'name': target.get_basename(),
- 'machine': target.for_machine.get_lower_case_name(),
- 'id': target.get_id(),
- 'type': target.get_typename(),
- 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
- 'subdir': self.subdir,
- 'build_by_default': target.build_by_default,
- 'installed': target.should_install(),
- 'outputs': target.get_outputs(),
- 'sources': source_nodes,
- 'extra_files': extraf_nodes,
- 'kwargs': kwargs,
- 'node': node,
- }
+ new_target = IntrospectionBuildTarget(
+ name=target.get_basename(),
+ machine=target.for_machine.get_lower_case_name(),
+ id=target.get_id(),
+ typename=target.get_typename(),
+ defined_in=os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
+ subdir=self.subdir,
+ build_by_default=target.build_by_default,
+ installed=target.should_install(),
+ outputs=target.get_outputs(),
+ source_nodes=source_nodes,
+ extra_files=extraf_nodes,
+ kwargs=kwargs,
+ node=node)
self.targets += [new_target]
return new_target
- def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
- default_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
+ default_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if default_library == 'shared':
return self.build_target(node, args, kwargs, SharedLibrary)
elif default_library == 'static':
@@ -322,28 +293,28 @@ class IntrospectionInterpreter(AstInterpreter):
return self.build_target(node, args, kwargs, SharedLibrary)
return None
- def func_executable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_executable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, Executable)
- def func_static_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_static_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, StaticLibrary)
- def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, SharedLibrary)
- def func_both_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_both_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, SharedLibrary)
- def func_shared_module(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_shared_module(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, SharedModule)
- def func_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_library(node, args, kwargs)
- def func_jar(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_jar(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
return self.build_target(node, args, kwargs, Jar)
- def func_build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
+ def func_build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
if 'target_type' not in kwargs:
return None
target_type = kwargs.pop('target_type')
@@ -395,7 +366,7 @@ class IntrospectionInterpreter(AstInterpreter):
flattened_kwargs = {}
for key, val in kwargs.items():
if isinstance(val, BaseNode):
- resolved = self.resolve_node(val, include_unknown_args)
+ resolved = self.node_to_runtime_value(val)
if resolved is not None:
flattened_kwargs[key] = resolved
elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
index 4ce3b3f..0d0c821 100644
--- a/mesonbuild/ast/printer.py
+++ b/mesonbuild/ast/printer.py
@@ -7,12 +7,46 @@ from __future__ import annotations
from .. import mparser
from .visitor import AstVisitor, FullAstVisitor
+from ..mesonlib import MesonBugException
import re
import typing as T
+# Also known as "order of operations" or "binding power".
+# This is the counterpart to Parser.e1, Parser.e2, Parser.e3, Parser.e4, Parser.e5, Parser.e6, Parser.e7, Parser.e8, Parser.e9, Parser.e10
+def precedence_level(node: mparser.BaseNode) -> int:
+ if isinstance(node, (mparser.PlusAssignmentNode, mparser.AssignmentNode, mparser.TernaryNode)):
+ return 1
+ elif isinstance(node, mparser.OrNode):
+ return 2
+ elif isinstance(node, mparser.AndNode):
+ return 3
+ elif isinstance(node, mparser.ComparisonNode):
+ return 4
+ elif isinstance(node, mparser.ArithmeticNode):
+ if node.operation in {'add', 'sub'}:
+ return 5
+ elif node.operation in {'mod', 'mul', 'div'}:
+ return 6
+ elif isinstance(node, (mparser.NotNode, mparser.UMinusNode)):
+ return 7
+ elif isinstance(node, mparser.FunctionNode):
+ return 8
+ elif isinstance(node, (mparser.ArrayNode, mparser.DictNode)):
+ return 9
+ elif isinstance(node, (mparser.BooleanNode, mparser.IdNode, mparser.NumberNode, mparser.StringNode, mparser.EmptyNode)):
+ return 10
+ elif isinstance(node, mparser.ParenthesizedNode):
+ # Parenthesize have the highest binding power, but since the AstPrinter
+ # ignores ParanthesizedNode, the binding power of the inner node is
+ # relevant.
+ return precedence_level(node.inner)
+ raise MesonBugException('Unhandled node type')
+
class AstPrinter(AstVisitor):
+ escape_trans: T.Dict[int, str] = str.maketrans({'\\': '\\\\', "'": "\'"})
+
def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5, update_ast_line_nos: bool = False):
self.result = ''
self.indent = indent
@@ -57,7 +91,7 @@ class AstPrinter(AstVisitor):
node.lineno = self.curr_line or node.lineno
def escape(self, val: str) -> str:
- return val.replace('\\', '\\\\').replace("'", "\'")
+ return val.translate(self.escape_trans)
def visit_StringNode(self, node: mparser.StringNode) -> None:
assert isinstance(node.value, str)
@@ -108,11 +142,21 @@ class AstPrinter(AstVisitor):
node.lineno = self.curr_line or node.lineno
node.right.accept(self)
+ def maybe_parentheses(self, outer: mparser.BaseNode, inner: mparser.BaseNode, parens: bool) -> None:
+ if parens:
+ self.append('(', inner)
+ inner.accept(self)
+ if parens:
+ self.append(')', inner)
+
def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
- node.left.accept(self)
+ prec = precedence_level(node)
+ prec_left = precedence_level(node.left)
+ prec_right = precedence_level(node.right)
+ self.maybe_parentheses(node, node.left, prec > prec_left)
self.append_padded(node.operator.value, node)
node.lineno = self.curr_line or node.lineno
- node.right.accept(self)
+ self.maybe_parentheses(node, node.right, prec > prec_right or (prec == prec_right and node.operation in {'sub', 'div', 'mod'}))
def visit_NotNode(self, node: mparser.NotNode) -> None:
node.lineno = self.curr_line or node.lineno
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 3dfa2fb..e3d6c60 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -24,12 +24,12 @@ from .. import dependencies
from .. import programs
from .. import mesonlib
from .. import mlog
-from ..compilers import LANGUAGES_USING_LDFLAGS, detect, lang_suffixes
+from ..compilers import detect, lang_suffixes
from ..mesonlib import (
File, MachineChoice, MesonException, MesonBugException, OrderedSet,
ExecutableSerialisation, EnvironmentException,
classify_unity_sources, get_compiler_for_source,
- is_parent_path,
+ get_rsp_threshold,
)
from ..options import OptionKey
@@ -61,7 +61,7 @@ if T.TYPE_CHECKING:
# Languages that can mix with C or C++ but don't support unity builds yet
# because the syntax we use for unity builds is specific to C/++/ObjC/++.
# Assembly files cannot be unitified and neither can LLVM IR files
-LANGS_CANT_UNITY = ('d', 'fortran', 'vala')
+LANGS_CANT_UNITY = ('d', 'fortran', 'vala', 'rust')
@dataclass(eq=False)
class RegenInfo:
@@ -150,7 +150,7 @@ class TargetInstallData:
def __post_init__(self, outdir_name: T.Optional[str]) -> None:
if outdir_name is None:
outdir_name = os.path.join('{prefix}', self.outdir)
- self.out_name = os.path.join(outdir_name, os.path.basename(self.fname))
+ self.out_name = Path(outdir_name, os.path.basename(self.fname)).as_posix()
@dataclass(eq=False)
class InstallEmptyDir:
@@ -307,16 +307,16 @@ class Backend:
else:
assert isinstance(t, build.BuildTarget), t
filename = t.get_filename()
- return os.path.join(self.get_target_dir(t), filename)
+ return Path(self.get_target_dir(t), filename).as_posix()
def get_target_filename_abs(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
- return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))
+ return Path(self.environment.get_build_dir(), self.get_target_filename(target)).as_posix()
def get_target_debug_filename(self, target: build.BuildTarget) -> T.Optional[str]:
assert isinstance(target, build.BuildTarget), target
if target.get_debug_filename():
debug_filename = target.get_debug_filename()
- return os.path.join(self.get_target_dir(target), debug_filename)
+ return Path(self.get_target_dir(target), debug_filename).as_posix()
else:
return None
@@ -324,7 +324,7 @@ class Backend:
assert isinstance(target, build.BuildTarget), target
if not target.get_debug_filename():
return None
- return os.path.join(self.environment.get_build_dir(), self.get_target_debug_filename(target))
+ return Path(self.environment.get_build_dir(), self.get_target_debug_filename(target)).as_posix()
def get_source_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]:
curdir = target.get_subdir()
@@ -533,6 +533,7 @@ class Backend:
capture: T.Optional[str] = None,
feed: T.Optional[str] = None,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
tag: T.Optional[str] = None,
verbose: bool = False,
installdir_map: T.Optional[T.Dict[str, str]] = None) -> 'ExecutableSerialisation':
@@ -563,9 +564,7 @@ class Backend:
cmd_args: T.List[str] = []
for c in raw_cmd_args:
if isinstance(c, programs.ExternalProgram):
- p = c.get_path()
- assert isinstance(p, str)
- cmd_args.append(p)
+ cmd_args += c.get_command()
elif isinstance(c, (build.BuildTarget, build.CustomTarget)):
cmd_args.append(self.get_target_filename_abs(c))
elif isinstance(c, mesonlib.File):
@@ -594,6 +593,21 @@ class Backend:
exe_wrapper = None
workdir = workdir or self.environment.get_build_dir()
+
+ # Must include separators as well
+ needs_rsp_file = can_use_rsp_file and sum(len(i) + 1 for i in cmd_args) >= get_rsp_threshold()
+
+ if needs_rsp_file:
+ hasher = hashlib.sha1()
+ args = ' '.join(mesonlib.quote_arg(arg) for arg in cmd_args)
+ hasher.update(args.encode(encoding='utf-8', errors='ignore'))
+ digest = hasher.hexdigest()
+ scratch_file = f'meson_rsp_{digest}.rsp'
+ rsp_file = os.path.join(self.environment.get_scratch_dir(), scratch_file)
+ with open(rsp_file, 'w', encoding='utf-8', newline='\n') as f:
+ f.write(args)
+ cmd_args = [f'@{rsp_file}']
+
return ExecutableSerialisation(exe_cmd + cmd_args, env,
exe_wrapper, workdir,
extra_paths, capture, feed, tag, verbose, installdir_map)
@@ -606,6 +620,7 @@ class Backend:
feed: T.Optional[str] = None,
force_serialize: bool = False,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
verbose: bool = False) -> T.Tuple[T.List[str], str]:
'''
Serialize an executable for running with a generator or a custom target
@@ -613,7 +628,7 @@ class Backend:
cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = []
cmd.append(exe)
cmd.extend(cmd_args)
- es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose)
+ es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, can_use_rsp_file, verbose=verbose)
reasons: T.List[str] = []
if es.extra_paths:
reasons.append('to set PATH')
@@ -653,6 +668,9 @@ class Backend:
envlist.append(f'{k}={v}')
return ['env'] + envlist + es.cmd_args, ', '.join(reasons)
+ if any(a.startswith('@') for a in es.cmd_args):
+ reasons.append('because command is too long')
+
if not force_serialize:
if not capture and not feed:
return es.cmd_args, ''
@@ -715,118 +733,6 @@ class Backend:
return l, stdlib_args
@staticmethod
- def _libdir_is_system(libdir: str, compilers: T.Mapping[str, 'Compiler'], env: 'Environment') -> bool:
- libdir = os.path.normpath(libdir)
- for cc in compilers.values():
- if libdir in cc.get_library_dirs(env):
- return True
- return False
-
- def get_external_rpath_dirs(self, target: build.BuildTarget) -> T.Set[str]:
- args: T.List[str] = []
- for lang in LANGUAGES_USING_LDFLAGS:
- try:
- e = self.environment.coredata.get_external_link_args(target.for_machine, lang)
- if isinstance(e, str):
- args.append(e)
- else:
- args.extend(e)
- except Exception:
- pass
- return self.get_rpath_dirs_from_link_args(args)
-
- @staticmethod
- def get_rpath_dirs_from_link_args(args: T.List[str]) -> T.Set[str]:
- dirs: T.Set[str] = set()
- # Match rpath formats:
- # -Wl,-rpath=
- # -Wl,-rpath,
- rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
- # Match solaris style compat runpath formats:
- # -Wl,-R
- # -Wl,-R,
- runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
- # Match symbols formats:
- # -Wl,--just-symbols=
- # -Wl,--just-symbols,
- symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
- for arg in args:
- rpath_match = rpath_regex.match(arg)
- if rpath_match:
- for dir in rpath_match.group(1).split(':'):
- dirs.add(dir)
- runpath_match = runpath_regex.match(arg)
- if runpath_match:
- for dir in runpath_match.group(1).split(':'):
- # The symbols arg is an rpath if the path is a directory
- if Path(dir).is_dir():
- dirs.add(dir)
- symbols_match = symbols_regex.match(arg)
- if symbols_match:
- for dir in symbols_match.group(1).split(':'):
- # Prevent usage of --just-symbols to specify rpath
- if Path(dir).is_dir():
- raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
- return dirs
-
- @lru_cache(maxsize=None)
- def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTarget, exclude_system: bool = True) -> 'ImmutableListProtocol[str]':
- paths: OrderedSet[str] = OrderedSet()
- srcdir = self.environment.get_source_dir()
-
- for dep in target.external_deps:
- if dep.type_name not in {'library', 'pkgconfig', 'cmake'}:
- continue
- for libpath in dep.link_args:
- # For all link args that are absolute paths to a library file, add RPATH args
- if not os.path.isabs(libpath):
- continue
- libdir = os.path.dirname(libpath)
- if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
- # No point in adding system paths.
- continue
- # Don't remove rpaths specified in LDFLAGS.
- if libdir in self.get_external_rpath_dirs(target):
- continue
- # Windows doesn't support rpaths, but we use this function to
- # emulate rpaths by setting PATH
- # .dll is there for mingw gcc
- # .so's may be extended with version information, e.g. libxyz.so.1.2.3
- if not (
- os.path.splitext(libpath)[1] in {'.dll', '.lib', '.so', '.dylib'}
- or re.match(r'.+\.so(\.|$)', os.path.basename(libpath))
- ):
- continue
-
- if is_parent_path(srcdir, libdir):
- rel_to_src = libdir[len(srcdir) + 1:]
- assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
- paths.add(os.path.join(self.build_to_src, rel_to_src))
- else:
- paths.add(libdir)
- # Don't remove rpaths specified by the dependency
- paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args))
- for i in chain(target.link_targets, target.link_whole_targets):
- if isinstance(i, build.BuildTarget):
- paths.update(self.rpaths_for_non_system_absolute_shared_libraries(i, exclude_system))
- return list(paths)
-
- # This may take other types
- def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]
- ) -> T.Tuple[str, ...]:
- result: OrderedSet[str]
- if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
- # Need a copy here
- result = OrderedSet(target.get_link_dep_subdirs())
- else:
- result = OrderedSet()
- result.add('meson-out')
- if isinstance(target, build.BuildTarget):
- result.update(self.rpaths_for_non_system_absolute_shared_libraries(target))
- target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
- return tuple(result)
-
- @staticmethod
@lru_cache(maxsize=None)
def canonicalize_filename(fname: str) -> str:
if os.path.altsep is not None:
@@ -1067,11 +973,6 @@ class Backend:
if compiler.language == 'vala':
if dep.type_name == 'pkgconfig':
assert isinstance(dep, dependencies.ExternalDependency)
- if dep.name == 'glib-2.0' and dep.version_reqs is not None:
- for req in dep.version_reqs:
- if req.startswith(('>=', '==')):
- commands += ['--target-glib', req[2:]]
- break
commands += ['--pkg', dep.name]
elif isinstance(dep, dependencies.ExternalLibrary):
commands += dep.get_link_args('vala')
@@ -1083,6 +984,32 @@ class Backend:
commands += dep.get_exe_args(compiler)
# For 'automagic' deps: Boost and GTest. Also dependency('threads').
# pkg-config puts the thread flags itself via `Cflags:`
+ if compiler.language == 'vala':
+ # Vala wants to know the minimum glib version
+ for dep in target.added_deps:
+ if dep.name == 'glib-2.0':
+ if dep.type_name == 'pkgconfig':
+ assert isinstance(dep, dependencies.ExternalDependency)
+ if dep.version_reqs is not None:
+ for req in dep.version_reqs:
+ if req.startswith(('>=', '==')):
+ commands += ['--target-glib', req[2:]]
+ break
+ elif isinstance(dep, dependencies.InternalDependency) and dep.version is not None:
+ glib_version = dep.version.split('.')
+ if len(glib_version) != 3:
+ mlog.warning(f'GLib version has unexpected format: {dep.version}')
+ break
+ try:
+ # If GLib version is a development version, downgrade
+ # --target-glib to the previous version, as valac will
+ # complain about non-even minor versions
+ glib_version[1] = str((int(glib_version[1]) // 2) * 2)
+ except ValueError:
+ mlog.warning(f'GLib version has unexpected format: {dep.version}')
+ break
+ commands += ['--target-glib', f'{glib_version[0]}.{glib_version[1]}']
+
# Fortran requires extra include directives.
if compiler.language == 'fortran':
for lt in chain(target.link_targets, target.link_whole_targets):
@@ -1258,12 +1185,9 @@ class Backend:
extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] = []
if isinstance(exe, build.CustomTarget):
extra_bdeps = list(exe.get_transitive_build_target_deps())
+ extra_bdeps.extend(t.depends)
+ extra_bdeps.extend(a for a in t.cmd_args if isinstance(a, build.BuildTarget))
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
- for a in t.cmd_args:
- if isinstance(a, build.BuildTarget):
- for p in self.determine_windows_extra_paths(a, []):
- if p not in extra_paths:
- extra_paths.append(p)
else:
extra_paths = []
@@ -1289,8 +1213,12 @@ class Backend:
else:
raise MesonException('Bad object in test command.')
+ # set LD_LIBRARY_PATH for
+ # a) dependencies, as relying on rpath is not very safe:
+ # https://github.com/mesonbuild/meson/pull/11119
+ # b) depends and targets passed via args.
t_env = copy.deepcopy(t.env)
- if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin():
+ if not machine.is_windows() and not machine.is_cygwin():
ld_lib_path_libs: T.Set[build.SharedLibrary] = set()
for d in depends:
if isinstance(d, build.BuildTarget):
@@ -1303,6 +1231,8 @@ class Backend:
if ld_lib_path:
t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':')
+ if machine.is_darwin():
+ t_env.prepend('DYLD_LIBRARY_PATH', list(ld_lib_path), ':')
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, self.environment.need_exe_wrapper(),
@@ -1562,7 +1492,7 @@ class Backend:
def eval_custom_target_command(
self, target: build.CustomTarget, absolute_outputs: bool = False) -> \
- T.Tuple[T.List[str], T.List[str], T.List[str]]:
+ T.Tuple[T.List[str], T.List[str], T.List[str | programs.ExternalProgram]]:
# We want the outputs to be absolute only when using the VS backend
# XXX: Maybe allow the vs backend to use relative paths too?
source_root = self.build_to_src
@@ -1575,7 +1505,7 @@ class Backend:
outputs = [os.path.join(outdir, i) for i in target.get_outputs()]
inputs = self.get_custom_target_sources(target)
# Evaluate the command list
- cmd: T.List[str] = []
+ cmd: T.List[str | programs.ExternalProgram] = []
for i in target.command:
if isinstance(i, build.BuildTarget):
cmd += self.build_target_to_cmd_array(i)
@@ -1611,6 +1541,9 @@ class Backend:
if not target.absolute_paths:
pdir = self.get_target_private_dir(target)
i = i.replace('@PRIVATE_DIR@', pdir)
+ elif isinstance(i, programs.ExternalProgram):
+ # Let it pass and be extended elsewhere
+ pass
else:
raise RuntimeError(f'Argument {i} is of unknown type {type(i)}')
cmd.append(i)
@@ -1635,7 +1568,7 @@ class Backend:
# fixed.
#
# https://github.com/mesonbuild/meson/pull/737
- cmd = [i.replace('\\', '/') for i in cmd]
+ cmd = [i.replace('\\', '/') if isinstance(i, str) else i for i in cmd]
return inputs, outputs, cmd
def get_introspect_command(self) -> str:
@@ -1996,6 +1929,8 @@ class Backend:
compiler += [j]
elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
compiler += j.get_outputs()
+ elif isinstance(j, programs.ExternalProgram):
+ compiler += j.get_command()
else:
raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug')
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index d7de987..595a27a 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -500,11 +500,6 @@ class NinjaBackend(backends.Backend):
# - https://github.com/mesonbuild/meson/pull/9453
# - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040
self.allow_thin_archives = PerMachine[bool](True, True)
- if self.environment:
- for for_machine in MachineChoice:
- if 'cuda' in self.environment.coredata.compilers[for_machine]:
- mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
- self.allow_thin_archives[for_machine] = False
def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement:
'''
@@ -595,6 +590,12 @@ class NinjaBackend(backends.Backend):
# We don't yet have a use case where we'd expect to make use of this,
# so no harm in catching and reporting something unexpected.
raise MesonBugException('We do not expect the ninja backend to be given a valid \'vslite_ctx\'')
+ if self.environment:
+ for for_machine in MachineChoice:
+ if 'cuda' in self.environment.coredata.compilers[for_machine]:
+ mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
+ self.allow_thin_archives[for_machine] = False
+
ninja = environment.detect_ninja_command_and_version(log=True)
if self.environment.coredata.optstore.get_value_for(OptionKey('vsenv')):
builddir = Path(self.environment.get_build_dir())
@@ -890,14 +891,14 @@ class NinjaBackend(backends.Backend):
self.generate_shlib_aliases(target, self.get_target_dir(target))
+ # Generate rules for GeneratedLists
+ self.generate_generator_list_rules(target)
+
# If target uses a language that cannot link to C objects,
# just generate for that language and return.
if isinstance(target, build.Jar):
self.generate_jar_target(target)
return
- if target.uses_rust():
- self.generate_rust_target(target)
- return
if 'cs' in target.compilers:
self.generate_cs_target(target)
return
@@ -934,8 +935,6 @@ class NinjaBackend(backends.Backend):
generated_sources = self.get_target_generated_sources(target)
transpiled_sources = []
self.scan_fortran_module_outputs(target)
- # Generate rules for GeneratedLists
- self.generate_generator_list_rules(target)
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
@@ -991,6 +990,8 @@ class NinjaBackend(backends.Backend):
# this target. We create the Ninja build file elements for this here
# because we need `header_deps` to be fully generated in the above loop.
for src in generated_source_files:
+ if not self.environment.is_separate_compile(src):
+ continue
if self.environment.is_llvm_ir(src):
o, s = self.generate_llvm_ir_compile(target, src)
else:
@@ -1049,21 +1050,24 @@ class NinjaBackend(backends.Backend):
# Generate compile targets for all the preexisting sources for this target
for src in target_sources.values():
- if not self.environment.is_header(src) or is_compile_target:
- if self.environment.is_llvm_ir(src):
- o, s = self.generate_llvm_ir_compile(target, src)
- obj_list.append(o)
- elif is_unity and self.get_target_source_can_unity(target, src):
- abs_src = os.path.join(self.environment.get_build_dir(),
- src.rel_to_builddir(self.build_to_src))
- unity_src.append(abs_src)
- else:
- o, s = self.generate_single_compile(target, src, False, [],
- header_deps + d_generated_deps + fortran_order_deps,
- fortran_inc_args)
- obj_list.append(o)
- compiled_sources.append(s)
- source2object[s] = o
+ if not self.environment.is_separate_compile(src):
+ continue
+ if self.environment.is_header(src) and not is_compile_target:
+ continue
+ if self.environment.is_llvm_ir(src):
+ o, s = self.generate_llvm_ir_compile(target, src)
+ obj_list.append(o)
+ elif is_unity and self.get_target_source_can_unity(target, src):
+ abs_src = os.path.join(self.environment.get_build_dir(),
+ src.rel_to_builddir(self.build_to_src))
+ unity_src.append(abs_src)
+ else:
+ o, s = self.generate_single_compile(target, src, False, [],
+ header_deps + d_generated_deps + fortran_order_deps,
+ fortran_inc_args)
+ obj_list.append(o)
+ compiled_sources.append(s)
+ source2object[s] = o
if is_unity:
for src in self.generate_unity_files(target, unity_src):
@@ -1083,8 +1087,14 @@ class NinjaBackend(backends.Backend):
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list
- elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
+
self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)
+
+ if target.uses_rust():
+ self.generate_rust_target(target, outname, final_obj_list, fortran_order_deps)
+ return
+
+ elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
self.add_build(elem)
#In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library
#object and create the build element.
@@ -1223,6 +1233,7 @@ class NinjaBackend(backends.Backend):
capture=ofilenames[0] if target.capture else None,
feed=srcs[0] if target.feed else None,
env=target.env,
+ can_use_rsp_file=target.rspable,
verbose=target.console)
if reason:
cmd_type = f' (wrapped by meson {reason})'
@@ -1554,7 +1565,6 @@ class NinjaBackend(backends.Backend):
elem.add_item('ARGS', commands)
self.add_build(elem)
- self.generate_generator_list_rules(target)
self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
def determine_java_compile_args(self, target, compiler) -> T.List[str]:
@@ -1765,6 +1775,9 @@ class NinjaBackend(backends.Backend):
girname = os.path.join(self.get_target_dir(target), target.vala_gir)
args += ['--gir', os.path.join('..', target.vala_gir)]
valac_outputs.append(girname)
+ shared_target = target.get('shared')
+ if isinstance(shared_target, build.SharedLibrary):
+ args += ['--shared-library', shared_target.get_filename()]
# Install GIR to default location if requested by user
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
@@ -1775,7 +1788,7 @@ class NinjaBackend(backends.Backend):
gres_xml, = self.get_custom_target_sources(gensrc)
args += ['--gresources=' + gres_xml]
for source_dir in gensrc.source_dirs:
- gres_dirs += [os.path.join(self.get_target_dir(gensrc), source_dir)]
+ gres_dirs += [source_dir]
# Ensure that resources are built before vala sources
# This is required since vala code using [GtkTemplate] effectively depends on .ui files
# GResourceHeaderTarget is not suitable due to lacking depfile
@@ -1967,6 +1980,7 @@ class NinjaBackend(backends.Backend):
for s in f.get_outputs()])
self.all_structured_sources.update(_ods)
orderdeps.extend(_ods)
+ return orderdeps, main_rust_file
for i in target.get_sources():
if main_rust_file is None:
@@ -2005,7 +2019,8 @@ class NinjaBackend(backends.Backend):
args += target.get_extra_args('rust')
return args
- def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler) -> T.Tuple[T.List[str], T.List[str], T.List[RustDep], T.List[str]]:
+ def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler,
+ obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[RustDep], T.List[str]]:
deps: T.List[str] = []
project_deps: T.List[RustDep] = []
args: T.List[str] = []
@@ -2037,11 +2052,9 @@ class NinjaBackend(backends.Backend):
type_ += ':' + ','.join(modifiers)
args.append(f'-l{type_}={libname}')
- objs, od = self.flatten_object_list(target)
- for o in objs:
+ for o in obj_list:
args.append(f'-Clink-arg={o}')
deps.append(o)
- fortran_order_deps = self.get_fortran_order_deps(od)
linkdirs = mesonlib.OrderedSet()
external_deps = target.external_deps.copy()
@@ -2091,20 +2104,24 @@ class NinjaBackend(backends.Backend):
for a in e.get_link_args():
if a in rustc.native_static_libs:
# Exclude link args that rustc already add by default
- pass
+ continue
elif a.startswith('-L'):
args.append(a)
- elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')) and isinstance(target, build.StaticLibrary):
+ continue
+ elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')):
dir_, lib = os.path.split(a)
linkdirs.add(dir_)
- if not verbatim:
- lib, ext = os.path.splitext(lib)
- if lib.startswith('lib'):
- lib = lib[3:]
- static = a.endswith(('.a', '.lib'))
- _link_library(lib, static)
- else:
- args.append(f'-Clink-arg={a}')
+
+ if isinstance(target, build.StaticLibrary):
+ if not verbatim:
+ lib, ext = os.path.splitext(lib)
+ if lib.startswith('lib'):
+ lib = lib[3:]
+ static = a.endswith(('.a', '.lib'))
+ _link_library(lib, static)
+ continue
+
+ args.append(f'-Clink-arg={a}')
for d in linkdirs:
d = d or '.'
@@ -2119,40 +2136,44 @@ class NinjaBackend(backends.Backend):
and dep.rust_crate_type == 'dylib'
for dep in target_deps)
- if target.rust_crate_type in {'dylib', 'proc-macro'} or has_rust_shared_deps:
- # add prefer-dynamic if any of the Rust libraries we link
+ if target.rust_crate_type in {'dylib', 'proc-macro'}:
+ # also add prefer-dynamic if any of the Rust libraries we link
# against are dynamic or this is a dynamic library itself,
# otherwise we'll end up with multiple implementations of libstd.
+ has_rust_shared_deps = True
+ elif self.get_target_option(target, 'rust_dynamic_std'):
+ if target.rust_crate_type == 'staticlib':
+ # staticlib crates always include a copy of the Rust libstd,
+ # therefore it is not possible to also link it dynamically.
+ # The options to avoid this (-Z staticlib-allow-rdylib-deps and
+ # -Z staticlib-prefer-dynamic) are not yet stable; alternatively,
+ # one could use "--emit obj" (implemented in the pull request at
+ # https://github.com/mesonbuild/meson/pull/11213) or "--emit rlib"
+ # (officially not recommended for linking with C programs).
+ raise MesonException('rust_dynamic_std does not support staticlib crates yet')
+ # want libstd as a shared dep
+ has_rust_shared_deps = True
+
+ if has_rust_shared_deps:
args += ['-C', 'prefer-dynamic']
-
- if isinstance(target, build.SharedLibrary) or has_shared_deps:
+ if has_shared_deps or has_rust_shared_deps:
args += self.get_build_rpath_args(target, rustc)
- return deps, fortran_order_deps, project_deps, args
-
- def generate_rust_target(self, target: build.BuildTarget) -> None:
- rustc = T.cast('RustCompiler', target.compilers['rust'])
- self.generate_generator_list_rules(target)
-
- for i in target.get_sources():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
- for g in target.get_generated_sources():
- for i in g.get_outputs():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ return deps, project_deps, args
+ def generate_rust_target(self, target: build.BuildTarget, target_name: str, obj_list: T.List[str],
+ fortran_order_deps: T.List[str]) -> None:
orderdeps, main_rust_file = self.generate_rust_sources(target)
- target_name = self.get_target_filename(target)
if main_rust_file is None:
raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
+ rustc = T.cast('RustCompiler', target.compilers['rust'])
args = rustc.compiler_args()
depfile = os.path.join(self.get_target_private_dir(target), target.name + '.d')
args += self.get_rust_compiler_args(target, rustc, target.rust_crate_type, depfile)
- deps, fortran_order_deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc)
+ deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc, obj_list)
args += deps_args
proc_macro_dylib_path = None
@@ -2187,7 +2208,10 @@ class NinjaBackend(backends.Backend):
rustdoc = rustc.get_rustdoc(self.environment)
args = rustdoc.get_exe_args()
args += self.get_rust_compiler_args(target.doctests.target, rustdoc, target.rust_crate_type)
- _, _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc)
+ # There can be no non-Rust objects: the doctests are gathered from Rust
+ # sources and the tests are linked with the target (which is where the
+ # obj_list was linked into)
+ _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc, [])
args += deps_args
target.doctests.cmd_args = args.to_native() + [main_rust_file] + target.doctests.cmd_args
@@ -2209,10 +2233,7 @@ class NinjaBackend(backends.Backend):
def swift_module_file_name(self, target):
return os.path.join(self.get_target_private_dir(target),
- self.target_swift_modulename(target) + '.swiftmodule')
-
- def target_swift_modulename(self, target):
- return target.name
+ target.swift_module_name + '.swiftmodule')
def determine_swift_dep_modules(self, target):
result = []
@@ -2239,12 +2260,26 @@ class NinjaBackend(backends.Backend):
return srcs, others
def generate_swift_target(self, target) -> None:
- module_name = self.target_swift_modulename(target)
+ module_name = target.swift_module_name
swiftc = target.compilers['swift']
abssrc = []
relsrc = []
abs_headers = []
header_imports = []
+
+ if not target.uses_swift_cpp_interop():
+ cpp_targets = [t for t in target.link_targets if t.uses_swift_cpp_interop()]
+ if cpp_targets != []:
+ target_word = 'targets' if len(cpp_targets) > 1 else 'target'
+ first = ', '.join(repr(t.name) for t in cpp_targets[:-1])
+ and_word = ' and ' if len(cpp_targets) > 1 else ''
+ last = repr(cpp_targets[-1].name)
+ enable_word = 'enable' if len(cpp_targets) > 1 else 'enables'
+ raise MesonException('Swift target {0} links against {1} {2}{3}{4} which {5} C++ interoperability. '
+ 'This requires {0} to also have it enabled. '
+ 'Add "swift_interoperability_mode: \'cpp\'" to the definition of {0}.'
+ .format(repr(target.name), target_word, first, and_word, last, enable_word))
+
for i in target.get_sources():
if swiftc.can_compile(i):
rels = i.rel_to_builddir(self.build_to_src)
@@ -2261,6 +2296,16 @@ class NinjaBackend(backends.Backend):
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = self.generate_basic_compiler_args(target, swiftc)
compile_args += swiftc.get_module_args(module_name)
+ compile_args += swiftc.get_cxx_interoperability_args(target)
+ compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine)
+ compile_args += self.build.get_global_args(swiftc, target.for_machine)
+ if isinstance(target, (build.StaticLibrary, build.SharedLibrary)):
+ # swiftc treats modules with a single source file, and the main.swift file in multi-source file modules
+ # as top-level code. This is undesirable in library targets since it emits a main function. Add the
+ # -parse-as-library option as necessary to prevent emitting the main function while keeping files explicitly
+ # named main.swift treated as the entrypoint of the module in case this is desired.
+ if len(abssrc) == 1 and os.path.basename(abssrc[0]) != 'main.swift':
+ compile_args += swiftc.get_library_args()
for i in reversed(target.get_include_dirs()):
basedir = i.get_curdir()
for d in i.get_incdirs():
@@ -3127,9 +3172,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# If TASKING compiler family is used and MIL linking is enabled for the target,
# then compilation rule name is a special one to output MIL files
# instead of object files for .c files
- key = OptionKey('b_lto')
if compiler.get_id() == 'tasking':
- if ((isinstance(target, build.StaticLibrary) and target.prelink) or target.get_option(key)) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']:
+ target_lto = self.get_target_option(target, OptionKey('b_lto', machine=target.for_machine, subproject=target.subproject))
+ if ((isinstance(target, build.StaticLibrary) and target.prelink) or target_lto) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']:
compiler_name = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine)
else:
compiler_name = self.compiler_to_rule_name(compiler)
@@ -3344,7 +3389,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def get_target_shsym_filename(self, target):
# Always name the .symbols file after the primary build output because it always exists
targetdir = self.get_target_private_dir(target)
- return os.path.join(targetdir, target.get_filename() + '.symbols')
+ return Path(targetdir, target.get_filename() + '.symbols').as_posix()
def generate_shsym(self, target) -> None:
target_file = self.get_target_filename(target)
@@ -3363,7 +3408,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.add_build(elem)
def get_import_filename(self, target) -> str:
- return os.path.join(self.get_target_dir(target), target.import_filename)
+ return Path(self.get_target_dir(target), target.import_filename).as_posix()
def get_target_type_link_args(self, target, linker):
commands = []
@@ -3551,9 +3596,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
- self.determine_rpath_dirs(target),
- target.build_rpath,
- target.install_rpath))
+ target))
return rpath_args
def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None):
@@ -3688,7 +3731,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs)
elem.add_dep(dep_targets + custom_target_libraries)
if linker.get_id() == 'tasking':
- if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not target.get_option(OptionKey('b_lto')):
+ if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not self.get_target_option(target, OptionKey('b_lto', target.subproject, target.for_machine)):
raise MesonException(f'Tried to link the target named \'{target.name}\' with a MIL archive without LTO enabled! This causes the compiler to ignore the archive.')
# Compiler args must be included in TI C28x linker commands.
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 283f9f0..deb3dfb 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -147,6 +147,9 @@ class Vs2010Backend(backends.Backend):
self.handled_target_deps = {}
self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications
+ def detect_toolset(self) -> None:
+ pass
+
def get_target_private_dir(self, target):
return os.path.join(self.get_target_dir(target), target.get_id())
@@ -227,6 +230,7 @@ class Vs2010Backend(backends.Backend):
# Check for (currently) unexpected capture arg use cases -
if capture:
raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'')
+ self.detect_toolset()
host_machine = self.environment.machines.host.cpu_family
if host_machine in {'64', 'x86_64'}:
# amd64 or x86_64
@@ -619,7 +623,8 @@ class Vs2010Backend(backends.Backend):
conftype='Utility',
target_ext=None,
target_platform=None,
- gen_manifest=True) -> T.Tuple[ET.Element, ET.Element]:
+ gen_manifest=True,
+ masm_type: T.Optional[T.Literal['masm', 'marmasm']] = None) -> T.Tuple[ET.Element, ET.Element]:
root = ET.Element('Project', {'DefaultTargets': "Build",
'ToolsVersion': '4.0',
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
@@ -657,6 +662,13 @@ class Vs2010Backend(backends.Backend):
# "The build tools for v142 (Platform Toolset = 'v142') cannot be found. ... please install v142 build tools."
# This is extremely unhelpful and misleading since the v14x build tools ARE installed.
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ ext_settings_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionSettings')
+ if masm_type:
+ ET.SubElement(
+ ext_settings_grp,
+ 'Import',
+ Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm_type}.props',
+ )
# This attribute makes sure project names are displayed as expected in solution files even when their project file names differ
pname = ET.SubElement(globalgroup, 'ProjectName')
@@ -692,9 +704,11 @@ class Vs2010Backend(backends.Backend):
if target_ext:
ET.SubElement(direlem, 'TargetExt').text = target_ext
- ET.SubElement(direlem, 'EmbedManifest').text = 'false'
- if not gen_manifest:
- ET.SubElement(direlem, 'GenerateManifest').text = 'false'
+ # Fix weird mt.exe error:
+ # mt.exe is trying to compile a non-existent .generated.manifest file and link it
+ # with the target. This does not happen without masm props.
+ ET.SubElement(direlem, 'EmbedManifest').text = 'true' if masm_type or gen_manifest == 'embed' else 'false'
+ ET.SubElement(direlem, 'GenerateManifest').text = 'true' if gen_manifest else 'false'
return (root, type_config)
@@ -775,12 +789,19 @@ class Vs2010Backend(backends.Backend):
platform = self.build_platform
else:
platform = self.platform
+
+ masm = self.get_masm_type(target)
+
(root, type_config) = self.create_basic_project(target.name,
temp_dir=target.get_id(),
guid=guid,
target_platform=platform,
- gen_manifest=self.get_gen_manifest(target))
+ gen_manifest=self.get_gen_manifest(target),
+ masm_type=masm)
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ if masm:
+ ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets')
target.generated = [self.compile_target_to_generator(target)]
target.sources = []
self.generate_custom_generator_commands(target, root)
@@ -795,6 +816,8 @@ class Vs2010Backend(backends.Backend):
return 'c'
if ext in compilers.cpp_suffixes:
return 'cpp'
+ if ext in compilers.lang_suffixes['masm']:
+ return 'masm'
raise MesonException(f'Could not guess language from source file {src}.')
def add_pch(self, pch_sources, lang, inc_cl):
@@ -956,13 +979,13 @@ class Vs2010Backend(backends.Backend):
other.append(arg)
return lpaths, libs, other
- def _get_cl_compiler(self, target):
+ def _get_cl_compiler(self, target: build.BuildTarget):
for lang, c in target.compilers.items():
if lang in {'c', 'cpp'}:
return c
- # No source files, only objects, but we still need a compiler, so
+ # No C/C++ source files, only objects/assembly source, but we still need a compiler, so
# return a found compiler
- if len(target.objects) > 0:
+ if len(target.objects) > 0 or len(target.sources) > 0:
for lang, c in self.environment.coredata.compilers[target.for_machine].items():
if lang in {'c', 'cpp'}:
return c
@@ -1493,8 +1516,9 @@ class Vs2010Backend(backends.Backend):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
if len(extra_link_args) > 0:
- extra_link_args.append('%(AdditionalOptions)')
- ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
+ args = [self.escape_additional_option(arg) for arg in extra_link_args]
+ args.append('%(AdditionalOptions)')
+ ET.SubElement(link, "AdditionalOptions").text = ' '.join(args)
if len(additional_libpaths) > 0:
additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
@@ -1607,6 +1631,8 @@ class Vs2010Backend(backends.Backend):
else:
platform = self.platform
+ masm = self.get_masm_type(target)
+
tfilename = os.path.splitext(target.get_filename())
(root, type_config) = self.create_basic_project(tfilename[0],
@@ -1615,7 +1641,8 @@ class Vs2010Backend(backends.Backend):
conftype=conftype,
target_ext=tfilename[1],
target_platform=platform,
- gen_manifest=self.get_gen_manifest(target))
+ gen_manifest=self.get_gen_manifest(target),
+ masm_type=masm)
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(
target, root)
@@ -1719,12 +1746,17 @@ class Vs2010Backend(backends.Backend):
for s in sources:
relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src))
if path_normalize_add(relpath, previous_sources):
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ if lang == 'masm' and masm:
+ inc_cl = ET.SubElement(inc_src, masm.upper(), Include=relpath)
+ else:
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+
if self.gen_lite:
self.add_project_nmake_defs_incs_and_opts(inc_cl, relpath, defs_paths_opts_per_lang_and_buildtype, platform)
else:
- lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(pch_sources, lang, inc_cl)
+ if lang != 'masm':
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1732,12 +1764,17 @@ class Vs2010Backend(backends.Backend):
self.object_filename_from_source(target, compiler, s)
for s in gen_src:
if path_normalize_add(s, previous_sources):
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ if lang == 'masm' and masm:
+ inc_cl = ET.SubElement(inc_src, masm.upper(), Include=s)
+ else:
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+
if self.gen_lite:
self.add_project_nmake_defs_incs_and_opts(inc_cl, s, defs_paths_opts_per_lang_and_buildtype, platform)
else:
- lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(pch_sources, lang, inc_cl)
+ if lang != 'masm':
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1786,6 +1823,9 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(inc_objs, 'Object', Include=s)
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ if masm:
+ ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets')
self.add_regen_dependency(root)
if not self.gen_lite:
# Injecting further target dependencies into this vcxproj implies and forces a Visual Studio BUILD dependency,
@@ -2096,7 +2136,8 @@ class Vs2010Backend(backends.Backend):
pass
# Returns if a target generates a manifest or not.
- def get_gen_manifest(self, target):
+ # Returns 'embed' if the generated manifest is embedded.
+ def get_gen_manifest(self, target: T.Optional[build.BuildTarget]):
if not isinstance(target, build.BuildTarget):
return True
@@ -2113,6 +2154,31 @@ class Vs2010Backend(backends.Backend):
arg = arg.upper()
if arg == '/MANIFEST:NO':
return False
+ if arg.startswith('/MANIFEST:EMBED'):
+ return 'embed'
if arg == '/MANIFEST' or arg.startswith('/MANIFEST:'):
break
return True
+
+ # FIXME: add a way to distinguish between arm64ec+marmasm (written in ARM assembly)
+ # and arm64ec+masm (written in x64 assembly).
+ #
+ # For now, assume it's the native ones. (same behavior as ninja backend)
+ def get_masm_type(self, target: build.BuildTarget):
+ if not isinstance(target, build.BuildTarget):
+ return None
+
+ if 'masm' not in target.compilers:
+ return None
+
+ if target.for_machine == MachineChoice.BUILD:
+ platform = self.build_platform
+ elif target.for_machine == MachineChoice.HOST:
+ platform = self.platform
+ else:
+ return None
+
+ if platform in {'ARM', 'arm64', 'arm64ec'}:
+ return 'marmasm'
+ else:
+ return 'masm'
diff --git a/mesonbuild/backend/vs2012backend.py b/mesonbuild/backend/vs2012backend.py
index 307964b..922cd60 100644
--- a/mesonbuild/backend/vs2012backend.py
+++ b/mesonbuild/backend/vs2012backend.py
@@ -21,6 +21,8 @@ class Vs2012Backend(Vs2010Backend):
self.vs_version = '2012'
self.sln_file_version = '12.00'
self.sln_version_comment = '2012'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2013backend.py b/mesonbuild/backend/vs2013backend.py
index ae0b68b..cf5d598 100644
--- a/mesonbuild/backend/vs2013backend.py
+++ b/mesonbuild/backend/vs2013backend.py
@@ -20,6 +20,8 @@ class Vs2013Backend(Vs2010Backend):
self.vs_version = '2013'
self.sln_file_version = '12.00'
self.sln_version_comment = '2013'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2015backend.py b/mesonbuild/backend/vs2015backend.py
index 4c515cc..1862def 100644
--- a/mesonbuild/backend/vs2015backend.py
+++ b/mesonbuild/backend/vs2015backend.py
@@ -21,6 +21,8 @@ class Vs2015Backend(Vs2010Backend):
self.vs_version = '2015'
self.sln_file_version = '12.00'
self.sln_version_comment = '14'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2017backend.py b/mesonbuild/backend/vs2017backend.py
index 393544f..372e1ce 100644
--- a/mesonbuild/backend/vs2017backend.py
+++ b/mesonbuild/backend/vs2017backend.py
@@ -24,6 +24,8 @@ class Vs2017Backend(Vs2010Backend):
self.vs_version = '2017'
self.sln_file_version = '12.00'
self.sln_version_comment = '15'
+
+ def detect_toolset(self) -> None:
# We assume that host == build
if self.environment is not None:
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2019backend.py b/mesonbuild/backend/vs2019backend.py
index 4d6e226..61ad75d 100644
--- a/mesonbuild/backend/vs2019backend.py
+++ b/mesonbuild/backend/vs2019backend.py
@@ -22,6 +22,8 @@ class Vs2019Backend(Vs2010Backend):
super().__init__(build, interpreter)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 16'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/vs2022backend.py b/mesonbuild/backend/vs2022backend.py
index 27e0438..ca449a4 100644
--- a/mesonbuild/backend/vs2022backend.py
+++ b/mesonbuild/backend/vs2022backend.py
@@ -22,6 +22,8 @@ class Vs2022Backend(Vs2010Backend):
super().__init__(build, interpreter, gen_lite=gen_lite)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 17'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 587404a..e7bd487 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -176,6 +176,15 @@ class PbxDict:
self.keys.add(key)
self.items.append(item)
+ def get_item(self, key: str) -> PbxDictItem:
+ assert key in self.keys
+ for item in self.items:
+ if not isinstance(item, PbxDictItem):
+ continue
+ if item.key == key:
+ return item
+ return None
+
def has_item(self, key: str) -> bool:
return key in self.keys
@@ -396,10 +405,23 @@ class XCodeBackend(backends.Backend):
def generate_filemap(self) -> None:
self.filemap = {} # Key is source file relative to src root.
+ self.foldermap = {}
self.target_filemap = {}
for name, t in self.build_targets.items():
for s in t.sources:
if isinstance(s, mesonlib.File):
+ if '/' in s.fname:
+ # From the top level down, add the folders containing the source file.
+ folder = os.path.split(os.path.dirname(s.fname))
+ while folder:
+ fpath = os.path.join(*folder)
+ # Multiple targets might use the same folders, so store their targets with them.
+ # Otherwise, folders and their source files will appear in the wrong places in Xcode.
+ if (fpath, t) not in self.foldermap:
+ self.foldermap[(fpath, t)] = self.gen_id()
+ else:
+ break
+ folder = folder[:-1]
s = os.path.join(s.subdir, s.fname)
self.filemap[s] = self.gen_id()
for o in t.objects:
@@ -1052,6 +1074,24 @@ class XCodeBackend(backends.Backend):
main_children.add_item(frameworks_id, 'Frameworks')
main_dict.add_item('sourceTree', '<group>')
+ # Define each folder as a group in Xcode. That way, it can build the file tree correctly.
+ # This must be done before the project tree group is generated, as source files are added during that phase.
+ for (path, target), id in self.foldermap.items():
+ folder_dict = PbxDict()
+ objects_dict.add_item(id, folder_dict, path)
+ folder_dict.add_item('isa', 'PBXGroup')
+ folder_children = PbxArray()
+ folder_dict.add_item('children', folder_children)
+ folder_dict.add_item('name', '"{}"'.format(path.rsplit('/', 1)[-1]))
+ folder_dict.add_item('path', f'"{path}"')
+ folder_dict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ # Add any detected subdirectories (not declared as subdir()) here, but only one level higher.
+ # Example: In "root", add "root/sub", but not "root/sub/subtwo".
+ for path_dep, target_dep in self.foldermap:
+ if path_dep.startswith(path) and path_dep.split('/', 1)[0] == path.split('/', 1)[0] and path_dep != path and path_dep.count('/') == path.count('/') + 1 and target == target_dep:
+ folder_children.add_item(self.foldermap[(path_dep, target)], path_dep)
+
self.add_projecttree(objects_dict, projecttree_id)
resource_dict = PbxDict()
@@ -1121,6 +1161,7 @@ class XCodeBackend(backends.Backend):
tid = t.get_id()
group_id = self.gen_id()
target_dict = PbxDict()
+ folder_ids = set()
objects_dict.add_item(group_id, target_dict, tid)
target_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
@@ -1130,6 +1171,18 @@ class XCodeBackend(backends.Backend):
source_files_dict = PbxDict()
for s in t.sources:
if isinstance(s, mesonlib.File):
+ # If the file is in a folder, add it to the group representing that folder.
+ if '/' in s.fname:
+ folder = '/'.join(s.fname.split('/')[:-1])
+ folder_dict = objects_dict.get_item(self.foldermap[(folder, t)]).value.get_item('children').value
+ temp = os.path.join(s.subdir, s.fname)
+ folder_dict.add_item(self.fileref_ids[(tid, temp)], temp)
+ if self.foldermap[(folder, t)] in folder_ids:
+ continue
+ if len(folder.split('/')) == 1:
+ target_children.add_item(self.foldermap[(folder, t)], folder)
+ folder_ids.add(self.foldermap[(folder, t)])
+ continue
s = os.path.join(s.subdir, s.fname)
elif isinstance(s, str):
s = os.path.join(t.subdir, s)
@@ -1596,6 +1649,7 @@ class XCodeBackend(backends.Backend):
headerdirs = []
bridging_header = ""
is_swift = self.is_swift_target(target)
+ langs = set()
for d in target.include_dirs:
for sd in d.incdirs:
cd = os.path.join(d.curdir, sd)
@@ -1715,6 +1769,7 @@ class XCodeBackend(backends.Backend):
lang = 'c'
elif lang == 'objcpp':
lang = 'cpp'
+ langs.add(lang)
langname = LANGNAMEMAP[lang]
langargs.setdefault(langname, [])
langargs[langname] = cargs + cti_args + args
@@ -1776,6 +1831,8 @@ class XCodeBackend(backends.Backend):
settings_dict.add_item('SECTORDER_FLAGS', '')
if is_swift and bridging_header:
settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', bridging_header)
+ if self.objversion >= 60 and target.uses_swift_cpp_interop():
+ settings_dict.add_item('SWIFT_OBJC_INTEROP_MODE', 'objcxx')
settings_dict.add_item('BUILD_DIR', symroot)
settings_dict.add_item('OBJROOT', f'{symroot}/build')
sysheader_arr = PbxArray()
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 7320b88..2adfb98 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -24,16 +24,16 @@ from .mesonlib import (
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
get_filenames_templates_dict, substitute_values, has_path_sep,
- is_parent_path, PerMachineDefaultable,
+ is_parent_path, relpath, PerMachineDefaultable,
MesonBugException, EnvironmentVariables, pickle_load, lazy_property,
)
from .options import OptionKey
from .compilers import (
is_header, is_object, is_source, clink_langs, sort_clink, all_languages,
- is_known_suffix, detect_static_linker
+ is_known_suffix, detect_static_linker, LANGUAGES_USING_LDFLAGS
)
-from .interpreterbase import FeatureNew, FeatureDeprecated
+from .interpreterbase import FeatureNew, FeatureDeprecated, UnknownValue
if T.TYPE_CHECKING:
from typing_extensions import Literal, TypedDict
@@ -75,6 +75,7 @@ lang_arg_kwargs |= {
vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'}
rust_kwargs = {'rust_crate_type', 'rust_dependency_map'}
cs_kwargs = {'resources', 'cs_args'}
+swift_kwargs = {'swift_interoperability_mode', 'swift_module_name'}
buildtarget_kwargs = {
'build_by_default',
@@ -110,7 +111,8 @@ known_build_target_kwargs = (
pch_kwargs |
vala_kwargs |
rust_kwargs |
- cs_kwargs)
+ cs_kwargs |
+ swift_kwargs)
known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie', 'vs_module_defs', 'android_exe_type'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions', 'rust_abi'}
@@ -275,7 +277,7 @@ class Build:
self.stdlibs = PerMachine({}, {})
self.test_setups: T.Dict[str, TestSetup] = {}
self.test_setup_default_name = None
- self.find_overrides: T.Dict[str, T.Union['Executable', programs.ExternalProgram, programs.OverrideProgram]] = {}
+ self.find_overrides: T.Dict[str, T.Union['OverrideExecutable', programs.ExternalProgram, programs.OverrideProgram]] = {}
self.searched_programs: T.Set[str] = set() # The list of all programs that have been searched for.
# If we are doing a cross build we need two caches, if we're doing a
@@ -648,7 +650,7 @@ class Target(HoldableObject, metaclass=abc.ABCMeta):
def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
if 'build_by_default' in kwargs:
self.build_by_default = kwargs['build_by_default']
- if not isinstance(self.build_by_default, bool):
+ if not isinstance(self.build_by_default, (bool, UnknownValue)):
raise InvalidArguments('build_by_default must be a boolean value.')
if not self.build_by_default and kwargs.get('install', False):
@@ -656,29 +658,11 @@ class Target(HoldableObject, metaclass=abc.ABCMeta):
# set, use the value of 'install' if it's enabled.
self.build_by_default = True
- self.raw_overrides = self.parse_overrides(kwargs)
+ self.raw_overrides = kwargs.get('override_options', {})
def get_override(self, name: str) -> T.Optional[str]:
return self.raw_overrides.get(name, None)
- @staticmethod
- def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]:
- opts = kwargs.get('override_options', [])
-
- # In this case we have an already parsed and ready to go dictionary
- # provided by typed_kwargs
- if isinstance(opts, dict):
- return T.cast('T.Dict[OptionKey, str]', opts)
-
- result: T.Dict[str, str] = {}
- overrides = stringlistify(opts)
- for o in overrides:
- if '=' not in o:
- raise InvalidArguments('Overrides must be of form "key=value"')
- k, v = o.split('=', 1)
- result[k] = v
- return result
-
def is_linkable_target(self) -> bool:
return False
@@ -787,14 +771,23 @@ class BuildTarget(Target):
''' Initialisations and checks requiring the final list of compilers to be known
'''
self.validate_sources()
- if self.structured_sources and any([self.sources, self.generated]):
- raise MesonException('cannot mix structured sources and unstructured sources')
- if self.structured_sources and 'rust' not in self.compilers:
- raise MesonException('structured sources are only supported in Rust targets')
if self.uses_rust():
+ if self.link_language and self.link_language != 'rust':
+ raise MesonException('cannot build Rust sources with a different link_language')
+ if self.structured_sources:
+ # TODO: the interpreter should be able to generate a better error message?
+ if any((s.endswith('.rs') for s in self.sources)) or \
+ any(any((s.endswith('.rs') for s in g.get_outputs())) for g in self.generated):
+ raise MesonException('cannot mix Rust structured sources and unstructured sources')
+
# relocation-model=pic is rustc's default and Meson does not
# currently have a way to disable PIC.
self.pic = True
+ self.pie = True
+ else:
+ if self.structured_sources:
+ raise MesonException('structured sources are only supported in Rust targets')
+
if 'vala' in self.compilers and self.is_linkable_target():
self.outputs += [self.vala_header, self.vala_vapi]
self.install_tag += ['devel', 'devel']
@@ -896,6 +889,10 @@ class BuildTarget(Target):
if isinstance(t, (CustomTarget, CustomTargetIndex)):
continue # We can't know anything about these.
for name, compiler in t.compilers.items():
+ if name == 'rust':
+ # Rust is always linked through a C-ABI target, so do not add
+ # the compiler here
+ continue
if name in link_langs and name not in self.compilers:
self.compilers[name] = compiler
@@ -981,7 +978,7 @@ class BuildTarget(Target):
self.compilers[lang] = compiler
break
else:
- if is_known_suffix(s):
+ if is_known_suffix(s) and not is_header(s):
path = pathlib.Path(str(s)).as_posix()
m = f'No {self.for_machine.get_lower_case_name()} machine compiler for {path!r}'
raise MesonException(m)
@@ -1278,6 +1275,12 @@ class BuildTarget(Target):
raise InvalidArguments(f'Invalid rust_dependency_map "{rust_dependency_map}": must be a dictionary with string values.')
self.rust_dependency_map = rust_dependency_map
+ self.swift_interoperability_mode = kwargs.get('swift_interoperability_mode')
+
+ self.swift_module_name = kwargs.get('swift_module_name')
+ if self.swift_module_name == '':
+ self.swift_module_name = self.name
+
def _extract_pic_pie(self, kwargs: T.Dict[str, T.Any], arg: str, option: str) -> bool:
# Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags
all_flags = self.extra_args['c'] + self.extra_args['cpp']
@@ -1375,6 +1378,10 @@ class BuildTarget(Target):
deps = listify(deps)
for dep in deps:
if dep in self.added_deps:
+ # Prefer to add dependencies to added_deps which have a name
+ if dep.is_named():
+ self.added_deps.remove(dep)
+ self.added_deps.add(dep)
continue
if isinstance(dep, dependencies.InternalDependency):
@@ -1603,6 +1610,9 @@ class BuildTarget(Target):
if isinstance(link_target, (CustomTarget, CustomTargetIndex)):
continue
for language in link_target.compilers:
+ if language == 'rust' and not link_target.uses_rust_abi():
+ # All Rust dependencies must go through a C-ABI dependency, so ignore it
+ continue
if language not in langs:
langs.append(language)
@@ -1694,6 +1704,9 @@ class BuildTarget(Target):
def uses_fortran(self) -> bool:
return 'fortran' in self.compilers
+ def uses_swift_cpp_interop(self) -> bool:
+ return self.swift_interoperability_mode == 'cpp' and 'swift' in self.compilers
+
def get_using_msvc(self) -> bool:
'''
Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
@@ -1788,6 +1801,121 @@ class BuildTarget(Target):
"""Base case used by BothLibraries"""
return self
+ def determine_rpath_dirs(self) -> T.Tuple[str, ...]:
+ result: OrderedSet[str]
+ if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
+ # Need a copy here
+ result = OrderedSet(self.get_link_dep_subdirs())
+ else:
+ result = OrderedSet()
+ result.add('meson-out')
+ result.update(self.rpaths_for_non_system_absolute_shared_libraries())
+ self.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
+ return tuple(result)
+
+ @lru_cache(maxsize=None)
+ def rpaths_for_non_system_absolute_shared_libraries(self, exclude_system: bool = True) -> ImmutableListProtocol[str]:
+ paths: OrderedSet[str] = OrderedSet()
+ srcdir = self.environment.get_source_dir()
+
+ system_dirs = set()
+ if exclude_system:
+ for cc in self.compilers.values():
+ system_dirs.update(cc.get_library_dirs(self.environment))
+
+ external_rpaths = self.get_external_rpath_dirs()
+ build_to_src = relpath(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+
+ for dep in self.external_deps:
+ if dep.type_name not in {'library', 'pkgconfig', 'cmake'}:
+ continue
+ for libpath in dep.link_args:
+ if libpath.startswith('-'):
+ continue
+ # For all link args that are absolute paths to a library file, add RPATH args
+ if not os.path.isabs(libpath):
+ continue
+ libdir, libname = os.path.split(libpath)
+ # Windows doesn't support rpaths, but we use this function to
+ # emulate rpaths by setting PATH
+ # .dll is there for mingw gcc
+ # .so's may be extended with version information, e.g. libxyz.so.1.2.3
+ if not (
+ libname.endswith(('.dll', '.lib', '.so', '.dylib'))
+ or '.so.' in libname
+ ):
+ continue
+
+ # Don't remove rpaths specified in LDFLAGS.
+ if libdir in external_rpaths:
+ continue
+ if system_dirs and os.path.normpath(libdir) in system_dirs:
+ # No point in adding system paths.
+ continue
+
+ if is_parent_path(srcdir, libdir):
+ rel_to_src = libdir[len(srcdir) + 1:]
+ assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
+ paths.add(os.path.join(build_to_src, rel_to_src))
+ else:
+ paths.add(libdir)
+ # Don't remove rpaths specified by the dependency
+ paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args))
+ for i in itertools.chain(self.link_targets, self.link_whole_targets):
+ if isinstance(i, BuildTarget):
+ paths.update(i.rpaths_for_non_system_absolute_shared_libraries(exclude_system))
+ return list(paths)
+
+ def get_external_rpath_dirs(self) -> T.Set[str]:
+ args: T.List[str] = []
+ for lang in LANGUAGES_USING_LDFLAGS:
+ try:
+ args += self.environment.coredata.get_external_link_args(self.for_machine, lang)
+ except KeyError:
+ pass
+ return self.get_rpath_dirs_from_link_args(args)
+
+ # Match rpath formats:
+ # -Wl,-rpath=
+ # -Wl,-rpath,
+ _rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
+ # Match solaris style compat runpath formats:
+ # -Wl,-R
+ # -Wl,-R,
+ _runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
+ # Match symbols formats:
+ # -Wl,--just-symbols=
+ # -Wl,--just-symbols,
+ _symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
+
+ @classmethod
+ def get_rpath_dirs_from_link_args(cls, args: T.List[str]) -> T.Set[str]:
+ dirs: T.Set[str] = set()
+
+ for arg in args:
+ if not arg.startswith('-Wl,'):
+ continue
+
+ rpath_match = cls._rpath_regex.match(arg)
+ if rpath_match:
+ for dir in rpath_match.group(1).split(':'):
+ dirs.add(dir)
+ runpath_match = cls._runpath_regex.match(arg)
+ if runpath_match:
+ for dir in runpath_match.group(1).split(':'):
+ # The symbols arg is an rpath if the path is a directory
+ if os.path.isdir(dir):
+ dirs.add(dir)
+ symbols_match = cls._symbols_regex.match(arg)
+ if symbols_match:
+ for dir in symbols_match.group(1).split(':'):
+ # Prevent usage of --just-symbols to specify rpath
+ if os.path.isdir(dir):
+ raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
+ return dirs
+
+
class FileInTargetPrivateDir:
"""Represents a file with the path '/path/to/build/target_private_dir/fname'.
target_private_dir is the return value of get_target_private_dir which is e.g. 'subdir/target.p'.
@@ -2199,10 +2327,16 @@ class StaticLibrary(BuildTarget):
elif self.rust_crate_type == 'staticlib':
self.suffix = 'a'
else:
- if 'c' in self.compilers and self.compilers['c'].get_id() == 'tasking':
- self.suffix = 'ma' if self.options.get_value('b_lto') and not self.prelink else 'a'
- else:
- self.suffix = 'a'
+ self.suffix = 'a'
+ if 'c' in self.compilers and self.compilers['c'].get_id() == 'tasking' and not self.prelink:
+ key = OptionKey('b_lto', self.subproject, self.for_machine)
+ try:
+ v = self.environment.coredata.get_option_for_target(self, key)
+ except KeyError:
+ v = self.environment.coredata.optstore.get_value_for(key)
+ assert isinstance(v, bool), 'for mypy'
+ if v:
+ self.suffix = 'ma'
self.filename = self.prefix + self.name + '.' + self.suffix
self.outputs[0] = self.filename
@@ -2594,7 +2728,7 @@ class CommandBase:
subproject: str
def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalProgram, BuildTargetTypes]]) -> \
- T.List[T.Union[str, File, BuildTarget, 'CustomTarget']]:
+ T.List[T.Union[str, File, BuildTarget, CustomTarget, programs.ExternalProgram]]:
cmd = listify(cmd)
final_cmd: T.List[T.Union[str, File, BuildTarget, 'CustomTarget']] = []
for c in cmd:
@@ -2611,7 +2745,8 @@ class CommandBase:
# Can only add a dependency on an external program which we
# know the absolute path of
self.depend_files.append(File.from_absolute_file(path))
- final_cmd += c.get_command()
+ # Do NOT flatten -- it is needed for later parsing
+ final_cmd.append(c)
elif isinstance(c, (BuildTarget, CustomTarget)):
self.dependencies.append(c)
final_cmd.append(c)
@@ -2681,6 +2816,7 @@ class CustomTarget(Target, CustomTargetBase, CommandBase):
install_dir: T.Optional[T.List[T.Union[str, Literal[False]]]] = None,
install_mode: T.Optional[FileMode] = None,
install_tag: T.Optional[T.List[T.Optional[str]]] = None,
+ rspable: bool = False,
absolute_paths: bool = False,
backend: T.Optional['Backend'] = None,
description: str = 'Generating {} with a custom command',
@@ -2713,6 +2849,9 @@ class CustomTarget(Target, CustomTargetBase, CommandBase):
# Whether to use absolute paths for all files on the commandline
self.absolute_paths = absolute_paths
+ # Whether to enable using response files for the underlying tool
+ self.rspable = rspable
+
def get_default_install_dir(self) -> T.Union[T.Tuple[str, str], T.Tuple[None, None]]:
return None, None
@@ -3129,6 +3268,18 @@ class ConfigurationData(HoldableObject):
def keys(self) -> T.Iterator[str]:
return self.values.keys()
+class OverrideExecutable(Executable):
+ def __init__(self, executable: Executable, version: str):
+ self._executable = executable
+ self._version = version
+
+ def __getattr__(self, name: str) -> T.Any:
+ _executable = object.__getattribute__(self, '_executable')
+ return getattr(_executable, name)
+
+ def get_version(self, interpreter: T.Optional[Interpreter] = None) -> str:
+ return self._version
+
# A bit poorly named, but this represents plain data files to copy
# during install.
@dataclass(eq=False)
diff --git a/mesonbuild/cargo/cfg.py b/mesonbuild/cargo/cfg.py
index 0d49527..a0ee6e2 100644
--- a/mesonbuild/cargo/cfg.py
+++ b/mesonbuild/cargo/cfg.py
@@ -4,6 +4,7 @@
"""Rust CFG parser.
Rust uses its `cfg()` format in cargo.
+https://doc.rust-lang.org/reference/conditional-compilation.html
This may have the following functions:
- all()
@@ -22,18 +23,15 @@ so you could have examples like:
from __future__ import annotations
import dataclasses
import enum
-import functools
import typing as T
-from . import builder
-from .. import mparser
from ..mesonlib import MesonBugException
if T.TYPE_CHECKING:
_T = T.TypeVar('_T')
_LEX_TOKEN = T.Tuple['TokenType', T.Optional[str]]
- _LEX_STREAM = T.Iterable[_LEX_TOKEN]
+ _LEX_STREAM = T.Iterator[_LEX_TOKEN]
_LEX_STREAM_AH = T.Iterator[T.Tuple[_LEX_TOKEN, T.Optional[_LEX_TOKEN]]]
@@ -48,6 +46,7 @@ class TokenType(enum.Enum):
NOT = enum.auto()
COMMA = enum.auto()
EQUAL = enum.auto()
+ CFG = enum.auto()
def lexer(raw: str) -> _LEX_STREAM:
@@ -56,45 +55,41 @@ def lexer(raw: str) -> _LEX_STREAM:
:param raw: The raw cfg() expression
:return: An iterable of tokens
"""
- buffer: T.List[str] = []
+ start: int = 0
is_string: bool = False
- for s in raw:
- if s.isspace() or s in {')', '(', ',', '='} or (s == '"' and buffer):
- val = ''.join(buffer)
- buffer.clear()
- if is_string:
+ for i, s in enumerate(raw):
+ if s.isspace() or s in {')', '(', ',', '=', '"'}:
+ val = raw[start:i]
+ start = i + 1
+ if s == '"' and is_string:
yield (TokenType.STRING, val)
+ is_string = False
+ continue
elif val == 'any':
yield (TokenType.ANY, None)
elif val == 'all':
yield (TokenType.ALL, None)
elif val == 'not':
yield (TokenType.NOT, None)
+ elif val == 'cfg':
+ yield (TokenType.CFG, None)
elif val:
yield (TokenType.IDENTIFIER, val)
if s == '(':
yield (TokenType.LPAREN, None)
- continue
elif s == ')':
yield (TokenType.RPAREN, None)
- continue
elif s == ',':
yield (TokenType.COMMA, None)
- continue
elif s == '=':
yield (TokenType.EQUAL, None)
- continue
- elif s.isspace():
- continue
-
- if s == '"':
- is_string = not is_string
- else:
- buffer.append(s)
- if buffer:
+ elif s == '"':
+ is_string = True
+ val = raw[start:]
+ if val:
# This should always be an identifier
- yield (TokenType.IDENTIFIER, ''.join(buffer))
+ yield (TokenType.IDENTIFIER, val)
def lookahead(iter: T.Iterator[_T]) -> T.Iterator[T.Tuple[_T, T.Optional[_T]]]:
@@ -146,8 +141,8 @@ class Identifier(IR):
@dataclasses.dataclass
class Equal(IR):
- lhs: IR
- rhs: IR
+ lhs: Identifier
+ rhs: String
@dataclasses.dataclass
@@ -175,41 +170,40 @@ def _parse(ast: _LEX_STREAM_AH) -> IR:
else:
ntoken, _ = (None, None)
- stream: T.List[_LEX_TOKEN]
if token is TokenType.IDENTIFIER:
+ assert value
+ id_ = Identifier(value)
if ntoken is TokenType.EQUAL:
- return Equal(Identifier(value), _parse(ast))
- if token is TokenType.STRING:
- return String(value)
- if token is TokenType.EQUAL:
- # In this case the previous caller already has handled the equal
- return _parse(ast)
- if token in {TokenType.ANY, TokenType.ALL}:
+ next(ast)
+ (token, value), _ = next(ast)
+ assert token is TokenType.STRING
+ assert value is not None
+ return Equal(id_, String(value))
+ return id_
+ elif token in {TokenType.ANY, TokenType.ALL}:
type_ = All if token is TokenType.ALL else Any
- assert ntoken is TokenType.LPAREN
- next(ast) # advance the iterator to get rid of the LPAREN
- stream = []
args: T.List[IR] = []
- while token is not TokenType.RPAREN:
+ (token, value), n_stream = next(ast)
+ assert token is TokenType.LPAREN
+ if n_stream and n_stream[0] == TokenType.RPAREN:
+ return type_(args)
+ while True:
+ args.append(_parse(ast))
(token, value), _ = next(ast)
- if token is TokenType.COMMA:
- args.append(_parse(lookahead(iter(stream))))
- stream.clear()
- else:
- stream.append((token, value))
- if stream:
- args.append(_parse(lookahead(iter(stream))))
+ if token is TokenType.RPAREN:
+ break
+ assert token is TokenType.COMMA
return type_(args)
- if token is TokenType.NOT:
- next(ast) # advance the iterator to get rid of the LPAREN
- stream = []
- # Mypy can't figure out that token is overridden inside the while loop
- while token is not TokenType.RPAREN: # type: ignore
- (token, value), _ = next(ast)
- stream.append((token, value))
- return Not(_parse(lookahead(iter(stream))))
-
- raise MesonBugException(f'Unhandled Cargo token: {token}')
+ elif token in {TokenType.NOT, TokenType.CFG}:
+ is_not = token is TokenType.NOT
+ (token, value), _ = next(ast)
+ assert token is TokenType.LPAREN
+ arg = _parse(ast)
+ (token, value), _ = next(ast)
+ assert token is TokenType.RPAREN
+ return Not(arg) if is_not else arg
+ else:
+ raise MesonBugException(f'Unhandled Cargo token:{token} {value}')
def parse(ast: _LEX_STREAM) -> IR:
@@ -218,57 +212,24 @@ def parse(ast: _LEX_STREAM) -> IR:
:param ast: An iterable of Tokens
:return: An mparser Node to be used as a conditional
"""
- ast_i: _LEX_STREAM_AH = lookahead(iter(ast))
+ ast_i: _LEX_STREAM_AH = lookahead(ast)
return _parse(ast_i)
-@functools.singledispatch
-def ir_to_meson(ir: T.Any, build: builder.Builder) -> mparser.BaseNode:
- raise NotImplementedError
-
-
-@ir_to_meson.register
-def _(ir: String, build: builder.Builder) -> mparser.BaseNode:
- return build.string(ir.value)
-
-
-@ir_to_meson.register
-def _(ir: Identifier, build: builder.Builder) -> mparser.BaseNode:
- host_machine = build.identifier('host_machine')
- if ir.value == "target_arch":
- return build.method('cpu_family', host_machine)
- elif ir.value in {"target_os", "target_family"}:
- return build.method('system', host_machine)
- elif ir.value == "target_endian":
- return build.method('endian', host_machine)
- raise MesonBugException(f"Unhandled Cargo identifier: {ir.value}")
-
-
-@ir_to_meson.register
-def _(ir: Equal, build: builder.Builder) -> mparser.BaseNode:
- return build.equal(ir_to_meson(ir.lhs, build), ir_to_meson(ir.rhs, build))
-
-
-@ir_to_meson.register
-def _(ir: Not, build: builder.Builder) -> mparser.BaseNode:
- return build.not_(ir_to_meson(ir.value, build))
-
-
-@ir_to_meson.register
-def _(ir: Any, build: builder.Builder) -> mparser.BaseNode:
- args = iter(reversed(ir.args))
- last = next(args)
- cur = build.or_(ir_to_meson(next(args), build), ir_to_meson(last, build))
- for a in args:
- cur = build.or_(ir_to_meson(a, build), cur)
- return cur
+def _eval_cfg(ir: IR, cfgs: T.Dict[str, str]) -> bool:
+ if isinstance(ir, Identifier):
+ return ir.value in cfgs
+ elif isinstance(ir, Equal):
+ return cfgs.get(ir.lhs.value) == ir.rhs.value
+ elif isinstance(ir, Not):
+ return not _eval_cfg(ir.value, cfgs)
+ elif isinstance(ir, Any):
+ return any(_eval_cfg(i, cfgs) for i in ir.args)
+ elif isinstance(ir, All):
+ return all(_eval_cfg(i, cfgs) for i in ir.args)
+ else:
+ raise MesonBugException(f'Unhandled Cargo cfg IR: {ir}')
-@ir_to_meson.register
-def _(ir: All, build: builder.Builder) -> mparser.BaseNode:
- args = iter(reversed(ir.args))
- last = next(args)
- cur = build.and_(ir_to_meson(next(args), build), ir_to_meson(last, build))
- for a in args:
- cur = build.and_(ir_to_meson(a, build), cur)
- return cur
+def eval_cfg(raw: str, cfgs: T.Dict[str, str]) -> bool:
+ return _eval_cfg(parse(lexer(raw)), cfgs)
diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py
index af272a8..a0d4371 100644
--- a/mesonbuild/cargo/interpreter.py
+++ b/mesonbuild/cargo/interpreter.py
@@ -11,439 +11,30 @@ port will be required.
from __future__ import annotations
import dataclasses
-import importlib
-import json
import os
-import shutil
import collections
import urllib.parse
import itertools
import typing as T
-from . import builder
-from . import version
-from ..mesonlib import MesonException, Popen_safe
+from . import builder, version, cfg
+from .toml import load_toml, TomlImplementationMissing
+from .manifest import Manifest, CargoLock, fixup_meson_varname
+from ..mesonlib import MesonException, MachineChoice
from .. import coredata, mlog
from ..wrap.wrap import PackageDefinition
if T.TYPE_CHECKING:
- from types import ModuleType
-
- from typing_extensions import Protocol, Self
-
- from . import manifest
+ from . import raw
from .. import mparser
+ from .manifest import Dependency, SystemDependency
from ..environment import Environment
from ..interpreterbase import SubProject
+ from ..compilers.rust import RustCompiler
- # Copied from typeshed. Blarg that they don't expose this
- class DataclassInstance(Protocol):
- __dataclass_fields__: T.ClassVar[dict[str, dataclasses.Field[T.Any]]]
-
- _UnknownKeysT = T.TypeVar('_UnknownKeysT', manifest.FixedPackage,
- manifest.FixedDependency, manifest.FixedLibTarget,
- manifest.FixedBuildTarget)
-
-
-# tomllib is present in python 3.11, before that it is a pypi module called tomli,
-# we try to import tomllib, then tomli,
-# TODO: add a fallback to toml2json?
-tomllib: T.Optional[ModuleType] = None
-toml2json: T.Optional[str] = None
-for t in ['tomllib', 'tomli']:
- try:
- tomllib = importlib.import_module(t)
- break
- except ImportError:
- pass
-else:
- # TODO: it would be better to use an Executable here, which could be looked
- # up in the cross file or provided by a wrap. However, that will have to be
- # passed in externally, since we don't have (and I don't think we should),
- # have access to the `Environment` for that in this module.
- toml2json = shutil.which('toml2json')
-
-
-_EXTRA_KEYS_WARNING = (
- "This may (unlikely) be an error in the cargo manifest, or may be a missing "
- "implementation in Meson. If this issue can be reproduced with the latest "
- "version of Meson, please help us by opening an issue at "
- "https://github.com/mesonbuild/meson/issues. Please include the crate and "
- "version that is generating this warning if possible."
-)
-
-class TomlImplementationMissing(MesonException):
- pass
-
-
-def load_toml(filename: str) -> T.Dict[object, object]:
- if tomllib:
- with open(filename, 'rb') as f:
- raw = tomllib.load(f)
- else:
- if toml2json is None:
- raise TomlImplementationMissing('Could not find an implementation of tomllib, nor toml2json')
-
- p, out, err = Popen_safe([toml2json, filename])
- if p.returncode != 0:
- raise MesonException('toml2json failed to decode output\n', err)
-
- raw = json.loads(out)
-
- if not isinstance(raw, dict):
- raise MesonException("Cargo.toml isn't a dictionary? How did that happen?")
-
- return raw
-
-
-def fixup_meson_varname(name: str) -> str:
- """Fixup a meson variable name
-
- :param name: The name to fix
- :return: the fixed name
- """
- return name.replace('-', '_')
-
-
-# Pylance can figure out that these do not, in fact, overlap, but mypy can't
-@T.overload
-def _fixup_raw_mappings(d: manifest.BuildTarget) -> manifest.FixedBuildTarget: ... # type: ignore
-
-@T.overload
-def _fixup_raw_mappings(d: manifest.LibTarget) -> manifest.FixedLibTarget: ... # type: ignore
-
-@T.overload
-def _fixup_raw_mappings(d: manifest.Dependency) -> manifest.FixedDependency: ...
-
-def _fixup_raw_mappings(d: T.Union[manifest.BuildTarget, manifest.LibTarget, manifest.Dependency]
- ) -> T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget,
- manifest.FixedDependency]:
- """Fixup raw cargo mappings to ones more suitable for python to consume.
-
- This does the following:
- * replaces any `-` with `_`, cargo likes the former, but python dicts make
- keys with `-` in them awkward to work with
- * Convert Dependency versions from the cargo format to something meson
- understands
-
- :param d: The mapping to fix
- :return: the fixed string
- """
- raw = {fixup_meson_varname(k): v for k, v in d.items()}
- if 'version' in raw:
- assert isinstance(raw['version'], str), 'for mypy'
- raw['version'] = version.convert(raw['version'])
- return T.cast('T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget, manifest.FixedDependency]', raw)
-
-
-def _handle_unknown_keys(data: _UnknownKeysT, cls: T.Union[DataclassInstance, T.Type[DataclassInstance]],
- msg: str) -> _UnknownKeysT:
- """Remove and warn on keys that are coming from cargo, but are unknown to
- our representations.
-
- This is intended to give users the possibility of things proceeding when a
- new key is added to Cargo.toml that we don't yet handle, but to still warn
- them that things might not work.
-
- :param data: The raw data to look at
- :param cls: The Dataclass derived type that will be created
- :param msg: the header for the error message. Usually something like "In N structure".
- :return: The original data structure, but with all unknown keys removed.
- """
- unexpected = set(data) - {x.name for x in dataclasses.fields(cls)}
- if unexpected:
- mlog.warning(msg, 'has unexpected keys', '"{}".'.format(', '.join(sorted(unexpected))),
- _EXTRA_KEYS_WARNING)
- for k in unexpected:
- # Mypy and Pyright can't prove that this is okay
- del data[k] # type: ignore[misc]
- return data
-
-
-@dataclasses.dataclass
-class Package:
-
- """Representation of a Cargo Package entry, with defaults filled in."""
-
- name: str
- version: str
- description: T.Optional[str] = None
- resolver: T.Optional[str] = None
- authors: T.List[str] = dataclasses.field(default_factory=list)
- edition: manifest.EDITION = '2015'
- rust_version: T.Optional[str] = None
- documentation: T.Optional[str] = None
- readme: T.Optional[str] = None
- homepage: T.Optional[str] = None
- repository: T.Optional[str] = None
- license: T.Optional[str] = None
- license_file: T.Optional[str] = None
- keywords: T.List[str] = dataclasses.field(default_factory=list)
- categories: T.List[str] = dataclasses.field(default_factory=list)
- workspace: T.Optional[str] = None
- build: T.Optional[str] = None
- links: T.Optional[str] = None
- exclude: T.List[str] = dataclasses.field(default_factory=list)
- include: T.List[str] = dataclasses.field(default_factory=list)
- publish: bool = True
- metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
- default_run: T.Optional[str] = None
- autolib: bool = True
- autobins: bool = True
- autoexamples: bool = True
- autotests: bool = True
- autobenches: bool = True
- api: str = dataclasses.field(init=False)
-
- def __post_init__(self) -> None:
- self.api = _version_to_api(self.version)
-
- @classmethod
- def from_raw(cls, raw: manifest.Package) -> Self:
- pkg = T.cast('manifest.FixedPackage',
- {fixup_meson_varname(k): v for k, v in raw.items()})
- pkg = _handle_unknown_keys(pkg, cls, f'Package entry {pkg["name"]}')
- return cls(**pkg)
-
-@dataclasses.dataclass
-class SystemDependency:
-
- """ Representation of a Cargo system-deps entry
- https://docs.rs/system-deps/latest/system_deps
- """
-
- name: str
- version: T.List[str]
- optional: bool = False
- feature: T.Optional[str] = None
- feature_overrides: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
-
- @classmethod
- def from_raw(cls, name: str, raw: T.Any) -> SystemDependency:
- if isinstance(raw, str):
- return cls(name, SystemDependency.convert_version(raw))
- name = raw.get('name', name)
- version = SystemDependency.convert_version(raw.get('version'))
- optional = raw.get('optional', False)
- feature = raw.get('feature')
- # Everything else are overrides when certain features are enabled.
- feature_overrides = {k: v for k, v in raw.items() if k not in {'name', 'version', 'optional', 'feature'}}
- return cls(name, version, optional, feature, feature_overrides)
-
- @staticmethod
- def convert_version(version: T.Optional[str]) -> T.List[str]:
- vers = version.split(',') if version is not None else []
- result: T.List[str] = []
- for v in vers:
- v = v.strip()
- if v[0] not in '><=':
- v = f'>={v}'
- result.append(v)
- return result
-
- def enabled(self, features: T.Set[str]) -> bool:
- return self.feature is None or self.feature in features
-
-@dataclasses.dataclass
-class Dependency:
-
- """Representation of a Cargo Dependency Entry."""
-
- name: dataclasses.InitVar[str]
- version: T.List[str]
- registry: T.Optional[str] = None
- git: T.Optional[str] = None
- branch: T.Optional[str] = None
- rev: T.Optional[str] = None
- path: T.Optional[str] = None
- optional: bool = False
- package: str = ''
- default_features: bool = True
- features: T.List[str] = dataclasses.field(default_factory=list)
- api: str = dataclasses.field(init=False)
-
- def __post_init__(self, name: str) -> None:
- self.package = self.package or name
- # Extract wanted API version from version constraints.
- api = set()
- for v in self.version:
- if v.startswith(('>=', '==')):
- api.add(_version_to_api(v[2:].strip()))
- elif v.startswith('='):
- api.add(_version_to_api(v[1:].strip()))
- if not api:
- self.api = '0'
- elif len(api) == 1:
- self.api = api.pop()
- else:
- raise MesonException(f'Cannot determine minimum API version from {self.version}.')
-
- @classmethod
- def from_raw(cls, name: str, raw: manifest.DependencyV) -> Dependency:
- """Create a dependency from a raw cargo dictionary"""
- if isinstance(raw, str):
- return cls(name, version.convert(raw))
- fixed = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Dependency entry {name}')
- return cls(name, **fixed)
-
-
-@dataclasses.dataclass
-class BuildTarget:
-
- name: str
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
- path: dataclasses.InitVar[T.Optional[str]] = None
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-test-field
- # True for lib, bin, test
- test: bool = True
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doctest-field
- # True for lib
- doctest: bool = False
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-bench-field
- # True for lib, bin, benchmark
- bench: bool = True
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doc-field
- # True for libraries and binaries
- doc: bool = False
-
- harness: bool = True
- edition: manifest.EDITION = '2015'
- required_features: T.List[str] = dataclasses.field(default_factory=list)
- plugin: bool = False
-
- @classmethod
- def from_raw(cls, raw: manifest.BuildTarget) -> Self:
- name = raw.get('name', '<anonymous>')
- build = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Binary entry {name}')
- return cls(**build)
-
-@dataclasses.dataclass
-class Library(BuildTarget):
-
- """Representation of a Cargo Library Entry."""
-
- doctest: bool = True
- doc: bool = True
- path: str = os.path.join('src', 'lib.rs')
- proc_macro: bool = False
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
- doc_scrape_examples: bool = True
-
- @classmethod
- def from_raw(cls, raw: manifest.LibTarget, fallback_name: str) -> Self: # type: ignore[override]
- fixed = _fixup_raw_mappings(raw)
-
- # We need to set the name field if it's not set manually, including if
- # other fields are set in the lib section
- if 'name' not in fixed:
- fixed['name'] = fallback_name
- fixed = _handle_unknown_keys(fixed, cls, f'Library entry {fixed["name"]}')
-
- return cls(**fixed)
-
-
-@dataclasses.dataclass
-class Binary(BuildTarget):
-
- """Representation of a Cargo Bin Entry."""
-
- doc: bool = True
-
-
-@dataclasses.dataclass
-class Test(BuildTarget):
-
- """Representation of a Cargo Test Entry."""
-
- bench: bool = True
-
-
-@dataclasses.dataclass
-class Benchmark(BuildTarget):
-
- """Representation of a Cargo Benchmark Entry."""
-
- test: bool = True
-
-
-@dataclasses.dataclass
-class Example(BuildTarget):
-
- """Representation of a Cargo Example Entry."""
-
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
-
-
-@dataclasses.dataclass
-class Manifest:
-
- """Cargo Manifest definition.
-
- Most of these values map up to the Cargo Manifest, but with default values
- if not provided.
-
- Cargo subprojects can contain what Meson wants to treat as multiple,
- interdependent, subprojects.
-
- :param path: the path within the cargo subproject.
- """
-
- package: Package
- dependencies: T.Dict[str, Dependency]
- dev_dependencies: T.Dict[str, Dependency]
- build_dependencies: T.Dict[str, Dependency]
- system_dependencies: T.Dict[str, SystemDependency] = dataclasses.field(init=False)
- lib: Library
- bin: T.List[Binary]
- test: T.List[Test]
- bench: T.List[Benchmark]
- example: T.List[Example]
- features: T.Dict[str, T.List[str]]
- target: T.Dict[str, T.Dict[str, Dependency]]
- path: str = ''
-
- def __post_init__(self) -> None:
- self.features.setdefault('default', [])
- self.system_dependencies = {k: SystemDependency.from_raw(k, v) for k, v in self.package.metadata.get('system-deps', {}).items()}
-
-
-def _convert_manifest(raw_manifest: manifest.Manifest, subdir: str, path: str = '') -> Manifest:
- return Manifest(
- Package.from_raw(raw_manifest['package']),
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dependencies', {}).items()},
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dev-dependencies', {}).items()},
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('build-dependencies', {}).items()},
- Library.from_raw(raw_manifest.get('lib', {}), raw_manifest['package']['name']),
- [Binary.from_raw(b) for b in raw_manifest.get('bin', {})],
- [Test.from_raw(b) for b in raw_manifest.get('test', {})],
- [Benchmark.from_raw(b) for b in raw_manifest.get('bench', {})],
- [Example.from_raw(b) for b in raw_manifest.get('example', {})],
- raw_manifest.get('features', {}),
- {k: {k2: Dependency.from_raw(k2, v2) for k2, v2 in v.get('dependencies', {}).items()}
- for k, v in raw_manifest.get('target', {}).items()},
- path,
- )
-
-
-def _version_to_api(version: str) -> str:
- # x.y.z -> x
- # 0.x.y -> 0.x
- # 0.0.x -> 0
- vers = version.split('.')
- if int(vers[0]) != 0:
- return vers[0]
- elif len(vers) >= 2 and int(vers[1]) != 0:
- return f'0.{vers[1]}'
- return '0'
-
-
-def _dependency_name(package_name: str, api: str) -> str:
- basename = package_name[:-3] if package_name.endswith('-rs') else package_name
- return f'{basename}-{api}-rs'
+def _dependency_name(package_name: str, api: str, suffix: str = '-rs') -> str:
+ basename = package_name[:-len(suffix)] if package_name.endswith(suffix) else package_name
+ return f'{basename}-{api}{suffix}'
def _dependency_varname(package_name: str) -> str:
@@ -458,13 +49,13 @@ def _extra_deps_varname() -> str:
return 'extra_deps'
+@dataclasses.dataclass
class PackageState:
- def __init__(self, manifest: Manifest, downloaded: bool) -> None:
- self.manifest = manifest
- self.downloaded = downloaded
- self.features: T.Set[str] = set()
- self.required_deps: T.Set[str] = set()
- self.optional_deps_features: T.Dict[str, T.Set[str]] = collections.defaultdict(set)
+ manifest: Manifest
+ downloaded: bool = False
+ features: T.Set[str] = dataclasses.field(default_factory=set)
+ required_deps: T.Set[str] = dataclasses.field(default_factory=set)
+ optional_deps_features: T.Dict[str, T.Set[str]] = dataclasses.field(default_factory=lambda: collections.defaultdict(set))
@dataclasses.dataclass(frozen=True)
@@ -476,10 +67,16 @@ class PackageKey:
class Interpreter:
def __init__(self, env: Environment) -> None:
self.environment = env
+ self.host_rustc = T.cast('RustCompiler', self.environment.coredata.compilers[MachineChoice.HOST]['rust'])
# Map Cargo.toml's subdir to loaded manifest.
self.manifests: T.Dict[str, Manifest] = {}
# Map of cargo package (name + api) to its state
self.packages: T.Dict[PackageKey, PackageState] = {}
+ # Rustc's config
+ self.cfgs = self._get_cfgs()
+
+ def get_build_def_files(self) -> T.List[str]:
+ return [os.path.join(subdir, 'Cargo.toml') for subdir in self.manifests]
def interpret(self, subdir: str) -> mparser.CodeBlockNode:
manifest = self._load_manifest(subdir)
@@ -503,9 +100,7 @@ class Interpreter:
ast += self._create_dependencies(pkg, build)
ast += self._create_meson_subdir(build)
- # Libs are always auto-discovered and there's no other way to handle them,
- # which is unfortunate for reproducability
- if os.path.exists(os.path.join(self.environment.source_dir, subdir, pkg.manifest.path, pkg.manifest.lib.path)):
+ if pkg.manifest.lib:
for crate_type in pkg.manifest.lib.crate_type:
ast.extend(self._create_lib(pkg, build, crate_type))
@@ -526,6 +121,10 @@ class Interpreter:
self.environment.wrap_resolver.wraps[meson_depname].type is not None
pkg = PackageState(manifest, downloaded)
self.packages[key] = pkg
+ # Merge target specific dependencies that are enabled
+ for condition, dependencies in manifest.target.items():
+ if cfg.eval_cfg(condition, self.cfgs):
+ manifest.dependencies.update(dependencies)
# Fetch required dependencies recursively.
for depname, dep in manifest.dependencies.items():
if not dep.optional:
@@ -538,11 +137,12 @@ class Interpreter:
def _load_manifest(self, subdir: str) -> Manifest:
manifest_ = self.manifests.get(subdir)
if not manifest_:
- filename = os.path.join(self.environment.source_dir, subdir, 'Cargo.toml')
- raw = load_toml(filename)
- if 'package' in raw:
- raw_manifest = T.cast('manifest.Manifest', raw)
- manifest_ = _convert_manifest(raw_manifest, subdir)
+ path = os.path.join(self.environment.source_dir, subdir)
+ filename = os.path.join(path, 'Cargo.toml')
+ toml = load_toml(filename)
+ if 'package' in toml:
+ raw_manifest = T.cast('raw.Manifest', toml)
+ manifest_ = Manifest.from_raw(raw_manifest, path)
self.manifests[subdir] = manifest_
else:
raise MesonException(f'{subdir}/Cargo.toml does not have [package] section')
@@ -599,6 +199,23 @@ class Interpreter:
else:
self._enable_feature(pkg, f)
+ def _get_cfgs(self) -> T.Dict[str, str]:
+ cfgs = self.host_rustc.get_cfgs().copy()
+ rustflags = self.environment.coredata.get_external_args(MachineChoice.HOST, 'rust')
+ rustflags_i = iter(rustflags)
+ for i in rustflags_i:
+ if i == '--cfg':
+ cfgs.append(next(rustflags_i))
+ return dict(self._split_cfg(i) for i in cfgs)
+
+ @staticmethod
+ def _split_cfg(cfg: str) -> T.Tuple[str, str]:
+ pair = cfg.split('=', maxsplit=1)
+ value = pair[1] if len(pair) > 1 else ''
+ if value and value[0] == '"':
+ value = value[1:-1]
+ return pair[0], value
+
def _create_project(self, pkg: PackageState, build: builder.Builder) -> T.List[mparser.BaseNode]:
"""Create the project() function call
@@ -608,6 +225,7 @@ class Interpreter:
"""
default_options: T.List[mparser.BaseNode] = []
default_options.append(build.string(f'rust_std={pkg.manifest.package.edition}'))
+ default_options.append(build.string(f'build.rust_std={pkg.manifest.package.edition}'))
if pkg.downloaded:
default_options.append(build.string('warning_level=0'))
@@ -643,8 +261,9 @@ class Interpreter:
return ast
def _create_system_dependency(self, name: str, dep: SystemDependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ # TODO: handle feature_overrides
kw = {
- 'version': build.array([build.string(s) for s in dep.version]),
+ 'version': build.array([build.string(s) for s in dep.meson_version]),
'required': build.bool(not dep.optional),
}
varname = f'{fixup_meson_varname(name)}_system_dep'
@@ -671,7 +290,7 @@ class Interpreter:
def _create_dependency(self, dep: Dependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
pkg = self._dep_package(dep)
kw = {
- 'version': build.array([build.string(s) for s in dep.version]),
+ 'version': build.array([build.string(s) for s in dep.meson_version]),
}
# Lookup for this dependency with the features we want in default_options kwarg.
#
@@ -747,7 +366,7 @@ class Interpreter:
build.block([build.function('subdir', [build.string('meson')])]))
]
- def _create_lib(self, pkg: PackageState, build: builder.Builder, crate_type: manifest.CRATE_TYPE) -> T.List[mparser.BaseNode]:
+ def _create_lib(self, pkg: PackageState, build: builder.Builder, crate_type: raw.CRATE_TYPE) -> T.List[mparser.BaseNode]:
dependencies: T.List[mparser.BaseNode] = []
dependency_map: T.Dict[mparser.BaseNode, mparser.BaseNode] = {}
for name in pkg.required_deps:
@@ -780,6 +399,9 @@ class Interpreter:
'rust_args': build.array(rust_args),
}
+ depname_suffix = '-rs' if crate_type in {'lib', 'rlib', 'proc-macro'} else f'-{crate_type}'
+ depname = _dependency_name(pkg.manifest.package.name, pkg.manifest.package.api, depname_suffix)
+
lib: mparser.BaseNode
if pkg.manifest.lib.proc_macro or crate_type == 'proc-macro':
lib = build.method('proc_macro', build.identifier('rust'), posargs, kwargs)
@@ -812,7 +434,8 @@ class Interpreter:
'link_with': build.identifier('lib'),
'variables': build.dict({
build.string('features'): build.string(','.join(pkg.features)),
- })
+ }),
+ 'version': build.string(pkg.manifest.package.version),
},
),
'dep'
@@ -821,7 +444,7 @@ class Interpreter:
'override_dependency',
build.identifier('meson'),
[
- build.string(_dependency_name(pkg.manifest.package.name, pkg.manifest.package.api)),
+ build.string(depname),
build.identifier('dep'),
],
),
@@ -835,24 +458,23 @@ def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition
filename = os.path.join(source_dir, 'Cargo.lock')
if os.path.exists(filename):
try:
- cargolock = T.cast('manifest.CargoLock', load_toml(filename))
+ toml = load_toml(filename)
except TomlImplementationMissing as e:
mlog.warning('Failed to load Cargo.lock:', str(e), fatal=False)
return wraps
- for package in cargolock['package']:
- name = package['name']
- version = package['version']
- subp_name = _dependency_name(name, _version_to_api(version))
- source = package.get('source')
- if source is None:
+ raw_cargolock = T.cast('raw.CargoLock', toml)
+ cargolock = CargoLock.from_raw(raw_cargolock)
+ for package in cargolock.package:
+ subp_name = _dependency_name(package.name, version.api(package.version))
+ if package.source is None:
# This is project's package, or one of its workspace members.
pass
- elif source == 'registry+https://github.com/rust-lang/crates.io-index':
- checksum = package.get('checksum')
+ elif package.source == 'registry+https://github.com/rust-lang/crates.io-index':
+ checksum = package.checksum
if checksum is None:
- checksum = cargolock['metadata'][f'checksum {name} {version} ({source})']
- url = f'https://crates.io/api/v1/crates/{name}/{version}/download'
- directory = f'{name}-{version}'
+ checksum = cargolock.metadata[f'checksum {package.name} {package.version} ({package.source})']
+ url = f'https://crates.io/api/v1/crates/{package.name}/{package.version}/download'
+ directory = f'{package.name}-{package.version}'
wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'file', {
'directory': directory,
'source_url': url,
@@ -860,18 +482,18 @@ def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition
'source_hash': checksum,
'method': 'cargo',
}))
- elif source.startswith('git+'):
- parts = urllib.parse.urlparse(source[4:])
+ elif package.source.startswith('git+'):
+ parts = urllib.parse.urlparse(package.source[4:])
query = urllib.parse.parse_qs(parts.query)
branch = query['branch'][0] if 'branch' in query else ''
revision = parts.fragment or branch
url = urllib.parse.urlunparse(parts._replace(params='', query='', fragment=''))
wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'git', {
- 'directory': name,
+ 'directory': package.name,
'url': url,
'revision': revision,
'method': 'cargo',
}))
else:
- mlog.warning(f'Unsupported source URL in {filename}: {source}')
+ mlog.warning(f'Unsupported source URL in {filename}: {package.source}')
return wraps
diff --git a/mesonbuild/cargo/manifest.py b/mesonbuild/cargo/manifest.py
index d95df7f..ab059b0 100644
--- a/mesonbuild/cargo/manifest.py
+++ b/mesonbuild/cargo/manifest.py
@@ -4,244 +4,505 @@
"""Type definitions for cargo manifest files."""
from __future__ import annotations
+
+import dataclasses
+import os
import typing as T
-from typing_extensions import Literal, TypedDict, Required
-
-EDITION = Literal['2015', '2018', '2021']
-CRATE_TYPE = Literal['bin', 'lib', 'dylib', 'staticlib', 'cdylib', 'rlib', 'proc-macro']
-
-Package = TypedDict(
- 'Package',
- {
- 'name': Required[str],
- 'version': Required[str],
- 'authors': T.List[str],
- 'edition': EDITION,
- 'rust-version': str,
- 'description': str,
- 'readme': str,
- 'license': str,
- 'license-file': str,
- 'keywords': T.List[str],
- 'categories': T.List[str],
- 'workspace': str,
- 'build': str,
- 'links': str,
- 'include': T.List[str],
- 'exclude': T.List[str],
- 'publish': bool,
- 'metadata': T.Dict[str, T.Dict[str, str]],
- 'default-run': str,
- 'autolib': bool,
- 'autobins': bool,
- 'autoexamples': bool,
- 'autotests': bool,
- 'autobenches': bool,
- },
- total=False,
-)
-"""A description of the Package Dictionary."""
-
-class FixedPackage(TypedDict, total=False):
-
- """A description of the Package Dictionary, fixed up."""
-
- name: Required[str]
- version: Required[str]
- authors: T.List[str]
- edition: EDITION
- rust_version: str
- description: str
- readme: str
- license: str
- license_file: str
- keywords: T.List[str]
- categories: T.List[str]
- workspace: str
- build: str
- links: str
- include: T.List[str]
- exclude: T.List[str]
- publish: bool
- metadata: T.Dict[str, T.Dict[str, str]]
- default_run: str
- autolib: bool
- autobins: bool
- autoexamples: bool
- autotests: bool
- autobenches: bool
-
-
-class Badge(TypedDict):
-
- """An entry in the badge section."""
-
- status: Literal['actively-developed', 'passively-developed', 'as-is', 'experimental', 'deprecated', 'none']
-
-
-Dependency = TypedDict(
- 'Dependency',
- {
- 'version': str,
- 'registry': str,
- 'git': str,
- 'branch': str,
- 'rev': str,
- 'path': str,
- 'optional': bool,
- 'package': str,
- 'default-features': bool,
- 'features': T.List[str],
- },
- total=False,
-)
-"""An entry in the *dependencies sections."""
+from . import version
+from ..mesonlib import MesonException, lazy_property
+from .. import mlog
+if T.TYPE_CHECKING:
+ from typing_extensions import Protocol, Self
-class FixedDependency(TypedDict, total=False):
+ from . import raw
+ from .raw import EDITION, CRATE_TYPE
- """An entry in the *dependencies sections, fixed up."""
+ # Copied from typeshed. Blarg that they don't expose this
+ class DataclassInstance(Protocol):
+ __dataclass_fields__: T.ClassVar[dict[str, dataclasses.Field[T.Any]]]
- version: T.List[str]
- registry: str
- git: str
- branch: str
- rev: str
- path: str
- optional: bool
- package: str
- default_features: bool
- features: T.List[str]
-
-
-DependencyV = T.Union[Dependency, str]
-"""A Dependency entry, either a string or a Dependency Dict."""
-
-
-_BaseBuildTarget = TypedDict(
- '_BaseBuildTarget',
- {
- 'path': str,
- 'test': bool,
- 'doctest': bool,
- 'bench': bool,
- 'doc': bool,
- 'plugin': bool,
- 'proc-macro': bool,
- 'harness': bool,
- 'edition': EDITION,
- 'crate-type': T.List[CRATE_TYPE],
- 'required-features': T.List[str],
- },
- total=False,
+_DI = T.TypeVar('_DI', bound='DataclassInstance')
+_R = T.TypeVar('_R', bound='raw._BaseBuildTarget')
+
+_EXTRA_KEYS_WARNING = (
+ "This may (unlikely) be an error in the cargo manifest, or may be a missing "
+ "implementation in Meson. If this issue can be reproduced with the latest "
+ "version of Meson, please help us by opening an issue at "
+ "https://github.com/mesonbuild/meson/issues. Please include the crate and "
+ "version that is generating this warning if possible."
)
-class BuildTarget(_BaseBuildTarget, total=False):
+def fixup_meson_varname(name: str) -> str:
+ """Fixup a meson variable name
+
+ :param name: The name to fix
+ :return: the fixed name
+ """
+ return name.replace('-', '_')
+
+
+@T.overload
+def _depv_to_dep(depv: raw.FromWorkspace) -> raw.FromWorkspace: ...
+
+@T.overload
+def _depv_to_dep(depv: raw.DependencyV) -> raw.Dependency: ...
+
+def _depv_to_dep(depv: T.Union[raw.FromWorkspace, raw.DependencyV]) -> T.Union[raw.FromWorkspace, raw.Dependency]:
+ return {'version': depv} if isinstance(depv, str) else depv
+
- name: Required[str]
+def _raw_to_dataclass(raw: T.Mapping[str, object], cls: T.Type[_DI],
+ msg: str, **kwargs: T.Callable[[T.Any], object]) -> _DI:
+ """Fixup raw cargo mappings to ones more suitable for python to consume as dataclass.
-class LibTarget(_BaseBuildTarget, total=False):
+ * Replaces any `-` with `_` in the keys.
+ * Optionally pass values through the functions in kwargs, in order to do
+ recursive conversions.
+ * Remove and warn on keys that are coming from cargo, but are unknown to
+ our representations.
+
+ This is intended to give users the possibility of things proceeding when a
+ new key is added to Cargo.toml that we don't yet handle, but to still warn
+ them that things might not work.
+
+ :param data: The raw data to look at
+ :param cls: The Dataclass derived type that will be created
+ :param msg: the header for the error message. Usually something like "In N structure".
+ :return: The original data structure, but with all unknown keys removed.
+ """
+ new_dict = {}
+ unexpected = set()
+ fields = {x.name for x in dataclasses.fields(cls)}
+ for orig_k, v in raw.items():
+ k = fixup_meson_varname(orig_k)
+ if k not in fields:
+ unexpected.add(orig_k)
+ continue
+ if k in kwargs:
+ v = kwargs[k](v)
+ new_dict[k] = v
+
+ if unexpected:
+ mlog.warning(msg, 'has unexpected keys', '"{}".'.format(', '.join(sorted(unexpected))),
+ _EXTRA_KEYS_WARNING)
+ return cls(**new_dict)
+
+
+@T.overload
+def _inherit_from_workspace(raw: raw.Package,
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> raw.Package: ...
+
+@T.overload
+def _inherit_from_workspace(raw: T.Union[raw.FromWorkspace, raw.Dependency],
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> raw.Dependency: ...
+
+def _inherit_from_workspace(raw_: T.Union[raw.FromWorkspace, raw.Package, raw.Dependency], # type: ignore[misc]
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> T.Mapping[str, object]:
+ # allow accesses by non-literal key below
+ raw = T.cast('T.Mapping[str, object]', raw_)
+
+ if not raw_from_workspace:
+ if raw.get('workspace', False) or \
+ any(isinstance(v, dict) and v.get('workspace', False) for v in raw):
+ raise MesonException(f'Cargo.toml file requests {msg} from workspace')
+
+ return raw
+
+ result = {k: v for k, v in raw.items() if k != 'workspace'}
+ for k, v in raw.items():
+ if isinstance(v, dict) and v.get('workspace', False):
+ if k in raw_from_workspace:
+ result[k] = raw_from_workspace[k]
+ if k in kwargs:
+ result[k] = kwargs[k](v, result[k])
+ else:
+ del result[k]
+
+ if raw.get('workspace', False):
+ for k, v in raw_from_workspace.items():
+ if k not in result or k in kwargs:
+ if k in kwargs:
+ v = kwargs[k](raw.get(k), v)
+ result[k] = v
+ return result
+
+
+@dataclasses.dataclass
+class Package:
+
+ """Representation of a Cargo Package entry, with defaults filled in."""
+
+ name: str
+ version: str
+ description: T.Optional[str] = None
+ resolver: T.Optional[str] = None
+ authors: T.List[str] = dataclasses.field(default_factory=list)
+ edition: EDITION = '2015'
+ rust_version: T.Optional[str] = None
+ documentation: T.Optional[str] = None
+ readme: T.Optional[str] = None
+ homepage: T.Optional[str] = None
+ repository: T.Optional[str] = None
+ license: T.Optional[str] = None
+ license_file: T.Optional[str] = None
+ keywords: T.List[str] = dataclasses.field(default_factory=list)
+ categories: T.List[str] = dataclasses.field(default_factory=list)
+ workspace: T.Optional[str] = None
+ build: T.Optional[str] = None
+ links: T.Optional[str] = None
+ exclude: T.List[str] = dataclasses.field(default_factory=list)
+ include: T.List[str] = dataclasses.field(default_factory=list)
+ publish: bool = True
+ metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
+ default_run: T.Optional[str] = None
+ autolib: bool = True
+ autobins: bool = True
+ autoexamples: bool = True
+ autotests: bool = True
+ autobenches: bool = True
+
+ @lazy_property
+ def api(self) -> str:
+ return version.api(self.version)
+
+ @classmethod
+ def from_raw(cls, raw_pkg: raw.Package, workspace: T.Optional[Workspace] = None) -> Self:
+ raw_ws_pkg = None
+ if workspace is not None:
+ raw_ws_pkg = workspace.package
+
+ raw_pkg = _inherit_from_workspace(raw_pkg, raw_ws_pkg, f'Package entry {raw_pkg["name"]}')
+ return _raw_to_dataclass(raw_pkg, cls, f'Package entry {raw_pkg["name"]}')
+
+@dataclasses.dataclass
+class SystemDependency:
+
+ """ Representation of a Cargo system-deps entry
+ https://docs.rs/system-deps/latest/system_deps
+ """
name: str
+ version: str = ''
+ optional: bool = False
+ feature: T.Optional[str] = None
+ # TODO: convert values to dataclass
+ feature_overrides: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
+
+ @classmethod
+ def from_raw(cls, name: str, raw: T.Union[T.Dict[str, T.Any], str]) -> SystemDependency:
+ if isinstance(raw, str):
+ raw = {'version': raw}
+ name = raw.get('name', name)
+ version = raw.get('version', '')
+ optional = raw.get('optional', False)
+ feature = raw.get('feature')
+ # Everything else are overrides when certain features are enabled.
+ feature_overrides = {k: v for k, v in raw.items() if k not in {'name', 'version', 'optional', 'feature'}}
+ return cls(name, version, optional, feature, feature_overrides)
+
+ @lazy_property
+ def meson_version(self) -> T.List[str]:
+ vers = self.version.split(',') if self.version else []
+ result: T.List[str] = []
+ for v in vers:
+ v = v.strip()
+ if v[0] not in '><=':
+ v = f'>={v}'
+ result.append(v)
+ return result
+
+ def enabled(self, features: T.Set[str]) -> bool:
+ return self.feature is None or self.feature in features
+
+@dataclasses.dataclass
+class Dependency:
+
+ """Representation of a Cargo Dependency Entry."""
+ package: str
+ version: str = ''
+ registry: T.Optional[str] = None
+ git: T.Optional[str] = None
+ branch: T.Optional[str] = None
+ rev: T.Optional[str] = None
+ path: T.Optional[str] = None
+ optional: bool = False
+ default_features: bool = True
+ features: T.List[str] = dataclasses.field(default_factory=list)
+
+ @lazy_property
+ def meson_version(self) -> T.List[str]:
+ return version.convert(self.version)
+
+ @lazy_property
+ def api(self) -> str:
+ # Extract wanted API version from version constraints.
+ api = set()
+ for v in self.meson_version:
+ if v.startswith(('>=', '==')):
+ api.add(version.api(v[2:].strip()))
+ elif v.startswith('='):
+ api.add(version.api(v[1:].strip()))
+ if not api:
+ return '0'
+ elif len(api) == 1:
+ return api.pop()
+ else:
+ raise MesonException(f'Cannot determine minimum API version from {self.version}.')
+
+ @classmethod
+ def from_raw_dict(cls, name: str, raw_dep: T.Union[raw.FromWorkspace, raw.Dependency], member_path: str = '', raw_ws_dep: T.Optional[raw.Dependency] = None) -> Dependency:
+ raw_dep = _inherit_from_workspace(raw_dep, raw_ws_dep,
+ f'Dependency entry {name}',
+ path=lambda pkg_path, ws_path: os.path.relpath(ws_path, member_path),
+ features=lambda pkg_path, ws_path: (pkg_path or []) + (ws_path or []))
+ raw_dep.setdefault('package', name)
+ return _raw_to_dataclass(raw_dep, cls, f'Dependency entry {name}')
+
+ @classmethod
+ def from_raw(cls, name: str, raw_depv: T.Union[raw.FromWorkspace, raw.DependencyV], member_path: str = '', workspace: T.Optional[Workspace] = None) -> Dependency:
+ """Create a dependency from a raw cargo dictionary or string"""
+ raw_ws_dep: T.Optional[raw.Dependency] = None
+ if workspace is not None:
+ raw_ws_depv = workspace.dependencies.get(name, {})
+ raw_ws_dep = _depv_to_dep(raw_ws_depv)
+
+ raw_dep = _depv_to_dep(raw_depv)
+ return cls.from_raw_dict(name, raw_dep, member_path, raw_ws_dep)
+
+
+@dataclasses.dataclass
+class BuildTarget(T.Generic[_R]):
-class _BaseFixedBuildTarget(TypedDict, total=False):
+ name: str
path: str
- test: bool
- doctest: bool
- bench: bool
- doc: bool
- plugin: bool
- harness: bool
- edition: EDITION
crate_type: T.List[CRATE_TYPE]
- required_features: T.List[str]
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-test-field
+ # True for lib, bin, test
+ test: bool = True
-class FixedBuildTarget(_BaseFixedBuildTarget, total=False):
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doctest-field
+ # True for lib
+ doctest: bool = False
- name: str
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-bench-field
+ # True for lib, bin, benchmark
+ bench: bool = True
-class FixedLibTarget(_BaseFixedBuildTarget, total=False):
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doc-field
+ # True for libraries and binaries
+ doc: bool = False
- name: Required[str]
- proc_macro: bool
+ harness: bool = True
+ edition: EDITION = '2015'
+ required_features: T.List[str] = dataclasses.field(default_factory=list)
+ plugin: bool = False
+ @classmethod
+ def from_raw(cls, raw: _R) -> Self:
+ name = raw.get('name', '<anonymous>')
+ return _raw_to_dataclass(raw, cls, f'Binary entry {name}')
-class Target(TypedDict):
+@dataclasses.dataclass
+class Library(BuildTarget['raw.LibTarget']):
- """Target entry in the Manifest File."""
+ """Representation of a Cargo Library Entry."""
- dependencies: T.Dict[str, DependencyV]
+ doctest: bool = True
+ doc: bool = True
+ path: str = os.path.join('src', 'lib.rs')
+ proc_macro: bool = False
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
+ doc_scrape_examples: bool = True
+ @classmethod
+ def from_raw(cls, raw: raw.LibTarget, fallback_name: str) -> Self: # type: ignore[override]
+ # We need to set the name field if it's not set manually, including if
+ # other fields are set in the lib section
+ raw.setdefault('name', fallback_name)
+ return _raw_to_dataclass(raw, cls, f'Library entry {raw["name"]}')
-class Workspace(TypedDict):
- """The representation of a workspace.
+@dataclasses.dataclass
+class Binary(BuildTarget['raw.BuildTarget']):
- In a vritual manifest the :attribute:`members` is always present, but in a
- project manifest, an empty workspace may be provided, in which case the
- workspace is implicitly filled in by values from the path based dependencies.
+ """Representation of a Cargo Bin Entry."""
- the :attribute:`exclude` is always optional
- """
+ doc: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
- members: T.List[str]
- exclude: T.List[str]
-
-
-Manifest = TypedDict(
- 'Manifest',
- {
- 'package': Required[Package],
- 'badges': T.Dict[str, Badge],
- 'dependencies': T.Dict[str, DependencyV],
- 'dev-dependencies': T.Dict[str, DependencyV],
- 'build-dependencies': T.Dict[str, DependencyV],
- 'lib': LibTarget,
- 'bin': T.List[BuildTarget],
- 'test': T.List[BuildTarget],
- 'bench': T.List[BuildTarget],
- 'example': T.List[BuildTarget],
- 'features': T.Dict[str, T.List[str]],
- 'target': T.Dict[str, Target],
- 'workspace': Workspace,
-
- # TODO: patch?
- # TODO: replace?
- },
- total=False,
-)
-"""The Cargo Manifest format."""
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('bin', raw['name'] + '.rs')
+ return super().from_raw(raw)
-class VirtualManifest(TypedDict):
+@dataclasses.dataclass
+class Test(BuildTarget['raw.BuildTarget']):
- """The Representation of a virtual manifest.
+ """Representation of a Cargo Test Entry."""
- Cargo allows a root manifest that contains only a workspace, this is called
- a virtual manifest. This doesn't really map 1:1 with any meson concept,
- except perhaps the proposed "meta project".
+ bench: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('tests', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+@dataclasses.dataclass
+class Benchmark(BuildTarget['raw.BuildTarget']):
+
+ """Representation of a Cargo Benchmark Entry."""
+
+ test: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('benches', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+
+@dataclasses.dataclass
+class Example(BuildTarget['raw.BuildTarget']):
+
+ """Representation of a Cargo Example Entry."""
+
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('examples', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+
+@dataclasses.dataclass
+class Manifest:
+
+ """Cargo Manifest definition.
+
+ Most of these values map up to the Cargo Manifest, but with default values
+ if not provided.
+
+ Cargo subprojects can contain what Meson wants to treat as multiple,
+ interdependent, subprojects.
+
+ :param path: the path within the cargo subproject.
"""
- workspace: Workspace
+ package: Package
+ dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ dev_dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ build_dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ lib: T.Optional[Library] = None
+ bin: T.List[Binary] = dataclasses.field(default_factory=list)
+ test: T.List[Test] = dataclasses.field(default_factory=list)
+ bench: T.List[Benchmark] = dataclasses.field(default_factory=list)
+ example: T.List[Example] = dataclasses.field(default_factory=list)
+ features: T.Dict[str, T.List[str]] = dataclasses.field(default_factory=dict)
+ target: T.Dict[str, T.Dict[str, Dependency]] = dataclasses.field(default_factory=dict)
+
+ path: str = ''
+
+ def __post_init__(self) -> None:
+ self.features.setdefault('default', [])
+
+ @lazy_property
+ def system_dependencies(self) -> T.Dict[str, SystemDependency]:
+ return {k: SystemDependency.from_raw(k, v) for k, v in self.package.metadata.get('system-deps', {}).items()}
+
+ @classmethod
+ def from_raw(cls, raw: raw.Manifest, path: str = '', workspace: T.Optional[Workspace] = None, member_path: str = '') -> Self:
+ # Libs are always auto-discovered and there's no other way to handle them,
+ # which is unfortunate for reproducability
+ pkg = Package.from_raw(raw['package'], workspace)
+ if pkg.autolib and 'lib' not in raw and \
+ os.path.exists(os.path.join(path, 'src/lib.rs')):
+ raw['lib'] = {}
+ fixed = _raw_to_dataclass(raw, cls, f'Cargo.toml package {raw["package"]["name"]}',
+ package=lambda x: pkg,
+ dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ dev_dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ build_dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ lib=lambda x: Library.from_raw(x, raw['package']['name']),
+ bin=lambda x: [Binary.from_raw(b) for b in x],
+ test=lambda x: [Test.from_raw(b) for b in x],
+ bench=lambda x: [Benchmark.from_raw(b) for b in x],
+ example=lambda x: [Example.from_raw(b) for b in x],
+ target=lambda x: {k: {k2: Dependency.from_raw(k2, v2, member_path, workspace) for k2, v2 in v.get('dependencies', {}).items()}
+ for k, v in x.items()})
+ fixed.path = path
+ return fixed
+
+
+@dataclasses.dataclass
+class Workspace:
+
+ """Cargo Workspace definition.
+ """
+
+ resolver: str = dataclasses.field(default_factory=lambda: '2')
+ members: T.List[str] = dataclasses.field(default_factory=list)
+ exclude: T.List[str] = dataclasses.field(default_factory=list)
+ default_members: T.List[str] = dataclasses.field(default_factory=list)
+
+ # inheritable settings are kept in raw format, for use with _inherit_from_workspace
+ package: T.Optional[raw.Package] = None
+ dependencies: T.Dict[str, raw.Dependency] = dataclasses.field(default_factory=dict)
+ lints: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
+ metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
-class CargoLockPackage(TypedDict, total=False):
+ # A workspace can also have a root package.
+ root_package: T.Optional[Manifest] = dataclasses.field(init=False)
+
+ @classmethod
+ def from_raw(cls, raw: raw.VirtualManifest) -> Workspace:
+ ws_raw = raw['workspace']
+ fixed = _raw_to_dataclass(ws_raw, cls, 'Workspace')
+ return fixed
+
+
+@dataclasses.dataclass
+class CargoLockPackage:
"""A description of a package in the Cargo.lock file format."""
name: str
version: str
- source: str
- checksum: str
+ source: T.Optional[str] = None
+ checksum: T.Optional[str] = None
+ dependencies: T.List[str] = dataclasses.field(default_factory=list)
+ @classmethod
+ def from_raw(cls, raw: raw.CargoLockPackage) -> CargoLockPackage:
+ return _raw_to_dataclass(raw, cls, 'Cargo.lock package')
-class CargoLock(TypedDict, total=False):
+
+@dataclasses.dataclass
+class CargoLock:
"""A description of the Cargo.lock file format."""
- version: str
- package: T.List[CargoLockPackage]
- metadata: T.Dict[str, str]
+ version: int = 1
+ package: T.List[CargoLockPackage] = dataclasses.field(default_factory=list)
+ metadata: T.Dict[str, str] = dataclasses.field(default_factory=dict)
+
+ @classmethod
+ def from_raw(cls, raw: raw.CargoLock) -> CargoLock:
+ return _raw_to_dataclass(raw, cls, 'Cargo.lock',
+ package=lambda x: [CargoLockPackage.from_raw(p) for p in x])
diff --git a/mesonbuild/cargo/raw.py b/mesonbuild/cargo/raw.py
new file mode 100644
index 0000000..67dd58a
--- /dev/null
+++ b/mesonbuild/cargo/raw.py
@@ -0,0 +1,192 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2022-2024 Intel Corporation
+
+"""Type definitions for cargo manifest files."""
+
+from __future__ import annotations
+import typing as T
+
+from typing_extensions import Literal, TypedDict, Required
+
+EDITION = Literal['2015', '2018', '2021']
+CRATE_TYPE = Literal['bin', 'lib', 'dylib', 'staticlib', 'cdylib', 'rlib', 'proc-macro']
+
+
+class FromWorkspace(TypedDict):
+
+ """An entry or section that is copied from the workspace."""
+
+ workspace: bool
+
+
+Package = TypedDict(
+ 'Package',
+ {
+ 'name': Required[str],
+ 'version': Required[T.Union[FromWorkspace, str]],
+ 'authors': T.Union[FromWorkspace, T.List[str]],
+ 'edition': T.Union[FromWorkspace, EDITION],
+ 'rust-version': T.Union[FromWorkspace, str],
+ 'description': T.Union[FromWorkspace, str],
+ 'readme': T.Union[FromWorkspace, str],
+ 'license': T.Union[FromWorkspace, str],
+ 'license-file': T.Union[FromWorkspace, str],
+ 'keywords': T.Union[FromWorkspace, T.List[str]],
+ 'categories': T.Union[FromWorkspace, T.List[str]],
+ 'homepage': T.Union[FromWorkspace, str],
+ 'repository': T.Union[FromWorkspace, str],
+ 'documentation': T.Union[FromWorkspace, str],
+ 'workspace': str,
+ 'build': str,
+ 'links': str,
+ 'include': T.Union[FromWorkspace, T.List[str]],
+ 'exclude': T.Union[FromWorkspace, T.List[str]],
+ 'publish': T.Union[FromWorkspace, bool],
+ 'metadata': T.Dict[str, T.Dict[str, str]],
+ 'default-run': str,
+ 'autolib': bool,
+ 'autobins': bool,
+ 'autoexamples': bool,
+ 'autotests': bool,
+ 'autobenches': bool,
+ },
+ total=False,
+)
+"""A description of the Package Dictionary."""
+
+class Badge(TypedDict):
+
+ """An entry in the badge section."""
+
+ status: Literal['actively-developed', 'passively-developed', 'as-is', 'experimental', 'deprecated', 'none']
+
+
+Dependency = TypedDict(
+ 'Dependency',
+ {
+ 'version': str,
+ 'registry': str,
+ 'git': str,
+ 'branch': str,
+ 'rev': str,
+ 'path': str,
+ 'optional': bool,
+ 'package': str,
+ 'default-features': bool,
+ 'features': T.List[str],
+ },
+ total=False,
+)
+"""An entry in the *dependencies sections."""
+
+
+DependencyV = T.Union[Dependency, str]
+"""A Dependency entry, either a string or a Dependency Dict."""
+
+
+_BaseBuildTarget = TypedDict(
+ '_BaseBuildTarget',
+ {
+ 'path': str,
+ 'test': bool,
+ 'doctest': bool,
+ 'bench': bool,
+ 'doc': bool,
+ 'plugin': bool,
+ 'proc-macro': bool,
+ 'harness': bool,
+ 'edition': EDITION,
+ 'crate-type': T.List[CRATE_TYPE],
+ 'required-features': T.List[str],
+ },
+ total=False,
+)
+
+
+class BuildTarget(_BaseBuildTarget, total=False):
+
+ name: Required[str]
+
+
+class LibTarget(_BaseBuildTarget, total=False):
+
+ name: str
+
+
+class Target(TypedDict):
+
+ """Target entry in the Manifest File."""
+
+ dependencies: T.Dict[str, T.Union[FromWorkspace, DependencyV]]
+
+
+class Workspace(TypedDict):
+
+ """The representation of a workspace.
+
+ In a vritual manifest the :attribute:`members` is always present, but in a
+ project manifest, an empty workspace may be provided, in which case the
+ workspace is implicitly filled in by values from the path based dependencies.
+
+ the :attribute:`exclude` is always optional
+ """
+
+ members: T.List[str]
+ exclude: T.List[str]
+ package: Package
+ dependencies: T.Dict[str, DependencyV]
+
+
+Manifest = TypedDict(
+ 'Manifest',
+ {
+ 'package': Required[Package],
+ 'badges': T.Dict[str, Badge],
+ 'dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'dev-dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'build-dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'lib': LibTarget,
+ 'bin': T.List[BuildTarget],
+ 'test': T.List[BuildTarget],
+ 'bench': T.List[BuildTarget],
+ 'example': T.List[BuildTarget],
+ 'features': T.Dict[str, T.List[str]],
+ 'target': T.Dict[str, Target],
+ 'workspace': Workspace,
+
+ # TODO: patch?
+ # TODO: replace?
+ },
+ total=False,
+)
+"""The Cargo Manifest format."""
+
+
+class VirtualManifest(TypedDict, total=False):
+
+ """The Representation of a virtual manifest.
+
+ Cargo allows a root manifest that contains only a workspace, this is called
+ a virtual manifest. This doesn't really map 1:1 with any meson concept,
+ except perhaps the proposed "meta project".
+ """
+
+ workspace: Workspace
+
+class CargoLockPackage(TypedDict, total=False):
+
+ """A description of a package in the Cargo.lock file format."""
+
+ name: str
+ version: str
+ source: str
+ checksum: str
+
+
+class CargoLock(TypedDict, total=False):
+
+ """A description of the Cargo.lock file format."""
+
+ version: int
+ package: T.List[CargoLockPackage]
+ metadata: T.Dict[str, str]
diff --git a/mesonbuild/cargo/toml.py b/mesonbuild/cargo/toml.py
new file mode 100644
index 0000000..601510e
--- /dev/null
+++ b/mesonbuild/cargo/toml.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import importlib
+import shutil
+import json
+import typing as T
+
+from ..mesonlib import MesonException, Popen_safe
+if T.TYPE_CHECKING:
+ from types import ModuleType
+
+
+# tomllib is present in python 3.11, before that it is a pypi module called tomli,
+# we try to import tomllib, then tomli,
+tomllib: T.Optional[ModuleType] = None
+toml2json: T.Optional[str] = None
+for t in ['tomllib', 'tomli']:
+ try:
+ tomllib = importlib.import_module(t)
+ break
+ except ImportError:
+ pass
+else:
+ # TODO: it would be better to use an Executable here, which could be looked
+ # up in the cross file or provided by a wrap. However, that will have to be
+ # passed in externally, since we don't have (and I don't think we should),
+ # have access to the `Environment` for that in this module.
+ toml2json = shutil.which('toml2json')
+
+class TomlImplementationMissing(MesonException):
+ pass
+
+
+def load_toml(filename: str) -> T.Dict[str, object]:
+ if tomllib:
+ with open(filename, 'rb') as f:
+ raw = tomllib.load(f)
+ else:
+ if toml2json is None:
+ raise TomlImplementationMissing('Could not find an implementation of tomllib, nor toml2json')
+
+ p, out, err = Popen_safe([toml2json, filename])
+ if p.returncode != 0:
+ raise MesonException('toml2json failed to decode output\n', err)
+
+ raw = json.loads(out)
+
+ # tomllib.load() returns T.Dict[str, T.Any] but not other implementations.
+ return T.cast('T.Dict[str, object]', raw)
diff --git a/mesonbuild/cargo/version.py b/mesonbuild/cargo/version.py
index cde7a83..ce58945 100644
--- a/mesonbuild/cargo/version.py
+++ b/mesonbuild/cargo/version.py
@@ -7,6 +7,18 @@ from __future__ import annotations
import typing as T
+def api(version: str) -> str:
+ # x.y.z -> x
+ # 0.x.y -> 0.x
+ # 0.0.x -> 0
+ vers = version.split('.')
+ if int(vers[0]) != 0:
+ return vers[0]
+ elif len(vers) >= 2 and int(vers[1]) != 0:
+ return f'0.{vers[1]}'
+ return '0'
+
+
def convert(cargo_ver: str) -> T.List[str]:
"""Convert a Cargo compatible version into a Meson compatible one.
@@ -15,6 +27,8 @@ def convert(cargo_ver: str) -> T.List[str]:
"""
# Cleanup, just for safety
cargo_ver = cargo_ver.strip()
+ if not cargo_ver:
+ return []
cargo_vers = [c.strip() for c in cargo_ver.split(',')]
out: T.List[str] = []
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index 7644c0b..b7ab1ba 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -19,6 +19,7 @@ language_map = {
'cuda': 'CUDA',
'objc': 'OBJC',
'objcpp': 'OBJCXX',
+ 'nasm': 'ASM_NASM',
'cs': 'CSharp',
'java': 'Java',
'fortran': 'Fortran',
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 9296276..c68cb60 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -125,7 +125,7 @@ TRANSFER_DEPENDENCIES_FROM: T.Collection[str] = ['header_only']
_cmake_name_regex = re.compile(r'[^_a-zA-Z0-9]')
def _sanitize_cmake_name(name: str) -> str:
name = _cmake_name_regex.sub('_', name)
- if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson'):
+ if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson') or name[0].isdigit():
name = 'cm_' + name
return name
@@ -223,6 +223,7 @@ class ConverterTarget:
self.install = target.install
self.install_dir: T.Optional[Path] = None
self.link_libraries = target.link_libraries
+ self.link_targets: T.List[str] = []
self.link_flags = target.link_flags + target.link_lang_flags
self.public_link_flags: T.List[str] = []
self.depends_raw: T.List[str] = []
@@ -363,6 +364,8 @@ class ConverterTarget:
self.public_link_flags += rtgt.public_link_flags
self.public_compile_opts += rtgt.public_compile_opts
self.link_libraries += rtgt.libraries
+ self.depends_raw += rtgt.target_dependencies
+ self.link_targets += rtgt.target_dependencies
elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']:
mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors')
@@ -957,17 +960,27 @@ class CMakeInterpreter:
object_libs += [tgt]
self.languages += [x for x in tgt.languages if x not in self.languages]
- # Second pass: Detect object library dependencies
+ # Second pass: Populate link_with project internal targets
+ for tgt in self.targets:
+ for i in tgt.link_targets:
+ # Handle target-based link libraries
+ link_with = self.output_target_map.target(i)
+ if not link_with or isinstance(link_with, ConverterCustomTarget):
+ # Generated file etc.
+ continue
+ tgt.link_with.append(link_with)
+
+ # Third pass: Detect object library dependencies
for tgt in self.targets:
tgt.process_object_libs(object_libs, self._object_lib_workaround)
- # Third pass: Reassign dependencies to avoid some loops
+ # Fourth pass: Reassign dependencies to avoid some loops
for tgt in self.targets:
tgt.process_inter_target_dependencies()
for ctgt in self.custom_targets:
ctgt.process_inter_target_dependencies()
- # Fourth pass: Remove rassigned dependencies
+ # Fifth pass: Remove reassigned dependencies
for tgt in self.targets:
tgt.cleanup_dependencies()
diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py
index d410886..11a00be 100644
--- a/mesonbuild/cmake/toolchain.py
+++ b/mesonbuild/cmake/toolchain.py
@@ -175,7 +175,12 @@ class CMakeToolchain:
# Set the compiler variables
for lang, comp_obj in self.compilers.items():
- prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper()))
+ language = language_map.get(lang, None)
+
+ if not language:
+ continue # unsupported language
+
+ prefix = 'CMAKE_{}_'.format(language)
exe_list = comp_obj.get_exelist()
if not exe_list:
@@ -211,7 +216,7 @@ class CMakeToolchain:
# Generate the CMakeLists.txt
mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state')
languages = list(self.compilers.keys())
- lang_ids = [language_map.get(x, x.upper()) for x in languages]
+ lang_ids = [language_map.get(x) for x in languages if x in language_map]
cmake_content = dedent(f'''
cmake_minimum_required(VERSION 3.10)
project(CompInfo {' '.join(lang_ids)})
diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py
index 2cc0c17..2b2b93d 100644
--- a/mesonbuild/cmake/tracetargets.py
+++ b/mesonbuild/cmake/tracetargets.py
@@ -45,6 +45,7 @@ class ResolvedTarget:
self.public_link_flags: T.List[str] = []
self.public_compile_opts: T.List[str] = []
self.libraries: T.List[str] = []
+ self.target_dependencies: T.List[str] = []
def resolve_cmake_trace_targets(target_name: str,
trace: 'CMakeTraceParser',
@@ -86,6 +87,7 @@ def resolve_cmake_trace_targets(target_name: str,
curr_path = Path(*path_to_framework)
framework_path = curr_path.parent
framework_name = curr_path.stem
+ res.public_compile_opts += [f"-F{framework_path}"]
res.libraries += [f'-F{framework_path}', '-framework', framework_name]
else:
res.libraries += [curr]
@@ -144,9 +146,13 @@ def resolve_cmake_trace_targets(target_name: str,
targets += [x for x in tgt.properties['IMPORTED_LOCATION'] if x]
if 'LINK_LIBRARIES' in tgt.properties:
- targets += [x for x in tgt.properties['LINK_LIBRARIES'] if x]
+ link_libraries = [x for x in tgt.properties['LINK_LIBRARIES'] if x]
+ targets += link_libraries
+ res.target_dependencies += link_libraries
if 'INTERFACE_LINK_LIBRARIES' in tgt.properties:
- targets += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+ link_libraries = [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+ targets += link_libraries
+ res.target_dependencies += link_libraries
if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties:
targets += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x]
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index aab761a..f645090 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -18,6 +18,7 @@ __all__ = [
'is_library',
'is_llvm_ir',
'is_object',
+ 'is_separate_compile',
'is_source',
'is_java',
'is_known_suffix',
@@ -62,6 +63,7 @@ from .compilers import (
is_object,
is_library,
is_known_suffix,
+ is_separate_compile,
lang_suffixes,
LANGUAGES_USING_LDFLAGS,
sort_clink,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 7a2fec5..424b612 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -504,7 +504,7 @@ class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerM
def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- std = self.get_compileropt_value('winlibs', env, target, subproject)
+ std = self.get_compileropt_value('std', env, target, subproject)
assert isinstance(std, str)
if std == 'c89':
mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 3c1d58b..a823aeb 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -42,7 +42,7 @@ _T = T.TypeVar('_T')
about. To support a new compiler, add its information below.
Also add corresponding autodetection code in detect.py."""
-header_suffixes = {'h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di'}
+header_suffixes = {'h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di', 'pxd', 'pxi'}
obj_suffixes = {'o', 'obj', 'res'}
# To the emscripten compiler, .js files are libraries
lib_suffixes = {'a', 'lib', 'dll', 'dll.a', 'dylib', 'so', 'js'}
@@ -84,7 +84,7 @@ clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'nasm', 'fortran')
# List of languages that can be linked with C code directly by the linker
# used in build.py:process_compilers() and build.py:get_dynamic_linker()
# This must be sorted, see sort_clink().
-clink_langs = ('d', 'cuda') + clib_langs
+clink_langs = ('rust', 'd', 'cuda') + clib_langs
SUFFIX_TO_LANG = dict(itertools.chain(*(
[(suffix, lang) for suffix in v] for lang, v in lang_suffixes.items())))
@@ -154,6 +154,9 @@ def is_java(fname: mesonlib.FileOrString) -> bool:
suffix = fname.split('.')[-1]
return suffix in lang_suffixes['java']
+def is_separate_compile(fname: mesonlib.FileOrString) -> bool:
+ return not fname.endswith('.rs')
+
def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
fname = fname.fname
@@ -749,7 +752,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
return args.copy()
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.')
def get_library_naming(self, env: 'Environment', libtype: LibType,
@@ -933,11 +936,10 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
"""
return None
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return self.linker.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target, extra_paths)
def get_archive_name(self, filename: str) -> str:
return self.linker.get_archive_name(filename)
@@ -1200,6 +1202,23 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
is good enough here.
"""
+ def run_sanity_check(self, environment: Environment, cmdlist: T.List[str], work_dir: str, use_exe_wrapper_for_cross: bool = True) -> T.Tuple[str, str]:
+ # Run sanity check
+ if self.is_cross and use_exe_wrapper_for_cross:
+ if not environment.has_exe_wrapper():
+ # Can't check if the binaries run so we have to assume they do
+ return ('', '')
+ cmdlist = environment.exe_wrapper.get_command() + cmdlist
+ mlog.debug('Running test binary command: ', mesonlib.join_args(cmdlist))
+ try:
+ pe, stdo, stde = Popen_safe_logged(cmdlist, 'Sanity check', cwd=work_dir)
+ except Exception as e:
+ raise mesonlib.EnvironmentException(f'Could not invoke sanity check executable: {e!s}.')
+
+ if pe.returncode != 0:
+ raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
+ return stdo, stde
+
def split_shlib_to_parts(self, fname: str) -> T.Tuple[T.Optional[str], str]:
return None, fname
@@ -1397,43 +1416,3 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
if 'none' not in value:
value = ['none'] + value
std.choices = value
-
-
-def get_global_options(lang: str,
- comp: T.Type[Compiler],
- for_machine: MachineChoice,
- env: 'Environment') -> dict[OptionKey, options.AnyOptionType]:
- """Retrieve options that apply to all compilers for a given language."""
- description = f'Extra arguments passed to the {lang}'
- argkey = OptionKey(f'{lang}_args', machine=for_machine)
- largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
- envkey = OptionKey(f'{lang}_env_args', machine=for_machine)
-
- comp_key = argkey if argkey in env.options else envkey
-
- comp_options = env.options.get(comp_key, [])
- link_options = env.options.get(largkey, [])
- assert isinstance(comp_options, (str, list)), 'for mypy'
- assert isinstance(link_options, (str, list)), 'for mypy'
-
- cargs = options.UserStringArrayOption(
- argkey.name,
- description + ' compiler',
- comp_options, split_args=True, allow_dups=True)
-
- largs = options.UserStringArrayOption(
- largkey.name,
- description + ' linker',
- link_options, split_args=True, allow_dups=True)
-
- if comp.INVOKES_LINKER and comp_key == envkey:
- # If the compiler acts as a linker driver, and we're using the
- # environment variable flags for both the compiler and linker
- # arguments, then put the compiler flags in the linker flags as well.
- # This is how autotools works, and the env vars feature is for
- # autotools compatibility.
- largs.extend_value(comp_options)
-
- opts: dict[OptionKey, options.AnyOptionType] = {argkey: cargs, largkey: largs}
-
- return opts
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index 01b9bb9..ed8d1cf 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -311,6 +311,9 @@ class ClangCPPCompiler(_StdCPPLibMixin, ClangCPPStds, ClangCompiler, CPPCompiler
return libs
return []
+ def is_libcpp_enable_assertions_deprecated(self) -> bool:
+ return version_compare(self.version, ">=18")
+
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
if disable:
return ['-DNDEBUG']
@@ -323,7 +326,7 @@ class ClangCPPCompiler(_StdCPPLibMixin, ClangCPPStds, ClangCompiler, CPPCompiler
if self.language_stdlib_provider(env) == 'stdc++':
return ['-D_GLIBCXX_ASSERTIONS=1']
else:
- if version_compare(self.version, '>=18'):
+ if self.is_libcpp_enable_assertions_deprecated():
return ['-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_FAST']
elif version_compare(self.version, '>=15'):
return ['-D_LIBCPP_ENABLE_ASSERTIONS=1']
@@ -343,7 +346,12 @@ class ArmLtdClangCPPCompiler(ClangCPPCompiler):
class AppleClangCPPCompiler(AppleCompilerMixin, AppleCPPStdsMixin, ClangCPPCompiler):
- pass
+ def is_libcpp_enable_assertions_deprecated(self) -> bool:
+ # Upstream libc++ deprecated _LIBCPP_ENABLE_ASSERTIONS
+ # in favor of _LIBCPP_HARDENING_MODE from version 18 onwards,
+ # but Apple Clang 17's libc++ has back-ported that change.
+ # See: https://github.com/mesonbuild/meson/issues/14440
+ return version_compare(self.version, ">=17")
class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler):
@@ -872,8 +880,7 @@ class CPP11AsCPP14Mixin(CompilerMixinBase):
'attempting best effort; setting the standard to C++14',
once=True, fatal=False)
original_args = super().get_option_std_args(target, env, subproject)
- std_mapping = {'/std:c++11': '/std:c++14',
- '/std:c++14': '/std:vc++14'}
+ std_mapping = {'/std:c++11': '/std:c++14'}
processed_args = [std_mapping.get(x, x) for x in original_args]
return processed_args
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index 38bb338..4bbddeb 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -102,10 +102,7 @@ class CsCompiler(BasicLinkerIsCompilerMixin, Compiler):
cmdlist = [self.runner, obj]
else:
cmdlist = [os.path.join(work_dir, obj)]
- pe = subprocess.Popen(cmdlist, cwd=work_dir)
- pe.wait()
- if pe.returncode != 0:
- raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string())
+ self.run_sanity_check(environment, cmdlist, work_dir, use_exe_wrapper_for_cross=False)
def needs_static_linker(self) -> bool:
return False
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index 6cc6f96..7e050f1 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -198,6 +198,7 @@ class CudaCompiler(Compiler):
for level, flags in host_compiler.warn_args.items()
}
self.host_werror_args = ['-Xcompiler=' + x for x in self.host_compiler.get_werror_args()]
+ self.debug_macros_available = version_compare(self.version, '>=12.9')
@classmethod
def _shield_nvcc_list_arg(cls, arg: str, listmode: bool = True) -> str:
@@ -577,21 +578,12 @@ class CudaCompiler(Compiler):
# Run sanity check (if possible)
if self.is_cross:
- if not env.has_exe_wrapper():
- return
- else:
- cmdlist = env.exe_wrapper.get_command() + [binary_name]
- else:
- cmdlist = self.exelist + ['--run', '"' + binary_name + '"']
- mlog.debug('Sanity check run command line: ', ' '.join(cmdlist))
- pe, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
- mlog.debug('Sanity check run stdout: ')
- mlog.debug(stdo)
- mlog.debug('-----\nSanity check run stderr:')
- mlog.debug(stde)
- mlog.debug('-----')
- pe.wait()
- if pe.returncode != 0:
+ return
+
+ cmdlist = self.exelist + ['--run', f'"{binary_name}"']
+ try:
+ stdo, stde = self.run_sanity_check(env, cmdlist, work_dir)
+ except EnvironmentException:
raise EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
# Interpret the result of the sanity test.
@@ -599,8 +591,6 @@ class CudaCompiler(Compiler):
# architecture detection test.
if stde == '':
self.detected_cc = stdo
- else:
- mlog.debug('cudaGetDeviceCount() returned ' + stde)
def has_header_symbol(self, hname: str, symbol: str, prefix: str,
env: 'Environment', *,
@@ -741,11 +731,10 @@ class CudaCompiler(Compiler):
def get_optimization_link_args(self, optimization_level: str) -> T.List[str]:
return self._to_host_flags(self.host_compiler.get_optimization_link_args(optimization_level), Phase.LINKER)
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
(rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target, extra_paths)
return (self._to_host_flags(rpath_args, Phase.LINKER), rpath_dirs_to_remove)
def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
@@ -774,8 +763,8 @@ class CudaCompiler(Compiler):
return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), Phase.LINKER)
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
- return self.host_compiler.find_library(libname, env, extra_dirs, libtype, lib_prefix_warning)
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
+ return self.host_compiler.find_library(libname, env, extra_dirs, libtype, lib_prefix_warning, ignore_system_dirs)
def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype))
@@ -819,7 +808,12 @@ class CudaCompiler(Compiler):
return ['-Xcompiler=' + x for x in self.host_compiler.get_profile_use_args()]
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
- return self.host_compiler.get_assert_args(disable, env)
+ cccl_macros = []
+ if not disable and self.debug_macros_available:
+ # https://github.com/NVIDIA/cccl/pull/2382
+ cccl_macros = ['-DCCCL_ENABLE_ASSERTIONS=1']
+
+ return self.host_compiler.get_assert_args(disable, env) + cccl_macros
def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
args = self._to_host_flags(args)
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 8ee6ebf..9f662ad 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -26,7 +26,7 @@ from .mixins.gnu import gnu_common_warning_args
if T.TYPE_CHECKING:
from . import compilers
- from ..build import DFeatures
+ from ..build import BuildTarget, DFeatures
from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
@@ -175,9 +175,8 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
def gen_import_library_args(self, implibname: str) -> T.List[str]:
return self.linker.import_library_args(implibname)
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
if self.info.is_windows():
return ([], set())
@@ -188,7 +187,7 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
# split into two separate arguments both prefaced with the -L=.
args: T.List[str] = []
(rpath_args, rpath_dirs_to_remove) = super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target)
for r in rpath_args:
if ',' in r:
a, b = r.split(',', maxsplit=1)
@@ -199,7 +198,7 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
return (args, rpath_dirs_to_remove)
return super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target)
@classmethod
def _translate_args_to_nongnu(cls, args: T.List[str], info: MachineInfo, link_id: str) -> T.List[str]:
@@ -456,15 +455,7 @@ class DCompiler(Compiler):
if pc.returncode != 0:
raise EnvironmentException('D compiler %s cannot compile programs.' % self.name_string())
- if self.is_cross:
- if not environment.has_exe_wrapper():
- # Can't check if the binaries run so we have to assume they do
- return
- cmdlist = environment.exe_wrapper.get_command() + [output_name]
- else:
- cmdlist = [output_name]
- if subprocess.call(cmdlist) != 0:
- raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string())
+ stdo, stde = self.run_sanity_check(environment, [output_name], work_dir)
def needs_static_linker(self) -> bool:
return True
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index 53bdd85..f57957f 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -107,7 +107,7 @@ def detect_compiler_for(env: 'Environment', lang: str, for_machine: MachineChoic
if comp is None:
return comp
assert comp.for_machine == for_machine
- env.coredata.process_compiler_options(lang, comp, env, subproject)
+ env.coredata.process_compiler_options(lang, comp, subproject)
if not skip_sanity_check:
comp.sanity_check(env.get_scratch_dir(), env)
env.coredata.compilers[comp.for_machine][lang] = comp
@@ -366,7 +366,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Emscripten' in out:
cls = c.EmscriptenCCompiler if lang == 'c' else cpp.EmscriptenCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
# emcc requires a file input in order to pass arguments to the
# linker. It'll exit with an error code, but still print the
@@ -410,7 +410,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version = arm_ver_str
cls = c.ArmclangCCompiler if lang == 'c' else cpp.ArmclangCPPCompiler
linker = linkers.ArmClangDynamicLinker(for_machine, version=version)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
@@ -445,7 +445,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if identifier in out:
cls = compiler_classes[0] if lang == 'c' else compiler_classes[1]
lnk = compiler_classes[2]
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = lnk(compiler, for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -482,7 +482,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelClCCompiler if lang == 'c' else cpp.IntelClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -491,7 +491,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelLLVMClCCompiler if lang == 'c' else cpp.IntelLLVMClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -524,14 +524,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=cl_signature, linker=linker)
if 'PGI Compilers' in out:
cls = c.PGICCompiler if lang == 'c' else cpp.PGICPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, linker=linker)
if 'NVIDIA Compilers and Tools' in out:
cls = c.NvidiaHPC_CCompiler if lang == 'c' else cpp.NvidiaHPC_CPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
@@ -550,14 +550,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=full_version, linker=l)
if 'ARM' in out and not ('Metrowerks' in out or 'Freescale' in out):
cls = c.ArmCCompiler if lang == 'c' else cpp.ArmCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.ArmDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, full_version=full_version, linker=linker)
if 'RX Family' in out:
cls = c.CcrxCCompiler if lang == 'c' else cpp.CcrxCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CcrxDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -565,7 +565,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Microchip Technology' in out:
cls = c.Xc16CCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.Xc16DynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -573,7 +573,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'CompCert' in out:
cls = c.CompCertCCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CompCertDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -591,7 +591,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
compiler_version = '.'.join(x for x in mwcc_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is not None:
@@ -616,7 +616,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert tasking_ver_match is not None, 'for mypy'
tasking_version = '.'.join(x for x in tasking_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is None:
raise MesonException(f'{cls.language}_ld was not properly defined in your cross file')
@@ -668,7 +668,7 @@ def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = out.strip().rsplit('V', maxsplit=1)[-1]
cpp_compiler = detect_cpp_compiler(env, for_machine)
cls = CudaCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
key = OptionKey('cuda_link_args', machine=for_machine)
if key in env.options:
# To fix LDFLAGS issue
@@ -759,7 +759,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelLLVMClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -769,7 +769,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -796,7 +796,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'PGI Compilers' in out:
cls = fortran.PGIFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -805,7 +805,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'NVIDIA Compilers and Tools' in out:
cls = fortran.NvidiaHPC_FortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -856,7 +856,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
full_version = err.split('\n', 1)[0]
version = full_version.split()[-1]
cls = fortran.NAGFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NAGDynamicLinker(
compiler, for_machine, cls.LINKER_PREFIX, [],
version=version)
@@ -948,7 +948,7 @@ def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
if len(parts) > 1:
version = parts[1]
comp_class = JavaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -972,7 +972,7 @@ def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compil
cls = cs.VisualStudioCsCompiler
else:
continue
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(comp, version, for_machine, info)
_handle_exceptions(popen_exceptions, compilers)
@@ -1002,7 +1002,7 @@ def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Co
version = search_version(err)
if version is not None:
comp_class = CythonCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
raise EnvironmentException('Unreachable code (exception to make mypy happy)')
@@ -1023,7 +1023,7 @@ def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(out)
if 'Vala' in out:
comp_class = ValaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, is_cross, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -1145,7 +1145,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0]
compiler.extend(cls.use_linker_args(c, ''))
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
compiler, version, for_machine, is_cross, info,
linker=linker, full_version=full_version)
@@ -1329,20 +1329,20 @@ def detect_nasm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(output)
if 'NASM' in output:
comp_class = NasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'yasm' in output:
comp_class = YasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'Metrowerks' in output or 'Freescale' in output:
if 'ARM' in output:
comp_class_mwasmarm = MetrowerksAsmCompilerARM
- env.coredata.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine, env)
+ env.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine)
return comp_class_mwasmarm([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
else:
comp_class_mwasmeppc = MetrowerksAsmCompilerEmbeddedPowerPC
- env.coredata.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine, env)
+ env.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine)
return comp_class_mwasmeppc([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
@@ -1383,7 +1383,7 @@ def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
@@ -1403,7 +1403,7 @@ def detect_linearasm_compiler(env: Environment, for_machine: MachineChoice) -> C
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 5794db0..6f4f3d2 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -104,9 +104,9 @@ class FortranCompiler(CLikeCompiler, Compiler):
return filename
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
code = 'stop; end program'
- return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning)
+ return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs)
def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
return self._has_multi_arguments(args, env, 'stop; end program')
@@ -446,6 +446,11 @@ class IntelLLVMFortranCompiler(IntelFortranCompiler):
id = 'intel-llvm'
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-preprocess-only']
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return []
class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler):
@@ -643,7 +648,11 @@ class LlvmFlangFortranCompiler(ClangCompiler, FortranCompiler):
# https://github.com/llvm/llvm-project/commit/8d5386669ed63548daf1bee415596582d6d78d7d;
# it seems flang 18 doesn't work if something accidentally includes a program unit, see
# https://github.com/llvm/llvm-project/issues/92496
- return search_dirs + ['-lFortranRuntime', '-lFortranDecimal']
+ # Only link FortranRuntime and FortranDecimal for flang < 19, see
+ # https://github.com/scipy/scipy/issues/21562#issuecomment-2942938509
+ if version_compare(self.version, '<19'):
+ search_dirs += ['-lFortranRuntime', '-lFortranDecimal']
+ return search_dirs
class Open64FortranCompiler(FortranCompiler):
diff --git a/mesonbuild/compilers/java.py b/mesonbuild/compilers/java.py
index 540e2aa..47d2ac9 100644
--- a/mesonbuild/compilers/java.py
+++ b/mesonbuild/compilers/java.py
@@ -91,10 +91,7 @@ class JavaCompiler(BasicLinkerIsCompilerMixin, Compiler):
runner = shutil.which(self.javarunner)
if runner:
cmdlist = [runner, '-cp', '.', obj]
- pe = subprocess.Popen(cmdlist, cwd=work_dir)
- pe.wait()
- if pe.returncode != 0:
- raise EnvironmentException(f'Executables created by Java compiler {self.name_string()} are not runnable.')
+ self.run_sanity_check(environment, cmdlist, work_dir, use_exe_wrapper_for_cross=False)
else:
m = "Java Virtual Machine wasn't found, but it's needed by Meson. " \
"Please install a JRE.\nIf you have specific needs where this " \
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index ae5ab63..72b987a 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -155,7 +155,10 @@ class ClangCompiler(GnuLikeCompiler):
# llvm based) is retargetable, while GCC is not.
#
- # qcld: Qualcomm Snapdragon linker, based on LLVM
+ # eld: Qualcomm's opensource embedded linker
+ if linker == 'eld':
+ return ['-fuse-ld=eld']
+ # qcld: Qualcomm's deprecated linker
if linker == 'qcld':
return ['-fuse-ld=qcld']
if linker == 'mold':
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index b163407..d2eb611 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -128,7 +128,7 @@ class CLikeCompiler(Compiler):
warn_args: T.Dict[str, T.List[str]] = {}
# TODO: Replace this manual cache with functools.lru_cache
- find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] = {}
+ find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType, bool], T.Optional[T.List[str]]] = {}
find_framework_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] = {}
internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
@@ -307,22 +307,7 @@ class CLikeCompiler(Compiler):
mlog.debug('-----')
if pc.returncode != 0:
raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} cannot compile programs.')
- # Run sanity check
- if self.is_cross:
- if not environment.has_exe_wrapper():
- # Can't check if the binaries run so we have to assume they do
- return
- cmdlist = environment.exe_wrapper.get_command() + [binary_name]
- else:
- cmdlist = [binary_name]
- mlog.debug('Running test binary command: ', mesonlib.join_args(cmdlist))
- try:
- # fortran code writes to stdout
- pe = subprocess.run(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
- except Exception as e:
- raise mesonlib.EnvironmentException(f'Could not invoke sanity test executable: {e!s}.')
- if pe.returncode != 0:
- raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
+ self.run_sanity_check(environment, [binary_name], work_dir)
def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
code = 'int main(void) { int class=0; return class; }\n'
@@ -1128,7 +1113,7 @@ class CLikeCompiler(Compiler):
'''
return self.sizeof('void *', '', env)[0] == 8
- def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]:
+ def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool, ignore_system_dirs: bool) -> T.Optional[T.List[str]]:
# First try if we can just add the library as -l.
# Gcc + co seem to prefer builtin lib dirs to -L dirs.
# Only try to find std libs if no extra dirs specified.
@@ -1159,7 +1144,7 @@ class CLikeCompiler(Compiler):
except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here
elf_class = 0
# Search in the specified dirs, and then in the system libraries
- for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)):
+ for d in itertools.chain(extra_dirs, [] if ignore_system_dirs else self.get_library_dirs(env, elf_class)):
for p in patterns:
trials = self._get_trials_from_pattern(p, d, libname)
if not trials:
@@ -1173,15 +1158,15 @@ class CLikeCompiler(Compiler):
return None
def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]:
+ code: str, libtype: LibType, lib_prefix_warning: bool, ignore_system_dirs: bool) -> T.Optional[T.List[str]]:
# These libraries are either built-in or invalid
if libname in self.ignore_libs:
return []
if isinstance(extra_dirs, str):
extra_dirs = [extra_dirs]
- key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype)
+ key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype, ignore_system_dirs)
if key not in self.find_library_cache:
- value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning)
+ value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs)
self.find_library_cache[key] = value
else:
value = self.find_library_cache[key]
@@ -1190,9 +1175,9 @@ class CLikeCompiler(Compiler):
return value.copy()
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
code = 'int main(void) { return 0; }\n'
- return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning)
+ return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs)
def find_framework_paths(self, env: 'Environment') -> T.List[str]:
'''
@@ -1287,12 +1272,19 @@ class CLikeCompiler(Compiler):
# check the equivalent enable flag too "-Wforgotten-towel".
if arg.startswith('-Wno-'):
# Make an exception for -Wno-attributes=x as -Wattributes=x is invalid
- # for GCC at least. Also, the opposite of -Wno-vla-larger-than is
- # -Wvla-larger-than=N
+ # for GCC at least. Also, the positive form of some flags require a
+ # value to be specified, i.e. we need to pass -Wfoo=N rather than just
+ # -Wfoo.
if arg.startswith('-Wno-attributes='):
pass
- elif arg == '-Wno-vla-larger-than':
- new_args.append('-Wvla-larger-than=1000')
+ elif arg in {'-Wno-alloc-size-larger-than',
+ '-Wno-alloca-larger-than',
+ '-Wno-frame-larger-than',
+ '-Wno-stack-usage',
+ '-Wno-vla-larger-than'}:
+ # Pass an arbitrary value to the enabling flag; since the test program
+ # is trivial, it is unlikely to provoke any of these warnings.
+ new_args.append('-W' + arg[5:] + '=1000')
else:
new_args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py
index 91b25e8..83534e1 100644
--- a/mesonbuild/compilers/mixins/emscripten.py
+++ b/mesonbuild/compilers/mixins/emscripten.py
@@ -76,7 +76,7 @@ class EmscriptenMixin(Compiler):
return wrap_js_includes(super().get_dependency_link_args(dep))
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
if not libname.endswith('.js'):
return super().find_library(libname, env, extra_dirs, libtype, lib_prefix_warning)
if os.path.isabs(libname):
diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py
index 9ea591e..ddcd14a 100644
--- a/mesonbuild/compilers/mixins/gnu.py
+++ b/mesonbuild/compilers/mixins/gnu.py
@@ -534,6 +534,8 @@ class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta):
# We want to allow preprocessing files with any extension, such as
# foo.c.in. In that case we need to tell GCC/CLANG to treat them as
# assembly file.
+ if self.language == 'fortran':
+ return self.get_preprocess_only_args()
lang = gnu_lang_map.get(self.language, 'assembler-with-cpp')
return self.get_preprocess_only_args() + [f'-x{lang}']
diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py
index 3f35619..e359fb3 100644
--- a/mesonbuild/compilers/mixins/islinker.py
+++ b/mesonbuild/compilers/mixins/islinker.py
@@ -101,9 +101,8 @@ class BasicLinkerIsCompilerMixin(Compiler):
darwin_versions: T.Tuple[str, str]) -> T.List[str]:
raise MesonException("This linker doesn't support soname args")
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def get_asneeded_args(self) -> T.List[str]:
diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py
index 50335c8..fddc837 100644
--- a/mesonbuild/compilers/mixins/pgi.py
+++ b/mesonbuild/compilers/mixins/pgi.py
@@ -54,6 +54,12 @@ class PGICompiler(Compiler):
def openmp_flags(self, env: Environment) -> T.List[str]:
return ['-mp']
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E', '-P', '-o', '-']
+
+ def get_preprocess_to_file_args(self) -> T.List[str]:
+ return ['-E', '-P']
+
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return clike_optimization_args[optimization_level]
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index d0d2e69..bc27779 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -5,7 +5,7 @@
from __future__ import annotations
import functools
-import subprocess, os.path
+import os.path
import textwrap
import re
import typing as T
@@ -141,17 +141,7 @@ class RustCompiler(Compiler):
if pc.returncode != 0:
raise EnvironmentException(f'Rust compiler {self.name_string()} cannot compile programs.')
self._native_static_libs(work_dir, source_name)
- if self.is_cross:
- if not environment.has_exe_wrapper():
- # Can't check if the binaries run so we have to assume they do
- return
- cmdlist = environment.exe_wrapper.get_command() + [output_name]
- else:
- cmdlist = [output_name]
- pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
- pe.wait()
- if pe.returncode != 0:
- raise EnvironmentException(f'Executables created by Rust compiler {self.name_string()} are not runnable.')
+ self.run_sanity_check(environment, [output_name], work_dir)
def _native_static_libs(self, work_dir: str, source_name: str) -> None:
# Get libraries needed to link with a Rust staticlib
@@ -192,10 +182,14 @@ class RustCompiler(Compiler):
return stdo.split('\n', maxsplit=1)[0]
@functools.lru_cache(maxsize=None)
- def get_crt_static(self) -> bool:
+ def get_cfgs(self) -> T.List[str]:
cmd = self.get_exelist(ccache=False) + ['--print', 'cfg']
p, stdo, stde = Popen_safe_logged(cmd)
- return bool(re.search('^target_feature="crt-static"$', stdo, re.MULTILINE))
+ return stdo.splitlines()
+
+ @functools.lru_cache(maxsize=None)
+ def get_crt_static(self) -> bool:
+ return 'target_feature="crt-static"' in self.get_cfgs()
def get_debug_args(self, is_debug: bool) -> T.List[str]:
return clike_debug_args[is_debug]
@@ -203,18 +197,15 @@ class RustCompiler(Compiler):
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return rust_optimization_args[optimization_level]
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- args, to_remove = super().build_rpath_args(env, build_dir, from_dir, rpath_paths,
- build_rpath, install_rpath)
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ # add rustc's sysroot to account for rustup installations
+ args, to_remove = super().build_rpath_args(env, build_dir, from_dir, target, [self.get_target_libdir()])
- # ... but then add rustc's sysroot to account for rustup
- # installations
rustc_rpath_args = []
for arg in args:
rustc_rpath_args.append('-C')
- rustc_rpath_args.append(f'link-arg={arg}:{self.get_target_libdir()}')
+ rustc_rpath_args.append(f'link-arg={arg}')
return rustc_rpath_args, to_remove
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
@@ -247,6 +238,12 @@ class RustCompiler(Compiler):
'none',
choices=['none', '2015', '2018', '2021', '2024'])
+ key = self.form_compileropt_key('dynamic_std')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Whether to link Rust build targets to a dynamic libstd',
+ False)
+
return opts
def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
@@ -327,11 +324,11 @@ class RustCompiler(Compiler):
return exelist + args
def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
- return self.compiles('fn main { std::process::exit(0) };\n', env, extra_args=args, mode=CompileCheckMode.COMPILE)
+ return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.COMPILE)
def has_multi_link_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
args = self.linker.fatal_warnings() + args
- return self.compiles('fn main { std::process::exit(0) };\n', env, extra_args=args, mode=CompileCheckMode.LINK)
+ return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.LINK)
@functools.lru_cache(maxsize=None)
def get_rustdoc(self, env: 'Environment') -> T.Optional[RustdocTestCompiler]:
@@ -341,7 +338,7 @@ class RustCompiler(Compiler):
return RustdocTestCompiler(exelist, self.version, self.for_machine,
self.is_cross, self.info, full_version=self.full_version,
- linker=self.linker)
+ linker=self.linker, rustc=self)
class ClippyRustCompiler(RustCompiler):
@@ -361,6 +358,26 @@ class RustdocTestCompiler(RustCompiler):
id = 'rustdoc --test'
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ full_version: T.Optional[str],
+ linker: T.Optional['DynamicLinker'], rustc: RustCompiler):
+ super().__init__(exelist, version, for_machine,
+ is_cross, info, full_version, linker)
+ self.rustc = rustc
+
+ @functools.lru_cache(maxsize=None)
+ def get_sysroot(self) -> str:
+ return self.rustc.get_sysroot()
+
+ @functools.lru_cache(maxsize=None)
+ def get_target_libdir(self) -> str:
+ return self.rustc.get_target_libdir()
+
+ @functools.lru_cache(maxsize=None)
+ def get_cfgs(self) -> T.List[str]:
+ return self.rustc.get_cfgs()
+
def get_debug_args(self, is_debug: bool) -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py
index 8410fbb..4ad3aff 100644
--- a/mesonbuild/compilers/swift.py
+++ b/mesonbuild/compilers/swift.py
@@ -8,7 +8,7 @@ import subprocess, os.path
import typing as T
from .. import mlog, options
-from ..mesonlib import EnvironmentException, MesonException, version_compare
+from ..mesonlib import first, MesonException, version_compare
from .compilers import Compiler, clike_debug_args
@@ -139,6 +139,16 @@ class SwiftCompiler(Compiler):
if std != 'none':
args += ['-swift-version', std]
+ # Pass C compiler -std=... arg to swiftc
+ c_langs = ['objc', 'c']
+ if target.uses_swift_cpp_interop():
+ c_langs = ['objcpp', 'cpp', *c_langs]
+
+ c_lang = first(c_langs, lambda x: x in target.compilers)
+ if c_lang is not None:
+ cc = target.compilers[c_lang]
+ args.extend(arg for c_arg in cc.get_option_std_args(target, env, subproject) for arg in ['-Xcc', c_arg])
+
return args
def get_working_directory_args(self, path: str) -> T.Optional[T.List[str]]:
@@ -147,6 +157,18 @@ class SwiftCompiler(Compiler):
return ['-working-directory', path]
+ def get_cxx_interoperability_args(self, target: T.Optional[build.BuildTarget] = None) -> T.List[str]:
+ if target is not None and not target.uses_swift_cpp_interop():
+ return []
+
+ if version_compare(self.version, '<5.9'):
+ raise MesonException(f'Compiler {self} does not support C++ interoperability')
+
+ return ['-cxx-interoperability-mode=default']
+
+ def get_library_args(self) -> T.List[str]:
+ return ['-parse-as-library']
+
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
@@ -170,13 +192,7 @@ class SwiftCompiler(Compiler):
''')
pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
pc.wait()
- if pc.returncode != 0:
- raise EnvironmentException('Swift compiler %s cannot compile programs.' % self.name_string())
- if self.is_cross:
- # Can't check if the binaries run so we have to assume they do
- return
- if subprocess.call(output_name) != 0:
- raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
+ self.run_sanity_check(environment, [output_name], work_dir)
def get_debug_args(self, is_debug: bool) -> T.List[str]:
return clike_debug_args[is_debug]
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index 28861a6..bbaefed 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -113,7 +113,7 @@ class ValaCompiler(Compiler):
raise EnvironmentException(msg)
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
- libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+ libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]:
if extra_dirs and isinstance(extra_dirs, str):
extra_dirs = [extra_dirs]
# Valac always looks in the default vapi dir, so only search there if
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 90157df..27795b0 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -7,6 +7,7 @@ from __future__ import annotations
import copy
from . import mlog, options
+import argparse
import pickle, os, uuid
import sys
from functools import lru_cache
@@ -18,7 +19,6 @@ from .mesonlib import (
MesonException, MachineChoice, PerMachine,
PerMachineDefaultable,
default_prefix,
- stringlistify,
pickle_load
)
@@ -32,13 +32,11 @@ import shlex
import typing as T
if T.TYPE_CHECKING:
- import argparse
from typing_extensions import Protocol
from . import dependencies
from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
from .dependencies.detect import TV_DepID
- from .environment import Environment
from .mesonlib import FileOrString
from .cmake.traceparser import CMakeCacheEntry
from .interpreterbase import SubProject
@@ -50,13 +48,11 @@ if T.TYPE_CHECKING:
"""Representation of command line options from Meson setup, configure,
and dist.
- :param projectoptions: The raw list of command line options given
:param cmd_line_options: command line options parsed into an OptionKey:
str mapping
"""
- cmd_line_options: T.Dict[OptionKey, str]
- projectoptions: T.List[str]
+ cmd_line_options: T.Dict[OptionKey, T.Optional[str]]
cross_file: T.List[str]
native_file: T.List[str]
@@ -72,7 +68,7 @@ if T.TYPE_CHECKING:
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
-version = '1.8.99'
+version = '1.9.0.rc1'
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
@@ -147,13 +143,13 @@ class DependencyCache:
def __init__(self, builtins: options.OptionStore, for_machine: MachineChoice):
self.__cache: T.MutableMapping[TV_DepID, DependencySubCache] = OrderedDict()
self.__builtins = builtins
- self.__pkg_conf_key = options.OptionKey('pkg_config_path')
- self.__cmake_key = options.OptionKey('cmake_prefix_path')
+ self.__pkg_conf_key = options.OptionKey('pkg_config_path', machine=for_machine)
+ self.__cmake_key = options.OptionKey('cmake_prefix_path', machine=for_machine)
def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
data: T.Dict[DependencyCacheType, T.List[str]] = {
- DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins.get_value_for(self.__pkg_conf_key)),
- DependencyCacheType.CMAKE: stringlistify(self.__builtins.get_value_for(self.__cmake_key)),
+ DependencyCacheType.PKG_CONFIG: T.cast('T.List[str]', self.__builtins.get_value_for(self.__pkg_conf_key)),
+ DependencyCacheType.CMAKE: T.cast('T.List[str]', self.__builtins.get_value_for(self.__cmake_key)),
DependencyCacheType.OTHER: [],
}
assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
@@ -248,6 +244,7 @@ class CoreData:
'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+ 'masm': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB',
'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8',
}
@@ -412,6 +409,9 @@ class CoreData:
return option_object.validate_value(override)
return value
+ def set_from_configure_command(self, options: SharedCMDOptions) -> bool:
+ return self.optstore.set_from_configure_command(options.cmd_line_options)
+
def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
dirty = False
try:
@@ -565,30 +565,19 @@ class CoreData:
return dirty
- def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice,
- env: Environment, subproject: str) -> None:
+ def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice) -> None:
for k, o in c_options.items():
- comp_key = OptionKey(f'{k.name}', None, for_machine)
+ assert k.subproject is None and k.machine is for_machine
if lang == 'objc' and k.name == 'c_std':
# For objective C, always fall back to c_std.
- self.optstore.add_compiler_option('c', comp_key, o)
+ self.optstore.add_compiler_option('c', k, o)
elif lang == 'objcpp' and k.name == 'cpp_std':
- self.optstore.add_compiler_option('cpp', comp_key, o)
+ self.optstore.add_compiler_option('cpp', k, o)
else:
- self.optstore.add_compiler_option(lang, comp_key, o)
-
- def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
- for_machine: MachineChoice, env: 'Environment') -> None:
- """Add global language arguments that are needed before compiler/linker detection."""
- from .compilers import compilers
- # These options are all new at this point, because the compiler is
- # responsible for adding its own options, thus calling
- # `self.optstore.update()`` is perfectly safe.
- for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
- self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
+ self.optstore.add_compiler_option(lang, k, o)
- def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None:
- self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env, subproject)
+ def process_compiler_options(self, lang: str, comp: Compiler, subproject: str) -> None:
+ self.add_compiler_options(comp.get_options(), lang, comp.for_machine)
for key in comp.base_options:
if subproject:
@@ -692,28 +681,60 @@ def save(obj: CoreData, build_dir: str) -> str:
return filename
+class KeyNoneAction(argparse.Action):
+ """
+ Custom argparse Action that stores values in a dictionary as keys with value None.
+ """
+
+ def __init__(self, option_strings, dest, nargs=None, **kwargs: object) -> None:
+ assert nargs is None or nargs == 1
+ super().__init__(option_strings, dest, nargs=1, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ arg: T.List[str], option_string: str = None) -> None:
+ current_dict = getattr(namespace, self.dest)
+ if current_dict is None:
+ current_dict = {}
+ setattr(namespace, self.dest, current_dict)
+
+ key = OptionKey.from_string(arg[0])
+ current_dict[key] = None
+
+
+class KeyValueAction(argparse.Action):
+ """
+ Custom argparse Action that parses KEY=VAL arguments and stores them in a dictionary.
+ """
+
+ def __init__(self, option_strings, dest, nargs=None, **kwargs: object) -> None:
+ assert nargs is None or nargs == 1
+ super().__init__(option_strings, dest, nargs=1, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ arg: T.List[str], option_string: str = None) -> None:
+ current_dict = getattr(namespace, self.dest)
+ if current_dict is None:
+ current_dict = {}
+ setattr(namespace, self.dest, current_dict)
+
+ try:
+ keystr, value = arg[0].split('=', 1)
+ key = OptionKey.from_string(keystr)
+ current_dict[key] = value
+ except ValueError:
+ parser.error(f'The argument for option {option_string!r} must be in OPTION=VALUE format.')
+
+
def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
for n, b in options.BUILTIN_OPTIONS.items():
options.option_to_argparse(b, n, parser, '')
for n, b in options.BUILTIN_OPTIONS_PER_MACHINE.items():
options.option_to_argparse(b, n, parser, ' (just for host machine)')
options.option_to_argparse(b, n.as_build(), parser, ' (just for build machine)')
- parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+ parser.add_argument('-D', action=KeyValueAction, dest='cmd_line_options', default={}, metavar="option=value",
help='Set the value of an option, can be used several times to set multiple options.')
-def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[str, str]:
- result: T.OrderedDict[OptionKey, str] = OrderedDict()
- for o in options:
- try:
- (key, value) = o.split('=', 1)
- except ValueError:
- raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
- result[key] = value
- return result
-
def parse_cmd_line_options(args: SharedCMDOptions) -> None:
- args.cmd_line_options = create_options_dict(args.projectoptions)
-
# Merge builtin options set with --option into the dict.
for key in chain(
options.BUILTIN_OPTIONS.keys(),
@@ -727,7 +748,7 @@ def parse_cmd_line_options(args: SharedCMDOptions) -> None:
cmdline_name = options.argparse_name_to_arg(name)
raise MesonException(
f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
- args.cmd_line_options[key.name] = value
+ args.cmd_line_options[key] = value
delattr(args, name)
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 38bfc08..732bae5 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -143,6 +143,11 @@ class Dependency(HoldableObject):
def is_built(self) -> bool:
return False
+ def is_named(self) -> bool:
+ if self.name is None:
+ return False
+ return self.name != f'dep{self._id}'
+
def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]:
if not self.found():
return mlog.red('NO')
diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py
index 82bf5ad..cd97127 100644
--- a/mesonbuild/dependencies/cuda.py
+++ b/mesonbuild/dependencies/cuda.py
@@ -11,9 +11,9 @@ from pathlib import Path
from .. import mesonlib
from .. import mlog
-from ..environment import detect_cpu_family
from .base import DependencyException, SystemDependency
from .detect import packages
+from ..mesonlib import LibType
if T.TYPE_CHECKING:
@@ -27,8 +27,11 @@ class CudaDependency(SystemDependency):
supported_languages = ['cpp', 'c', 'cuda'] # see also _default_language
def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
- compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)]
+ for_machine = self.get_for_machine_from_kwargs(kwargs)
+ compilers = environment.coredata.compilers[for_machine]
+ machine = environment.machines[for_machine]
language = self._detect_language(compilers)
+
if language not in self.supported_languages:
raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.')
@@ -50,16 +53,26 @@ class CudaDependency(SystemDependency):
if not os.path.isabs(self.cuda_path):
raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.')
+ # Cuda target directory relative to cuda path.
+ if machine.is_linux():
+ # E.g. targets/x86_64-linux
+ self.target_path = os.path.join('targets', f'{machine.cpu_family}-{machine.system}')
+ else:
+ self.target_path = '.'
+
# nvcc already knows where to find the CUDA Toolkit, but if we're compiling
# a mixed C/C++/CUDA project, we still need to make the include dir searchable
if self.language != 'cuda' or len(compilers) > 1:
- self.incdir = os.path.join(self.cuda_path, 'include')
+ self.incdir = os.path.join(self.cuda_path, self.target_path, 'include')
self.compile_args += [f'-I{self.incdir}']
arch_libdir = self._detect_arch_libdir()
- self.libdir = os.path.join(self.cuda_path, arch_libdir)
+ self.libdir = os.path.join(self.cuda_path, self.target_path, arch_libdir)
mlog.debug('CUDA library directory is', mlog.bold(self.libdir))
+ if 'static' not in kwargs:
+ self.libtype = LibType.PREFER_STATIC
+
self.is_found = self._find_requested_libraries()
@classmethod
@@ -211,8 +224,8 @@ class CudaDependency(SystemDependency):
return '.'.join(version.split('.')[:2])
def _detect_arch_libdir(self) -> str:
- arch = detect_cpu_family(self.env.coredata.compilers.host)
machine = self.env.machines[self.for_machine]
+ arch = machine.cpu_family
msg = '{} architecture is not supported in {} version of the CUDA Toolkit.'
if machine.is_windows():
libdirs = {'x86': 'Win32', 'x86_64': 'x64'}
@@ -220,10 +233,7 @@ class CudaDependency(SystemDependency):
raise DependencyException(msg.format(arch, 'Windows'))
return os.path.join('lib', libdirs[arch])
elif machine.is_linux():
- libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'}
- if arch not in libdirs:
- raise DependencyException(msg.format(arch, 'Linux'))
- return libdirs[arch]
+ return 'lib'
elif machine.is_darwin():
libdirs = {'x86_64': 'lib64'}
if arch not in libdirs:
@@ -236,13 +246,14 @@ class CudaDependency(SystemDependency):
all_found = True
for module in self.requested_modules:
- args = self.clib_compiler.find_library(module, self.env, [self.libdir])
- if module == 'cudart_static' and self.language != 'cuda':
- machine = self.env.machines[self.for_machine]
- if machine.is_linux():
- # extracted by running
- # nvcc -v foo.o
- args += ['-lrt', '-lpthread', '-ldl']
+ # You should only ever link to libraries inside the cuda tree, nothing outside of it.
+ # For instance, there is a
+ #
+ # - libnvidia-ml.so in stubs/ of the CUDA tree
+ # - libnvidia-ml.so in /usr/lib/ that is provided by the nvidia drivers
+ #
+ # Users should never link to the latter, since its ABI may change.
+ args = self.clib_compiler.find_library(module, self.env, [self.libdir, os.path.join(self.libdir, 'stubs')], self.libtype, ignore_system_dirs=True)
if args is None:
self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'')
@@ -284,23 +295,26 @@ class CudaDependency(SystemDependency):
return candidates
def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+ # when using nvcc to link, we should instead use the native driver options
+ REWRITE_MODULES = {
+ 'cudart': ['-cudart', 'shared'],
+ 'cudart_static': ['-cudart', 'static'],
+ 'cudadevrt': ['-cudadevrt'],
+ }
+
args: T.List[str] = []
for lib in self.requested_modules:
link_args = self.lib_modules[lib]
- # Turn canonical arguments like
- # /opt/cuda/lib64/libcublas.so
- # back into
- # -lcublas
- # since this is how CUDA modules were passed to nvcc since time immemorial
- if language == 'cuda':
- if lib in frozenset(['cudart', 'cudart_static']):
- # nvcc always links these unconditionally
- mlog.debug(f'Not adding \'{lib}\' to dependency, since nvcc will link it implicitly')
- link_args = []
- elif link_args and link_args[0].startswith(self.libdir):
- # module included with CUDA, nvcc knows how to find these itself
- mlog.debug(f'CUDA module \'{lib}\' found in CUDA libdir')
- link_args = ['-l' + lib]
+ if language == 'cuda' and lib in REWRITE_MODULES:
+ link_args = REWRITE_MODULES[lib]
+ mlog.debug(f'Rewriting module \'{lib}\' to \'{link_args}\'')
+ elif lib == 'cudart_static':
+ machine = self.env.machines[self.for_machine]
+ if machine.is_linux():
+ # extracted by running
+ # nvcc -v foo.o
+ link_args += ['-lrt', '-lpthread', '-ldl']
+
args += link_args
return args
diff --git a/mesonbuild/dependencies/detect.py b/mesonbuild/dependencies/detect.py
index aa62c66..4cdf16d 100644
--- a/mesonbuild/dependencies/detect.py
+++ b/mesonbuild/dependencies/detect.py
@@ -15,7 +15,7 @@ if T.TYPE_CHECKING:
from ..environment import Environment
from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator
- TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]]
+ TV_DepIDEntry = T.Union[str, bool, int, None, T.Tuple[str, ...]]
TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...]
PackageTypes = T.Union[T.Type[ExternalDependency], DependencyFactory, WrappedFactoryFunc]
@@ -40,10 +40,14 @@ _packages_accept_language: T.Set[str] = set()
def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
identifier: 'TV_DepID' = (('name', name), )
+ from ..interpreter.type_checking import DEPENDENCY_KWS
+ nkwargs = {k.name: k.default for k in DEPENDENCY_KWS}
+ nkwargs.update(kwargs)
+
from ..interpreter import permitted_dependency_kwargs
assert len(permitted_dependency_kwargs) == 19, \
'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here'
- for key, value in kwargs.items():
+ for key, value in nkwargs.items():
# 'version' is irrelevant for caching; the caller must check version matches
# 'native' is handled above with `for_machine`
# 'required' is irrelevant for caching; the caller handles it separately
@@ -62,7 +66,7 @@ def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
assert isinstance(i, str), i
value = tuple(frozenset(listify(value)))
else:
- assert isinstance(value, (str, bool, int)), value
+ assert value is None or isinstance(value, (str, bool, int)), value
identifier = (*identifier, (key, value),)
return identifier
diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py
index 3dab31c..b028d9f 100644
--- a/mesonbuild/dependencies/python.py
+++ b/mesonbuild/dependencies/python.py
@@ -330,10 +330,12 @@ class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase):
# But not Apple, because it's a framework
if self.env.machines.host.is_darwin() and 'PYTHONFRAMEWORKPREFIX' in self.variables:
framework_prefix = self.variables['PYTHONFRAMEWORKPREFIX']
- # Add rpath, will be de-duplicated if necessary
+ # Add rpath, will be de-duplicated if necessary
if framework_prefix.startswith('/Applications/Xcode.app/'):
self.link_args += ['-Wl,-rpath,' + framework_prefix]
- self.raw_link_args += ['-Wl,-rpath,' + framework_prefix]
+ if self.raw_link_args is not None:
+ # When None, self.link_args is used
+ self.raw_link_args += ['-Wl,-rpath,' + framework_prefix]
class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase):
@@ -350,8 +352,14 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
SystemDependency.__init__(self, name, environment, kwargs)
_PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False))
- # match pkg-config behavior
- if self.link_libpython:
+ # For most platforms, match pkg-config behavior. iOS is a special case;
+ # check for that first, so that check takes priority over
+ # `link_libpython` (which *shouldn't* be set, but just in case)
+ if self.platform.startswith('ios-'):
+ # iOS doesn't use link_libpython - it links with the *framework*.
+ self.link_args = ['-framework', 'Python', '-F', self.variables.get('prefix')]
+ self.is_found = True
+ elif self.link_libpython:
# link args
if mesonlib.is_windows():
self.find_libpy_windows(environment, limited_api=False)
diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py
index a3a9388..8bb269e 100644
--- a/mesonbuild/dependencies/qt.py
+++ b/mesonbuild/dependencies/qt.py
@@ -9,6 +9,7 @@ from __future__ import annotations
import abc
import re
import os
+from pathlib import Path
import typing as T
from .base import DependencyException, DependencyMethods
@@ -50,7 +51,7 @@ def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) ->
if len(dirname.split('.')) == 3:
private_dir = dirname
break
- return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+ return [private_dir, Path(private_dir, f'Qt{module}').as_posix()]
def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
@@ -303,7 +304,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
for module in self.requested_modules:
- mincdir = os.path.join(incdir, 'Qt' + module)
+ mincdir = Path(incdir, f'Qt{module}').as_posix()
self.compile_args.append('-I' + mincdir)
if module == 'QuickTest':
diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py
index c04d1f5..f34692c 100644
--- a/mesonbuild/dependencies/scalapack.py
+++ b/mesonbuild/dependencies/scalapack.py
@@ -9,7 +9,7 @@ import os
import typing as T
from ..options import OptionKey
-from .base import DependencyMethods
+from .base import DependencyException, DependencyMethods
from .cmake import CMakeDependency
from .detect import packages
from .pkgconfig import PkgConfigDependency
@@ -65,8 +65,7 @@ class MKLPkgConfigDependency(PkgConfigDependency):
super().__init__(name, env, kwargs, language=language)
# Doesn't work with gcc on windows, but does on Linux
- if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
- and self.clib_compiler.id == 'gcc')):
+ if env.machines[self.for_machine].is_windows() and self.clib_compiler.id == 'gcc':
self.is_found = False
# This can happen either because we're using GCC, we couldn't find the
@@ -96,6 +95,9 @@ class MKLPkgConfigDependency(PkgConfigDependency):
self.version = v
def _set_libs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
super()._set_libs()
if self.env.machines[self.for_machine].is_windows():
@@ -133,6 +135,9 @@ class MKLPkgConfigDependency(PkgConfigDependency):
self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
def _set_cargs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
allow_system = False
if self.language == 'fortran':
# gfortran doesn't appear to look in system paths for INCLUDE files,
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index fc44037..1e80a77 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -187,12 +187,9 @@ class VulkanDependencySystem(SystemDependency):
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
super().__init__(name, environment, kwargs, language=language)
- try:
- self.vulkan_sdk = os.environ.get('VULKAN_SDK', os.environ['VK_SDK_PATH'])
- if not os.path.isabs(self.vulkan_sdk):
- raise DependencyException('VULKAN_SDK must be an absolute path.')
- except KeyError:
- self.vulkan_sdk = None
+ self.vulkan_sdk = os.environ.get('VULKAN_SDK', os.environ.get('VK_SDK_PATH'))
+ if self.vulkan_sdk and not os.path.isabs(self.vulkan_sdk):
+ raise DependencyException('VULKAN_SDK must be an absolute path.')
if self.vulkan_sdk:
# TODO: this config might not work on some platforms, fix bugs as reported
@@ -242,7 +239,7 @@ class VulkanDependencySystem(SystemDependency):
low=0, high=None, guess=e,
prefix='#include <vulkan/vulkan.h>',
env=environment,
- extra_args=None,
+ extra_args=self.compile_args,
dependencies=None))
# list containing vulkan version components and their expected value
for c, e in [('MAJOR', 1), ('MINOR', 3), ('PATCH', None)]]
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index f322cda..489ef50 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -12,6 +12,7 @@ import collections
from . import coredata
from . import mesonlib
from . import machinefile
+from . import options
CmdLineFileParser = machinefile.CmdLineFileParser
@@ -34,6 +35,7 @@ from .compilers import (
is_library,
is_llvm_ir,
is_object,
+ is_separate_compile,
is_source,
)
@@ -43,13 +45,20 @@ from mesonbuild import envconfig
if T.TYPE_CHECKING:
from .compilers import Compiler
from .compilers.mixins.visualstudio import VisualStudioLikeCompiler
- from .options import ElementaryOptionValues
+ from .options import OptionDict, ElementaryOptionValues
from .wrap.wrap import Resolver
from . import cargo
CompilersDict = T.Dict[str, Compiler]
+NON_LANG_ENV_OPTIONS = [
+ ('PKG_CONFIG_PATH', 'pkg_config_path'),
+ ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
+ ('LDFLAGS', 'ldflags'),
+ ('CPPFLAGS', 'cppflags'),
+]
+
build_filename = 'meson.build'
@@ -639,7 +648,12 @@ class Environment:
#
# Note that order matters because of 'buildtype', if it is after
# 'optimization' and 'debug' keys, it override them.
- self.options: T.MutableMapping[OptionKey, ElementaryOptionValues] = collections.OrderedDict()
+ self.options: OptionDict = collections.OrderedDict()
+
+ # Environment variables with the name converted into an OptionKey type.
+ # These have subtly different behavior compared to machine files, so do
+ # not store them in self.options. See _set_default_options_from_env.
+ self.env_opts: OptionDict = {}
self.machinestore = machinefile.MachineFileStore(self.coredata.config_files, self.coredata.cross_files, self.source_dir)
@@ -716,13 +730,14 @@ class Environment:
def mfilestr2key(self, machine_file_string: str, section: T.Optional[str], section_subproject: T.Optional[str], machine: MachineChoice) -> OptionKey:
key = OptionKey.from_string(machine_file_string)
- assert key.machine == MachineChoice.HOST
if key.subproject:
suggestion = section if section == 'project options' else 'built-in options'
raise MesonException(f'Do not set subproject options in [{section}] section, use [subproject:{suggestion}] instead.')
if section_subproject:
key = key.evolve(subproject=section_subproject)
if machine == MachineChoice.BUILD:
+ if key.machine == MachineChoice.BUILD:
+ mlog.deprecation('Setting build machine options in the native file does not need the "build." prefix', once=True)
return key.evolve(machine=machine)
return key
@@ -777,12 +792,7 @@ class Environment:
def _set_default_options_from_env(self) -> None:
opts: T.List[T.Tuple[str, str]] = (
[(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] +
- [
- ('PKG_CONFIG_PATH', 'pkg_config_path'),
- ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
- ('LDFLAGS', 'ldflags'),
- ('CPPFLAGS', 'cppflags'),
- ]
+ NON_LANG_ENV_OPTIONS
)
env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list)
@@ -817,35 +827,35 @@ class Environment:
env_opts[key].extend(p_list)
elif keyname == 'cppflags':
for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
- key = OptionKey(f'{lang}_env_args', machine=for_machine)
+ key = OptionKey(f'{lang}_args', machine=for_machine)
env_opts[key].extend(p_list)
else:
key = OptionKey.from_string(keyname).evolve(machine=for_machine)
if evar in compilers.compilers.CFLAGS_MAPPING.values():
- # If this is an environment variable, we have to
- # store it separately until the compiler is
- # instantiated, as we don't know whether the
- # compiler will want to use these arguments at link
- # time and compile time (instead of just at compile
- # time) until we're instantiating that `Compiler`
- # object. This is required so that passing
- # `-Dc_args=` on the command line and `$CFLAGS`
- # have subtly different behavior. `$CFLAGS` will be
- # added to the linker command line if the compiler
- # acts as a linker driver, `-Dc_args` will not.
- #
- # We still use the original key as the base here, as
- # we want to inherit the machine and the compiler
- # language
lang = key.name.split('_', 1)[0]
- key = key.evolve(f'{lang}_env_args')
+ key = key.evolve(f'{lang}_args')
env_opts[key].extend(p_list)
- # Only store options that are not already in self.options,
- # otherwise we'd override the machine files
- for k, v in env_opts.items():
- if k not in self.options:
- self.options[k] = v
+ # If this is an environment variable, we have to
+ # store it separately until the compiler is
+ # instantiated, as we don't know whether the
+ # compiler will want to use these arguments at link
+ # time and compile time (instead of just at compile
+ # time) until we're instantiating that `Compiler`
+ # object. This is required so that passing
+ # `-Dc_args=` on the command line and `$CFLAGS`
+ # have subtly different behavior. `$CFLAGS` will be
+ # added to the linker command line if the compiler
+ # acts as a linker driver, `-Dc_args` will not.
+ for (_, keyname), for_machine in itertools.product(NON_LANG_ENV_OPTIONS, MachineChoice):
+ key = OptionKey.from_string(keyname).evolve(machine=for_machine)
+ # Only store options that are not already in self.options,
+ # otherwise we'd override the machine files
+ if key in env_opts and key not in self.options:
+ self.options[key] = env_opts[key]
+ del env_opts[key]
+
+ self.env_opts.update(env_opts)
def _set_default_binaries_from_env(self) -> None:
"""Set default binaries from the environment.
@@ -928,6 +938,9 @@ class Environment:
def is_assembly(self, fname: 'mesonlib.FileOrString') -> bool:
return is_assembly(fname)
+ def is_separate_compile(self, fname: 'mesonlib.FileOrString') -> bool:
+ return is_separate_compile(fname)
+
def is_llvm_ir(self, fname: 'mesonlib.FileOrString') -> bool:
return is_llvm_ir(fname)
@@ -1064,3 +1077,44 @@ class Environment:
if extra_paths:
env.prepend('PATH', list(extra_paths))
return env
+
+ def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+ for_machine: MachineChoice) -> None:
+ """Add global language arguments that are needed before compiler/linker detection."""
+ description = f'Extra arguments passed to the {lang}'
+ argkey = OptionKey(f'{lang}_args', machine=for_machine)
+ largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
+
+ comp_args_from_envvar = False
+ comp_options = self.coredata.optstore.get_pending_value(argkey)
+ if comp_options is None:
+ comp_args_from_envvar = True
+ comp_options = self.env_opts.get(argkey, [])
+
+ link_options = self.coredata.optstore.get_pending_value(largkey)
+ if link_options is None:
+ link_options = self.env_opts.get(largkey, [])
+
+ assert isinstance(comp_options, (str, list)), 'for mypy'
+ assert isinstance(link_options, (str, list)), 'for mypy'
+
+ cargs = options.UserStringArrayOption(
+ argkey.name,
+ description + ' compiler',
+ comp_options, split_args=True, allow_dups=True)
+
+ largs = options.UserStringArrayOption(
+ largkey.name,
+ description + ' linker',
+ link_options, split_args=True, allow_dups=True)
+
+ self.coredata.optstore.add_compiler_option(lang, argkey, cargs)
+ self.coredata.optstore.add_compiler_option(lang, largkey, largs)
+
+ if comp.INVOKES_LINKER and comp_args_from_envvar:
+ # If the compiler acts as a linker driver, and we're using the
+ # environment variable flags for both the compiler and linker
+ # arguments, then put the compiler flags in the linker flags as well.
+ # This is how autotools works, and the env vars feature is for
+ # autotools compatibility.
+ largs.extend_value(comp_options)
diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py
index 8aeac8a..57e9499 100644
--- a/mesonbuild/interpreter/compiler.py
+++ b/mesonbuild/interpreter/compiler.py
@@ -19,7 +19,7 @@ from ..compilers import SUFFIX_TO_LANG, RunResult
from ..compilers.compilers import CompileCheckMode
from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs,
FeatureNew, FeatureNewKwargs, disablerIfNotFound,
- InterpreterException)
+ InterpreterException, InterpreterObject)
from ..interpreterbase.decorators import ContainerTypeInfo, typed_kwargs, KwargInfo, typed_pos_args
from ..options import OptionKey
from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
@@ -110,29 +110,28 @@ class _TestMode(enum.Enum):
class TryRunResultHolder(ObjectHolder['RunResult']):
def __init__(self, res: 'RunResult', interpreter: 'Interpreter'):
super().__init__(res, interpreter)
- self.methods.update({'returncode': self.returncode_method,
- 'compiled': self.compiled_method,
- 'stdout': self.stdout_method,
- 'stderr': self.stderr_method,
- })
@noPosargs
@noKwargs
+ @InterpreterObject.method('returncode')
def returncode_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> int:
return self.held_object.returncode
@noPosargs
@noKwargs
+ @InterpreterObject.method('compiled')
def compiled_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
return self.held_object.compiled
@noPosargs
@noKwargs
+ @InterpreterObject.method('stdout')
def stdout_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.held_object.stdout
@noPosargs
@noKwargs
+ @InterpreterObject.method('stderr')
def stderr_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.held_object.stderr
@@ -190,40 +189,6 @@ class CompilerHolder(ObjectHolder['Compiler']):
def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'):
super().__init__(compiler, interpreter)
self.environment = self.env
- self.methods.update({'compiles': self.compiles_method,
- 'links': self.links_method,
- 'get_id': self.get_id_method,
- 'get_linker_id': self.get_linker_id_method,
- 'compute_int': self.compute_int_method,
- 'sizeof': self.sizeof_method,
- 'get_define': self.get_define_method,
- 'has_define': self.has_define_method,
- 'check_header': self.check_header_method,
- 'has_header': self.has_header_method,
- 'has_header_symbol': self.has_header_symbol_method,
- 'run': self.run_method,
- 'has_function': self.has_function_method,
- 'has_member': self.has_member_method,
- 'has_members': self.has_members_method,
- 'has_type': self.has_type_method,
- 'alignment': self.alignment_method,
- 'version': self.version_method,
- 'cmd_array': self.cmd_array_method,
- 'find_library': self.find_library_method,
- 'has_argument': self.has_argument_method,
- 'has_function_attribute': self.has_func_attribute_method,
- 'get_supported_function_attributes': self.get_supported_function_attributes_method,
- 'has_multi_arguments': self.has_multi_arguments_method,
- 'get_supported_arguments': self.get_supported_arguments_method,
- 'first_supported_argument': self.first_supported_argument_method,
- 'has_link_argument': self.has_link_argument_method,
- 'has_multi_link_arguments': self.has_multi_link_arguments_method,
- 'get_supported_link_arguments': self.get_supported_link_arguments_method,
- 'first_supported_link_argument': self.first_supported_link_argument_method,
- 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
- 'get_argument_syntax': self.get_argument_syntax_method,
- 'preprocess': self.preprocess_method,
- })
@property
def compiler(self) -> 'Compiler':
@@ -254,11 +219,13 @@ class CompilerHolder(ObjectHolder['Compiler']):
@noPosargs
@noKwargs
+ @InterpreterObject.method('version')
def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.compiler.version
@noPosargs
@noKwargs
+ @InterpreterObject.method('cmd_array')
def cmd_array_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
return self.compiler.exelist
@@ -289,6 +256,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
_ARGS_KW,
_DEPENDENCIES_KW,
)
+ @InterpreterObject.method('alignment')
def alignment_method(self, args: T.Tuple[str], kwargs: 'AlignmentKw') -> int:
typename = args[0]
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
@@ -302,6 +270,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.run', (str, mesonlib.File))
@typed_kwargs('compiler.run', *_COMPILES_KWS)
+ @InterpreterObject.method('run')
def run_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> 'RunResult':
if self.compiler.language not in {'d', 'c', 'cpp', 'objc', 'objcpp', 'fortran'}:
FeatureNew.single_use(f'compiler.run for {self.compiler.get_display_language()} language',
@@ -338,17 +307,20 @@ class CompilerHolder(ObjectHolder['Compiler']):
@noPosargs
@noKwargs
+ @InterpreterObject.method('get_id')
def get_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.compiler.get_id()
@noPosargs
@noKwargs
@FeatureNew('compiler.get_linker_id', '0.53.0')
+ @InterpreterObject.method('get_linker_id')
def get_linker_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.compiler.get_linker_id()
@noPosargs
@noKwargs
+ @InterpreterObject.method('symbols_have_underscore_prefix')
def symbols_have_underscore_prefix_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
'''
Check if the compiler prefixes _ (underscore) to global C symbols
@@ -358,6 +330,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_member', str, str)
@typed_kwargs('compiler.has_member', _HAS_REQUIRED_KW, *_COMMON_KWS)
+ @InterpreterObject.method('has_member')
def has_member_method(self, args: T.Tuple[str, str], kwargs: 'HasKW') -> bool:
typename, membername = args
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
@@ -383,6 +356,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_members', str, varargs=str, min_varargs=1)
@typed_kwargs('compiler.has_members', _HAS_REQUIRED_KW, *_COMMON_KWS)
+ @InterpreterObject.method('has_members')
def has_members_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'HasKW') -> bool:
typename, membernames = args
members = mlog.bold(', '.join([f'"{m}"' for m in membernames]))
@@ -410,6 +384,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_function', str)
@typed_kwargs('compiler.has_function', _HAS_REQUIRED_KW, *_COMMON_KWS)
+ @InterpreterObject.method('has_function')
def has_function_method(self, args: T.Tuple[str], kwargs: 'HasKW') -> bool:
funcname = args[0]
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
@@ -433,6 +408,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_type', str)
@typed_kwargs('compiler.has_type', _HAS_REQUIRED_KW, *_COMMON_KWS)
+ @InterpreterObject.method('has_type')
def has_type_method(self, args: T.Tuple[str], kwargs: 'HasKW') -> bool:
typename = args[0]
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
@@ -462,6 +438,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
KwargInfo('guess', (int, NoneType)),
*_COMMON_KWS,
)
+ @InterpreterObject.method('compute_int')
def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int:
expression = args[0]
extra_args = functools.partial(self._determine_args, kwargs)
@@ -475,6 +452,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.sizeof', str)
@typed_kwargs('compiler.sizeof', *_COMMON_KWS)
+ @InterpreterObject.method('sizeof')
def sizeof_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> int:
element = args[0]
extra_args = functools.partial(self._determine_args, kwargs)
@@ -489,6 +467,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@FeatureNew('compiler.get_define', '0.40.0')
@typed_pos_args('compiler.get_define', str)
@typed_kwargs('compiler.get_define', *_COMMON_KWS)
+ @InterpreterObject.method('get_define')
def get_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> str:
element = args[0]
extra_args = functools.partial(self._determine_args, kwargs)
@@ -504,6 +483,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@FeatureNew('compiler.has_define', '1.3.0')
@typed_pos_args('compiler.has_define', str)
@typed_kwargs('compiler.has_define', *_COMMON_KWS)
+ @InterpreterObject.method('has_define')
def has_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool:
define_name = args[0]
extra_args = functools.partial(self._determine_args, kwargs)
@@ -519,6 +499,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.compiles', (str, mesonlib.File))
@typed_kwargs('compiler.compiles', *_COMPILES_KWS)
+ @InterpreterObject.method('compiles')
def compiles_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
code = args[0]
testname = kwargs['name']
@@ -555,6 +536,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.links', (str, mesonlib.File))
@typed_kwargs('compiler.links', *_COMPILES_KWS)
+ @InterpreterObject.method('links')
def links_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
code = args[0]
testname = kwargs['name']
@@ -606,6 +588,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@FeatureNew('compiler.check_header', '0.47.0')
@typed_pos_args('compiler.check_header', str)
@typed_kwargs('compiler.check_header', *_HEADER_KWS)
+ @InterpreterObject.method('check_header')
def check_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
hname = args[0]
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
@@ -648,11 +631,13 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_header', str)
@typed_kwargs('compiler.has_header', *_HEADER_KWS)
+ @InterpreterObject.method('has_header')
def has_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
return self._has_header_impl(args[0], kwargs)
@typed_pos_args('compiler.has_header_symbol', str, str)
@typed_kwargs('compiler.has_header_symbol', *_HEADER_KWS)
+ @InterpreterObject.method('has_header_symbol')
def has_header_symbol_method(self, args: T.Tuple[str, str], kwargs: 'HeaderKW') -> bool:
hname, symbol = args
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
@@ -692,6 +677,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
KwargInfo('dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
*(k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS)
)
+ @InterpreterObject.method('find_library')
def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'dependencies.ExternalLibrary':
# TODO add dependencies support?
libname = args[0]
@@ -772,12 +758,14 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_pos_args('compiler.has_argument', str)
@typed_kwargs('compiler.has_argument', _HAS_REQUIRED_KW)
+ @InterpreterObject.method('has_argument')
def has_argument_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool:
return self._has_argument_impl([args[0]], kwargs=kwargs)
@typed_pos_args('compiler.has_multi_arguments', varargs=str)
@typed_kwargs('compiler.has_multi_arguments', _HAS_REQUIRED_KW)
@FeatureNew('compiler.has_multi_arguments', '0.37.0')
+ @InterpreterObject.method('has_multi_arguments')
def has_multi_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'HasArgumentKW') -> bool:
return self._has_argument_impl(args[0], kwargs=kwargs)
@@ -788,6 +776,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
KwargInfo('checked', str, default='off', since='0.59.0',
validator=in_set_validator({'warn', 'require', 'off'})),
)
+ @InterpreterObject.method('get_supported_arguments')
def get_supported_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'GetSupportedArgumentKw') -> T.List[str]:
supported_args: T.List[str] = []
checked = kwargs['checked']
@@ -805,6 +794,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@noKwargs
@typed_pos_args('compiler.first_supported_argument', varargs=str)
+ @InterpreterObject.method('first_supported_argument')
def first_supported_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
for arg in args[0]:
if self._has_argument_impl([arg]):
@@ -816,18 +806,21 @@ class CompilerHolder(ObjectHolder['Compiler']):
@FeatureNew('compiler.has_link_argument', '0.46.0')
@typed_pos_args('compiler.has_link_argument', str)
@typed_kwargs('compiler.has_link_argument', _HAS_REQUIRED_KW)
+ @InterpreterObject.method('has_link_argument')
def has_link_argument_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool:
return self._has_argument_impl([args[0]], mode=_TestMode.LINKER, kwargs=kwargs)
@FeatureNew('compiler.has_multi_link_argument', '0.46.0')
@typed_pos_args('compiler.has_multi_link_argument', varargs=str)
@typed_kwargs('compiler.has_multi_link_argument', _HAS_REQUIRED_KW)
+ @InterpreterObject.method('has_multi_link_arguments')
def has_multi_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'HasArgumentKW') -> bool:
return self._has_argument_impl(args[0], mode=_TestMode.LINKER, kwargs=kwargs)
@FeatureNew('compiler.get_supported_link_arguments', '0.46.0')
@noKwargs
@typed_pos_args('compiler.get_supported_link_arguments', varargs=str)
+ @InterpreterObject.method('get_supported_link_arguments')
def get_supported_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
supported_args: T.List[str] = []
for arg in args[0]:
@@ -835,9 +828,10 @@ class CompilerHolder(ObjectHolder['Compiler']):
supported_args.append(arg)
return supported_args
- @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
+ @FeatureNew('compiler.first_supported_link_argument', '0.46.0')
@noKwargs
@typed_pos_args('compiler.first_supported_link_argument', varargs=str)
+ @InterpreterObject.method('first_supported_link_argument')
def first_supported_link_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
for arg in args[0]:
if self._has_argument_impl([arg], mode=_TestMode.LINKER):
@@ -871,18 +865,21 @@ class CompilerHolder(ObjectHolder['Compiler']):
@FeatureNew('compiler.has_function_attribute', '0.48.0')
@typed_pos_args('compiler.has_function_attribute', str)
@typed_kwargs('compiler.has_function_attribute', _HAS_REQUIRED_KW)
+ @InterpreterObject.method('has_function_attribute')
def has_func_attribute_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool:
return self._has_function_attribute_impl(args[0], kwargs)
@FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
@noKwargs
@typed_pos_args('compiler.get_supported_function_attributes', varargs=str)
+ @InterpreterObject.method('get_supported_function_attributes')
def get_supported_function_attributes_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
return [a for a in args[0] if self._has_function_attribute_impl(a)]
- @FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
+ @FeatureNew('compiler.get_argument_syntax', '0.49.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('get_argument_syntax')
def get_argument_syntax_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.compiler.get_argument_syntax()
@@ -897,6 +894,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
_DEPENDENCIES_KW.evolve(since='1.1.0'),
_DEPENDS_KW.evolve(since='1.4.0'),
)
+ @InterpreterObject.method('preprocess')
def preprocess_method(self, args: T.Tuple[T.List['mesonlib.FileOrString']], kwargs: 'PreprocessKW') -> T.List[build.CustomTargetIndex]:
compiler = self.compiler.get_preprocessor()
_sources: T.List[mesonlib.File] = self.interpreter.source_strings_to_files(args[0])
diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py
index 0ebfe3b..f415026 100644
--- a/mesonbuild/interpreter/dependencyfallbacks.py
+++ b/mesonbuild/interpreter/dependencyfallbacks.py
@@ -4,14 +4,12 @@
from __future__ import annotations
-import copy
-
from .interpreterobjects import extract_required_kwarg
from .. import mlog
from .. import dependencies
from .. import build
from ..wrap import WrapMode
-from ..mesonlib import extract_as_list, stringlistify, version_compare_many, listify
+from ..mesonlib import extract_as_list, stringlistify, version_compare_many
from ..options import OptionKey
from ..dependencies import Dependency, DependencyException, NotFoundDependency
from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
@@ -124,21 +122,17 @@ class DependencyFallbacksHolder(MesonInterpreterObject):
# dependency('foo', static: true) should implicitly add
# default_options: ['default_library=static']
static = kwargs.get('static')
- default_options = func_kwargs.get('default_options', {})
- if static is not None and 'default_library' not in default_options:
+ forced_options = {}
+ if static is not None:
default_library = 'static' if static else 'shared'
mlog.log(f'Building fallback subproject with default_library={default_library}')
- default_options = copy.copy(default_options)
- default_options['default_library'] = default_library
- func_kwargs['default_options'] = default_options
+ forced_options[OptionKey('default_library')] = default_library
# Configure the subproject
subp_name = self.subproject_name
varname = self.subproject_varname
func_kwargs.setdefault('version', [])
- if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
- func_kwargs['default_options'] = listify(kwargs['default_options'])
- self.interpreter.do_subproject(subp_name, func_kwargs)
+ self.interpreter.do_subproject(subp_name, func_kwargs, forced_options=forced_options)
return self._get_subproject_dep(subp_name, varname, kwargs)
def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index bf41bfb..2cf5b7a 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -62,6 +62,7 @@ from .type_checking import (
OUTPUT_KW,
DEFAULT_OPTIONS,
DEPENDENCIES_KW,
+ DEPENDENCY_KWS,
DEPENDS_KW,
DEPEND_FILES_KW,
DEPFILE_KW,
@@ -115,6 +116,7 @@ if T.TYPE_CHECKING:
from . import kwargs as kwtypes
from ..backend.backends import Backend
from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs
+ from ..options import OptionDict
from ..programs import OverrideProgram
from .type_checking import SourcesVarargsType
@@ -270,7 +272,7 @@ class Interpreter(InterpreterBase, HoldableObject):
subproject: str = '',
subdir: str = '',
subproject_dir: str = 'subprojects',
- default_project_options: T.Optional[T.Dict[OptionKey, str]] = None,
+ invoker_method_default_options: T.Optional[OptionDict] = None,
ast: T.Optional[mparser.CodeBlockNode] = None,
relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None,
user_defined_options: T.Optional[coredata.SharedCMDOptions] = None,
@@ -295,13 +297,12 @@ class Interpreter(InterpreterBase, HoldableObject):
self.subproject_stack: T.List[str] = []
self.configure_file_outputs: T.Dict[str, int] = {}
# Passed from the outside, only used in subprojects.
- if default_project_options:
- self.default_project_options = default_project_options if isinstance(default_project_options, str) else default_project_options.copy()
- if isinstance(default_project_options, dict):
- pass
+ if invoker_method_default_options:
+ assert isinstance(invoker_method_default_options, dict)
+ self.invoker_method_default_options = invoker_method_default_options
else:
- self.default_project_options = {}
- self.project_default_options: T.List[str] = []
+ self.invoker_method_default_options = {}
+ self.project_default_options: OptionDict = {}
self.build_func_dict()
self.build_holder_map()
self.user_defined_options = user_defined_options
@@ -426,6 +427,7 @@ class Interpreter(InterpreterBase, HoldableObject):
build.Generator: OBJ.GeneratorHolder,
build.GeneratedList: OBJ.GeneratedListHolder,
build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
+ build.OverrideExecutable: OBJ.OverrideExecutableHolder,
build.RunTarget: OBJ.RunTargetHolder,
build.AliasTarget: OBJ.AliasTargetHolder,
build.Headers: OBJ.HeadersHolder,
@@ -522,6 +524,8 @@ class Interpreter(InterpreterBase, HoldableObject):
self.handle_meson_version(val.value, val)
def get_build_def_files(self) -> mesonlib.OrderedSet[str]:
+ if self.environment.cargo:
+ self.build_def_files.update(self.environment.cargo.get_build_def_files())
return self.build_def_files
def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
@@ -868,7 +872,8 @@ class Interpreter(InterpreterBase, HoldableObject):
self.subprojects[subp_name] = sub
return sub
- def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None) -> SubprojectHolder:
+ def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None,
+ forced_options: T.Optional[OptionDict] = None) -> SubprojectHolder:
if subp_name == 'sub_static':
pass
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
@@ -879,6 +884,16 @@ class Interpreter(InterpreterBase, HoldableObject):
default_options = kwargs['default_options']
+ # This in practice is only used for default_library. forced_options is the
+ # only case in which a meson.build file overrides the machine file or the
+ # command line.
+ if forced_options:
+ for k, v in forced_options.items():
+ # FIXME: this should have no business poking at augments[],
+ # but set_option() does not do what we want
+ self.coredata.optstore.augments[k.evolve(subproject=subp_name)] = v
+ default_options = {**forced_options, **default_options}
+
if subp_name == '':
raise InterpreterException('Subproject name must not be empty.')
if subp_name[0] == '.':
@@ -930,7 +945,8 @@ class Interpreter(InterpreterBase, HoldableObject):
m += ['method', mlog.bold(method)]
mlog.log(*m, '\n', nested=False)
- methods_map: T.Dict[wrap.Method, T.Callable[[str, str, T.Dict[OptionKey, str, kwtypes.DoSubproject]], SubprojectHolder]] = {
+ methods_map: T.Dict[wrap.Method, T.Callable[[str, str, OptionDict, kwtypes.DoSubproject],
+ SubprojectHolder]] = {
'meson': self._do_subproject_meson,
'cmake': self._do_subproject_cmake,
'cargo': self._do_subproject_cargo,
@@ -952,7 +968,7 @@ class Interpreter(InterpreterBase, HoldableObject):
raise e
def _do_subproject_meson(self, subp_name: str, subdir: str,
- default_options: T.List[str],
+ default_options: OptionDict,
kwargs: kwtypes.DoSubproject,
ast: T.Optional[mparser.CodeBlockNode] = None,
build_def_files: T.Optional[T.List[str]] = None,
@@ -1012,7 +1028,7 @@ class Interpreter(InterpreterBase, HoldableObject):
return self.subprojects[subp_name]
def _do_subproject_cmake(self, subp_name: str, subdir: str,
- default_options: T.List[str],
+ default_options: OptionDict,
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from ..cmake import CMakeInterpreter
with mlog.nested(subp_name):
@@ -1039,13 +1055,14 @@ class Interpreter(InterpreterBase, HoldableObject):
return result
def _do_subproject_cargo(self, subp_name: str, subdir: str,
- default_options: T.List[str],
+ default_options: OptionDict,
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from .. import cargo
FeatureNew.single_use('Cargo subproject', '1.3.0', self.subproject, location=self.current_node)
mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.',
once=True, location=self.current_node)
if self.environment.cargo is None:
+ self.add_languages(['rust'], True, MachineChoice.HOST)
self.environment.cargo = cargo.Interpreter(self.environment)
with mlog.nested(subp_name):
ast = self.environment.cargo.interpret(subdir)
@@ -1071,7 +1088,7 @@ class Interpreter(InterpreterBase, HoldableObject):
value_object: T.Optional[options.AnyOptionType]
try:
- optkey = options.OptionKey(optname, self.subproject)
+ optkey = options.OptionKey.from_string(optname).evolve(subproject=self.subproject)
value_object, value = self.coredata.optstore.get_value_object_and_value_for(optkey)
except KeyError:
if self.coredata.optstore.is_base_option(optkey):
@@ -1184,23 +1201,34 @@ class Interpreter(InterpreterBase, HoldableObject):
self._load_option_file()
self.project_default_options = kwargs['default_options']
- if isinstance(self.project_default_options, str):
- self.project_default_options = [self.project_default_options]
- assert isinstance(self.project_default_options, (list, dict))
if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
if self.subproject == '':
self.coredata.optstore.initialize_from_top_level_project_call(self.project_default_options,
self.user_defined_options.cmd_line_options,
self.environment.options)
else:
- invoker_method_default_options = self.default_project_options
self.coredata.optstore.initialize_from_subproject_call(self.subproject,
- invoker_method_default_options,
+ self.invoker_method_default_options,
self.project_default_options,
- self.user_defined_options.cmd_line_options)
+ self.user_defined_options.cmd_line_options,
+ self.environment.options)
self.coredata.initialized_subprojects.add(self.subproject)
if not self.is_subproject():
+ # We have to activate VS before adding languages and before calling
+ # self.set_backend() otherwise it wouldn't be able to detect which
+ # vs backend version we need. But after setting default_options in case
+ # the project sets vs backend by default.
+ backend = self.coredata.optstore.get_value_for(OptionKey('backend'))
+ assert backend is None or isinstance(backend, str), 'for mypy'
+ vsenv = self.coredata.optstore.get_value_for(OptionKey('vsenv'))
+ assert isinstance(vsenv, bool), 'for mypy'
+ force_vsenv = vsenv or backend.startswith('vs')
+ mesonlib.setup_vsenv(force_vsenv)
+ self.set_backend()
+
+ if not self.is_subproject():
+ self.coredata.optstore.validate_cmd_line_options(self.user_defined_options.cmd_line_options)
self.build.project_name = proj_name
self.active_projectname = proj_name
@@ -1270,22 +1298,9 @@ class Interpreter(InterpreterBase, HoldableObject):
mlog.log('Project name:', mlog.bold(proj_name))
mlog.log('Project version:', mlog.bold(self.project_version))
- if not self.is_subproject():
- # We have to activate VS before adding languages and before calling
- # self.set_backend() otherwise it wouldn't be able to detect which
- # vs backend version we need. But after setting default_options in case
- # the project sets vs backend by default.
- backend = self.coredata.optstore.get_value_for(OptionKey('backend'))
- assert backend is None or isinstance(backend, str), 'for mypy'
- vsenv = self.coredata.optstore.get_value_for(OptionKey('vsenv'))
- assert isinstance(vsenv, bool), 'for mypy'
- force_vsenv = vsenv or backend.startswith('vs')
- mesonlib.setup_vsenv(force_vsenv)
-
self.add_languages(proj_langs, True, MachineChoice.HOST)
self.add_languages(proj_langs, False, MachineChoice.BUILD)
- self.set_backend()
if not self.is_subproject():
self.check_stdlibs()
@@ -1528,7 +1543,7 @@ class Interpreter(InterpreterBase, HoldableObject):
self.backend.allow_thin_archives[for_machine] = False
else:
# update new values from commandline, if it applies
- self.coredata.process_compiler_options(lang, comp, self.environment, self.subproject)
+ self.coredata.process_compiler_options(lang, comp, self.subproject)
if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
logger_fun = mlog.log
@@ -1589,7 +1604,7 @@ class Interpreter(InterpreterBase, HoldableObject):
def program_from_overrides(self, command_names: T.List[mesonlib.FileOrString],
extra_info: T.List['mlog.TV_Loggable']
- ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.Executable]]:
+ ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.OverrideExecutable]]:
for name in command_names:
if not isinstance(name, str):
continue
@@ -1604,7 +1619,7 @@ class Interpreter(InterpreterBase, HoldableObject):
if isinstance(name, str):
self.build.searched_programs.add(name)
- def add_find_program_override(self, name: str, exe: T.Union[build.Executable, ExternalProgram, 'OverrideProgram']) -> None:
+ def add_find_program_override(self, name: str, exe: T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']) -> None:
if name in self.build.searched_programs:
raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
if name in self.build.find_overrides:
@@ -1623,13 +1638,13 @@ class Interpreter(InterpreterBase, HoldableObject):
# the host machine.
def find_program_impl(self, args: T.List[mesonlib.FileOrString],
for_machine: MachineChoice = MachineChoice.HOST,
- default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]] = None,
+ default_options: T.Optional[OptionDict] = None,
required: bool = True, silent: bool = True,
wanted: T.Union[str, T.List[str]] = '',
search_dirs: T.Optional[T.List[str]] = None,
version_arg: T.Optional[str] = '',
version_func: T.Optional[ProgramVersionFunc] = None
- ) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']:
+ ) -> T.Union['ExternalProgram', 'build.OverrideExecutable', 'OverrideProgram']:
args = mesonlib.listify(args)
extra_info: T.List[mlog.TV_Loggable] = []
@@ -1654,7 +1669,7 @@ class Interpreter(InterpreterBase, HoldableObject):
return progobj
def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
- default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]],
+ default_options: T.Optional[OptionDict],
required: bool,
search_dirs: T.Optional[T.List[str]],
wanted: T.Union[str, T.List[str]],
@@ -1722,7 +1737,7 @@ class Interpreter(InterpreterBase, HoldableObject):
return True
def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
- default_options: T.Dict[OptionKey, options.ElementaryOptionValues],
+ default_options: OptionDict,
required: bool, extra_info: T.List[mlog.TV_Loggable]
) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
@@ -1775,8 +1790,8 @@ class Interpreter(InterpreterBase, HoldableObject):
@disablerIfNotFound
@permittedKwargs(permitted_dependency_kwargs)
@typed_pos_args('dependency', varargs=str, min_varargs=1)
- @typed_kwargs('dependency', DEFAULT_OPTIONS.evolve(since='0.38.0'), allow_unknown=True)
- def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs) -> Dependency:
+ @typed_kwargs('dependency', *DEPENDENCY_KWS, allow_unknown=True)
+ def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs: kwtypes.FuncDependency) -> Dependency:
# Replace '' by empty list of names
names = [n for n in args[0] if n]
if len(names) > 1:
@@ -3243,9 +3258,9 @@ class Interpreter(InterpreterBase, HoldableObject):
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries', subproject=self.subproject))
if preferred_library == 'auto':
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if preferred_library == 'both':
preferred_library = 'shared'
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
index a2fadbe..17ba989 100644
--- a/mesonbuild/interpreter/interpreterobjects.py
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -15,7 +15,7 @@ from .. import mlog
from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule, NewExtensionModule
from ..backend.backends import TestProtocol
from ..interpreterbase import (
- ContainerTypeInfo, KwargInfo, MesonOperator,
+ ContainerTypeInfo, KwargInfo, InterpreterObject, MesonOperator,
MesonInterpreterObject, ObjectHolder, MutableInterpreterObject,
FeatureNew, FeatureDeprecated,
typed_pos_args, typed_kwargs, typed_operator,
@@ -32,7 +32,7 @@ if T.TYPE_CHECKING:
from . import kwargs
from ..cmake.interpreter import CMakeInterpreter
from ..envconfig import MachineInfo
- from ..interpreterbase import FeatureCheckBase, InterpreterObject, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs
+ from ..interpreterbase import FeatureCheckBase, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs
from .interpreter import Interpreter
from typing_extensions import TypedDict
@@ -97,16 +97,6 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
auto = T.cast('options.UserFeatureOption', self.env.coredata.optstore.get_value_object_for('auto_features'))
self.held_object = copy.copy(auto)
self.held_object.name = option.name
- self.methods.update({'enabled': self.enabled_method,
- 'disabled': self.disabled_method,
- 'allowed': self.allowed_method,
- 'auto': self.auto_method,
- 'require': self.require_method,
- 'disable_auto_if': self.disable_auto_if_method,
- 'enable_auto_if': self.enable_auto_if_method,
- 'disable_if': self.disable_if_method,
- 'enable_if': self.enable_if_method,
- })
@property
def value(self) -> str:
@@ -124,22 +114,26 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('enabled')
def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.value == 'enabled'
@noPosargs
@noKwargs
+ @InterpreterObject.method('disabled')
def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.value == 'disabled'
@noPosargs
@noKwargs
@FeatureNew('feature_option.allowed()', '0.59.0')
+ @InterpreterObject.method('allowed')
def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.value != 'disabled'
@noPosargs
@noKwargs
+ @InterpreterObject.method('auto')
def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.value == 'auto'
@@ -160,6 +154,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
'feature_option.require',
_ERROR_MSG_KW,
)
+ @InterpreterObject.method('require')
def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption:
return self._disable_if(not args[0], kwargs['error_message'])
@@ -169,6 +164,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
'feature_option.disable_if',
_ERROR_MSG_KW,
)
+ @InterpreterObject.method('disable_if')
def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption:
return self._disable_if(args[0], kwargs['error_message'])
@@ -178,6 +174,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
'feature_option.enable_if',
_ERROR_MSG_KW,
)
+ @InterpreterObject.method('enable_if')
def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption:
if not args[0]:
return copy.deepcopy(self.held_object)
@@ -192,12 +189,14 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]):
@FeatureNew('feature_option.disable_auto_if()', '0.59.0')
@noKwargs
@typed_pos_args('feature_option.disable_auto_if', bool)
+ @InterpreterObject.method('disable_auto_if')
def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption:
return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
@FeatureNew('feature_option.enable_auto_if()', '1.1.0')
@noKwargs
@typed_pos_args('feature_option.enable_auto_if', bool)
+ @InterpreterObject.method('enable_auto_if')
def enable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption:
return self.as_enabled() if self.value == 'auto' and args[0] else copy.deepcopy(self.held_object)
@@ -220,10 +219,6 @@ class RunProcess(MesonInterpreterObject):
raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
self.capture = capture
self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
- self.methods.update({'returncode': self.returncode_method,
- 'stdout': self.stdout_method,
- 'stderr': self.stderr_method,
- })
def run_command(self,
cmd: ExternalProgram,
@@ -271,16 +266,19 @@ class RunProcess(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('returncode')
def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
return self.returncode
@noPosargs
@noKwargs
+ @InterpreterObject.method('stdout')
def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.stdout
@noPosargs
@noKwargs
+ @InterpreterObject.method('stderr')
def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.stderr
@@ -288,11 +286,6 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu
def __init__(self, obj: mesonlib.EnvironmentVariables, interpreter: 'Interpreter'):
super().__init__(obj, interpreter)
- self.methods.update({'set': self.set_method,
- 'unset': self.unset_method,
- 'append': self.append_method,
- 'prepend': self.prepend_method,
- })
def __repr__(self) -> str:
repr_str = "<{0}: {1}>"
@@ -310,6 +303,7 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu
@typed_pos_args('environment.set', str, varargs=str, min_varargs=1)
@typed_kwargs('environment.set', ENV_SEPARATOR_KW)
+ @InterpreterObject.method('set')
def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
name, values = args
self.held_object.set(name, values, kwargs['separator'])
@@ -317,11 +311,13 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu
@FeatureNew('environment.unset', '1.4.0')
@typed_pos_args('environment.unset', str)
@noKwargs
+ @InterpreterObject.method('unset')
def unset_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> None:
self.held_object.unset(args[0])
@typed_pos_args('environment.append', str, varargs=str, min_varargs=1)
@typed_kwargs('environment.append', ENV_SEPARATOR_KW)
+ @InterpreterObject.method('append')
def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
name, values = args
self.warn_if_has_name(name)
@@ -329,6 +325,7 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu
@typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1)
@typed_kwargs('environment.prepend', ENV_SEPARATOR_KW)
+ @InterpreterObject.method('prepend')
def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
name, values = args
self.warn_if_has_name(name)
@@ -342,15 +339,6 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'):
super().__init__(obj, interpreter)
- self.methods.update({'set': self.set_method,
- 'set10': self.set10_method,
- 'set_quoted': self.set_quoted_method,
- 'has': self.has_method,
- 'get': self.get_method,
- 'keys': self.keys_method,
- 'get_unquoted': self.get_unquoted_method,
- 'merge_from': self.merge_from_method,
- })
def __deepcopy__(self, memo: T.Dict) -> 'ConfigurationDataHolder':
return ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter)
@@ -364,12 +352,14 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@typed_pos_args('configuration_data.set', str, (str, int, bool))
@typed_kwargs('configuration_data.set', _CONF_DATA_SET_KWS)
+ @InterpreterObject.method('set')
def set_method(self, args: T.Tuple[str, T.Union[str, int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
self.__check_used()
self.held_object.values[args[0]] = (args[1], kwargs['description'])
@typed_pos_args('configuration_data.set_quoted', str, str)
@typed_kwargs('configuration_data.set_quoted', _CONF_DATA_SET_KWS)
+ @InterpreterObject.method('set_quoted')
def set_quoted_method(self, args: T.Tuple[str, str], kwargs: 'kwargs.ConfigurationDataSet') -> None:
self.__check_used()
escaped_val = '\\"'.join(args[1].split('"'))
@@ -377,6 +367,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@typed_pos_args('configuration_data.set10', str, (int, bool))
@typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS)
+ @InterpreterObject.method('set10')
def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
self.__check_used()
# bool is a subclass of int, so we need to check for bool explicitly.
@@ -394,12 +385,14 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@typed_pos_args('configuration_data.has', (str, int, bool))
@noKwargs
+ @InterpreterObject.method('has')
def has_method(self, args: T.Tuple[T.Union[str, int, bool]], kwargs: TYPE_kwargs) -> bool:
return args[0] in self.held_object.values
@FeatureNew('configuration_data.get()', '0.38.0')
@typed_pos_args('configuration_data.get', str, optargs=[(str, int, bool)])
@noKwargs
+ @InterpreterObject.method('get')
def get_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
name = args[0]
@@ -412,6 +405,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@FeatureNew('configuration_data.get_unquoted()', '0.44.0')
@typed_pos_args('configuration_data.get_unquoted', str, optargs=[(str, int, bool)])
@noKwargs
+ @InterpreterObject.method('get_unquoted')
def get_unquoted_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
name = args[0]
@@ -431,6 +425,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@FeatureNew('configuration_data.keys()', '0.57.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('keys')
def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
return sorted(self.keys())
@@ -439,6 +434,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte
@typed_pos_args('configuration_data.merge_from', build.ConfigurationData)
@noKwargs
+ @InterpreterObject.method('merge_from')
def merge_from_method(self, args: T.Tuple[build.ConfigurationData], kwargs: TYPE_kwargs) -> None:
from_object = args[0]
self.held_object.values.update(from_object.values)
@@ -455,31 +451,19 @@ _PARTIAL_DEP_KWARGS = [
class DependencyHolder(ObjectHolder[Dependency]):
def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
super().__init__(dep, interpreter)
- self.methods.update({'found': self.found_method,
- 'type_name': self.type_name_method,
- 'version': self.version_method,
- 'name': self.name_method,
- 'get_pkgconfig_variable': self.pkgconfig_method,
- 'get_configtool_variable': self.configtool_method,
- 'get_variable': self.variable_method,
- 'partial_dependency': self.partial_dependency_method,
- 'include_type': self.include_type_method,
- 'as_system': self.as_system_method,
- 'as_link_whole': self.as_link_whole_method,
- 'as_static': self.as_static_method,
- 'as_shared': self.as_shared_method,
- })
def found(self) -> bool:
return self.found_method([], {})
@noPosargs
@noKwargs
+ @InterpreterObject.method('type_name')
def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.type_name
@noPosargs
@noKwargs
+ @InterpreterObject.method('found')
def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
if self.held_object.type_name == 'internal':
return True
@@ -487,11 +471,13 @@ class DependencyHolder(ObjectHolder[Dependency]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('version')
def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.get_version()
@noPosargs
@noKwargs
+ @InterpreterObject.method('name')
def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.get_name()
@@ -503,6 +489,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
KwargInfo('default', str, default=''),
PKGCONFIG_DEFINE_KW.evolve(name='define_variable')
)
+ @InterpreterObject.method('get_pkgconfig_variable')
def pkgconfig_method(self, args: T.Tuple[str], kwargs: 'kwargs.DependencyPkgConfigVar') -> str:
from ..dependencies.pkgconfig import PkgConfigDependency
if not isinstance(self.held_object, PkgConfigDependency):
@@ -521,6 +508,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
'use dependency.get_variable(configtool : ...) instead')
@noKwargs
@typed_pos_args('dependency.get_config_tool_variable', str)
+ @InterpreterObject.method('get_configtool_variable')
def configtool_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> str:
from ..dependencies.configtool import ConfigToolDependency
if not isinstance(self.held_object, ConfigToolDependency):
@@ -533,6 +521,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
@FeatureNew('dependency.partial_dependency', '0.46.0')
@noPosargs
@typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ @InterpreterObject.method('partial_dependency')
def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
pdep = self.held_object.get_partial_dependency(**kwargs)
return pdep
@@ -549,6 +538,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
KwargInfo('default_value', (str, NoneType)),
PKGCONFIG_DEFINE_KW,
)
+ @InterpreterObject.method('get_variable')
def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: 'kwargs.DependencyGetVariable') -> str:
default_varname = args[0]
if default_varname is not None:
@@ -570,18 +560,21 @@ class DependencyHolder(ObjectHolder[Dependency]):
@FeatureNew('dependency.include_type', '0.52.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('include_type')
def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.get_include_type()
@FeatureNew('dependency.as_system', '0.52.0')
@noKwargs
@typed_pos_args('dependency.as_system', optargs=[str])
+ @InterpreterObject.method('as_system')
def as_system_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> Dependency:
return self.held_object.generate_system_dependency(args[0] or 'system')
@FeatureNew('dependency.as_link_whole', '0.56.0')
@noKwargs
@noPosargs
+ @InterpreterObject.method('as_link_whole')
def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
if not isinstance(self.held_object, InternalDependency):
raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
@@ -594,6 +587,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
'dependency.as_static',
KwargInfo('recursive', bool, default=False),
)
+ @InterpreterObject.method('as_static')
def as_static_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency:
if not isinstance(self.held_object, InternalDependency):
raise InterpreterException('as_static method is only supported on declare_dependency() objects')
@@ -605,6 +599,7 @@ class DependencyHolder(ObjectHolder[Dependency]):
'dependency.as_shared',
KwargInfo('recursive', bool, default=False),
)
+ @InterpreterObject.method('as_shared')
def as_shared_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency:
if not isinstance(self.held_object, InternalDependency):
raise InterpreterException('as_shared method is only supported on declare_dependency() objects')
@@ -615,13 +610,10 @@ _EXTPROG = T.TypeVar('_EXTPROG', bound=ExternalProgram)
class _ExternalProgramHolder(ObjectHolder[_EXTPROG]):
def __init__(self, ep: _EXTPROG, interpreter: 'Interpreter') -> None:
super().__init__(ep, interpreter)
- self.methods.update({'found': self.found_method,
- 'path': self.path_method,
- 'version': self.version_method,
- 'full_path': self.full_path_method})
@noPosargs
@noKwargs
+ @InterpreterObject.method('found')
def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.found()
@@ -629,12 +621,14 @@ class _ExternalProgramHolder(ObjectHolder[_EXTPROG]):
@noKwargs
@FeatureDeprecated('ExternalProgram.path', '0.55.0',
'use ExternalProgram.full_path() instead')
+ @InterpreterObject.method('path')
def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self._full_path()
@noPosargs
@noKwargs
@FeatureNew('ExternalProgram.full_path', '0.55.0')
+ @InterpreterObject.method('full_path')
def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self._full_path()
@@ -648,6 +642,7 @@ class _ExternalProgramHolder(ObjectHolder[_EXTPROG]):
@noPosargs
@noKwargs
@FeatureNew('ExternalProgram.version', '0.62.0')
+ @InterpreterObject.method('version')
def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
if not self.found():
raise InterpreterException('Unable to get the version of a not-found external program')
@@ -665,25 +660,23 @@ class ExternalProgramHolder(_ExternalProgramHolder[ExternalProgram]):
class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
super().__init__(el, interpreter)
- self.methods.update({'found': self.found_method,
- 'type_name': self.type_name_method,
- 'partial_dependency': self.partial_dependency_method,
- 'name': self.name_method,
- })
@noPosargs
@noKwargs
+ @InterpreterObject.method('type_name')
def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.type_name
@noPosargs
@noKwargs
+ @InterpreterObject.method('found')
def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.held_object.found()
@FeatureNew('dependency.partial_dependency', '0.46.0')
@noPosargs
@typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ @InterpreterObject.method('partial_dependency')
def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
pdep = self.held_object.get_partial_dependency(**kwargs)
return pdep
@@ -691,6 +684,7 @@ class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
@FeatureNew('dependency.name', '1.5.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('name')
def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.name
@@ -699,36 +693,34 @@ class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
class MachineHolder(ObjectHolder['MachineInfo']):
def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'):
super().__init__(machine_info, interpreter)
- self.methods.update({'system': self.system_method,
- 'cpu': self.cpu_method,
- 'cpu_family': self.cpu_family_method,
- 'endian': self.endian_method,
- 'kernel': self.kernel_method,
- 'subsystem': self.subsystem_method,
- })
@noPosargs
@noKwargs
+ @InterpreterObject.method('cpu_family')
def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.cpu_family
@noPosargs
@noKwargs
+ @InterpreterObject.method('cpu')
def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.cpu
@noPosargs
@noKwargs
+ @InterpreterObject.method('system')
def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.system
@noPosargs
@noKwargs
+ @InterpreterObject.method('endian')
def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.endian
@noPosargs
@noKwargs
+ @InterpreterObject.method('kernel')
def kernel_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
if self.held_object.kernel is not None:
return self.held_object.kernel
@@ -736,6 +728,7 @@ class MachineHolder(ObjectHolder['MachineInfo']):
@noPosargs
@noKwargs
+ @InterpreterObject.method('subsystem')
def subsystem_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
if self.held_object.subsystem is not None:
return self.held_object.subsystem
@@ -748,12 +741,11 @@ class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]):
class FileHolder(ObjectHolder[mesonlib.File]):
def __init__(self, file: mesonlib.File, interpreter: 'Interpreter'):
super().__init__(file, interpreter)
- self.methods.update({'full_path': self.full_path_method,
- })
@noPosargs
@noKwargs
@FeatureNew('file.full_path', '1.4.0')
+ @InterpreterObject.method('full_path')
def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.absolute_path(self.env.source_dir, self.env.build_dir)
@@ -836,12 +828,10 @@ class SubprojectHolder(MesonInterpreterObject):
self.subdir = PurePath(subdir).as_posix()
self.cm_interpreter: T.Optional[CMakeInterpreter] = None
self.callstack = callstack
- self.methods.update({'get_variable': self.get_variable_method,
- 'found': self.found_method,
- })
@noPosargs
@noKwargs
+ @InterpreterObject.method('found')
def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.found()
@@ -863,6 +853,7 @@ class SubprojectHolder(MesonInterpreterObject):
@noKwargs
@typed_pos_args('subproject.get_variable', str, optargs=[object])
@noArgsFlattening
+ @InterpreterObject.method('get_variable')
def get_variable_method(self, args: T.Tuple[str, T.Optional[str]], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
return self.get_variable(args, kwargs)
@@ -905,16 +896,6 @@ _BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build.
class BuildTargetHolder(ObjectHolder[_BuildTarget]):
def __init__(self, target: _BuildTarget, interp: 'Interpreter'):
super().__init__(target, interp)
- self.methods.update({'extract_objects': self.extract_objects_method,
- 'extract_all_objects': self.extract_all_objects_method,
- 'name': self.name_method,
- 'get_id': self.get_id_method,
- 'outdir': self.outdir_method,
- 'full_path': self.full_path_method,
- 'path': self.path_method,
- 'found': self.found_method,
- 'private_dir_include': self.private_dir_include_method,
- })
def __repr__(self) -> str:
r = '<{} {}: {}>'
@@ -934,6 +915,7 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('found')
def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program):
FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject)
@@ -941,27 +923,32 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('private_dir_include')
def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs:
return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)])
@noPosargs
@noKwargs
+ @InterpreterObject.method('full_path')
def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.interpreter.backend.get_target_filename_abs(self._target_object)
@noPosargs
@noKwargs
@FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead')
+ @InterpreterObject.method('path')
def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.interpreter.backend.get_target_filename_abs(self._target_object)
@noPosargs
@noKwargs
+ @InterpreterObject.method('outdir')
def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.interpreter.backend.get_target_dir(self._target_object)
@noKwargs
@typed_pos_args('extract_objects', varargs=(mesonlib.File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+ @InterpreterObject.method('extract_objects')
def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, 'build.GeneratedTypes']]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
tobj = self._target_object
unity_value = self.interpreter.coredata.get_option_for_target(tobj, "unity")
@@ -981,6 +968,7 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]):
''')
)
)
+ @InterpreterObject.method('extract_all_objects')
def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects:
return self._target_object.extract_all_objects(kwargs['recursive'])
@@ -989,12 +977,14 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]):
@FeatureDeprecated('BuildTarget.get_id', '1.2.0',
'This was never formally documented and does not seem to have a real world use. ' +
'See https://github.com/mesonbuild/meson/pull/6061')
+ @InterpreterObject.method('get_id')
def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self._target_object.get_id()
@FeatureNew('name', '0.54.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('name')
def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self._target_object.name
@@ -1010,9 +1000,6 @@ class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
super().__init__(libs, interp)
- self.methods.update({'get_shared_lib': self.get_shared_lib_method,
- 'get_static_lib': self.get_static_lib_method,
- })
def __repr__(self) -> str:
r = '<{} {}: {}, {}: {}>'
@@ -1022,6 +1009,7 @@ class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('get_shared_lib')
def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
lib = copy.copy(self.held_object.shared)
lib.both_lib = None
@@ -1029,6 +1017,7 @@ class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('get_static_lib')
def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
lib = copy.copy(self.held_object.static)
lib.both_lib = None
@@ -1043,12 +1032,11 @@ class JarHolder(BuildTargetHolder[build.Jar]):
class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'):
super().__init__(target, interp)
- self.methods.update({'full_path': self.full_path_method,
- })
@FeatureNew('custom_target[i].full_path', '0.54.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('full_path')
def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
assert self.interpreter.backend is not None
return self.interpreter.backend.get_target_filename_abs(self.held_object)
@@ -1058,13 +1046,6 @@ _CT = T.TypeVar('_CT', bound=build.CustomTarget)
class _CustomTargetHolder(ObjectHolder[_CT]):
def __init__(self, target: _CT, interp: 'Interpreter'):
super().__init__(target, interp)
- self.methods.update({'full_path': self.full_path_method,
- 'to_list': self.to_list_method,
- })
-
- self.operators.update({
- MesonOperator.INDEX: self.op_index,
- })
def __repr__(self) -> str:
r = '<{} {}: {}>'
@@ -1073,12 +1054,14 @@ class _CustomTargetHolder(ObjectHolder[_CT]):
@noPosargs
@noKwargs
+ @InterpreterObject.method('full_path')
def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.interpreter.backend.get_target_filename_abs(self.held_object)
@FeatureNew('custom_target.to_list', '0.54.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('to_list')
def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]:
result = []
for i in self.held_object:
@@ -1087,6 +1070,7 @@ class _CustomTargetHolder(ObjectHolder[_CT]):
@noKwargs
@typed_operator(MesonOperator.INDEX, int)
+ @InterpreterObject.operator(MesonOperator.INDEX)
def op_index(self, other: int) -> build.CustomTargetIndex:
try:
return self.held_object[other]
@@ -1108,7 +1092,6 @@ class GeneratedListHolder(ObjectHolder[build.GeneratedList]):
class GeneratorHolder(ObjectHolder[build.Generator]):
def __init__(self, gen: build.Generator, interpreter: 'Interpreter'):
super().__init__(gen, interpreter)
- self.methods.update({'process': self.process_method})
@typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
@typed_kwargs(
@@ -1117,6 +1100,7 @@ class GeneratorHolder(ObjectHolder[build.Generator]):
KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
ENV_KW.evolve(since='1.3.0')
)
+ @InterpreterObject.method('process')
def process_method(self,
args: T.Tuple[T.List[T.Union[str, mesonlib.File, 'build.GeneratedTypes']]],
kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList:
@@ -1142,3 +1126,11 @@ class StructuredSourcesHolder(ObjectHolder[build.StructuredSources]):
def __init__(self, sources: build.StructuredSources, interp: 'Interpreter'):
super().__init__(sources, interp)
+
+class OverrideExecutableHolder(BuildTargetHolder[build.OverrideExecutable]):
+ @noPosargs
+ @noKwargs
+ @FeatureNew('OverrideExecutable.version', '1.9.0')
+ @InterpreterObject.method('version')
+ def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_version(self.interpreter)
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
index fb34bbb..7dd49a1 100644
--- a/mesonbuild/interpreter/kwargs.py
+++ b/mesonbuild/interpreter/kwargs.py
@@ -321,7 +321,7 @@ class Subproject(ExtractRequired):
class DoSubproject(ExtractRequired):
- default_options: T.List[str]
+ default_options: T.Union[T.List[str], T.Dict[str, options.ElementaryOptionValues], str]
version: T.List[str]
cmake_options: T.List[str]
options: T.Optional[CMakeSubprojectOptions]
@@ -363,6 +363,8 @@ class _BuildTarget(_BaseBuildTarget):
d_module_versions: T.List[T.Union[str, int]]
d_unittest: bool
rust_dependency_map: T.Dict[str, str]
+ swift_interoperability_mode: Literal['c', 'cpp']
+ swift_module_name: str
sources: SourcesVarargsType
c_args: T.List[str]
cpp_args: T.List[str]
@@ -486,3 +488,8 @@ class FuncDeclareDependency(TypedDict):
sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
variables: T.Dict[str, str]
version: T.Optional[str]
+
+
+class FuncDependency(TypedDict):
+
+ default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py
index 8ede691..602575c 100644
--- a/mesonbuild/interpreter/mesonmain.py
+++ b/mesonbuild/interpreter/mesonmain.py
@@ -18,7 +18,7 @@ from ..programs import OverrideProgram, ExternalProgram
from ..interpreter.type_checking import ENV_KW, ENV_METHOD_KW, ENV_SEPARATOR_KW, env_convertor_with_method
from ..interpreterbase import (MesonInterpreterObject, FeatureNew, FeatureDeprecated,
typed_pos_args, noArgsFlattening, noPosargs, noKwargs,
- typed_kwargs, KwargInfo, InterpreterException)
+ typed_kwargs, KwargInfo, InterpreterException, InterpreterObject)
from .primitives import MesonVersionString
from .type_checking import NATIVE_KW, NoneType
@@ -55,38 +55,6 @@ class MesonMain(MesonInterpreterObject):
super().__init__(subproject=interpreter.subproject)
self.build = build
self.interpreter = interpreter
- self.methods.update({'add_devenv': self.add_devenv_method,
- 'add_dist_script': self.add_dist_script_method,
- 'add_install_script': self.add_install_script_method,
- 'add_postconf_script': self.add_postconf_script_method,
- 'backend': self.backend_method,
- 'build_options': self.build_options_method,
- 'build_root': self.build_root_method,
- 'can_run_host_binaries': self.can_run_host_binaries_method,
- 'current_source_dir': self.current_source_dir_method,
- 'current_build_dir': self.current_build_dir_method,
- 'get_compiler': self.get_compiler_method,
- 'get_cross_property': self.get_cross_property_method,
- 'get_external_property': self.get_external_property_method,
- 'global_build_root': self.global_build_root_method,
- 'global_source_root': self.global_source_root_method,
- 'has_exe_wrapper': self.has_exe_wrapper_method,
- 'has_external_property': self.has_external_property_method,
- 'install_dependency_manifest': self.install_dependency_manifest_method,
- 'is_cross_build': self.is_cross_build_method,
- 'is_subproject': self.is_subproject_method,
- 'is_unity': self.is_unity_method,
- 'override_dependency': self.override_dependency_method,
- 'override_find_program': self.override_find_program_method,
- 'project_build_root': self.project_build_root_method,
- 'project_license': self.project_license_method,
- 'project_license_files': self.project_license_files_method,
- 'project_name': self.project_name_method,
- 'project_source_root': self.project_source_root_method,
- 'project_version': self.project_version_method,
- 'source_root': self.source_root_method,
- 'version': self.version_method,
- })
def _find_source_script(
self, name: str, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
@@ -157,6 +125,7 @@ class MesonMain(MesonInterpreterObject):
KwargInfo('install_tag', (str, NoneType), since='0.60.0'),
KwargInfo('dry_run', bool, default=False, since='1.1.0'),
)
+ @InterpreterObject.method('add_install_script')
def add_install_script_method(
self,
args: T.Tuple[T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
@@ -175,6 +144,7 @@ class MesonMain(MesonInterpreterObject):
varargs=(str, mesonlib.File, ExternalProgram)
)
@noKwargs
+ @InterpreterObject.method('add_postconf_script')
def add_postconf_script_method(
self,
args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
@@ -191,6 +161,7 @@ class MesonMain(MesonInterpreterObject):
)
@noKwargs
@FeatureNew('meson.add_dist_script', '0.48.0')
+ @InterpreterObject.method('add_dist_script')
def add_dist_script_method(
self,
args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
@@ -208,6 +179,7 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('current_source_dir')
def current_source_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
src = self.interpreter.environment.source_dir
sub = self.interpreter.subdir
@@ -217,6 +189,7 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('current_build_dir')
def current_build_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
src = self.interpreter.environment.build_dir
sub = self.interpreter.subdir
@@ -226,24 +199,28 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('backend')
def backend_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.backend.name
@noPosargs
@noKwargs
@FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.')
+ @InterpreterObject.method('source_root')
def source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.environment.source_dir
@noPosargs
@noKwargs
@FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.')
+ @InterpreterObject.method('build_root')
def build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.environment.build_dir
@noPosargs
@noKwargs
@FeatureNew('meson.project_source_root', '0.56.0')
+ @InterpreterObject.method('project_source_root')
def project_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
src = self.interpreter.environment.source_dir
sub = self.interpreter.root_subdir
@@ -254,6 +231,7 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
@FeatureNew('meson.project_build_root', '0.56.0')
+ @InterpreterObject.method('project_build_root')
def project_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
src = self.interpreter.environment.build_dir
sub = self.interpreter.root_subdir
@@ -264,24 +242,28 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
@FeatureNew('meson.global_source_root', '0.58.0')
+ @InterpreterObject.method('global_source_root')
def global_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.environment.source_dir
@noPosargs
@noKwargs
@FeatureNew('meson.global_build_root', '0.58.0')
+ @InterpreterObject.method('global_build_root')
def global_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.environment.build_dir
@noPosargs
@noKwargs
@FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+ @InterpreterObject.method('has_exe_wrapper')
def has_exe_wrapper_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
return self._can_run_host_binaries_impl()
@noPosargs
@noKwargs
@FeatureNew('meson.can_run_host_binaries', '0.55.0')
+ @InterpreterObject.method('can_run_host_binaries')
def can_run_host_binaries_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
return self._can_run_host_binaries_impl()
@@ -294,11 +276,13 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('is_cross_build')
def is_cross_build_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
return self.build.environment.is_cross_build()
@typed_pos_args('meson.get_compiler', str)
@typed_kwargs('meson.get_compiler', NATIVE_KW)
+ @InterpreterObject.method('get_compiler')
def get_compiler_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> 'Compiler':
cname = args[0]
for_machine = kwargs['native']
@@ -310,23 +294,27 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('is_unity')
def is_unity_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
optval = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('unity'))
return optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject())
@noPosargs
@noKwargs
+ @InterpreterObject.method('is_subproject')
def is_subproject_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
return self.interpreter.is_subproject()
@typed_pos_args('meson.install_dependency_manifest', str)
@noKwargs
+ @InterpreterObject.method('install_dependency_manifest')
def install_dependency_manifest_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
self.build.dep_manifest_name = args[0]
@FeatureNew('meson.override_find_program', '0.46.0')
@typed_pos_args('meson.override_find_program', str, (mesonlib.File, ExternalProgram, build.Executable))
@noKwargs
+ @InterpreterObject.method('override_find_program')
def override_find_program_method(self, args: T.Tuple[str, T.Union[mesonlib.File, ExternalProgram, build.Executable]], kwargs: 'TYPE_kwargs') -> None:
name, exe = args
if isinstance(exe, mesonlib.File):
@@ -335,6 +323,8 @@ class MesonMain(MesonInterpreterObject):
if not os.path.exists(abspath):
raise InterpreterException(f'Tried to override {name} with a file that does not exist.')
exe = OverrideProgram(name, self.interpreter.project_version, command=[abspath])
+ elif isinstance(exe, build.Executable):
+ exe = build.OverrideExecutable(exe, self.interpreter.project_version)
self.interpreter.add_find_program_override(name, exe)
@typed_kwargs(
@@ -344,6 +334,7 @@ class MesonMain(MesonInterpreterObject):
)
@typed_pos_args('meson.override_dependency', str, dependencies.Dependency)
@FeatureNew('meson.override_dependency', '0.54.0')
+ @InterpreterObject.method('override_dependency')
def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency], kwargs: 'FuncOverrideDependency') -> None:
name, dep = args
if not name:
@@ -409,28 +400,33 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
+ @InterpreterObject.method('project_version')
def project_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.build.dep_manifest[self.interpreter.active_projectname].version
@FeatureNew('meson.project_license()', '0.45.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('project_license')
def project_license_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
return self.build.dep_manifest[self.interpreter.active_projectname].license
@FeatureNew('meson.project_license_files()', '1.1.0')
@noPosargs
@noKwargs
+ @InterpreterObject.method('project_license_files')
def project_license_files_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[mesonlib.File]:
return [l[1] for l in self.build.dep_manifest[self.interpreter.active_projectname].license_files]
@noPosargs
@noKwargs
+ @InterpreterObject.method('version')
def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> MesonVersionString:
return MesonVersionString(self.interpreter.coredata.version)
@noPosargs
@noKwargs
+ @InterpreterObject.method('project_name')
def project_name_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.interpreter.active_projectname
@@ -447,6 +443,7 @@ class MesonMain(MesonInterpreterObject):
@FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead')
@typed_pos_args('meson.get_cross_property', str, optargs=[object])
@noKwargs
+ @InterpreterObject.method('get_cross_property')
def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'TYPE_kwargs') -> object:
propname, fallback = args
return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST)
@@ -455,6 +452,7 @@ class MesonMain(MesonInterpreterObject):
@FeatureNew('meson.get_external_property', '0.54.0')
@typed_pos_args('meson.get_external_property', str, optargs=[object])
@typed_kwargs('meson.get_external_property', NATIVE_KW)
+ @InterpreterObject.method('get_external_property')
def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'NativeKW') -> object:
propname, fallback = args
return self.__get_external_property_impl(propname, fallback, kwargs['native'])
@@ -462,6 +460,7 @@ class MesonMain(MesonInterpreterObject):
@FeatureNew('meson.has_external_property', '0.58.0')
@typed_pos_args('meson.has_external_property', str)
@typed_kwargs('meson.has_external_property', NATIVE_KW)
+ @InterpreterObject.method('has_external_property')
def has_external_property_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> bool:
prop_name = args[0]
return prop_name in self.interpreter.environment.properties[kwargs['native']]
@@ -469,6 +468,7 @@ class MesonMain(MesonInterpreterObject):
@FeatureNew('add_devenv', '0.58.0')
@typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
@typed_pos_args('add_devenv', (str, list, dict, mesonlib.EnvironmentVariables))
+ @InterpreterObject.method('add_devenv')
def add_devenv_method(self, args: T.Tuple[T.Union[str, list, dict, mesonlib.EnvironmentVariables]],
kwargs: 'AddDevenvKW') -> None:
env = args[0]
@@ -482,6 +482,7 @@ class MesonMain(MesonInterpreterObject):
@noPosargs
@noKwargs
@FeatureNew('meson.build_options', '1.1.0')
+ @InterpreterObject.method('build_options')
def build_options_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
options = self.interpreter.user_defined_options
if options is None:
diff --git a/mesonbuild/interpreter/primitives/array.py b/mesonbuild/interpreter/primitives/array.py
index b42ddea..d0a2441 100644
--- a/mesonbuild/interpreter/primitives/array.py
+++ b/mesonbuild/interpreter/primitives/array.py
@@ -5,9 +5,10 @@ from __future__ import annotations
import typing as T
from ...interpreterbase import (
- ObjectHolder,
+ InterpreterObject,
IterableObject,
MesonOperator,
+ ObjectHolder,
typed_operator,
noKwargs,
noPosargs,
@@ -22,31 +23,16 @@ from ...interpreterbase import (
from ...mparser import PlusAssignmentNode
if T.TYPE_CHECKING:
- # Object holders need the actual interpreter
- from ...interpreter import Interpreter
from ...interpreterbase import TYPE_kwargs
class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
- def __init__(self, obj: T.List[TYPE_var], interpreter: 'Interpreter') -> None:
- super().__init__(obj, interpreter)
- self.methods.update({
- 'contains': self.contains_method,
- 'length': self.length_method,
- 'get': self.get_method,
- })
-
- self.trivial_operators.update({
- MesonOperator.EQUALS: (list, lambda x: self.held_object == x),
- MesonOperator.NOT_EQUALS: (list, lambda x: self.held_object != x),
- MesonOperator.IN: (object, lambda x: x in self.held_object),
- MesonOperator.NOT_IN: (object, lambda x: x not in self.held_object),
- })
-
- # Use actual methods for functions that require additional checks
- self.operators.update({
- MesonOperator.PLUS: self.op_plus,
- MesonOperator.INDEX: self.op_index,
- })
+ # Operators that only require type checks
+ TRIVIAL_OPERATORS = {
+ MesonOperator.EQUALS: (list, lambda obj, x: obj.held_object == x),
+ MesonOperator.NOT_EQUALS: (list, lambda obj, x: obj.held_object != x),
+ MesonOperator.IN: (object, lambda obj, x: x in obj.held_object),
+ MesonOperator.NOT_IN: (object, lambda obj, x: x not in obj.held_object),
+ }
def display_name(self) -> str:
return 'array'
@@ -63,6 +49,7 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
@noArgsFlattening
@noKwargs
@typed_pos_args('array.contains', object)
+ @InterpreterObject.method('contains')
def contains_method(self, args: T.Tuple[object], kwargs: TYPE_kwargs) -> bool:
def check_contains(el: T.List[TYPE_var]) -> bool:
for element in el:
@@ -77,12 +64,14 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
@noKwargs
@noPosargs
+ @InterpreterObject.method('length')
def length_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
return len(self.held_object)
@noArgsFlattening
@noKwargs
@typed_pos_args('array.get', int, optargs=[object])
+ @InterpreterObject.method('get')
def get_method(self, args: T.Tuple[int, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
index = args[0]
if index < -len(self.held_object) or index >= len(self.held_object):
@@ -92,6 +81,7 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
return self.held_object[index]
@typed_operator(MesonOperator.PLUS, object)
+ @InterpreterObject.operator(MesonOperator.PLUS)
def op_plus(self, other: TYPE_var) -> T.List[TYPE_var]:
if not isinstance(other, list):
if not isinstance(self.current_node, PlusAssignmentNode):
@@ -101,8 +91,23 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
return self.held_object + other
@typed_operator(MesonOperator.INDEX, int)
+ @InterpreterObject.operator(MesonOperator.INDEX)
def op_index(self, other: int) -> TYPE_var:
try:
return self.held_object[other]
except IndexError:
raise InvalidArguments(f'Index {other} out of bounds of array of size {len(self.held_object)}.')
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('array.flatten', '1.9.0')
+ @InterpreterObject.method('flatten')
+ def flatten_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+ def flatten(obj: TYPE_var) -> T.Iterable[TYPE_var]:
+ if isinstance(obj, list):
+ for o in obj:
+ yield from flatten(o)
+ else:
+ yield obj
+
+ return list(flatten(self.held_object))
diff --git a/mesonbuild/interpreter/primitives/boolean.py b/mesonbuild/interpreter/primitives/boolean.py
index 4b49caf..eb01b9f 100644
--- a/mesonbuild/interpreter/primitives/boolean.py
+++ b/mesonbuild/interpreter/primitives/boolean.py
@@ -3,8 +3,9 @@
from __future__ import annotations
from ...interpreterbase import (
- ObjectHolder,
+ InterpreterObject,
MesonOperator,
+ ObjectHolder,
typed_pos_args,
noKwargs,
noPosargs,
@@ -15,35 +16,28 @@ from ...interpreterbase import (
import typing as T
if T.TYPE_CHECKING:
- # Object holders need the actual interpreter
- from ...interpreter import Interpreter
from ...interpreterbase import TYPE_var, TYPE_kwargs
class BooleanHolder(ObjectHolder[bool]):
- def __init__(self, obj: bool, interpreter: 'Interpreter') -> None:
- super().__init__(obj, interpreter)
- self.methods.update({
- 'to_int': self.to_int_method,
- 'to_string': self.to_string_method,
- })
-
- self.trivial_operators.update({
- MesonOperator.BOOL: (None, lambda x: self.held_object),
- MesonOperator.NOT: (None, lambda x: not self.held_object),
- MesonOperator.EQUALS: (bool, lambda x: self.held_object == x),
- MesonOperator.NOT_EQUALS: (bool, lambda x: self.held_object != x),
- })
+ TRIVIAL_OPERATORS = {
+ MesonOperator.BOOL: (None, lambda obj, x: obj.held_object),
+ MesonOperator.NOT: (None, lambda obj, x: not obj.held_object),
+ MesonOperator.EQUALS: (bool, lambda obj, x: obj.held_object == x),
+ MesonOperator.NOT_EQUALS: (bool, lambda obj, x: obj.held_object != x),
+ }
def display_name(self) -> str:
return 'bool'
@noKwargs
@noPosargs
+ @InterpreterObject.method('to_int')
def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
return 1 if self.held_object else 0
@noKwargs
@typed_pos_args('bool.to_string', optargs=[str, str])
+ @InterpreterObject.method('to_string')
def to_string_method(self, args: T.Tuple[T.Optional[str], T.Optional[str]], kwargs: TYPE_kwargs) -> str:
true_str = args[0] or 'true'
false_str = args[1] or 'false'
diff --git a/mesonbuild/interpreter/primitives/dict.py b/mesonbuild/interpreter/primitives/dict.py
index ab4c15f..d641fa8 100644
--- a/mesonbuild/interpreter/primitives/dict.py
+++ b/mesonbuild/interpreter/primitives/dict.py
@@ -5,9 +5,10 @@ from __future__ import annotations
import typing as T
from ...interpreterbase import (
- ObjectHolder,
+ InterpreterObject,
IterableObject,
MesonOperator,
+ ObjectHolder,
typed_operator,
noKwargs,
noPosargs,
@@ -20,34 +21,20 @@ from ...interpreterbase import (
)
if T.TYPE_CHECKING:
- # Object holders need the actual interpreter
- from ...interpreter import Interpreter
from ...interpreterbase import TYPE_kwargs
class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject):
- def __init__(self, obj: T.Dict[str, TYPE_var], interpreter: 'Interpreter') -> None:
- super().__init__(obj, interpreter)
- self.methods.update({
- 'has_key': self.has_key_method,
- 'keys': self.keys_method,
- 'get': self.get_method,
- })
-
- self.trivial_operators.update({
- # Arithmetic
- MesonOperator.PLUS: (dict, lambda x: {**self.held_object, **x}),
-
- # Comparison
- MesonOperator.EQUALS: (dict, lambda x: self.held_object == x),
- MesonOperator.NOT_EQUALS: (dict, lambda x: self.held_object != x),
- MesonOperator.IN: (str, lambda x: x in self.held_object),
- MesonOperator.NOT_IN: (str, lambda x: x not in self.held_object),
- })
-
- # Use actual methods for functions that require additional checks
- self.operators.update({
- MesonOperator.INDEX: self.op_index,
- })
+ # Operators that only require type checks
+ TRIVIAL_OPERATORS = {
+ # Arithmetic
+ MesonOperator.PLUS: (dict, lambda obj, x: {**obj.held_object, **x}),
+
+ # Comparison
+ MesonOperator.EQUALS: (dict, lambda obj, x: obj.held_object == x),
+ MesonOperator.NOT_EQUALS: (dict, lambda obj, x: obj.held_object != x),
+ MesonOperator.IN: (str, lambda obj, x: x in obj.held_object),
+ MesonOperator.NOT_IN: (str, lambda obj, x: x not in obj.held_object),
+ }
def display_name(self) -> str:
return 'dict'
@@ -63,17 +50,20 @@ class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject):
@noKwargs
@typed_pos_args('dict.has_key', str)
+ @InterpreterObject.method('has_key')
def has_key_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
return args[0] in self.held_object
@noKwargs
@noPosargs
+ @InterpreterObject.method('keys')
def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
return sorted(self.held_object)
@noArgsFlattening
@noKwargs
@typed_pos_args('dict.get', str, optargs=[object])
+ @InterpreterObject.method('get')
def get_method(self, args: T.Tuple[str, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
if args[0] in self.held_object:
return self.held_object[args[0]]
@@ -82,6 +72,7 @@ class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject):
raise InvalidArguments(f'Key {args[0]!r} is not in the dictionary.')
@typed_operator(MesonOperator.INDEX, str)
+ @InterpreterObject.operator(MesonOperator.INDEX)
def op_index(self, other: str) -> TYPE_var:
if other not in self.held_object:
raise InvalidArguments(f'Key {other} is not in the dictionary.')
diff --git a/mesonbuild/interpreter/primitives/integer.py b/mesonbuild/interpreter/primitives/integer.py
index cdf2355..c59ea6e 100644
--- a/mesonbuild/interpreter/primitives/integer.py
+++ b/mesonbuild/interpreter/primitives/integer.py
@@ -3,47 +3,33 @@
from __future__ import annotations
from ...interpreterbase import (
- FeatureBroken, InvalidArguments, MesonOperator, ObjectHolder, KwargInfo,
+ InterpreterObject, MesonOperator, ObjectHolder,
+ FeatureBroken, InvalidArguments, KwargInfo,
noKwargs, noPosargs, typed_operator, typed_kwargs
)
import typing as T
if T.TYPE_CHECKING:
- # Object holders need the actual interpreter
- from ...interpreter import Interpreter
from ...interpreterbase import TYPE_var, TYPE_kwargs
class IntegerHolder(ObjectHolder[int]):
- def __init__(self, obj: int, interpreter: 'Interpreter') -> None:
- super().__init__(obj, interpreter)
- self.methods.update({
- 'is_even': self.is_even_method,
- 'is_odd': self.is_odd_method,
- 'to_string': self.to_string_method,
- })
+ # Operators that only require type checks
+ TRIVIAL_OPERATORS = {
+ # Arithmetic
+ MesonOperator.UMINUS: (None, lambda obj, x: -obj.held_object),
+ MesonOperator.PLUS: (int, lambda obj, x: obj.held_object + x),
+ MesonOperator.MINUS: (int, lambda obj, x: obj.held_object - x),
+ MesonOperator.TIMES: (int, lambda obj, x: obj.held_object * x),
- self.trivial_operators.update({
- # Arithmetic
- MesonOperator.UMINUS: (None, lambda x: -self.held_object),
- MesonOperator.PLUS: (int, lambda x: self.held_object + x),
- MesonOperator.MINUS: (int, lambda x: self.held_object - x),
- MesonOperator.TIMES: (int, lambda x: self.held_object * x),
-
- # Comparison
- MesonOperator.EQUALS: (int, lambda x: self.held_object == x),
- MesonOperator.NOT_EQUALS: (int, lambda x: self.held_object != x),
- MesonOperator.GREATER: (int, lambda x: self.held_object > x),
- MesonOperator.LESS: (int, lambda x: self.held_object < x),
- MesonOperator.GREATER_EQUALS: (int, lambda x: self.held_object >= x),
- MesonOperator.LESS_EQUALS: (int, lambda x: self.held_object <= x),
- })
-
- # Use actual methods for functions that require additional checks
- self.operators.update({
- MesonOperator.DIV: self.op_div,
- MesonOperator.MOD: self.op_mod,
- })
+ # Comparison
+ MesonOperator.EQUALS: (int, lambda obj, x: obj.held_object == x),
+ MesonOperator.NOT_EQUALS: (int, lambda obj, x: obj.held_object != x),
+ MesonOperator.GREATER: (int, lambda obj, x: obj.held_object > x),
+ MesonOperator.LESS: (int, lambda obj, x: obj.held_object < x),
+ MesonOperator.GREATER_EQUALS: (int, lambda obj, x: obj.held_object >= x),
+ MesonOperator.LESS_EQUALS: (int, lambda obj, x: obj.held_object <= x),
+ }
def display_name(self) -> str:
return 'int'
@@ -57,11 +43,13 @@ class IntegerHolder(ObjectHolder[int]):
@noKwargs
@noPosargs
+ @InterpreterObject.method('is_even')
def is_even_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.held_object % 2 == 0
@noKwargs
@noPosargs
+ @InterpreterObject.method('is_odd')
def is_odd_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
return self.held_object % 2 != 0
@@ -70,16 +58,19 @@ class IntegerHolder(ObjectHolder[int]):
KwargInfo('fill', int, default=0, since='1.3.0')
)
@noPosargs
+ @InterpreterObject.method('to_string')
def to_string_method(self, args: T.List[TYPE_var], kwargs: T.Dict[str, T.Any]) -> str:
return str(self.held_object).zfill(kwargs['fill'])
@typed_operator(MesonOperator.DIV, int)
+ @InterpreterObject.operator(MesonOperator.DIV)
def op_div(self, other: int) -> int:
if other == 0:
raise InvalidArguments('Tried to divide by 0')
return self.held_object // other
@typed_operator(MesonOperator.MOD, int)
+ @InterpreterObject.operator(MesonOperator.MOD)
def op_mod(self, other: int) -> int:
if other == 0:
raise InvalidArguments('Tried to divide by 0')
diff --git a/mesonbuild/interpreter/primitives/range.py b/mesonbuild/interpreter/primitives/range.py
index 23d5617..1aceb68 100644
--- a/mesonbuild/interpreter/primitives/range.py
+++ b/mesonbuild/interpreter/primitives/range.py
@@ -5,8 +5,9 @@ from __future__ import annotations
import typing as T
from ...interpreterbase import (
- MesonInterpreterObject,
+ InterpreterObject,
IterableObject,
+ MesonInterpreterObject,
MesonOperator,
InvalidArguments,
)
@@ -18,10 +19,8 @@ class RangeHolder(MesonInterpreterObject, IterableObject):
def __init__(self, start: int, stop: int, step: int, *, subproject: 'SubProject') -> None:
super().__init__(subproject=subproject)
self.range = range(start, stop, step)
- self.operators.update({
- MesonOperator.INDEX: self.op_index,
- })
+ @InterpreterObject.operator(MesonOperator.INDEX)
def op_index(self, other: int) -> int:
try:
return self.range[other]
diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py
index a224dfa..49dd716 100644
--- a/mesonbuild/interpreter/primitives/string.py
+++ b/mesonbuild/interpreter/primitives/string.py
@@ -9,8 +9,9 @@ import typing as T
from ...mesonlib import version_compare, version_compare_many
from ...interpreterbase import (
- ObjectHolder,
+ InterpreterObject,
MesonOperator,
+ ObjectHolder,
FeatureNew,
typed_operator,
noArgsFlattening,
@@ -24,73 +25,47 @@ from ...interpreterbase import (
if T.TYPE_CHECKING:
- # Object holders need the actual interpreter
- from ...interpreter import Interpreter
from ...interpreterbase import TYPE_var, TYPE_kwargs
class StringHolder(ObjectHolder[str]):
- def __init__(self, obj: str, interpreter: 'Interpreter') -> None:
- super().__init__(obj, interpreter)
- self.methods.update({
- 'contains': self.contains_method,
- 'startswith': self.startswith_method,
- 'endswith': self.endswith_method,
- 'format': self.format_method,
- 'join': self.join_method,
- 'replace': self.replace_method,
- 'split': self.split_method,
- 'splitlines': self.splitlines_method,
- 'strip': self.strip_method,
- 'substring': self.substring_method,
- 'to_int': self.to_int_method,
- 'to_lower': self.to_lower_method,
- 'to_upper': self.to_upper_method,
- 'underscorify': self.underscorify_method,
- 'version_compare': self.version_compare_method,
- })
-
- self.trivial_operators.update({
- # Arithmetic
- MesonOperator.PLUS: (str, lambda x: self.held_object + x),
-
- # Comparison
- MesonOperator.EQUALS: (str, lambda x: self.held_object == x),
- MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),
- MesonOperator.GREATER: (str, lambda x: self.held_object > x),
- MesonOperator.LESS: (str, lambda x: self.held_object < x),
- MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),
- MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),
- })
-
- # Use actual methods for functions that require additional checks
- self.operators.update({
- MesonOperator.DIV: self.op_div,
- MesonOperator.INDEX: self.op_index,
- MesonOperator.IN: self.op_in,
- MesonOperator.NOT_IN: self.op_notin,
- })
+ TRIVIAL_OPERATORS = {
+ # Arithmetic
+ MesonOperator.PLUS: (str, lambda obj, x: obj.held_object + x),
+
+ # Comparison
+ MesonOperator.EQUALS: (str, lambda obj, x: obj.held_object == x),
+ MesonOperator.NOT_EQUALS: (str, lambda obj, x: obj.held_object != x),
+ MesonOperator.GREATER: (str, lambda obj, x: obj.held_object > x),
+ MesonOperator.LESS: (str, lambda obj, x: obj.held_object < x),
+ MesonOperator.GREATER_EQUALS: (str, lambda obj, x: obj.held_object >= x),
+ MesonOperator.LESS_EQUALS: (str, lambda obj, x: obj.held_object <= x),
+ }
def display_name(self) -> str:
return 'str'
@noKwargs
@typed_pos_args('str.contains', str)
+ @InterpreterObject.method('contains')
def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
return self.held_object.find(args[0]) >= 0
@noKwargs
@typed_pos_args('str.startswith', str)
+ @InterpreterObject.method('startswith')
def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
return self.held_object.startswith(args[0])
@noKwargs
@typed_pos_args('str.endswith', str)
+ @InterpreterObject.method('endswith')
def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
return self.held_object.endswith(args[0])
@noArgsFlattening
@noKwargs
@typed_pos_args('str.format', varargs=object)
+ @InterpreterObject.method('format')
def format_method(self, args: T.Tuple[T.List[TYPE_var]], kwargs: TYPE_kwargs) -> str:
arg_strings: T.List[str] = []
for arg in args[0]:
@@ -111,27 +86,32 @@ class StringHolder(ObjectHolder[str]):
@noKwargs
@noPosargs
@FeatureNew('str.splitlines', '1.2.0')
+ @InterpreterObject.method('splitlines')
def splitlines_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
return self.held_object.splitlines()
@noKwargs
@typed_pos_args('str.join', varargs=str)
+ @InterpreterObject.method('join')
def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:
return self.held_object.join(args[0])
@noKwargs
@FeatureNew('str.replace', '0.58.0')
@typed_pos_args('str.replace', str, str)
+ @InterpreterObject.method('replace')
def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:
return self.held_object.replace(args[0], args[1])
@noKwargs
@typed_pos_args('str.split', optargs=[str])
+ @InterpreterObject.method('split')
def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:
return self.held_object.split(args[0])
@noKwargs
@typed_pos_args('str.strip', optargs=[str])
+ @InterpreterObject.method('strip')
def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:
if args[0]:
FeatureNew.single_use('str.strip with a positional argument', '0.43.0', self.subproject, location=self.current_node)
@@ -140,6 +120,7 @@ class StringHolder(ObjectHolder[str]):
@noKwargs
@FeatureNew('str.substring', '0.56.0')
@typed_pos_args('str.substring', optargs=[int, int])
+ @InterpreterObject.method('substring')
def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:
start = args[0] if args[0] is not None else 0
end = args[1] if args[1] is not None else len(self.held_object)
@@ -147,6 +128,7 @@ class StringHolder(ObjectHolder[str]):
@noKwargs
@noPosargs
+ @InterpreterObject.method('to_int')
def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
try:
return int(self.held_object)
@@ -155,20 +137,24 @@ class StringHolder(ObjectHolder[str]):
@noKwargs
@noPosargs
+ @InterpreterObject.method('to_lower')
def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.lower()
@noKwargs
@noPosargs
+ @InterpreterObject.method('to_upper')
def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return self.held_object.upper()
@noKwargs
@noPosargs
+ @InterpreterObject.method('underscorify')
def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)
@noKwargs
+ @InterpreterObject.method('version_compare')
@typed_pos_args('str.version_compare', varargs=str, min_varargs=1)
def version_compare_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> bool:
if len(args[0]) > 1:
@@ -181,10 +167,12 @@ class StringHolder(ObjectHolder[str]):
@FeatureNew('/ with string arguments', '0.49.0')
@typed_operator(MesonOperator.DIV, str)
+ @InterpreterObject.operator(MesonOperator.DIV)
def op_div(self, other: str) -> str:
return self._op_div(self.held_object, other)
@typed_operator(MesonOperator.INDEX, int)
+ @InterpreterObject.operator(MesonOperator.INDEX)
def op_index(self, other: int) -> str:
try:
return self.held_object[other]
@@ -193,11 +181,13 @@ class StringHolder(ObjectHolder[str]):
@FeatureNew('"in" string operator', '1.0.0')
@typed_operator(MesonOperator.IN, str)
+ @InterpreterObject.operator(MesonOperator.IN)
def op_in(self, other: str) -> bool:
return other in self.held_object
@FeatureNew('"not in" string operator', '1.0.0')
@typed_operator(MesonOperator.NOT_IN, str)
+ @InterpreterObject.operator(MesonOperator.NOT_IN)
def op_notin(self, other: str) -> bool:
return other not in self.held_object
@@ -208,6 +198,7 @@ class MesonVersionString(str):
class MesonVersionStringHolder(StringHolder):
@noKwargs
@typed_pos_args('str.version_compare', str)
+ @InterpreterObject.method('version_compare')
def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
self.interpreter.tmp_meson_version = args[0]
return version_compare(self.held_object, args[0])
@@ -221,6 +212,7 @@ class DependencyVariableString(str):
pass
class DependencyVariableStringHolder(StringHolder):
+ @InterpreterObject.operator(MesonOperator.DIV)
def op_div(self, other: str) -> T.Union[str, DependencyVariableString]:
ret = super().op_div(other)
if '..' in other:
@@ -243,6 +235,7 @@ class OptionString(str):
class OptionStringHolder(StringHolder):
held_object: OptionString
+ @InterpreterObject.operator(MesonOperator.DIV)
def op_div(self, other: str) -> T.Union[str, OptionString]:
ret = super().op_div(other)
name = self._op_div(self.held_object.optname, other)
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
index 78938ba..a551d0f 100644
--- a/mesonbuild/interpreter/type_checking.py
+++ b/mesonbuild/interpreter/type_checking.py
@@ -11,10 +11,10 @@ from .. import compilers
from ..build import (CustomTarget, BuildTarget,
CustomTargetIndex, ExtractedObjects, GeneratedList, IncludeDirs,
BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable, StructuredSources)
-from ..options import UserFeatureOption
+from ..options import OptionKey, UserFeatureOption
from ..dependencies import Dependency, InternalDependency
from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
-from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep,
+from ..mesonlib import (File, FileMode, MachineChoice, has_path_sep, listify, stringlistify,
EnvironmentVariables)
from ..programs import ExternalProgram
@@ -293,11 +293,22 @@ COMMAND_KW: KwargInfo[T.List[T.Union[str, BuildTarget, CustomTarget, CustomTarge
)
-OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.Dict[str, ElementaryOptionValues], T.List[str]]] = KwargInfo(
+def _override_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]) -> T.Dict[str, ElementaryOptionValues]:
+ if isinstance(raw, dict):
+ return raw
+ raw = stringlistify(raw)
+ output: T.Dict[str, ElementaryOptionValues] = {}
+ for each in raw:
+ k, v = split_equal_string(each)
+ output[k] = v
+ return output
+
+OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]] = KwargInfo(
'override_options',
(str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, (str, int, bool, list))),
default={},
validator=_options_validator,
+ convertor=_override_options_convertor,
since_values={dict: '1.2.0'},
)
@@ -394,7 +405,13 @@ INCLUDE_DIRECTORIES: KwargInfo[T.List[T.Union[str, IncludeDirs]]] = KwargInfo(
default=[],
)
-DEFAULT_OPTIONS = OVERRIDE_OPTIONS_KW.evolve(name='default_options')
+def _default_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]) -> T.Dict[OptionKey, ElementaryOptionValues]:
+ d = _override_options_convertor(raw)
+ return {OptionKey.from_string(k): v for k, v in d.items()}
+
+DEFAULT_OPTIONS = OVERRIDE_OPTIONS_KW.evolve(
+ name='default_options',
+ convertor=_default_options_convertor)
ENV_METHOD_KW = KwargInfo('method', str, default='set', since='0.62.0',
validator=in_set_validator({'set', 'prepend', 'append'}))
@@ -616,6 +633,8 @@ _BUILD_TARGET_KWS: T.List[KwargInfo] = [
default={},
since='1.2.0',
),
+ KwargInfo('swift_interoperability_mode', str, default='c', validator=in_set_validator({'c', 'cpp'}), since='1.9.0'),
+ KwargInfo('swift_module_name', str, default='', since='1.9.0'),
KwargInfo('build_rpath', str, default='', since='0.42.0'),
KwargInfo(
'gnu_symbol_visibility',
@@ -848,3 +867,8 @@ PKGCONFIG_DEFINE_KW: KwargInfo = KwargInfo(
default=[],
convertor=_pkgconfig_define_convertor,
)
+
+
+DEPENDENCY_KWS: T.List[KwargInfo] = [
+ DEFAULT_OPTIONS.evolve(since='0.38.0'),
+]
diff --git a/mesonbuild/interpreterbase/__init__.py b/mesonbuild/interpreterbase/__init__.py
index aa38e94..88fa706 100644
--- a/mesonbuild/interpreterbase/__init__.py
+++ b/mesonbuild/interpreterbase/__init__.py
@@ -59,6 +59,9 @@ __all__ = [
'TYPE_HoldableTypes',
'HoldableTypes',
+
+ 'UnknownValue',
+ 'UndefinedVariable',
]
from .baseobjects import (
@@ -81,6 +84,9 @@ from .baseobjects import (
SubProject,
HoldableTypes,
+
+ UnknownValue,
+ UndefinedVariable,
)
from .decorators import (
diff --git a/mesonbuild/interpreterbase/baseobjects.py b/mesonbuild/interpreterbase/baseobjects.py
index a5cccce..c756761 100644
--- a/mesonbuild/interpreterbase/baseobjects.py
+++ b/mesonbuild/interpreterbase/baseobjects.py
@@ -15,16 +15,11 @@ from abc import ABCMeta
from contextlib import AbstractContextManager
if T.TYPE_CHECKING:
- from typing_extensions import Protocol, TypeAlias
+ from typing_extensions import TypeAlias
# Object holders need the actual interpreter
from ..interpreter import Interpreter
- __T = T.TypeVar('__T', bound='TYPE_var', contravariant=True)
-
- class OperatorCall(Protocol[__T]):
- def __call__(self, other: __T) -> 'TYPE_var': ...
-
TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
@@ -34,34 +29,85 @@ TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
TYPE_kwargs = T.Dict[str, TYPE_var]
TYPE_nkwargs = T.Dict[str, TYPE_nvar]
TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
+TYPE_op_arg = T.TypeVar('TYPE_op_arg', bound='TYPE_var', contravariant=True)
+TYPE_op_func = T.Callable[[TYPE_op_arg, TYPE_op_arg], TYPE_var]
+TYPE_method_func = T.Callable[['InterpreterObject', T.List[TYPE_var], TYPE_kwargs], TYPE_var]
+
SubProject = T.NewType('SubProject', str)
class InterpreterObject:
+ TRIVIAL_OPERATORS: T.Dict[
+ MesonOperator,
+ T.Tuple[
+ T.Union[T.Type, T.Tuple[T.Type, ...]],
+ TYPE_op_func
+ ]
+ ] = {}
+
+ OPERATORS: T.Dict[MesonOperator, TYPE_op_func] = {}
+
+ METHODS: T.Dict[
+ str,
+ TYPE_method_func,
+ ] = {}
+
+ def __init_subclass__(cls: T.Type[InterpreterObject], **kwargs: T.Any) -> None:
+ super().__init_subclass__(**kwargs)
+ saved_trivial_operators = cls.TRIVIAL_OPERATORS
+
+ cls.METHODS = {}
+ cls.OPERATORS = {}
+ cls.TRIVIAL_OPERATORS = {}
+
+ # Compute inherited operators and methods according to the Python resolution
+ # order. Reverse the result of mro() because update() will overwrite entries
+ # that are set by the superclass with those that are set by the subclass.
+ for superclass in reversed(cls.mro()[1:]):
+ if superclass is InterpreterObject:
+ # InterpreterObject cannot use @InterpreterObject.operator because
+ # __init_subclass__ does not operate on InterpreterObject itself
+ cls.OPERATORS.update({
+ MesonOperator.EQUALS: InterpreterObject.op_equals,
+ MesonOperator.NOT_EQUALS: InterpreterObject.op_not_equals
+ })
+
+ elif issubclass(superclass, InterpreterObject):
+ cls.METHODS.update(superclass.METHODS)
+ cls.OPERATORS.update(superclass.OPERATORS)
+ cls.TRIVIAL_OPERATORS.update(superclass.TRIVIAL_OPERATORS)
+
+ for name, method in cls.__dict__.items():
+ if hasattr(method, 'meson_method'):
+ cls.METHODS[method.meson_method] = method
+ if hasattr(method, 'meson_operator'):
+ cls.OPERATORS[method.meson_operator] = method
+ cls.TRIVIAL_OPERATORS.update(saved_trivial_operators)
+
+ @staticmethod
+ def method(name: str) -> T.Callable[[TV_func], TV_func]:
+ '''Decorator that tags a Python method as the implementation of a method
+ for the Meson interpreter'''
+ def decorator(f: TV_func) -> TV_func:
+ f.meson_method = name # type: ignore[attr-defined]
+ return f
+ return decorator
+
+ @staticmethod
+ def operator(op: MesonOperator) -> T.Callable[[TV_func], TV_func]:
+ '''Decorator that tags a method as the implementation of an operator
+ for the Meson interpreter'''
+ def decorator(f: TV_func) -> TV_func:
+ f.meson_operator = op # type: ignore[attr-defined]
+ return f
+ return decorator
+
def __init__(self, *, subproject: T.Optional['SubProject'] = None) -> None:
- self.methods: T.Dict[
- str,
- T.Callable[[T.List[TYPE_var], TYPE_kwargs], TYPE_var]
- ] = {}
- self.operators: T.Dict[MesonOperator, 'OperatorCall'] = {}
- self.trivial_operators: T.Dict[
- MesonOperator,
- T.Tuple[
- T.Union[T.Type, T.Tuple[T.Type, ...]],
- 'OperatorCall'
- ]
- ] = {}
# Current node set during a method call. This can be used as location
# when printing a warning message during a method call.
self.current_node: mparser.BaseNode = None
self.subproject = subproject or SubProject('')
- # Some default operators supported by all objects
- self.operators.update({
- MesonOperator.EQUALS: self.op_equals,
- MesonOperator.NOT_EQUALS: self.op_not_equals,
- })
-
# The type of the object that can be printed to the user
def display_name(self) -> str:
return type(self).__name__
@@ -72,25 +118,26 @@ class InterpreterObject:
args: T.List[TYPE_var],
kwargs: TYPE_kwargs
) -> TYPE_var:
- if method_name in self.methods:
- method = self.methods[method_name]
+ if method_name in self.METHODS:
+ method = self.METHODS[method_name]
if not getattr(method, 'no-args-flattening', False):
args = flatten(args)
if not getattr(method, 'no-second-level-holder-flattening', False):
args, kwargs = resolve_second_level_holders(args, kwargs)
- return method(args, kwargs)
+ return method(self, args, kwargs)
raise InvalidCode(f'Unknown method "{method_name}" in object {self} of type {type(self).__name__}.')
def operator_call(self, operator: MesonOperator, other: TYPE_var) -> TYPE_var:
- if operator in self.trivial_operators:
- op = self.trivial_operators[operator]
+ if operator in self.TRIVIAL_OPERATORS:
+ op = self.TRIVIAL_OPERATORS[operator]
if op[0] is None and other is not None:
raise MesonBugException(f'The unary operator `{operator.value}` of {self.display_name()} was passed the object {other} of type {type(other).__name__}')
if op[0] is not None and not isinstance(other, op[0]):
raise InvalidArguments(f'The `{operator.value}` operator of {self.display_name()} does not accept objects of type {type(other).__name__} ({other})')
- return op[1](other)
- if operator in self.operators:
- return self.operators[operator](other)
+ return op[1](self, other)
+ if operator in self.OPERATORS:
+ return self.OPERATORS[operator](self, other)
+
raise InvalidCode(f'Object {self} of type {self.display_name()} does not support the `{operator.value}` operator.')
# Default comparison operator support
@@ -121,6 +168,16 @@ class MesonInterpreterObject(InterpreterObject):
class MutableInterpreterObject:
''' Dummy class to mark the object type as mutable '''
+class UnknownValue(MesonInterpreterObject):
+ '''This class is only used for the rewriter/static introspection tool and
+ indicates that a value cannot be determined statically, either because of
+ limitations in our code or because the value differs from machine to
+ machine.'''
+
+class UndefinedVariable(MesonInterpreterObject):
+ '''This class is only used for the rewriter/static introspection tool and
+ represents the `value` a meson-variable has if it was never written to.'''
+
HoldableTypes = (HoldableObject, int, bool, str, list, dict)
TYPE_HoldableTypes = T.Union[TYPE_var, HoldableObject]
InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=TYPE_HoldableTypes)
@@ -142,12 +199,14 @@ class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]):
return type(self.held_object).__name__
# Override default comparison operators for the held object
+ @InterpreterObject.operator(MesonOperator.EQUALS)
def op_equals(self, other: TYPE_var) -> bool:
# See the comment from InterpreterObject why we are using `type()` here.
if type(self.held_object) is not type(other):
self._throw_comp_exception(other, '==')
return self.held_object == other
+ @InterpreterObject.operator(MesonOperator.NOT_EQUALS)
def op_not_equals(self, other: TYPE_var) -> bool:
if type(self.held_object) is not type(other):
self._throw_comp_exception(other, '!=')
diff --git a/mesonbuild/interpreterbase/decorators.py b/mesonbuild/interpreterbase/decorators.py
index 06cac52..a847689 100644
--- a/mesonbuild/interpreterbase/decorators.py
+++ b/mesonbuild/interpreterbase/decorators.py
@@ -393,7 +393,7 @@ class KwargInfo(T.Generic[_T]):
deprecated_message: T.Union[str, None, _NULL_T] = _NULL,
deprecated_values: T.Union[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]], None, _NULL_T] = _NULL,
validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL,
- convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo':
+ convertor: T.Union[T.Callable[[_T], object], None, _NULL_T] = _NULL) -> 'KwargInfo':
"""Create a shallow copy of this KwargInfo, with modifications.
This allows us to create a new copy of a KwargInfo with modifications.
diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py
index ee9bb08..6fbe6e4 100644
--- a/mesonbuild/linkers/detect.py
+++ b/mesonbuild/linkers/detect.py
@@ -39,7 +39,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
use_linker_prefix: bool = True, invoked_directly: bool = True,
extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
if invoked_directly or comp_class.get_argument_syntax() == 'msvc':
rsp_syntax = RSPFileSyntax.MSVC
@@ -128,7 +128,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
:extra_args: Any additional arguments required (such as a source file)
"""
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
extra_args = extra_args or []
system = env.machines[for_machine].system
@@ -166,6 +166,9 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
linker = lld_cls(
compiler, for_machine, comp_class.LINKER_PREFIX, override, system=system, version=v)
+ elif o.startswith("eld"):
+ linker = linkers.ELDDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Snapdragon' in e and 'LLVM' in e:
linker = linkers.QualcommLLVMDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py
index 59f60e0..c528db7 100644
--- a/mesonbuild/linkers/linkers.py
+++ b/mesonbuild/linkers/linkers.py
@@ -65,9 +65,8 @@ class StaticLinker:
def get_coverage_link_args(self) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
@@ -297,9 +296,8 @@ class DynamicLinker(metaclass=abc.ABCMeta):
def bitcode_args(self) -> T.List[str]:
raise MesonException('This linker does not support bitcode bundles')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
@@ -703,13 +701,13 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
sostr = '' if soversion is None else '.' + soversion
return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
m = env.machines[self.for_machine]
if m.is_windows() or m.is_cygwin():
return ([], set())
- if not rpath_paths and not install_rpath and not build_rpath:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
origin_placeholder = '$ORIGIN'
@@ -722,10 +720,12 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
for p in all_paths:
rpath_dirs_to_remove.add(p.encode('utf8'))
# Build_rpath is used as-is (it is usually absolute).
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in build_rpath.split(':'):
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.build_rpath.split(':'):
rpath_dirs_to_remove.add(p.encode('utf8'))
+ if extra_paths:
+ all_paths.update(extra_paths)
# TODO: should this actually be "for (dragonfly|open)bsd"?
if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
@@ -740,7 +740,7 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
paths_length = len(paths.encode('utf-8'))
- install_rpath_length = len(install_rpath.encode('utf-8'))
+ install_rpath_length = len(target.install_rpath.encode('utf-8'))
if paths_length < install_rpath_length:
padding = 'X' * (install_rpath_length - paths_length)
if not paths:
@@ -873,10 +873,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
'-current_version', darwin_versions[1]])
return args
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
rpath_dirs_to_remove: T.Set[bytes] = set()
@@ -885,8 +885,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
origin_placeholder = '@loader_path'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
- if build_rpath != '':
- all_paths.update(build_rpath.split(':'))
+ if target.build_rpath != '':
+ all_paths.update(target.build_rpath.split(':'))
+ if extra_paths:
+ all_paths.update(extra_paths)
for rp in all_paths:
rpath_dirs_to_remove.add(rp.encode('utf8'))
args.extend(self._apply_prefix('-rpath,' + rp))
@@ -1022,9 +1024,8 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna
def get_asneeded_args(self) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
@@ -1100,9 +1101,8 @@ class Xc16DynamicLinker(DynamicLinker):
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
class CompCertDynamicLinker(DynamicLinker):
@@ -1143,9 +1143,8 @@ class CompCertDynamicLinker(DynamicLinker):
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
raise MesonException(f'{self.id} does not support shared libraries.')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
class TIDynamicLinker(DynamicLinker):
@@ -1235,6 +1234,12 @@ class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
id = 'ld.qcld'
+class ELDDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Qualcomm's opensource embedded linker"""
+
+ id = 'ld.eld'
+
class NAGDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
@@ -1249,17 +1254,19 @@ class NAGDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
id = 'nag'
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
origin_placeholder = '$ORIGIN'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
- if build_rpath != '':
- all_paths.add(build_rpath)
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ if extra_paths:
+ all_paths.update(extra_paths)
for rp in all_paths:
args.extend(self._apply_prefix('-Wl,-Wl,,-rpath,,' + rp))
@@ -1294,10 +1301,10 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
return ['-shared']
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not env.machines[self.for_machine].is_windows():
+ rpath_paths = target.determine_rpath_dirs()
return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
return ([], set())
@@ -1505,26 +1512,28 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def fatal_warnings(self) -> T.List[str]:
return ['-z', 'fatal-warnings']
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
rpath_dirs_to_remove: T.Set[bytes] = set()
for p in all_paths:
rpath_dirs_to_remove.add(p.encode('utf8'))
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in build_rpath.split(':'):
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.build_rpath.split(':'):
rpath_dirs_to_remove.add(p.encode('utf8'))
+ if extra_paths:
+ all_paths.update(extra_paths)
# In order to avoid relinking for RPATH removal, the binary needs to contain just
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
paths_length = len(paths.encode('utf-8'))
- install_rpath_length = len(install_rpath.encode('utf-8'))
+ install_rpath_length = len(target.install_rpath.encode('utf-8'))
if paths_length < install_rpath_length:
padding = 'X' * (install_rpath_length - paths_length)
if not paths:
@@ -1575,16 +1584,15 @@ class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
# archives or not."
return args
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
all_paths: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
# install_rpath first, followed by other paths, and the system path last
- if install_rpath != '':
- all_paths.add(install_rpath)
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in rpath_paths:
+ if target.install_rpath != '':
+ all_paths.add(target.install_rpath)
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.determine_rpath_dirs():
all_paths.add(os.path.join(build_dir, p))
# We should consider allowing the $LIBPATH environment variable
# to override sys_path.
@@ -1598,6 +1606,8 @@ class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
for p in sys_path:
if os.path.isdir(p):
all_paths.add(p)
+ if extra_paths:
+ all_paths.update(extra_paths)
return (self._apply_prefix('-blibpath:' + ':'.join(all_paths)), set())
def thread_flags(self, env: 'Environment') -> T.List[str]:
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 416caf1..7f62ba0 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -33,7 +33,6 @@ if T.TYPE_CHECKING:
builddir: str
clearcache: bool
pager: bool
- unset_opts: T.List[str]
# cannot be TV_Loggable, because non-ansidecorators do direct string concat
LOGLINE = T.Union[str, mlog.AnsiDecorator]
@@ -47,7 +46,7 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
help='Clear cached state (e.g. found dependencies)')
parser.add_argument('--no-pager', action='store_false', dest='pager',
help='Do not redirect output to a pager')
- parser.add_argument('-U', action='append', dest='unset_opts', default=[],
+ parser.add_argument('-U', action=coredata.KeyNoneAction, dest='cmd_line_options', default={},
help='Remove a subproject option.')
def stringify(val: T.Any) -> str:
@@ -73,6 +72,7 @@ class Conf:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
+ self.pending_section: T.Optional[str] = None
self.name_col: T.List[LOGLINE] = []
self.value_col: T.List[LOGLINE] = []
self.choices_col: T.List[LOGLINE] = []
@@ -125,9 +125,6 @@ class Conf:
def clear_cache(self) -> None:
self.coredata.clear_cache()
- def set_options(self, options: T.Dict[OptionKey, str]) -> bool:
- return self.coredata.set_options(options)
-
def save(self) -> None:
# Do nothing when using introspection
if self.default_values_only:
@@ -149,7 +146,7 @@ class Conf:
Each column will have a specific width, and will be line wrapped.
"""
total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
- _col = max(total_width // 5, 20)
+ _col = max(total_width // 5, 24)
last_column = total_width - (3 * _col) - 3
four_column = (_col, _col, _col, last_column if last_column > 1 else _col)
@@ -194,7 +191,7 @@ class Conf:
) -> T.Dict[str, options.MutableKeyedOptionDictType]:
result: T.Dict[str, options.MutableKeyedOptionDictType] = {}
for k, o in opts.items():
- if k.subproject:
+ if k.subproject is not None:
self.all_subprojects.add(k.subproject)
result.setdefault(k.subproject, {})[k] = o
return result
@@ -209,12 +206,15 @@ class Conf:
self.choices_col.append(choices)
self.descr_col.append(descr)
- def add_option(self, name: str, descr: str, value: T.Any, choices: T.Any) -> None:
+ def add_option(self, key: OptionKey, descr: str, value: T.Any, choices: T.Any) -> None:
+ self._add_section()
value = stringify(value)
choices = stringify(choices)
- self._add_line(mlog.green(name), mlog.yellow(value), mlog.blue(choices), descr)
+ self._add_line(mlog.green(str(key.evolve(subproject=None))), mlog.yellow(value),
+ mlog.blue(choices), descr)
def add_title(self, title: str) -> None:
+ self._add_section()
newtitle = mlog.cyan(title)
descr = mlog.cyan('Description')
value = mlog.cyan('Default Value' if self.default_values_only else 'Current Value')
@@ -223,11 +223,17 @@ class Conf:
self._add_line(newtitle, value, choices, descr)
self._add_line('-' * len(newtitle), '-' * len(value), '-' * len(choices), '-' * len(descr))
- def add_section(self, section: str) -> None:
+ def _add_section(self) -> None:
+ if not self.pending_section:
+ return
self.print_margin = 0
self._add_line('', '', '', '')
- self._add_line(mlog.normal_yellow(section + ':'), '', '', '')
+ self._add_line(mlog.normal_yellow(self.pending_section + ':'), '', '', '')
self.print_margin = 2
+ self.pending_section = None
+
+ def add_section(self, section: str) -> None:
+ self.pending_section = section
def print_options(self, title: str, opts: T.Union[options.MutableKeyedOptionDictType, options.OptionStore]) -> None:
if not opts:
@@ -242,7 +248,7 @@ class Conf:
# printable_value = '<inherited from main project>'
#if isinstance(o, options.UserFeatureOption) and o.is_auto():
# printable_value = auto.printable_value()
- self.add_option(k.name, o.description, printable_value, o.printable_choices())
+ self.add_option(k, o.description, printable_value, o.printable_choices())
def print_conf(self, pager: bool) -> None:
if pager:
@@ -291,15 +297,15 @@ class Conf:
project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_project_option(k)})
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
- self.add_section('Main project options')
+ self.add_section('Global build options')
self.print_options('Core options', host_core_options[None])
if show_build_options and build_core_options:
self.print_options('', build_core_options[None])
self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_backend_option(k)})
self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_base_option(k)})
- self.print_options('Compiler options', host_compiler_options.get('', {}))
+ self.print_options('Compiler options', host_compiler_options.get(None, {}))
if show_build_options:
- self.print_options('', build_compiler_options.get('', {}))
+ self.print_options('', build_compiler_options.get(None, {}))
for mod, mod_options in module_options.items():
self.print_options(f'{mod} module options', mod_options)
self.print_options('Directories', dir_options)
@@ -307,8 +313,9 @@ class Conf:
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
- continue
- self.add_section('Subproject ' + subproject)
+ self.add_section('Main project')
+ else:
+ self.add_section('Subproject ' + subproject)
if subproject in host_core_options:
self.print_options('Core options', host_core_options[subproject])
if subproject in build_core_options and show_build_options:
@@ -317,7 +324,7 @@ class Conf:
self.print_options('Compiler options', host_compiler_options[subproject])
if subproject in build_compiler_options and show_build_options:
self.print_options('', build_compiler_options[subproject])
- if subproject in project_options:
+ if subproject != '' and subproject in project_options:
self.print_options('Project options', project_options[subproject])
self.print_aligned()
@@ -342,16 +349,12 @@ class Conf:
if self.coredata.optstore.augments:
mlog.log('\nCurrently set option augments:')
for k, v in self.coredata.optstore.augments.items():
- mlog.log(f'{k:21}{v:10}')
+ mlog.log(f'{k!s:21}{v:10}')
else:
mlog.log('\nThere are no option augments.')
def has_option_flags(options: CMDOptions) -> bool:
- if options.cmd_line_options:
- return True
- if options.unset_opts:
- return True
- return False
+ return bool(options.cmd_line_options)
def is_print_only(options: CMDOptions) -> bool:
if has_option_flags(options):
@@ -373,11 +376,7 @@ def run_impl(options: CMDOptions, builddir: str) -> int:
save = False
if has_option_flags(options):
- unset_opts = getattr(options, 'unset_opts', [])
- all_D = options.projectoptions[:]
- for keystr, valstr in options.cmd_line_options.items():
- all_D.append(f'{keystr}={valstr}')
- save |= c.coredata.optstore.set_from_configure_command(all_D, unset_opts)
+ save |= c.coredata.set_from_configure_command(options)
coredata.update_cmd_line_file(builddir, options)
if options.clearcache:
c.clear_cache()
diff --git a/mesonbuild/mdevenv.py b/mesonbuild/mdevenv.py
index 4962d96..e9974fe 100644
--- a/mesonbuild/mdevenv.py
+++ b/mesonbuild/mdevenv.py
@@ -4,6 +4,7 @@ import os, subprocess
import argparse
import tempfile
import shutil
+import sys
import itertools
import typing as T
@@ -226,8 +227,14 @@ def run(options: argparse.Namespace) -> int:
args[0] = abs_path or args[0]
try:
- os.chdir(workdir)
- os.execvpe(args[0], args, env=devenv)
+ if is_windows():
+ # execvpe doesn't return exit code on Windows
+ # see https://github.com/python/cpython/issues/63323
+ result = subprocess.run(args, env=devenv, cwd=workdir)
+ sys.exit(result.returncode)
+ else:
+ os.chdir(workdir)
+ os.execvpe(args[0], args, env=devenv)
except FileNotFoundError:
raise MesonException(f'Command not found: {args[0]}')
except OSError as e:
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index 0361606..6e1bfd0 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -7,7 +7,6 @@ from __future__ import annotations
import abc
import argparse
-import gzip
import os
import sys
import shlex
@@ -294,6 +293,7 @@ class HgDist(Dist):
shutil.copyfileobj(tf, bf)
output_names.append(bz2name)
if 'gztar' in archives:
+ import gzip
with gzip.open(gzname, 'wb') as zf, open(tarname, 'rb') as tf:
shutil.copyfileobj(tf, zf)
output_names.append(gzname)
diff --git a/mesonbuild/mformat.py b/mesonbuild/mformat.py
index 92729a0..2131ff7 100644
--- a/mesonbuild/mformat.py
+++ b/mesonbuild/mformat.py
@@ -536,7 +536,7 @@ class TrimWhitespaces(FullAstVisitor):
def visit_ParenthesizedNode(self, node: mparser.ParenthesizedNode) -> None:
self.enter_node(node)
- is_multiline = node.lpar.whitespaces and '#' in node.lpar.whitespaces.value
+ is_multiline = node.lpar.lineno != node.rpar.lineno
if is_multiline:
self.indent_comments += self.config.indent_by
@@ -546,7 +546,8 @@ class TrimWhitespaces(FullAstVisitor):
if is_multiline:
node.inner.whitespaces.value = self.dedent(node.inner.whitespaces.value)
self.indent_comments = self.dedent(self.indent_comments)
- self.add_nl_after(node.inner)
+ if node.lpar.whitespaces and '\n' in node.lpar.whitespaces.value:
+ self.add_nl_after(node.inner)
node.rpar.accept(self)
self.move_whitespaces(node.rpar, node)
@@ -836,7 +837,15 @@ class Formatter:
# See https://editorconfig.org/
config = EditorConfig()
- for p in source_file.parents:
+ if source_file == Path('STDIN'):
+ raise MesonException('Using editorconfig with stdin requires --source-file-path argument')
+
+ try:
+ source_file_path = source_file.resolve()
+ except FileNotFoundError:
+ raise MesonException(f'Unable to resolve path for "{source_file}"')
+
+ for p in source_file_path.parents:
editorconfig_file = p / '.editorconfig'
if not editorconfig_file.exists():
continue
@@ -955,6 +964,11 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='output file (implies having exactly one input)'
)
parser.add_argument(
+ '--source-file-path',
+ type=Path,
+ help='path to use, when reading from stdin'
+ )
+ parser.add_argument(
'sources',
nargs='*',
type=Path,
@@ -980,6 +994,10 @@ def run(options: argparse.Namespace) -> int:
raise MesonException('--recursive argument is not compatible with stdin input')
if options.inplace and from_stdin:
raise MesonException('--inplace argument is not compatible with stdin input')
+ if options.source_file_path and not from_stdin:
+ raise MesonException('--source-file-path argument is only compatible with stdin input')
+ if from_stdin and options.editor_config and not options.source_file_path:
+ raise MesonException('using --editor-config with stdin input requires --source-file-path argument')
sources: T.List[Path] = options.sources.copy() or [Path(build_filename)]
@@ -995,7 +1013,7 @@ def run(options: argparse.Namespace) -> int:
try:
if from_stdin:
- src_file = Path('STDIN') # used for error messages and introspection
+ src_file = options.source_file_path or Path('STDIN') # used for error messages and introspection
code = sys.stdin.read()
else:
code = src_file.read_text(encoding='utf-8')
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 462ee2f..e19e528 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -19,20 +19,23 @@ from pathlib import Path, PurePath
import sys
import typing as T
-from . import build, mesonlib, options, coredata as cdata
-from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
+from . import build, environment, mesonlib, options, coredata as cdata
+from .ast import IntrospectionInterpreter, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
from .backend import backends
from .dependencies import Dependency
-from . import environment
-from .interpreterbase import ObjectHolder
+from .interpreterbase import ObjectHolder, UnknownValue
from .options import OptionKey
-from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
if T.TYPE_CHECKING:
import argparse
from .interpreter import Interpreter
- from .mparser import BaseNode
+
+class IntrospectionEncoder(json.JSONEncoder):
+ def default(self, obj: T.Any) -> T.Any:
+ if isinstance(obj, UnknownValue):
+ return 'unknown'
+ return json.JSONEncoder.default(self, obj)
def get_meson_info_file(info_dir: str) -> str:
return os.path.join(info_dir, 'meson-info.json')
@@ -54,7 +57,7 @@ class IntroCommand:
def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
builddata: T.Optional[build.Build] = None,
- backend: T.Optional[backends.Backend] = None) -> 'T.Mapping[str, IntroCommand]':
+ backend: T.Optional[backends.Backend] = None) -> T.Mapping[str, IntroCommand]:
if backend and builddata:
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
testdata = backend.create_test_serialisation(builddata.get_tests())
@@ -122,14 +125,15 @@ def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
res[basename] = os.path.join(installdata.prefix, s.install_path, basename)
return res
-def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
- plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] = {
+def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Union[str, T.List[str], None]]]]:
+ plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Union[str, T.List[str], None]]]] = {
'targets': {
- os.path.join(installdata.build_dir, target.fname): {
+ Path(installdata.build_dir, target.fname).as_posix(): {
'destination': target.out_name,
'tag': target.tag or None,
'subproject': target.subproject or None,
- 'install_rpath': target.install_rpath or None
+ 'install_rpath': target.install_rpath or None,
+ 'build_rpaths': sorted(x.decode('utf8') for x in target.rpath_dirs_to_remove),
}
for target in installdata.targets
},
@@ -142,13 +146,14 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
}.items():
# Mypy doesn't recognize SubdirInstallData as a subclass of InstallDataBase
for data in data_list: # type: ignore[attr-defined]
+ data_path = Path(data.path).as_posix()
data_type = data.data_type or key
- install_path_name = data.install_path_name
+ install_path_name = Path(data.install_path_name)
if key == 'headers': # in the headers, install_path_name is the directory
- install_path_name = os.path.join(install_path_name, os.path.basename(data.path))
+ install_path_name = install_path_name / os.path.basename(data.path)
entry = {
- 'destination': install_path_name,
+ 'destination': install_path_name.as_posix(),
'tag': data.tag or None,
'subproject': data.subproject or None,
}
@@ -159,7 +164,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
entry['exclude_files'] = list(exclude_files)
plan[data_type] = plan.get(data_type, {})
- plan[data_type][data.path] = entry
+ plan[data_type][data_path] = entry
return plan
@@ -169,56 +174,35 @@ def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
else:
return subdir
-def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
- tlist: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] = []
- root_dir = Path(intr.source_root)
-
- def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
- res: T.List[Path] = []
- for n in node_list:
- args: T.List[BaseNode] = []
- if isinstance(n, FunctionNode):
- args = list(n.args.arguments)
- if n.func_name.value in BUILD_TARGET_FUNCTIONS:
- args.pop(0)
- elif isinstance(n, ArrayNode):
- args = n.args.arguments
- elif isinstance(n, ArgumentNode):
- args = n.arguments
- for j in args:
- if isinstance(j, StringNode):
- assert isinstance(j.value, str)
- res += [Path(j.value)]
- elif isinstance(j, str):
- res += [Path(j)]
- res = [root_dir / i['subdir'] / x for x in res]
- res = [x.resolve() for x in res]
- return res
+def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, object]]:
+ tlist = []
+ root_dir = Path(intr.source_root).resolve()
for i in intr.targets:
- sources = nodes_to_paths(i['sources'])
- extra_f = nodes_to_paths(i['extra_files'])
- outdir = get_target_dir(intr.coredata, i['subdir'])
+ sources = intr.nodes_to_pretty_filelist(root_dir, i.subdir, i.source_nodes)
+ extra_files = intr.nodes_to_pretty_filelist(root_dir, i.subdir, [i.extra_files] if i.extra_files else [])
+
+ outdir = get_target_dir(intr.coredata, i.subdir)
tlist += [{
- 'name': i['name'],
- 'id': i['id'],
- 'type': i['type'],
- 'defined_in': i['defined_in'],
- 'filename': [os.path.join(outdir, x) for x in i['outputs']],
- 'build_by_default': i['build_by_default'],
+ 'name': i.name,
+ 'id': i.id,
+ 'type': i.typename,
+ 'defined_in': i.defined_in,
+ 'filename': [os.path.join(outdir, x) for x in i.outputs],
+ 'build_by_default': i.build_by_default,
'target_sources': [{
'language': 'unknown',
- 'machine': i['machine'],
+ 'machine': i.machine,
'compiler': [],
'parameters': [],
- 'sources': [str(x) for x in sources],
+ 'sources': sources,
'generated_sources': []
}],
'depends': [],
- 'extra_files': [str(x) for x in extra_f],
+ 'extra_files': extra_files,
'subproject': None, # Subprojects are not supported
- 'installed': i['installed']
+ 'installed': i.installed
}]
return tlist
@@ -380,17 +364,16 @@ def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[s
}
return compilers
-def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
- result: T.List[T.Dict[str, T.Union[str, bool]]] = []
+def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, T.List[str], UnknownValue]]]:
+ result: T.List[T.Dict[str, T.Union[str, bool, T.List[str], UnknownValue]]] = []
for i in intr.dependencies:
- keys = [
- 'name',
- 'required',
- 'version',
- 'has_fallback',
- 'conditional',
- ]
- result += [{k: v for k, v in i.items() if k in keys}]
+ result += [{
+ 'name': i.name,
+ 'required': i.required,
+ 'version': i.version,
+ 'has_fallback': i.has_fallback,
+ 'conditional': i.conditional,
+ }]
return result
def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]:
@@ -517,12 +500,12 @@ def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str,
return 1
elif len(results) == 1 and not options.force_dict:
# Make to keep the existing output format for a single option
- print(json.dumps(results[0][1], indent=indent))
+ print(json.dumps(results[0][1], indent=indent, cls=IntrospectionEncoder))
else:
out = {}
for i in results:
out[i[0]] = i[1]
- print(json.dumps(out, indent=indent))
+ print(json.dumps(out, indent=indent, cls=IntrospectionEncoder))
return 0
def get_infodir(builddir: T.Optional[str] = None) -> str:
@@ -546,10 +529,11 @@ def run(options: argparse.Namespace) -> int:
datadir = os.path.join(options.builddir, datadir)
indent = 4 if options.indent else None
results: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
- sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
intro_types = get_meson_introspection_types()
- if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
+ # TODO: This if clause is undocumented.
+ if os.path.basename(options.builddir) == environment.build_filename:
+ sourcedir = '.' if options.builddir == environment.build_filename else options.builddir[:-len(environment.build_filename)]
# Make sure that log entries in other parts of meson don't interfere with the JSON output
with redirect_stdout(sys.stderr):
backend = backends.get_backend_from_name(options.backend)
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 67d1666..3938101 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -75,26 +75,28 @@ class ModuleState:
required: bool = True,
version_func: T.Optional[ProgramVersionFunc] = None,
wanted: T.Union[str, T.List[str]] = '', silent: bool = False,
- for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, build.Executable, OverrideProgram]:
+ for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, build.OverrideExecutable, OverrideProgram]:
if not isinstance(prog, list):
prog = [prog]
return self._interpreter.find_program_impl(prog, required=required, version_func=version_func,
wanted=wanted, silent=silent, for_machine=for_machine)
def find_tool(self, name: str, depname: str, varname: str, required: bool = True,
- wanted: T.Optional[str] = None) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']:
- # Look in overrides in case it's built as subproject
- progobj = self._interpreter.program_from_overrides([name], [])
- if progobj is not None:
- return progobj
+ wanted: T.Optional[str] = None, for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']:
+ if for_machine is MachineChoice.HOST:
+ # Look in overrides in case it's built as subproject
+ progobj = self._interpreter.program_from_overrides([name], [])
+ if progobj is not None:
+ return progobj
# Look in machine file
- prog_list = self.environment.lookup_binary_entry(MachineChoice.HOST, name)
+ prog_list = self.environment.lookup_binary_entry(for_machine, name)
if prog_list is not None:
return ExternalProgram.from_entry(name, prog_list)
# Check if pkgconfig has a variable
- dep = self.dependency(depname, native=True, required=False, wanted=wanted)
+ dep = self.dependency(depname, native=for_machine is MachineChoice.BUILD,
+ required=False, wanted=wanted)
if dep.found() and dep.type_name == 'pkgconfig':
value = dep.get_variable(pkgconfig=varname)
if value:
@@ -106,7 +108,7 @@ class ModuleState:
return progobj
# Normal program lookup
- return self.find_program(name, required=required, wanted=wanted)
+ return self.find_program(name, required=required, wanted=wanted, for_machine=for_machine)
def dependency(self, depname: str, native: bool = False, required: bool = True,
wanted: T.Optional[str] = None) -> 'Dependency':
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index e3154b0..f12cc51 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -154,10 +154,11 @@ class CMakeSubproject(ModuleObject):
@noKwargs
@typed_pos_args('cmake.subproject.include_directories', str)
- def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+ def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> T.List[build.IncludeDirs]:
info = self._args_to_info(args[0])
inc = self.get_variable(state, [info['inc']], kwargs)
- assert isinstance(inc, build.IncludeDirs), 'for mypy'
+ assert isinstance(inc, list), 'for mypy'
+ assert isinstance(inc[0], build.IncludeDirs), 'for mypy'
return inc
@noKwargs
diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py
index 1fa368e..57a6b6d 100644
--- a/mesonbuild/modules/fs.py
+++ b/mesonbuild/modules/fs.py
@@ -2,7 +2,9 @@
# Copyright 2019 The Meson development team
from __future__ import annotations
-from pathlib import Path, PurePath, PureWindowsPath
+from ntpath import sep as ntsep
+from pathlib import Path
+from posixpath import sep as posixsep
import hashlib
import os
import typing as T
@@ -12,7 +14,7 @@ from .. import mlog
from ..build import BuildTarget, CustomTarget, CustomTargetIndex, InvalidArguments
from ..interpreter.type_checking import INSTALL_KW, INSTALL_MODE_KW, INSTALL_TAG_KW, NoneType
from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
-from ..mesonlib import File, MesonException, has_path_sep, path_is_in_root, relpath
+from ..mesonlib import File, MesonException, has_path_sep, is_windows, path_is_in_root, relpath
if T.TYPE_CHECKING:
from . import ModuleState
@@ -42,7 +44,7 @@ class FSModule(ExtensionModule):
INFO = ModuleInfo('fs', '0.53.0')
- def __init__(self, interpreter: 'Interpreter') -> None:
+ def __init__(self, interpreter: Interpreter) -> None:
super().__init__(interpreter)
self.methods.update({
'as_posix': self.as_posix,
@@ -62,29 +64,30 @@ class FSModule(ExtensionModule):
'replace_suffix': self.replace_suffix,
'size': self.size,
'stem': self.stem,
+ 'suffix': self.suffix,
})
- def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ def _absolute_dir(self, state: ModuleState, arg: FileOrString) -> str:
"""
make an absolute path from a relative path, WITHOUT resolving symlinks
"""
if isinstance(arg, File):
- return Path(arg.absolute_path(state.source_root, state.environment.get_build_dir()))
- return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+ return arg.absolute_path(state.source_root, state.environment.get_build_dir())
+ return os.path.join(state.source_root, state.subdir, os.path.expanduser(arg))
@staticmethod
- def _obj_to_path(feature_new_prefix: str, obj: T.Union[FileOrString, BuildTargetTypes], state: ModuleState) -> PurePath:
+ def _obj_to_pathstr(feature_new_prefix: str, obj: T.Union[FileOrString, BuildTargetTypes], state: ModuleState) -> str:
if isinstance(obj, str):
- return PurePath(obj)
+ return obj
if isinstance(obj, File):
FeatureNew(f'{feature_new_prefix} with file', '0.59.0').use(state.subproject, location=state.current_node)
- return PurePath(str(obj))
+ return str(obj)
FeatureNew(f'{feature_new_prefix} with build_tgt, custom_tgt, and custom_idx', '1.4.0').use(state.subproject, location=state.current_node)
- return PurePath(state.backend.get_target_filename(obj))
+ return state.backend.get_target_filename(obj)
- def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ def _resolve_dir(self, state: ModuleState, arg: FileOrString) -> str:
"""
resolves symlinks and makes absolute a directory relative to calling meson.build,
if not already absolute
@@ -92,7 +95,7 @@ class FSModule(ExtensionModule):
path = self._absolute_dir(state, arg)
try:
# accommodate unresolvable paths e.g. symlink loops
- path = path.resolve()
+ path = os.path.realpath(path)
except Exception:
# return the best we could do
pass
@@ -101,123 +104,139 @@ class FSModule(ExtensionModule):
@noKwargs
@FeatureNew('fs.expanduser', '0.54.0')
@typed_pos_args('fs.expanduser', str)
- def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
- return str(Path(args[0]).expanduser())
+ def expanduser(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ return os.path.expanduser(args[0])
@noKwargs
@FeatureNew('fs.is_absolute', '0.54.0')
@typed_pos_args('fs.is_absolute', (str, File))
- def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
- if isinstance(args[0], File):
+ def is_absolute(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
+ path = args[0]
+ if isinstance(path, File):
FeatureNew('fs.is_absolute with file', '0.59.0').use(state.subproject, location=state.current_node)
- return PurePath(str(args[0])).is_absolute()
+ path = str(path)
+ if is_windows():
+ # os.path.isabs was broken for Windows before Python 3.13, so we implement it ourselves
+ path = path[:3].replace(posixsep, ntsep)
+ return path.startswith(ntsep * 2) or path.startswith(':' + ntsep, 1)
+ return path.startswith(posixsep)
@noKwargs
@FeatureNew('fs.as_posix', '0.54.0')
@typed_pos_args('fs.as_posix', str)
- def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ def as_posix(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
r"""
this function assumes you are passing a Windows path, even if on a Unix-like system
and so ALL '\' are turned to '/', even if you meant to escape a character
"""
- return PureWindowsPath(args[0]).as_posix()
+ return args[0].replace(ntsep, posixsep)
@noKwargs
@typed_pos_args('fs.exists', str)
- def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).exists()
+ def exists(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.exists(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_symlink', (str, File))
- def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ def is_symlink(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
if isinstance(args[0], File):
FeatureNew('fs.is_symlink with file', '0.59.0').use(state.subproject, location=state.current_node)
- return self._absolute_dir(state, args[0]).is_symlink()
+ return os.path.islink(self._absolute_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_file', str)
- def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).is_file()
+ def is_file(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.isfile(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_dir', str)
- def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).is_dir()
+ def is_dir(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.isdir(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.hash', (str, File), str)
- def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+ def hash(self, state: ModuleState, args: T.Tuple[FileOrString, str], kwargs: T.Dict[str, T.Any]) -> str:
if isinstance(args[0], File):
FeatureNew('fs.hash with file', '0.59.0').use(state.subproject, location=state.current_node)
file = self._resolve_dir(state, args[0])
- if not file.is_file():
+ if not os.path.isfile(file):
raise MesonException(f'{file} is not a file and therefore cannot be hashed')
try:
h = hashlib.new(args[1])
except ValueError:
raise MesonException('hash algorithm {} is not available'.format(args[1]))
- mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size))
- h.update(file.read_bytes())
+ mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, os.stat(file).st_size))
+ with open(file, mode='rb', buffering=0) as f:
+ h.update(f.read())
return h.hexdigest()
@noKwargs
@typed_pos_args('fs.size', (str, File))
- def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int:
+ def size(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> int:
if isinstance(args[0], File):
FeatureNew('fs.size with file', '0.59.0').use(state.subproject, location=state.current_node)
file = self._resolve_dir(state, args[0])
- if not file.is_file():
+ if not os.path.isfile(file):
raise MesonException(f'{file} is not a file and therefore cannot be sized')
try:
- return file.stat().st_size
+ return os.stat(file).st_size
except ValueError:
raise MesonException('{} size could not be determined'.format(args[0]))
@noKwargs
@typed_pos_args('fs.is_samepath', (str, File), (str, File))
- def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ def is_samepath(self, state: ModuleState, args: T.Tuple[FileOrString, FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
if isinstance(args[0], File) or isinstance(args[1], File):
FeatureNew('fs.is_samepath with file', '0.59.0').use(state.subproject, location=state.current_node)
file1 = self._resolve_dir(state, args[0])
file2 = self._resolve_dir(state, args[1])
- if not file1.exists():
+ if not os.path.exists(file1):
return False
- if not file2.exists():
+ if not os.path.exists(file2):
return False
try:
- return file1.samefile(file2)
+ return os.path.samefile(file1, file2)
except OSError:
return False
@noKwargs
@typed_pos_args('fs.replace_suffix', (str, File, CustomTarget, CustomTargetIndex, BuildTarget), str)
- def replace_suffix(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes], str], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.replace_suffix', args[0], state)
- return str(path.with_suffix(args[1]))
+ def replace_suffix(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes], str], kwargs: T.Dict[str, T.Any]) -> str:
+ if args[1] and not args[1].startswith('.'):
+ raise ValueError(f"Invalid suffix {args[1]!r}")
+ path = self._obj_to_pathstr('fs.replace_suffix', args[0], state)
+ return os.path.splitext(path)[0] + args[1]
@noKwargs
@typed_pos_args('fs.parent', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
- def parent(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.parent', args[0], state)
- return str(path.parent)
+ def parent(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.parent', args[0], state)
+ return os.path.split(path)[0] or '.'
@noKwargs
@typed_pos_args('fs.name', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
- def name(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.name', args[0], state)
- return str(path.name)
+ def name(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.name', args[0], state)
+ return os.path.basename(path)
@noKwargs
@typed_pos_args('fs.stem', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
@FeatureNew('fs.stem', '0.54.0')
- def stem(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.stem', args[0], state)
- return str(path.stem)
+ def stem(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.name', args[0], state)
+ return os.path.splitext(os.path.basename(path))[0]
+
+ @noKwargs
+ @typed_pos_args('fs.suffix', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
+ @FeatureNew('fs.suffix', '1.9.0')
+ def suffix(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.suffix', args[0], state)
+ return os.path.splitext(path)[1]
@FeatureNew('fs.read', '0.57.0')
@typed_pos_args('fs.read', (str, File))
@typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8'))
- def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str:
+ def read(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: ReadKwArgs) -> str:
"""Read a file from the source tree and return its value as a decoded
string.
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 6764133..53919bc 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -22,7 +22,7 @@ from .. import build
from .. import interpreter
from .. import mesonlib
from .. import mlog
-from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments
+from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, OverrideExecutable
from ..dependencies import Dependency, InternalDependency
from ..dependencies.pkgconfig import PkgConfigDependency, PkgConfigInterface
from ..interpreter.type_checking import DEPENDS_KW, DEPEND_FILES_KW, ENV_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, DEPENDENCY_SOURCES_KW, in_set_validator
@@ -33,11 +33,11 @@ from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, join_args, quote_arg
)
from ..options import OptionKey
-from ..programs import OverrideProgram
+from ..programs import ExternalProgram, OverrideProgram
from ..scripts.gettext import read_linguas
if T.TYPE_CHECKING:
- from typing_extensions import Literal, TypedDict
+ from typing_extensions import Literal, TypeAlias, TypedDict
from . import ModuleState
from ..build import BuildTarget
@@ -45,7 +45,6 @@ if T.TYPE_CHECKING:
from ..interpreter import Interpreter
from ..interpreterbase import TYPE_var, TYPE_kwargs
from ..mesonlib import FileOrString
- from ..programs import ExternalProgram
class PostInstall(TypedDict):
glib_compile_schemas: bool
@@ -137,6 +136,8 @@ if T.TYPE_CHECKING:
install_header: bool
install_dir: T.Optional[str]
docbook: T.Optional[str]
+ rst: T.Optional[str]
+ markdown: T.Optional[str]
autocleanup: Literal['all', 'none', 'objects', 'default']
class GenMarshal(TypedDict):
@@ -196,7 +197,7 @@ if T.TYPE_CHECKING:
vtail: T.Optional[str]
depends: T.List[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]]
- ToolType = T.Union[Executable, ExternalProgram, OverrideProgram]
+ ToolType: TypeAlias = T.Union[OverrideExecutable, ExternalProgram, OverrideProgram]
# Differs from the CustomTarget version in that it straight defaults to True
@@ -253,9 +254,8 @@ class GnomeModule(ExtensionModule):
def __init__(self, interpreter: 'Interpreter') -> None:
super().__init__(interpreter)
- self.gir_dep: T.Optional[Dependency] = None
- self.giscanner: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
- self.gicompiler: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
+ self.giscanner: T.Optional[ToolType] = None
+ self.gicompiler: T.Optional[ToolType] = None
self.install_glib_compile_schemas = False
self.install_gio_querymodules: T.List[str] = []
self.install_gtk_update_icon_cache = False
@@ -307,7 +307,7 @@ class GnomeModule(ExtensionModule):
once=True, fatal=False)
@staticmethod
- def _find_tool(state: 'ModuleState', tool: str) -> 'ToolType':
+ def _find_tool(state: 'ModuleState', tool: str, for_machine: MachineChoice = MachineChoice.HOST) -> 'ToolType':
tool_map = {
'gio-querymodules': 'gio-2.0',
'glib-compile-schemas': 'gio-2.0',
@@ -320,7 +320,7 @@ class GnomeModule(ExtensionModule):
}
depname = tool_map[tool]
varname = tool.replace('-', '_')
- return state.find_tool(tool, depname, varname)
+ return state.find_tool(tool, depname, varname, for_machine=for_machine)
@typed_kwargs(
'gnome.post_install',
@@ -634,7 +634,7 @@ class GnomeModule(ExtensionModule):
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
- for d in state.backend.determine_rpath_dirs(lib):
+ for d in lib.determine_rpath_dirs():
d = os.path.join(state.environment.get_build_dir(), d)
link_command.append('-L' + d)
if include_rpath:
@@ -773,9 +773,7 @@ class GnomeModule(ExtensionModule):
STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1"
if isinstance(girtarget, (build.StaticLibrary)) and \
- not mesonlib.version_compare(
- self._get_gir_dep(state)[0].get_version(),
- STATIC_BUILD_REQUIRED_VERSION):
+ not self._giscanner_version_compare(state, STATIC_BUILD_REQUIRED_VERSION):
raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION)
return girtarget
@@ -789,18 +787,26 @@ class GnomeModule(ExtensionModule):
if self.devenv is not None:
b.devenv.append(self.devenv)
- def _get_gir_dep(self, state: 'ModuleState') -> T.Tuple[Dependency, T.Union[Executable, 'ExternalProgram', 'OverrideProgram'],
- T.Union[Executable, 'ExternalProgram', 'OverrideProgram']]:
- if not self.gir_dep:
- self.gir_dep = state.dependency('gobject-introspection-1.0')
- self.giscanner = self._find_tool(state, 'g-ir-scanner')
- self.gicompiler = self._find_tool(state, 'g-ir-compiler')
- return self.gir_dep, self.giscanner, self.gicompiler
+ def _get_gi(self, state: 'ModuleState') -> T.Tuple[ToolType, ToolType]:
+ if not self.giscanner:
+ self.giscanner = self._find_tool(state, 'g-ir-scanner', for_machine=MachineChoice.BUILD)
+ self.gicompiler = self._find_tool(state, 'g-ir-compiler', for_machine=MachineChoice.HOST)
+ return self.giscanner, self.gicompiler
+
+ def _giscanner_version_compare(self, state: 'ModuleState', cmp: str) -> bool:
+ # Support for --version was introduced in g-i 1.58, but Ubuntu
+ # Bionic shipped 1.56.1. As all our version checks are greater
+ # than 1.58, we can just return False if get_version fails.
+ try:
+ giscanner, _ = self._get_gi(state)
+ return mesonlib.version_compare(giscanner.get_version(), cmp)
+ except MesonException:
+ return False
@functools.lru_cache(maxsize=None)
def _gir_has_option(self, option: str) -> bool:
exe = self.giscanner
- if isinstance(exe, OverrideProgram):
+ if isinstance(exe, (Executable, OverrideProgram)):
# Handle overridden g-ir-scanner
assert option in {'--extra-library', '--sources-top-dirs'}
return True
@@ -865,7 +871,7 @@ class GnomeModule(ExtensionModule):
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
- for d in state.backend.determine_rpath_dirs(girtarget):
+ for d in girtarget.determine_rpath_dirs():
d = os.path.join(state.environment.get_build_dir(), d)
ret.append('-L' + d)
@@ -957,8 +963,8 @@ class GnomeModule(ExtensionModule):
return gir_filelist_filename
- @staticmethod
def _make_gir_target(
+ self,
state: 'ModuleState',
girfile: str,
scan_command: T.Sequence[T.Union['FileOrString', Executable, ExternalProgram, OverrideProgram]],
@@ -988,6 +994,11 @@ class GnomeModule(ExtensionModule):
run_env.set('CFLAGS', [quote_arg(x) for x in env_flags], ' ')
run_env.merge(kwargs['env'])
+ giscanner, _ = self._get_gi(state)
+
+ # response file supported?
+ rspable = self._giscanner_version_compare(state, '>= 1.85.0')
+
return GirTarget(
girfile,
state.subdir,
@@ -1002,6 +1013,7 @@ class GnomeModule(ExtensionModule):
install_dir=[install_dir],
install_tag=['devel'],
env=run_env,
+ rspable=rspable,
)
@staticmethod
@@ -1137,7 +1149,7 @@ class GnomeModule(ExtensionModule):
if len(girtargets) > 1 and any(isinstance(el, Executable) for el in girtargets):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
- gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
+ giscanner, gicompiler = self._get_gi(state)
ns = kwargs['namespace']
nsversion = kwargs['nsversion']
@@ -1148,14 +1160,13 @@ class GnomeModule(ExtensionModule):
builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
depends: T.List[T.Union['FileOrString', 'build.GeneratedTypes', build.BuildTarget, build.StructuredSources]] = []
- depends.extend(gir_dep.sources)
depends.extend(girtargets)
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
deps += kwargs['dependencies']
- deps += [gir_dep]
+ deps += [state.dependency('glib-2.0'), state.dependency('gobject-2.0'), state.dependency('gmodule-2.0'), state.dependency('gio-2.0')]
typelib_includes, depends = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
@@ -1168,13 +1179,13 @@ class GnomeModule(ExtensionModule):
scan_cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
scan_internal_ldflags = []
scan_external_ldflags = []
- scan_env_ldflags = []
+ scan_env_ldflags = state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c')
for cli_flags, env_flags in (self._get_scanner_ldflags(internal_ldflags), self._get_scanner_ldflags(dep_internal_ldflags)):
scan_internal_ldflags += cli_flags
- scan_env_ldflags = env_flags
+ scan_env_ldflags += env_flags
for cli_flags, env_flags in (self._get_scanner_ldflags(external_ldflags), self._get_scanner_ldflags(dep_external_ldflags)):
scan_external_ldflags += cli_flags
- scan_env_ldflags = env_flags
+ scan_env_ldflags += env_flags
girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
inc_dirs = kwargs['include_directories']
@@ -1182,6 +1193,32 @@ class GnomeModule(ExtensionModule):
scan_command: T.List[T.Union[str, Executable, 'ExternalProgram', 'OverrideProgram']] = [giscanner]
scan_command += ['--quiet']
+
+ if state.environment.is_cross_build() and state.environment.need_exe_wrapper():
+ if not state.environment.has_exe_wrapper():
+ mlog.error('generate_gir requires exe_wrapper')
+
+ binary_wrapper = state.environment.get_exe_wrapper().get_command()
+ ldd = state.environment.lookup_binary_entry(MachineChoice.HOST, 'ldd')
+ if ldd is None:
+ ldd_wrapper = ['ldd']
+ else:
+ ldd_wrapper = ExternalProgram.from_bin_list(state.environment, MachineChoice.HOST, 'ldd').get_command()
+
+ WRAPPER_ARGS_REQUIRED_VERSION = ">=1.85.0"
+ if not self._giscanner_version_compare(state, WRAPPER_ARGS_REQUIRED_VERSION):
+ msg = ('Use of gnome.generate_gir during cross compilation requires'
+ f'g-ir-scanner {WRAPPER_ARGS_REQUIRED_VERSION}')
+ raise MesonException(msg)
+ else:
+ scan_command += ['--use-binary-wrapper', binary_wrapper[0]]
+ if len(binary_wrapper) > 1:
+ scan_command += ['--binary-wrapper-args-begin', *binary_wrapper[1:], '--binary-wrapper-args-end']
+
+ scan_command += ['--use-ldd-wrapper', ldd_wrapper[0]]
+ if len(ldd_wrapper) > 1:
+ scan_command += ['--ldd-wrapper-args-begin', *ldd_wrapper[1:], '--ldd-wrapper-args-end']
+
scan_command += ['--no-libtool']
scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
scan_command += ['--warn-all']
@@ -1619,6 +1656,8 @@ class GnomeModule(ExtensionModule):
),
KwargInfo('install_header', bool, default=False, since='0.46.0'),
KwargInfo('docbook', (str, NoneType)),
+ KwargInfo('rst', (str, NoneType), since='1.9.0'),
+ KwargInfo('markdown', (str, NoneType), since='1.9.0'),
KwargInfo(
'autocleanup', str, default='default', since='0.47.0',
validator=in_set_validator({'all', 'none', 'objects'})),
@@ -1675,6 +1714,26 @@ class GnomeModule(ExtensionModule):
cmd += ['--generate-docbook', docbook]
+ if kwargs['rst'] is not None:
+ if not mesonlib.version_compare(glib_version, '>= 2.71.1'):
+ mlog.error(f'Glib version ({glib_version}) is too old to '
+ 'support the \'rst\' kwarg, need 2.71.1 or '
+ 'newer')
+
+ rst = kwargs['rst']
+
+ cmd += ['--generate-rst', rst]
+
+ if kwargs['markdown'] is not None:
+ if not mesonlib.version_compare(glib_version, '>= 2.75.2'):
+ mlog.error(f'Glib version ({glib_version}) is too old to '
+ 'support the \'markdown\' kwarg, need 2.75.2 '
+ 'or newer')
+
+ markdown = kwargs['markdown']
+
+ cmd += ['--generate-md', markdown]
+
# https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
if mesonlib.version_compare(glib_version, '>= 2.51.3'):
cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
@@ -1750,6 +1809,48 @@ class GnomeModule(ExtensionModule):
)
targets.append(docbook_custom_target)
+ if kwargs['rst'] is not None:
+ rst = kwargs['rst']
+ # The rst output is always ${rst}-${name_of_xml_file}
+ output = namebase + '-rst'
+ outputs = []
+ for f in xml_files:
+ outputs.append('{}-{}'.format(rst, os.path.basename(str(f))))
+
+ rst_custom_target = CustomTarget(
+ output,
+ state.subdir,
+ state.subproject,
+ state.environment,
+ cmd + ['--output-directory', '@OUTDIR@', '--generate-rst', rst, '@INPUT@'],
+ xml_files,
+ outputs,
+ build_by_default=build_by_default,
+ description='Generating gdbus reStructuredText {}',
+ )
+ targets.append(rst_custom_target)
+
+ if kwargs['markdown'] is not None:
+ markdown = kwargs['markdown']
+ # The markdown output is always ${markdown}-${name_of_xml_file}
+ output = namebase + '-markdown'
+ outputs = []
+ for f in xml_files:
+ outputs.append('{}-{}'.format(markdown, os.path.basename(str(f))))
+
+ markdown_custom_target = CustomTarget(
+ output,
+ state.subdir,
+ state.subproject,
+ state.environment,
+ cmd + ['--output-directory', '@OUTDIR@', '--generate-md', markdown, '@INPUT@'],
+ xml_files,
+ outputs,
+ build_by_default=build_by_default,
+ description='Generating gdbus markdown {}',
+ )
+ targets.append(markdown_custom_target)
+
return ModuleReturnValue(targets, targets)
@typed_pos_args('gnome.mkenums', str)
@@ -1991,6 +2092,7 @@ class GnomeModule(ExtensionModule):
extra_depends=depends,
# https://github.com/mesonbuild/meson/issues/973
absolute_paths=True,
+ rspable=mesonlib.is_windows() or mesonlib.is_cygwin(),
description='Generating GObject enum file {}',
)
diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py
index 5099b41..b66a0bf 100644
--- a/mesonbuild/modules/hotdoc.py
+++ b/mesonbuild/modules/hotdoc.py
@@ -14,7 +14,7 @@ from ..build import CustomTarget, CustomTargetIndex
from ..dependencies import Dependency, InternalDependency
from ..interpreterbase import (
InvalidArguments, noPosargs, noKwargs, typed_kwargs, FeatureDeprecated,
- ContainerTypeInfo, KwargInfo, typed_pos_args
+ ContainerTypeInfo, KwargInfo, typed_pos_args, InterpreterObject
)
from ..interpreter.interpreterobjects import _CustomTargetHolder
from ..interpreter.type_checking import NoneType
@@ -383,12 +383,9 @@ class HotdocTargetBuilder:
class HotdocTargetHolder(_CustomTargetHolder['HotdocTarget']):
- def __init__(self, target: HotdocTarget, interp: Interpreter):
- super().__init__(target, interp)
- self.methods.update({'config_path': self.config_path_method})
-
@noPosargs
@noKwargs
+ @InterpreterObject.method('config_path')
def config_path_method(self, *args: T.Any, **kwargs: T.Any) -> str:
conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir,
self.interpreter.environment.build_dir)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index cc0450a..bef14e9 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -38,6 +38,7 @@ if T.TYPE_CHECKING:
filebase: T.Optional[str]
description: T.Optional[str]
url: str
+ license: str
subdirs: T.List[str]
conflicts: T.List[str]
dataonly: bool
@@ -155,6 +156,14 @@ class DependenciesHelper:
pass
elif isinstance(obj, dependencies.ExternalDependency) and obj.name == 'threads':
pass
+ elif isinstance(obj, dependencies.InternalDependency) and all(lib.get_id() in self.metadata for lib in obj.libraries):
+ # Ensure BothLibraries are resolved:
+ if self.pub_libs and isinstance(self.pub_libs[0], build.StaticLibrary):
+ obj = obj.get_as_static(recursive=True)
+ else:
+ obj = obj.get_as_shared(recursive=True)
+ for lib in obj.libraries:
+ processed_reqs.append(self.metadata[lib.get_id()].filebase)
else:
raise mesonlib.MesonException('requires argument not a string, '
'library with pkgconfig-generated file '
@@ -441,6 +450,7 @@ class PkgConfigModule(NewExtensionModule):
def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
subdirs: T.List[str], name: str,
description: str, url: str, version: str,
+ license: str,
pcfile: str, conflicts: T.List[str],
variables: T.List[T.Tuple[str, str]],
unescaped_variables: T.List[T.Tuple[str, str]],
@@ -519,18 +529,20 @@ class PkgConfigModule(NewExtensionModule):
ofile.write(f'{k}={v}\n')
ofile.write('\n')
ofile.write(f'Name: {name}\n')
- if len(description) > 0:
+ if description:
ofile.write(f'Description: {description}\n')
- if len(url) > 0:
+ if url:
ofile.write(f'URL: {url}\n')
+ if license:
+ ofile.write(f'License: {license}\n')
ofile.write(f'Version: {version}\n')
reqs_str = deps.format_reqs(deps.pub_reqs)
- if len(reqs_str) > 0:
+ if reqs_str:
ofile.write(f'Requires: {reqs_str}\n')
reqs_str = deps.format_reqs(deps.priv_reqs)
- if len(reqs_str) > 0:
+ if reqs_str:
ofile.write(f'Requires.private: {reqs_str}\n')
- if len(conflicts) > 0:
+ if conflicts:
ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
def generate_libs_flags(libs: T.List[LIBS]) -> T.Iterable[str]:
@@ -571,9 +583,9 @@ class PkgConfigModule(NewExtensionModule):
if isinstance(l, (build.CustomTarget, build.CustomTargetIndex)) or 'cs' not in l.compilers:
yield f'-l{lname}'
- if len(deps.pub_libs) > 0:
+ if deps.pub_libs:
ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
- if len(deps.priv_libs) > 0:
+ if deps.priv_libs:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
cflags: T.List[str] = []
@@ -605,6 +617,7 @@ class PkgConfigModule(NewExtensionModule):
KwargInfo('name', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None),
KwargInfo('subdirs', ContainerTypeInfo(list, str), default=[], listify=True),
KwargInfo('url', str, default=''),
+ KwargInfo('license', str, default='', since='1.9.0'),
KwargInfo('version', (str, NoneType)),
VARIABLES_KW.evolve(name="unescaped_uninstalled_variables", since='0.59.0'),
VARIABLES_KW.evolve(name="unescaped_variables", since='0.59.0'),
@@ -659,6 +672,7 @@ class PkgConfigModule(NewExtensionModule):
filebase = kwargs['filebase'] if kwargs['filebase'] is not None else name
description = kwargs['description'] if kwargs['description'] is not None else default_description
url = kwargs['url']
+ license = kwargs['license']
conflicts = kwargs['conflicts']
# Prepend the main library to public libraries list. This is required
@@ -713,7 +727,7 @@ class PkgConfigModule(NewExtensionModule):
pkgroot_name = os.path.join('{libdir}', 'pkgconfig')
relocatable = state.get_option('pkgconfig.relocatable')
self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
- version, pcfile, conflicts, variables,
+ version, license, pcfile, conflicts, variables,
unescaped_variables, False, dataonly,
pkgroot=pkgroot if relocatable else None)
res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, pkgroot_name, None, state.subproject, install_tag='devel')
@@ -722,7 +736,7 @@ class PkgConfigModule(NewExtensionModule):
pcfile = filebase + '-uninstalled.pc'
self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
- version, pcfile, conflicts, variables,
+ version, license, pcfile, conflicts, variables,
unescaped_variables, uninstalled=True, dataonly=dataonly)
# Associate the main library with this generated pc file. If the library
# is used in any subsequent call to the generated, it will generate a
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 2a7e685..3c07960 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -16,11 +16,11 @@ from ..dependencies.detect import get_dep_identifier, find_external_dependency
from ..dependencies.python import BasicPythonExternalProgram, python_factory, _PythonDependencyBase
from ..interpreter import extract_required_kwarg, permitted_dependency_kwargs, primitives as P_OBJ
from ..interpreter.interpreterobjects import _ExternalProgramHolder
-from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW, SHARED_MOD_KWS
+from ..interpreter.type_checking import NoneType, DEPENDENCY_KWS, PRESERVE_PATH_KW, SHARED_MOD_KWS
from ..interpreterbase import (
noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo,
InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo,
- FeatureNew, FeatureNewKwargs, disablerIfNotFound
+ FeatureNew, FeatureNewKwargs, disablerIfNotFound, InterpreterObject
)
from ..mesonlib import MachineChoice
from ..options import OptionKey
@@ -126,23 +126,11 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
self.platform = info['platform']
self.is_pypy = info['is_pypy']
self.link_libpython = info['link_libpython']
- self.methods.update({
- 'extension_module': self.extension_module_method,
- 'dependency': self.dependency_method,
- 'install_sources': self.install_sources_method,
- 'get_install_dir': self.get_install_dir_method,
- 'language_version': self.language_version_method,
- 'found': self.found_method,
- 'has_path': self.has_path_method,
- 'get_path': self.get_path_method,
- 'has_variable': self.has_variable_method,
- 'get_variable': self.get_variable_method,
- 'path': self.path_method,
- })
@permittedKwargs(mod_kwargs)
@typed_pos_args('python.extension_module', str, varargs=(str, mesonlib.File, CustomTarget, CustomTargetIndex, GeneratedList, StructuredSources, ExtractedObjects, BuildTarget))
@typed_kwargs('python.extension_module', *_MOD_KWARGS, _DEFAULTABLE_SUBDIR_KW, _LIMITED_API_KW, allow_unknown=True)
+ @InterpreterObject.method('extension_module')
def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]], kwargs: ExtensionModuleKw) -> 'SharedModule':
if 'install_dir' in kwargs:
if kwargs['subdir'] is not None:
@@ -268,6 +256,8 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@permittedKwargs(permitted_dependency_kwargs | {'embed'})
@FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
@noPosargs
+ @typed_kwargs('python_installation.dependency', *DEPENDENCY_KWS, allow_unknown=True)
+ @InterpreterObject.method('dependency')
def dependency_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'Dependency':
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
@@ -287,6 +277,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
PRESERVE_PATH_KW,
KwargInfo('install_tag', (str, NoneType), since='0.60.0')
)
+ @InterpreterObject.method('install_sources')
def install_sources_method(self, args: T.Tuple[T.List[T.Union[str, mesonlib.File]]],
kwargs: 'PyInstallKw') -> 'Data':
self.held_object.run_bytecompile[self.version] = True
@@ -301,6 +292,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@noPosargs
@typed_kwargs('python_installation.install_dir', _PURE_KW, _SUBDIR_KW)
+ @InterpreterObject.method('get_install_dir')
def get_install_dir_method(self, args: T.List['TYPE_var'], kwargs: 'PyInstallKw') -> str:
self.held_object.run_bytecompile[self.version] = True
pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure
@@ -318,16 +310,19 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@noPosargs
@noKwargs
+ @InterpreterObject.method('language_version')
def language_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return self.version
@typed_pos_args('python_installation.has_path', str)
@noKwargs
+ @InterpreterObject.method('has_path')
def has_path_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
return args[0] in self.paths
@typed_pos_args('python_installation.get_path', str, optargs=[object])
@noKwargs
+ @InterpreterObject.method('get_path')
def get_path_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var':
path_name, fallback = args
try:
@@ -339,11 +334,13 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@typed_pos_args('python_installation.has_variable', str)
@noKwargs
+ @InterpreterObject.method('has_variable')
def has_variable_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
return args[0] in self.variables
@typed_pos_args('python_installation.get_variable', str, optargs=[object])
@noKwargs
+ @InterpreterObject.method('get_variable')
def get_variable_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var':
var_name, fallback = args
try:
@@ -356,6 +353,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@noPosargs
@noKwargs
@FeatureNew('Python module path method', '0.50.0')
+ @InterpreterObject.method('path')
def path_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
return super().path_method(args, kwargs)
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index f43a0ed..d0e8091 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -242,6 +242,14 @@ class RustModule(ExtensionModule):
def doctest(self, state: ModuleState, args: T.Tuple[str, T.Union[SharedLibrary, StaticLibrary]], kwargs: FuncDoctest) -> ModuleReturnValue:
name, base_target = args
+ if not base_target.uses_rust():
+ raise MesonException('doc tests are only supported for Rust targets')
+ if not base_target.uses_rust_abi():
+ raise MesonException("doc tests are not supported for rust_abi: 'c'")
+ if state.environment.is_cross_build() and state.environment.need_exe_wrapper(base_target.for_machine):
+ mlog.notice('skipping Rust doctests due to cross compilation', once=True)
+ return ModuleReturnValue(None, [])
+
# Link the base target's crate into the tests
kwargs['link_with'].append(base_target)
kwargs['depends'].append(base_target)
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index f1c6071..3dd8f0a 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -201,7 +201,7 @@ class Lexer:
lines = value.split('\n')
if len(lines) > 1:
lineno += len(lines) - 1
- line_start = mo.end() - len(lines[-1])
+ line_start = mo.end() - len(lines[-1]) - 3
elif tid == 'eol_cont':
lineno += 1
line_start = loc
@@ -221,7 +221,7 @@ class Lexer:
yield Token(tid, filename, curline_start, curline, col, bytespan, value)
break
if not matched:
- raise ParseException('lexer', self.getline(line_start), lineno, col)
+ raise ParseException(f'lexer: unrecognized token {self.code[loc]!r}', self.getline(line_start), lineno, loc - line_start)
@dataclass
class BaseNode:
@@ -369,6 +369,13 @@ class ArgumentNode(BaseNode):
mlog.warning('This will be an error in Meson 2.0.')
self.kwargs[name] = value
+ def get_kwarg_or_default(self, name: str, default: BaseNode) -> BaseNode:
+ for k, v in self.kwargs.items():
+ assert isinstance(k, IdNode)
+ if k.value == name:
+ return v
+ return default
+
def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
self.kwargs[name] = value
@@ -676,15 +683,16 @@ comparison_map: T.Mapping[str, COMPARISONS] = {
# levels so there are not enough words to describe them all.
# Enter numbering:
#
-# 1 assignment
-# 2 or
-# 3 and
-# 4 comparison
-# 5 arithmetic
-# 6 negation
-# 7 funcall, method call
-# 8 parentheses
-# 9 plain token
+# 1 assignment
+# 2 or
+# 3 and
+# 4 comparison
+# 5 addition and subtraction
+# 6 multiplication, division and modulus
+# 7 negation
+# 8 funcall, method call
+# 9 parentheses
+# 10 plain token
class Parser:
def __init__(self, code: str, filename: str):
@@ -831,50 +839,47 @@ class Parser:
return left
def e5(self) -> BaseNode:
- return self.e5addsub()
-
- def e5addsub(self) -> BaseNode:
op_map = {
'plus': 'add',
'dash': 'sub',
}
- left = self.e5muldiv()
+ left = self.e6()
while True:
op = self.accept_any(tuple(op_map.keys()))
if op:
operator = self.create_node(SymbolNode, self.previous)
- left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e5muldiv())
+ left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e6())
else:
break
return left
- def e5muldiv(self) -> BaseNode:
+ def e6(self) -> BaseNode:
op_map = {
'percent': 'mod',
'star': 'mul',
'fslash': 'div',
}
- left = self.e6()
+ left = self.e7()
while True:
op = self.accept_any(tuple(op_map.keys()))
if op:
operator = self.create_node(SymbolNode, self.previous)
- left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e6())
+ left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e7())
else:
break
return left
- def e6(self) -> BaseNode:
+ def e7(self) -> BaseNode:
if self.accept('not'):
operator = self.create_node(SymbolNode, self.previous)
- return self.create_node(NotNode, self.current, operator, self.e7())
+ return self.create_node(NotNode, self.current, operator, self.e8())
if self.accept('dash'):
operator = self.create_node(SymbolNode, self.previous)
- return self.create_node(UMinusNode, self.current, operator, self.e7())
- return self.e7()
+ return self.create_node(UMinusNode, self.current, operator, self.e8())
+ return self.e8()
- def e7(self) -> BaseNode:
- left = self.e8()
+ def e8(self) -> BaseNode:
+ left = self.e9()
block_start = self.current
if self.accept('lparen'):
lpar = self.create_node(SymbolNode, block_start)
@@ -897,7 +902,7 @@ class Parser:
left = self.index_call(left)
return left
- def e8(self) -> BaseNode:
+ def e9(self) -> BaseNode:
block_start = self.current
if self.accept('lparen'):
lpar = self.create_node(SymbolNode, block_start)
@@ -918,9 +923,9 @@ class Parser:
rcurl = self.create_node(SymbolNode, self.previous)
return self.create_node(DictNode, lcurl, key_values, rcurl)
else:
- return self.e9()
+ return self.e10()
- def e9(self) -> BaseNode:
+ def e10(self) -> BaseNode:
t = self.current
if self.accept('true'):
t.value = True
@@ -978,7 +983,7 @@ class Parser:
def method_call(self, source_object: BaseNode) -> MethodNode:
dot = self.create_node(SymbolNode, self.previous)
- methodname = self.e9()
+ methodname = self.e10()
if not isinstance(methodname, IdNode):
if isinstance(source_object, NumberNode) and isinstance(methodname, NumberNode):
raise ParseException('meson does not support float numbers',
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 81dd183..8d7dd0b 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -11,7 +11,7 @@ import typing as T
from . import build, coredata, environment, interpreter, mesonlib, mintro, mlog
from .mesonlib import MesonException
-from .options import COMPILER_BASE_OPTIONS, OptionKey
+from .options import OptionKey
if T.TYPE_CHECKING:
from typing_extensions import Protocol
@@ -27,7 +27,6 @@ if T.TYPE_CHECKING:
builddir: str
sourcedir: str
pager: bool
- unset_opts: T.List[str]
git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
*
@@ -180,36 +179,39 @@ class MesonApp:
# See class Backend's 'generate' for comments on capture args and returned dictionary.
def generate(self, capture: bool = False, vslite_ctx: T.Optional[dict] = None) -> T.Optional[dict]:
env = environment.Environment(self.source_dir, self.build_dir, self.options)
+ if not env.first_invocation:
+ assert self.options.reconfigure
+ env.coredata.set_from_configure_command(self.options)
mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
if self.options.profile:
mlog.set_timestamp_start(time.monotonic())
if self.options.clearcache:
env.coredata.clear_cache()
- with mesonlib.BuildDirLock(self.build_dir):
+ with mesonlib.DirectoryLock(self.build_dir, 'meson-private/meson.lock',
+ mesonlib.DirectoryLockAction.FAIL,
+ 'Some other Meson process is already using this build directory. Exiting.'):
return self._generate(env, capture, vslite_ctx)
- def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Any, all_subprojects: T.Any) -> None:
- pending = coredata.optstore.pending_options
+ def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Dict[OptionKey, str], all_subprojects: T.Mapping[str, object]) -> None:
errlist: T.List[str] = []
- for opt in pending:
- # It is not an error to set wrong option for unknown subprojects or
- # language because we don't have control on which one will be selected.
- if opt.subproject and opt.subproject not in all_subprojects:
+ known_subprojects = all_subprojects.keys()
+ for opt in cmd_line_options:
+ # Accept options that exist or could appear in subsequent reconfigurations,
+ # including options for subprojects that were not used
+ if opt in coredata.optstore or \
+ opt.evolve(subproject=None) in coredata.optstore or \
+ coredata.optstore.accept_as_pending_option(opt):
continue
- if coredata.optstore.is_compiler_option(opt):
+ if opt.subproject and opt.subproject not in known_subprojects:
continue
- if (coredata.optstore.is_base_option(opt) and
- opt.evolve(subproject=None, machine=mesonlib.MachineChoice.HOST) in COMPILER_BASE_OPTIONS):
+ # "foo=true" may also refer to toplevel project option ":foo"
+ if opt.subproject is None and coredata.optstore.is_project_option(opt.as_root()):
continue
- keystr = str(opt)
- if keystr in cmd_line_options:
- errlist.append(f'"{keystr}"')
+ errlist.append(f'"{opt}"')
if errlist:
errstr = ', '.join(errlist)
raise MesonException(f'Unknown options: {errstr}')
- coredata.optstore.clear_pending()
-
def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.Optional[dict]) -> T.Optional[dict]:
# Get all user defined options, including options that have been defined
# during a previous invocation or using meson configure.
@@ -347,17 +349,18 @@ def run_genvslite_setup(options: CMDOptions) -> None:
# invoke the appropriate 'meson compile ...' build commands upon the normal visual studio build/rebuild/clean actions, instead of using
# the native VS/msbuild system.
builddir_prefix = options.builddir
- genvsliteval = options.cmd_line_options.pop('genvslite') # type: ignore [call-overload]
+ k_genvslite = OptionKey('genvslite')
+ genvsliteval = options.cmd_line_options.pop(k_genvslite)
# The command line may specify a '--backend' option, which doesn't make sense in conjunction with
# '--genvslite', where we always want to use a ninja back end -
- k_backend = 'backend'
+ k_backend = OptionKey('backend')
if k_backend in options.cmd_line_options.keys():
- if options.cmd_line_options[k_backend] != 'ninja': # type: ignore [index]
+ if options.cmd_line_options[k_backend] != 'ninja':
raise MesonException('Explicitly specifying a backend option with \'genvslite\' is not necessary '
'(the ninja backend is always used) but specifying a non-ninja backend '
'conflicts with a \'genvslite\' setup')
else:
- options.cmd_line_options[k_backend] = 'ninja' # type: ignore [index]
+ options.cmd_line_options[k_backend] = 'ninja'
buildtypes_list = coredata.get_genvs_default_buildtype_list()
vslite_ctx = {}
@@ -368,7 +371,7 @@ def run_genvslite_setup(options: CMDOptions) -> None:
vslite_ctx[buildtypestr] = app.generate(capture=True)
#Now for generating the 'lite' solution and project files, which will use these builds we've just set up, above.
options.builddir = f'{builddir_prefix}_vs'
- options.cmd_line_options[OptionKey('genvslite')] = genvsliteval
+ options.cmd_line_options[k_genvslite] = genvsliteval
app = MesonApp(options)
app.generate(capture=False, vslite_ctx=vslite_ctx)
@@ -384,7 +387,7 @@ def run(options: T.Union[CMDOptions, T.List[str]]) -> int:
# lie
options.pager = False
- if 'genvslite' in options.cmd_line_options.keys():
+ if OptionKey('genvslite') in options.cmd_line_options.keys():
run_genvslite_setup(options)
else:
app = MesonApp(options)
diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py
index c74283c..d4549c0 100755
--- a/mesonbuild/msubprojects.py
+++ b/mesonbuild/msubprojects.py
@@ -4,6 +4,7 @@ from dataclasses import dataclass, InitVar
import os, subprocess
import argparse
import asyncio
+import fnmatch
import threading
import copy
import shutil
@@ -640,9 +641,14 @@ def add_common_arguments(p: argparse.ArgumentParser) -> None:
p.add_argument('--allow-insecure', default=False, action='store_true',
help='Allow insecure server connections.')
-def add_subprojects_argument(p: argparse.ArgumentParser) -> None:
- p.add_argument('subprojects', nargs='*',
- help='List of subprojects (default: all)')
+def add_subprojects_argument(p: argparse.ArgumentParser, name: str = None) -> None:
+ helpstr = 'Patterns of subprojects to operate on (default: all)'
+ if name:
+ p.add_argument(name, dest='subprojects', metavar='pattern', nargs=1, action='append',
+ default=[], help=helpstr)
+ else:
+ p.add_argument('subprojects', metavar='pattern', nargs='*', default=[],
+ help=helpstr)
def add_wrap_update_parser(subparsers: 'SubParsers') -> argparse.ArgumentParser:
p = subparsers.add_parser('update', help='Update wrap files from WrapDB (Since 0.63.0)')
@@ -692,7 +698,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
p.add_argument('args', nargs=argparse.REMAINDER,
help=argparse.SUPPRESS)
add_common_arguments(p)
- p.set_defaults(subprojects=[])
+ add_subprojects_argument(p, '--filter')
p.set_defaults(subprojects_func=Runner.foreach)
p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
@@ -724,7 +730,8 @@ def run(options: 'Arguments') -> int:
return 0
r = Resolver(source_dir, subproject_dir, wrap_frontend=True, allow_insecure=options.allow_insecure, silent=True)
if options.subprojects:
- wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+ wraps = [wrap for name, wrap in r.wraps.items()
+ if any(fnmatch.fnmatch(name, pat) for pat in options.subprojects)]
else:
wraps = list(r.wraps.values())
types = [t.strip() for t in options.types.split(',')] if options.types else []
diff --git a/mesonbuild/options.py b/mesonbuild/options.py
index 62413b1..988b4f3 100644
--- a/mesonbuild/options.py
+++ b/mesonbuild/options.py
@@ -310,7 +310,7 @@ class OptionKey:
return self.machine is MachineChoice.BUILD
if T.TYPE_CHECKING:
- OptionStringLikeDict: TypeAlias = T.Dict[T.Union[OptionKey, str], str]
+ OptionDict: TypeAlias = T.Dict[OptionKey, ElementaryOptionValues]
@dataclasses.dataclass
class UserOption(T.Generic[_T], HoldableObject):
@@ -321,13 +321,20 @@ class UserOption(T.Generic[_T], HoldableObject):
yielding: bool = DEFAULT_YIELDING
deprecated: DeprecatedType = False
readonly: bool = dataclasses.field(default=False)
+ parent: T.Optional[UserOption] = None
def __post_init__(self, value_: _T) -> None:
self.value = self.validate_value(value_)
# Final isn't technically allowed in a __post_init__ method
self.default: Final[_T] = self.value # type: ignore[misc]
- def listify(self, value: T.Any) -> T.List[T.Any]:
+ def listify(self, value: ElementaryOptionValues) -> T.List[str]:
+ if isinstance(value, list):
+ return value
+ if isinstance(value, bool):
+ return ['true'] if value else ['false']
+ if isinstance(value, int):
+ return [str(value)]
return [value]
def printable_value(self) -> ElementaryOptionValues:
@@ -340,10 +347,10 @@ class UserOption(T.Generic[_T], HoldableObject):
# Check that the input is a valid value and return the
# "cleaned" or "native" version. For example the Boolean
# option could take the string "true" and return True.
- def validate_value(self, value: T.Any) -> _T:
+ def validate_value(self, value: object) -> _T:
raise RuntimeError('Derived option class did not override validate_value.')
- def set_value(self, newvalue: T.Any) -> bool:
+ def set_value(self, newvalue: object) -> bool:
oldvalue = self.value
self.value = self.validate_value(newvalue)
return self.value != oldvalue
@@ -361,7 +368,7 @@ class EnumeratedUserOption(UserOption[_T]):
class UserStringOption(UserOption[str]):
- def validate_value(self, value: T.Any) -> str:
+ def validate_value(self, value: object) -> str:
if not isinstance(value, str):
raise MesonException(f'The value of option "{self.name}" is "{value}", which is not a string.')
return value
@@ -374,7 +381,7 @@ class UserBooleanOption(EnumeratedUserOption[bool]):
def __bool__(self) -> bool:
return self.value
- def validate_value(self, value: T.Any) -> bool:
+ def validate_value(self, value: object) -> bool:
if isinstance(value, bool):
return value
if not isinstance(value, str):
@@ -406,7 +413,7 @@ class _UserIntegerBase(UserOption[_T]):
def printable_choices(self) -> T.Optional[T.List[str]]:
return [self.__choices]
- def validate_value(self, value: T.Any) -> _T:
+ def validate_value(self, value: object) -> _T:
if isinstance(value, str):
value = T.cast('_T', self.toint(value))
if not isinstance(value, int):
@@ -450,7 +457,7 @@ class UserUmaskOption(_UserIntegerBase[T.Union["Literal['preserve']", OctalInt]]
return format(self.value, '04o')
return self.value
- def validate_value(self, value: T.Any) -> T.Union[Literal['preserve'], OctalInt]:
+ def validate_value(self, value: object) -> T.Union[Literal['preserve'], OctalInt]:
if value == 'preserve':
return 'preserve'
return OctalInt(super().validate_value(value))
@@ -465,7 +472,7 @@ class UserUmaskOption(_UserIntegerBase[T.Union["Literal['preserve']", OctalInt]]
@dataclasses.dataclass
class UserComboOption(EnumeratedUserOption[str]):
- def validate_value(self, value: T.Any) -> str:
+ def validate_value(self, value: object) -> str:
if value not in self.choices:
if isinstance(value, bool):
_type = 'boolean'
@@ -503,13 +510,13 @@ class UserArrayOption(UserOption[T.List[_T]]):
@dataclasses.dataclass
class UserStringArrayOption(UserArrayOption[str]):
- def listify(self, value: T.Any) -> T.List[T.Any]:
+ def listify(self, value: object) -> T.List[str]:
try:
return listify_array_value(value, self.split_args)
except MesonException as e:
raise MesonException(f'error in option "{self.name}": {e!s}')
- def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]:
+ def validate_value(self, value: object) -> T.List[str]:
newvalue = self.listify(value)
if not self.allow_dups and len(set(newvalue)) != len(newvalue):
@@ -606,11 +613,14 @@ class UserStdOption(UserComboOption):
else:
self.choices += gnu_stds_map.keys()
- def validate_value(self, value: T.Union[str, T.List[str]]) -> str:
+ def validate_value(self, value: object) -> str:
try:
candidates = listify_array_value(value)
except MesonException as e:
raise MesonException(f'error in option "{self.name}": {e!s}')
+ for std in candidates:
+ if not isinstance(std, str):
+ raise MesonException(f'String array element "{candidates!s}" for option "{self.name}" is not a string.')
unknown = ','.join(std for std in candidates if std not in self.all_stds)
if unknown:
raise MesonException(f'Unknown option "{self.name}" value {unknown}. Possible values are {self.all_stds}.')
@@ -796,21 +806,19 @@ class OptionStore:
def __init__(self, is_cross: bool) -> None:
self.options: T.Dict['OptionKey', 'AnyOptionType'] = {}
+ self.subprojects: T.Set[str] = set()
self.project_options: T.Set[OptionKey] = set()
self.module_options: T.Set[OptionKey] = set()
from .compilers import all_languages
self.all_languages = set(all_languages)
- self.project_options = set()
- self.augments: T.Dict[str, str] = {}
+ self.augments: OptionDict = {}
self.is_cross = is_cross
- # Pending options are options that need to be initialized later, either
- # configuration dependent options like compiler options, or options for
- # a different subproject
- self.pending_options: T.Dict[OptionKey, ElementaryOptionValues] = {}
-
- def clear_pending(self) -> None:
- self.pending_options = {}
+ # Pending options are configuration dependent options that could be
+ # initialized later, such as compiler options
+ self.pending_options: OptionDict = {}
+ # Subproject options from toplevel project()
+ self.pending_subproject_options: OptionDict = {}
def ensure_and_validate_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
if isinstance(key, str):
@@ -829,6 +837,12 @@ class OptionStore:
key = key.as_host()
return key
+ def get_pending_value(self, key: T.Union[OptionKey, str], default: T.Optional[ElementaryOptionValues] = None) -> ElementaryOptionValues:
+ key = self.ensure_and_validate_key(key)
+ if key in self.options:
+ return self.options[key].value
+ return self.pending_options.get(key, default)
+
def get_value(self, key: T.Union[OptionKey, str]) -> ElementaryOptionValues:
return self.get_value_object(key).value
@@ -840,41 +854,33 @@ class OptionStore:
potential = self.options.get(key, None)
if self.is_project_option(key):
assert key.subproject is not None
- if potential is not None and potential.yielding:
- parent_key = key.as_root()
- try:
- parent_option = self.options[parent_key]
- except KeyError:
- # Subproject is set to yield, but top level
- # project does not have an option of the same
- # name. Return the subproject option.
- return potential
- # If parent object has different type, do not yield.
- # This should probably be an error.
- if type(parent_option) is type(potential):
- return parent_option
- return potential
if potential is None:
raise KeyError(f'Tried to access nonexistant project option {key}.')
- return potential
else:
if potential is None:
parent_key = OptionKey(key.name, subproject=None, machine=key.machine)
if parent_key not in self.options:
raise KeyError(f'Tried to access nonexistant project parent option {parent_key}.')
+ # This is a global option but it can still have per-project
+ # augment, so return the subproject key.
return self.options[parent_key]
- return potential
+ return potential
def get_value_object_and_value_for(self, key: OptionKey) -> T.Tuple[AnyOptionType, ElementaryOptionValues]:
assert isinstance(key, OptionKey)
vobject = self.get_value_object_for(key)
computed_value = vobject.value
- if key.subproject is not None:
- keystr = str(key)
- if keystr in self.augments:
- computed_value = vobject.validate_value(self.augments[keystr])
+ if key in self.augments:
+ assert key.subproject is not None
+ computed_value = self.augments[key]
+ elif vobject.yielding:
+ computed_value = vobject.parent.value
return (vobject, computed_value)
+ def option_has_value(self, key: OptionKey, value: ElementaryOptionValues) -> bool:
+ vobject, current_value = self.get_value_object_and_value_for(key)
+ return vobject.validate_value(value) == current_value
+
def get_value_for(self, name: 'T.Union[OptionKey, str]', subproject: T.Optional[str] = None) -> ElementaryOptionValues:
if isinstance(name, str):
key = OptionKey(name, subproject)
@@ -897,16 +903,16 @@ class OptionStore:
if key in self.options:
return
- self.options[key] = valobj
pval = self.pending_options.pop(key, None)
if key.subproject:
proj_key = key.evolve(subproject=None)
self.add_system_option_internal(proj_key, valobj)
- if pval is None:
- pval = self.options[proj_key].value
-
- if pval is not None:
- self.set_option(key, pval)
+ if pval is not None:
+ self.augments[key] = pval
+ else:
+ self.options[key] = valobj
+ if pval is not None:
+ self.set_option(key, pval)
def add_compiler_option(self, language: str, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None:
key = self.ensure_and_validate_key(key)
@@ -919,6 +925,19 @@ class OptionStore:
assert key.subproject is not None
if key in self.options:
raise MesonException(f'Internal error: tried to add a project option {key} that already exists.')
+ if valobj.yielding and key.subproject:
+ parent_key = key.as_root()
+ try:
+ parent_option = self.options[parent_key]
+ # If parent object has different type, do not yield.
+ # This should probably be an error.
+ if type(parent_option) is type(valobj):
+ valobj.parent = parent_option
+ except KeyError:
+ # Subproject is set to yield, but top level
+ # project does not have an option of the same
+ pass
+ valobj.yielding = bool(valobj.parent)
self.options[key] = valobj
self.project_options.add(key)
@@ -985,6 +1004,11 @@ class OptionStore:
return value.as_posix()
def set_option(self, key: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ changed = False
+ error_key = key
+ if error_key.subproject == '':
+ error_key = error_key.evolve(subproject=None)
+
if key.name == 'prefix':
assert isinstance(new_value, str), 'for mypy'
new_value = self.sanitize_prefix(new_value)
@@ -996,83 +1020,114 @@ class OptionStore:
try:
opt = self.get_value_object_for(key)
except KeyError:
- raise MesonException(f'Unknown options: "{key!s}" not found.')
+ raise MesonException(f'Unknown option: "{error_key}".')
if opt.deprecated is True:
- mlog.deprecation(f'Option {key.name!r} is deprecated')
+ mlog.deprecation(f'Option "{error_key}" is deprecated')
elif isinstance(opt.deprecated, list):
for v in opt.listify(new_value):
if v in opt.deprecated:
- mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated')
+ mlog.deprecation(f'Option "{error_key}" value {v!r} is deprecated')
elif isinstance(opt.deprecated, dict):
- def replace(v: T.Any) -> T.Any:
+ def replace(v: str) -> str:
assert isinstance(opt.deprecated, dict) # No, Mypy can not tell this from two lines above
newvalue = opt.deprecated.get(v)
if newvalue is not None:
- mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}')
+ mlog.deprecation(f'Option "{error_key}" value {v!r} is replaced by {newvalue!r}')
return newvalue
return v
valarr = [replace(v) for v in opt.listify(new_value)]
new_value = ','.join(valarr)
elif isinstance(opt.deprecated, str):
- mlog.deprecation(f'Option {key.name!r} is replaced by {opt.deprecated!r}')
+ mlog.deprecation(f'Option "{error_key}" is replaced by {opt.deprecated!r}')
# Change both this aption and the new one pointed to.
- dirty = self.set_option(key.evolve(name=opt.deprecated), new_value)
- dirty |= opt.set_value(new_value)
- return dirty
+ changed |= self.set_option(key.evolve(name=opt.deprecated), new_value, first_invocation)
- old_value = opt.value
- changed = opt.set_value(new_value)
+ new_value = opt.validate_value(new_value)
+ if key in self.options:
+ old_value = opt.value
+ opt.set_value(new_value)
+ opt.yielding = False
+ else:
+ assert key.subproject is not None
+ old_value = self.augments.get(key, opt.value)
+ self.augments[key] = new_value
+ changed |= old_value != new_value
if opt.readonly and changed and not first_invocation:
- raise MesonException(f'Tried to modify read only option {str(key)!r}')
+ raise MesonException(f'Tried to modify read only option "{error_key}"')
if key.name == 'prefix' and first_invocation and changed:
assert isinstance(old_value, str), 'for mypy'
assert isinstance(new_value, str), 'for mypy'
self.reset_prefixed_options(old_value, new_value)
- if changed and key.name == 'buildtype':
+ if changed and key.name == 'buildtype' and new_value != 'custom':
assert isinstance(new_value, str), 'for mypy'
optimization, debug = self.DEFAULT_DEPENDENTS[new_value]
dkey = key.evolve(name='debug')
optkey = key.evolve(name='optimization')
- self.options[dkey].set_value(debug)
- self.options[optkey].set_value(optimization)
+ self.set_option(dkey, debug, first_invocation)
+ self.set_option(optkey, optimization, first_invocation)
return changed
- def set_option_from_string(self, keystr: T.Union[OptionKey, str], new_value: str) -> bool:
- if isinstance(keystr, OptionKey):
- o = keystr
- else:
- o = OptionKey.from_string(keystr)
+ def set_user_option(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ if not self.is_cross and o.is_for_build():
+ return False
+
+ # This is complicated by the fact that a string can have two meanings:
+ #
+ # default_options: 'foo=bar'
+ #
+ # can be either
+ #
+ # A) a system option in which case the subproject is None
+ # B) a project option, in which case the subproject is ''
+ #
+ # The key parsing function can not handle the difference between the two
+ # and defaults to A.
if o in self.options:
- return self.set_option(o, new_value)
- o = o.as_root()
- return self.set_option(o, new_value)
+ return self.set_option(o, new_value, first_invocation)
+
+ # could also be an augment...
+ global_option = o.evolve(subproject=None)
+ if o.subproject is not None and global_option in self.options:
+ return self.set_option(o, new_value, first_invocation)
+
+ if self.accept_as_pending_option(o, first_invocation=first_invocation):
+ old_value = self.pending_options.get(o, None)
+ self.pending_options[o] = new_value
+ return old_value is None or str(old_value) != new_value
+ elif o.subproject is None:
+ o = o.as_root()
+ return self.set_option(o, new_value, first_invocation)
+ else:
+ raise MesonException(f'Unknown option: "{o}".')
- def set_from_configure_command(self, D_args: T.List[str], U_args: T.List[str]) -> bool:
+ def set_from_configure_command(self, D_args: T.Dict[OptionKey, T.Optional[str]]) -> bool:
dirty = False
- D_args = [] if D_args is None else D_args
- (global_options, perproject_global_options, project_options) = self.classify_D_arguments(D_args)
- U_args = [] if U_args is None else U_args
- for key, valstr in global_options:
- dirty |= self.set_option_from_string(key, valstr)
- for key, valstr in project_options:
- dirty |= self.set_option_from_string(key, valstr)
- for keystr, valstr in perproject_global_options:
- if keystr in self.augments:
- if self.augments[keystr] != valstr:
- self.augments[keystr] = valstr
- dirty = True
- else:
- self.augments[keystr] = valstr
- dirty = True
- for delete in U_args:
- if delete in self.augments:
- del self.augments[delete]
+ for key, valstr in D_args.items():
+ if valstr is not None:
+ dirty |= self.set_user_option(key, valstr)
+ continue
+
+ if key in self.augments:
+ del self.augments[key]
dirty = True
+ else:
+ # TODO: For project options, "dropping an augment" means going
+ # back to the superproject's value. However, it's confusing
+ # that -U does not simply remove the option from the stored
+ # cmd_line_options. This may cause "meson setup --wipe" to
+ # have surprising behavior. For this to work, UserOption
+ # should only store the default value and the option values
+ # should be stored with their source (project(), subproject(),
+ # machine file, command line). This way the effective value
+ # can be easily recomputed.
+ opt = self.get_value_object(key)
+ dirty |= not opt.yielding and bool(opt.parent)
+ opt.yielding = bool(opt.parent)
return dirty
def reset_prefixed_options(self, old_prefix: str, new_prefix: str) -> None:
@@ -1193,64 +1248,25 @@ class OptionStore:
def is_module_option(self, key: OptionKey) -> bool:
return key in self.module_options
- def classify_D_arguments(self, D: T.List[str]) -> T.Tuple[T.List[T.Tuple[OptionKey, str]],
- T.List[T.Tuple[str, str]],
- T.List[T.Tuple[OptionKey, str]]]:
- global_options = []
- project_options = []
- perproject_global_options = []
- for setval in D:
- keystr, valstr = setval.split('=', 1)
- key = OptionKey.from_string(keystr)
- valuetuple = (key, valstr)
- if self.is_project_option(key):
- project_options.append(valuetuple)
- elif key.subproject is None:
- global_options.append(valuetuple)
- else:
- # FIXME, augments are currently stored as strings, not OptionKeys
- strvaluetuple = (keystr, valstr)
- perproject_global_options.append(strvaluetuple)
- return (global_options, perproject_global_options, project_options)
-
- def optlist2optdict(self, optlist: T.List[str]) -> T.Dict[str, str]:
- optdict = {}
- for p in optlist:
- k, v = p.split('=', 1)
- optdict[k] = v
- return optdict
-
- def prefix_split_options(self, coll: T.Union[T.List[str], OptionStringLikeDict]) -> T.Tuple[str, T.Union[T.List[str], OptionStringLikeDict]]:
+ def prefix_split_options(self, coll: OptionDict) -> T.Tuple[T.Optional[str], OptionDict]:
prefix = None
- if isinstance(coll, list):
- others: T.List[str] = []
- for e in coll:
- if e.startswith('prefix='):
- prefix = e.split('=', 1)[1]
- else:
- others.append(e)
- return (prefix, others)
- else:
- others_d: OptionStringLikeDict = {}
- for k, v in coll.items():
- if isinstance(k, OptionKey) and k.name == 'prefix':
- prefix = v
- elif k == 'prefix':
- prefix = v
- else:
- others_d[k] = v
- return (prefix, others_d)
+ others_d: OptionDict = {}
+ for k, v in coll.items():
+ if k.name == 'prefix':
+ if not isinstance(v, str):
+ raise MesonException('Incorrect type for prefix option (expected string)')
+ prefix = v
+ else:
+ others_d[k] = v
+ return (prefix, others_d)
def first_handle_prefix(self,
- project_default_options: T.Union[T.List[str], OptionStringLikeDict],
- cmd_line_options: T.Union[T.List[str], OptionStringLikeDict],
- machine_file_options: T.Mapping[OptionKey, ElementaryOptionValues]) \
- -> T.Tuple[T.Union[T.List[str], OptionStringLikeDict],
- T.Union[T.List[str], OptionStringLikeDict],
- T.MutableMapping[OptionKey, ElementaryOptionValues]]:
+ project_default_options: OptionDict,
+ cmd_line_options: OptionDict,
+ machine_file_options: OptionDict) \
+ -> T.Tuple[OptionDict, OptionDict, OptionDict]:
# Copy to avoid later mutation
- nopref_machine_file_options = T.cast(
- 'T.MutableMapping[OptionKey, ElementaryOptionValues]', copy.copy(machine_file_options))
+ nopref_machine_file_options = copy.copy(machine_file_options)
prefix = None
(possible_prefix, nopref_project_default_options) = self.prefix_split_options(project_default_options)
@@ -1281,145 +1297,129 @@ class OptionStore:
self.options[OptionKey('prefix')].set_value(prefix)
def initialize_from_top_level_project_call(self,
- project_default_options_in: T.Union[T.List[str], OptionStringLikeDict],
- cmd_line_options_in: T.Union[T.List[str], OptionStringLikeDict],
- machine_file_options_in: T.Mapping[OptionKey, ElementaryOptionValues]) -> None:
- first_invocation = True
+ project_default_options_in: OptionDict,
+ cmd_line_options_in: OptionDict,
+ machine_file_options_in: OptionDict) -> None:
(project_default_options, cmd_line_options, machine_file_options) = self.first_handle_prefix(project_default_options_in,
cmd_line_options_in,
machine_file_options_in)
- if isinstance(project_default_options, str):
- project_default_options = [project_default_options]
- if isinstance(project_default_options, list):
- project_default_options = self.optlist2optdict(project_default_options) # type: ignore [assignment]
- if project_default_options is None:
- project_default_options = {}
- assert isinstance(machine_file_options, dict)
- for keystr, valstr in machine_file_options.items():
- if isinstance(keystr, str):
- # FIXME, standardise on Key or string.
- key = OptionKey.from_string(keystr)
- else:
- key = keystr
- # Due to backwards compatibility we ignore all build-machine options
- # when building natively.
- if not self.is_cross and key.is_for_build():
- continue
- if key.subproject:
- augstr = str(key)
- self.augments[augstr] = valstr
- elif key in self.options:
- self.set_option(key, valstr, first_invocation)
- else:
- proj_key = key.as_root()
- if proj_key in self.options:
- self.set_option(proj_key, valstr, first_invocation)
- else:
- self.pending_options[key] = valstr
- assert isinstance(project_default_options, dict)
- for keystr, valstr in project_default_options.items():
- # Ths is complicated by the fact that a string can have two meanings:
- #
- # default_options: 'foo=bar'
- #
- # can be either
- #
- # A) a system option in which case the subproject is None
- # B) a project option, in which case the subproject is '' (this method is only called from top level)
- #
- # The key parsing function can not handle the difference between the two
- # and defaults to A.
- if isinstance(keystr, str):
- key = OptionKey.from_string(keystr)
- else:
- key = keystr
+ for key, valstr in project_default_options.items():
# Due to backwards compatibility we ignore build-machine options
# when building natively.
if not self.is_cross and key.is_for_build():
continue
if key.subproject:
- self.pending_options[key] = valstr
- elif key in self.options:
- self.set_option(key, valstr, first_invocation)
+ # Subproject options from toplevel project() have low priority
+ # and will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
else:
- # Setting a project option with default_options.
- # Argubly this should be a hard error, the default
+ # Setting a project option with default_options
+ # should arguably be a hard error; the default
# value of project option should be set in the option
# file, not in the project call.
- proj_key = key.as_root()
- if self.is_project_option(proj_key):
- self.set_option(proj_key, valstr)
- else:
- self.pending_options[key] = valstr
- assert isinstance(cmd_line_options, dict)
- for keystr, valstr in cmd_line_options.items():
- if isinstance(keystr, str):
- key = OptionKey.from_string(keystr)
- else:
- key = keystr
+ self.set_user_option(key, valstr, True)
+
+ # ignore subprojects for now for machine file and command line
+ # options; they are applied later
+ for key, valstr in machine_file_options.items():
# Due to backwards compatibility we ignore all build-machine options
# when building natively.
if not self.is_cross and key.is_for_build():
continue
- if key.subproject:
- self.pending_options[key] = valstr
- elif key in self.options:
- self.set_option(key, valstr, True)
- else:
- proj_key = key.as_root()
- if proj_key in self.options:
- self.set_option(proj_key, valstr, True)
- else:
- # Fail on unknown options that we can know must
- # exist at this point in time. Subproject and compiler
- # options are resolved later.
- #
- # Some base options (sanitizers etc) might get added later.
- # Permitting them all is not strictly correct.
- if key.subproject is None and not self.is_compiler_option(key) and not self.is_base_option(key):
- raise MesonException(f'Unknown options: "{keystr}"')
- self.pending_options[key] = valstr
-
- def hacky_mchackface_back_to_list(self, optdict: T.Dict[str, str]) -> T.List[str]:
- if isinstance(optdict, dict):
- return [f'{k}={v}' for k, v in optdict.items()]
- return optdict
+ if not key.subproject:
+ self.set_user_option(key, valstr, True)
+ for key, valstr in cmd_line_options.items():
+ # Due to backwards compatibility we ignore all build-machine options
+ # when building natively.
+ if not self.is_cross and key.is_for_build():
+ continue
+ if not key.subproject:
+ self.set_user_option(key, valstr, True)
+
+ def accept_as_pending_option(self, key: OptionKey, first_invocation: bool = False) -> bool:
+ # Some base options (sanitizers etc) might get added later.
+ # Permitting them all is not strictly correct.
+ if self.is_compiler_option(key):
+ return True
+ if first_invocation and self.is_backend_option(key):
+ return True
+ return (self.is_base_option(key) and
+ key.evolve(subproject=None, machine=MachineChoice.HOST) in COMPILER_BASE_OPTIONS)
+
+ def validate_cmd_line_options(self, cmd_line_options: OptionDict) -> None:
+ unknown_options = []
+ for key, valstr in cmd_line_options.items():
+ if key in self.pending_options and not self.accept_as_pending_option(key):
+ unknown_options.append(f'"{key}"')
+
+ if unknown_options:
+ keys = ', '.join(unknown_options)
+ raise MesonException(f'Unknown options: {keys}')
def initialize_from_subproject_call(self,
subproject: str,
- spcall_default_options: T.Union[T.List[str], OptionStringLikeDict],
- project_default_options: T.Union[T.List[str], OptionStringLikeDict],
- cmd_line_options: T.Union[T.List[str], OptionStringLikeDict]) -> None:
- is_first_invocation = True
- spcall_default_options = self.hacky_mchackface_back_to_list(spcall_default_options) # type: ignore [arg-type]
- project_default_options = self.hacky_mchackface_back_to_list(project_default_options) # type: ignore [arg-type]
- if isinstance(spcall_default_options, str):
- spcall_default_options = [spcall_default_options]
- for o in itertools.chain(project_default_options, spcall_default_options):
- keystr, valstr = o.split('=', 1)
- key = OptionKey.from_string(keystr)
- assert key.subproject is None
- key = key.evolve(subproject=subproject)
- # If the key points to a project option, set the value from that.
- # Otherwise set an augment.
- if key in self.project_options:
- self.set_option(key, valstr, is_first_invocation)
- else:
- self.pending_options.pop(key, None)
- aug_str = f'{subproject}:{keystr}'
- self.augments[aug_str] = valstr
- # Check for pending options
- assert isinstance(cmd_line_options, dict)
- for key, valstr in cmd_line_options.items(): # type: ignore [assignment]
- if not isinstance(key, OptionKey):
- key = OptionKey.from_string(key)
+ spcall_default_options: OptionDict,
+ project_default_options: OptionDict,
+ cmd_line_options: OptionDict,
+ machine_file_options: OptionDict) -> None:
+
+ options: OptionDict = {}
+
+ # project() default_options
+ for key, valstr in project_default_options.items():
+ if key.subproject == subproject:
+ without_subp = key.evolve(subproject=None)
+ raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
+ if key.subproject is None:
+ key = key.evolve(subproject=subproject)
+ options[key] = valstr
+
+ # augments from the toplevel project() default_options
+ for key, valstr in self.pending_subproject_options.items():
+ if key.subproject == subproject:
+ options[key] = valstr
+
+ # subproject() default_options
+ for key, valstr in spcall_default_options.items():
+ if key.subproject == subproject:
+ without_subp = key.evolve(subproject=None)
+ raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
+ if key.subproject is None:
+ key = key.evolve(subproject=subproject)
+ options[key] = valstr
+
+ # then global settings from machine file and command line
+ # **but not if they are toplevel project options**
+ for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()):
+ if key.subproject is None and not self.is_project_option(key.as_root()):
+ subp_key = key.evolve(subproject=subproject)
+ self.pending_subproject_options.pop(subp_key, None)
+ options.pop(subp_key, None)
+
+ # then finally per project augments from machine file and command line
+ for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()):
+ if key.subproject == subproject:
+ options[key] = valstr
+
+ # merge everything that has been computed above, while giving self.augments priority
+ for key, valstr in options.items():
if key.subproject != subproject:
+ if key.subproject in self.subprojects and not self.option_has_value(key, valstr):
+ mlog.warning('option {key} is set in subproject {subproject} but has already been processed')
+ continue
+
+ # Subproject options from project() will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
continue
+
+ self.pending_subproject_options.pop(key, None)
self.pending_options.pop(key, None)
- if key in self.options:
- self.set_option(key, valstr, is_first_invocation)
- else:
- self.augments[str(key)] = valstr
+ if key not in self.augments:
+ self.set_user_option(key, valstr, True)
+
+ self.subprojects.add(subproject)
def update_project_options(self, project_options: MutableKeyedOptionDictType, subproject: SubProject) -> None:
for key, value in project_options.items():
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index 919bd38..4c2fb11 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -10,23 +10,29 @@
from __future__ import annotations
from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
-from mesonbuild.mesonlib import MesonException, setup_vsenv
+from .ast.interpreter import IntrospectionBuildTarget, IntrospectionDependency, _symbol
+from .interpreterbase import UnknownValue, TV_func
+from .interpreterbase.helpers import flatten
+from mesonbuild.mesonlib import MesonException, setup_vsenv, relpath
from . import mlog, environment
from functools import wraps
-from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, StringNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, SymbolNode
-import json, os, re, sys
+from .mparser import Token, ArrayNode, ArgumentNode, ArithmeticNode, AssignmentNode, BaseNode, StringNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, PlusAssignmentNode
+from .mintro import IntrospectionEncoder
+import json, os, re, sys, codecs
import typing as T
+from pathlib import Path
if T.TYPE_CHECKING:
- from argparse import ArgumentParser, HelpFormatter
- from .mparser import BaseNode
+ import argparse
+ from argparse import ArgumentParser, _FormatterClass
+ from .mlog import AnsiDecorator
class RewriterException(MesonException):
pass
# Note: when adding arguments, please also add them to the completion
# scripts in $MESONSRC/data/shell-completions/
-def add_arguments(parser: ArgumentParser, formatter: T.Callable[[str], HelpFormatter]) -> None:
+def add_arguments(parser: ArgumentParser, formatter: _FormatterClass) -> None:
parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
@@ -62,12 +68,14 @@ def add_arguments(parser: ArgumentParser, formatter: T.Callable[[str], HelpForma
cmd_parser.add_argument('json', help='JSON string or file to execute')
class RequiredKeys:
- def __init__(self, keys):
+ keys: T.Dict[str, T.Any]
+
+ def __init__(self, keys: T.Dict[str, T.Any]):
self.keys = keys
- def __call__(self, f):
+ def __call__(self, f: TV_func) -> TV_func:
@wraps(f)
- def wrapped(*wrapped_args, **wrapped_kwargs):
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
assert len(wrapped_args) >= 2
cmd = wrapped_args[1]
for key, val in self.keys.items():
@@ -90,12 +98,11 @@ class RequiredKeys:
.format(key, choices, cmd[key]))
return f(*wrapped_args, **wrapped_kwargs)
- return wrapped
-
-def _symbol(val: str) -> SymbolNode:
- return SymbolNode(Token('', '', 0, 0, 0, (0, 0), val))
+ return T.cast('TV_func', wrapped)
class MTypeBase:
+ node: BaseNode
+
def __init__(self, node: T.Optional[BaseNode] = None):
if node is None:
self.node = self.new_node()
@@ -107,30 +114,30 @@ class MTypeBase:
self.node_type = i
@classmethod
- def new_node(cls, value=None):
+ def new_node(cls, value: T.Any = None) -> BaseNode:
# Overwrite in derived class
raise RewriterException('Internal error: new_node of MTypeBase was called')
@classmethod
- def supported_nodes(cls):
+ def supported_nodes(cls) -> T.List[type]:
# Overwrite in derived class
return []
- def can_modify(self):
+ def can_modify(self) -> bool:
return self.node_type is not None
- def get_node(self):
+ def get_node(self) -> BaseNode:
return self.node
- def add_value(self, value):
+ def add_value(self, value: T.Any) -> None:
# Overwrite in derived class
mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping')
- def remove_value(self, value):
+ def remove_value(self, value: T.Any) -> None:
# Overwrite in derived class
mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
- def remove_regex(self, value):
+ def remove_regex(self, value: T.Any) -> None:
# Overwrite in derived class
mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
@@ -139,13 +146,13 @@ class MTypeStr(MTypeBase):
super().__init__(node)
@classmethod
- def new_node(cls, value=None):
+ def new_node(cls, value: T.Optional[str] = None) -> BaseNode:
if value is None:
value = ''
return StringNode(Token('string', '', 0, 0, 0, None, str(value)))
@classmethod
- def supported_nodes(cls):
+ def supported_nodes(cls) -> T.List[type]:
return [StringNode]
class MTypeBool(MTypeBase):
@@ -153,11 +160,11 @@ class MTypeBool(MTypeBase):
super().__init__(node)
@classmethod
- def new_node(cls, value=None):
+ def new_node(cls, value: T.Optional[str] = None) -> BaseNode:
return BooleanNode(Token('', '', 0, 0, 0, None, bool(value)))
@classmethod
- def supported_nodes(cls):
+ def supported_nodes(cls) -> T.List[type]:
return [BooleanNode]
class MTypeID(MTypeBase):
@@ -165,21 +172,23 @@ class MTypeID(MTypeBase):
super().__init__(node)
@classmethod
- def new_node(cls, value=None):
+ def new_node(cls, value: T.Optional[str] = None) -> BaseNode:
if value is None:
value = ''
return IdNode(Token('', '', 0, 0, 0, None, str(value)))
@classmethod
- def supported_nodes(cls):
+ def supported_nodes(cls) -> T.List[type]:
return [IdNode]
class MTypeList(MTypeBase):
+ node: ArrayNode
+
def __init__(self, node: T.Optional[BaseNode] = None):
super().__init__(node)
@classmethod
- def new_node(cls, value=None):
+ def new_node(cls, value: T.Optional[T.List[T.Any]] = None) -> ArrayNode:
if value is None:
value = []
elif not isinstance(value, list):
@@ -189,50 +198,52 @@ class MTypeList(MTypeBase):
return ArrayNode(_symbol('['), args, _symbol(']'))
@classmethod
- def _new_element_node(cls, value):
+ def _new_element_node(cls, value: T.Any) -> BaseNode:
# Overwrite in derived class
raise RewriterException('Internal error: _new_element_node of MTypeList was called')
- def _ensure_array_node(self):
+ def _ensure_array_node(self) -> None:
if not isinstance(self.node, ArrayNode):
tmp = self.node
self.node = self.new_node()
self.node.args.arguments = [tmp]
@staticmethod
- def _check_is_equal(node, value) -> bool:
+ def _check_is_equal(node: BaseNode, value: str) -> bool:
# Overwrite in derived class
return False
@staticmethod
- def _check_regex_matches(node, regex: str) -> bool:
+ def _check_regex_matches(node: BaseNode, regex: str) -> bool:
# Overwrite in derived class
return False
- def get_node(self):
+ def get_node(self) -> BaseNode:
if isinstance(self.node, ArrayNode):
if len(self.node.args.arguments) == 1:
return self.node.args.arguments[0]
return self.node
@classmethod
- def supported_element_nodes(cls):
+ def supported_element_nodes(cls) -> T.List[T.Type]:
# Overwrite in derived class
return []
@classmethod
- def supported_nodes(cls):
+ def supported_nodes(cls) -> T.List[T.Type]:
return [ArrayNode] + cls.supported_element_nodes()
- def add_value(self, value):
+ def add_value(self, value: T.Any) -> None:
if not isinstance(value, list):
value = [value]
self._ensure_array_node()
for i in value:
+ assert hasattr(self.node, 'args') # For mypy
+ assert isinstance(self.node.args, ArgumentNode) # For mypy
self.node.args.arguments += [self._new_element_node(i)]
- def _remove_helper(self, value, equal_func):
- def check_remove_node(node):
+ def _remove_helper(self, value: T.Any, equal_func: T.Callable[[T.Any, T.Any], bool]) -> None:
+ def check_remove_node(node: BaseNode) -> bool:
for j in value:
if equal_func(i, j):
return True
@@ -241,16 +252,18 @@ class MTypeList(MTypeBase):
if not isinstance(value, list):
value = [value]
self._ensure_array_node()
+ assert hasattr(self.node, 'args') # For mypy
+ assert isinstance(self.node.args, ArgumentNode) # For mypy
removed_list = []
for i in self.node.args.arguments:
if not check_remove_node(i):
removed_list += [i]
self.node.args.arguments = removed_list
- def remove_value(self, value):
+ def remove_value(self, value: T.Any) -> None:
self._remove_helper(value, self._check_is_equal)
- def remove_regex(self, regex: str):
+ def remove_regex(self, regex: str) -> None:
self._remove_helper(regex, self._check_regex_matches)
class MTypeStrList(MTypeList):
@@ -258,23 +271,23 @@ class MTypeStrList(MTypeList):
super().__init__(node)
@classmethod
- def _new_element_node(cls, value):
+ def _new_element_node(cls, value: str) -> StringNode:
return StringNode(Token('string', '', 0, 0, 0, None, str(value)))
@staticmethod
- def _check_is_equal(node, value) -> bool:
+ def _check_is_equal(node: BaseNode, value: str) -> bool:
if isinstance(node, StringNode):
- return node.value == value
+ return bool(node.value == value)
return False
@staticmethod
- def _check_regex_matches(node, regex: str) -> bool:
+ def _check_regex_matches(node: BaseNode, regex: str) -> bool:
if isinstance(node, StringNode):
return re.match(regex, node.value) is not None
return False
@classmethod
- def supported_element_nodes(cls):
+ def supported_element_nodes(cls) -> T.List[T.Type]:
return [StringNode]
class MTypeIDList(MTypeList):
@@ -282,26 +295,26 @@ class MTypeIDList(MTypeList):
super().__init__(node)
@classmethod
- def _new_element_node(cls, value):
+ def _new_element_node(cls, value: str) -> IdNode:
return IdNode(Token('', '', 0, 0, 0, None, str(value)))
@staticmethod
- def _check_is_equal(node, value) -> bool:
+ def _check_is_equal(node: BaseNode, value: str) -> bool:
if isinstance(node, IdNode):
- return node.value == value
+ return bool(node.value == value)
return False
@staticmethod
- def _check_regex_matches(node, regex: str) -> bool:
+ def _check_regex_matches(node: BaseNode, regex: str) -> bool:
if isinstance(node, StringNode):
return re.match(regex, node.value) is not None
return False
@classmethod
- def supported_element_nodes(cls):
+ def supported_element_nodes(cls) -> T.List[T.Type]:
return [IdNode]
-rewriter_keys = {
+rewriter_keys: T.Dict[str, T.Dict[str, T.Any]] = {
'default_options': {
'operation': (str, None, ['set', 'delete']),
'options': (dict, {}, None)
@@ -355,13 +368,15 @@ rewriter_func_kwargs = {
}
class Rewriter:
+ info_dump: T.Optional[T.Dict[str, T.Dict[str, T.Any]]]
+
def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
self.sourcedir = sourcedir
self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
self.skip_errors = skip_errors
- self.modified_nodes = []
- self.to_remove_nodes = []
- self.to_add_nodes = []
+ self.modified_nodes: T.List[BaseNode] = []
+ self.to_remove_nodes: T.List[BaseNode] = []
+ self.to_add_nodes: T.List[BaseNode] = []
self.functions = {
'default_options': self.process_default_options,
'kwargs': self.process_kwargs,
@@ -369,89 +384,99 @@ class Rewriter:
}
self.info_dump = None
- def analyze_meson(self):
+ def analyze_meson(self) -> None:
mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
self.interpreter.analyze()
mlog.log(' -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
mlog.log(' -- Version:', mlog.cyan(self.interpreter.project_data['version']))
- def add_info(self, cmd_type: str, cmd_id: str, data: dict):
+ def add_info(self, cmd_type: str, cmd_id: str, data: dict) -> None:
if self.info_dump is None:
self.info_dump = {}
if cmd_type not in self.info_dump:
self.info_dump[cmd_type] = {}
self.info_dump[cmd_type][cmd_id] = data
- def print_info(self):
+ def print_info(self) -> None:
if self.info_dump is None:
return
- sys.stdout.write(json.dumps(self.info_dump, indent=2))
+ sys.stdout.write(json.dumps(self.info_dump, indent=2, cls=IntrospectionEncoder))
- def on_error(self):
+ def on_error(self) -> T.Tuple[AnsiDecorator, AnsiDecorator]:
if self.skip_errors:
return mlog.cyan('-->'), mlog.yellow('skipping')
return mlog.cyan('-->'), mlog.red('aborting')
- def handle_error(self):
+ def handle_error(self) -> None:
if self.skip_errors:
return None
raise MesonException('Rewriting the meson.build failed')
- def find_target(self, target: str):
- def check_list(name: str) -> T.List[BaseNode]:
- result = []
- for i in self.interpreter.targets:
- if name in {i['name'], i['id']}:
- result += [i]
- return result
-
- targets = check_list(target)
- if targets:
- if len(targets) == 1:
- return targets[0]
- else:
- mlog.error('There are multiple targets matching', mlog.bold(target))
- for i in targets:
- mlog.error(' -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
- mlog.error('Please try again with the unique ID of the target', *self.on_error())
- self.handle_error()
- return None
-
- # Check the assignments
- tgt = None
- if target in self.interpreter.assignments:
- node = self.interpreter.assignments[target]
- if isinstance(node, FunctionNode):
- if node.func_name.value in {'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'}:
- tgt = self.interpreter.assign_vals[target]
-
- return tgt
-
- def find_dependency(self, dependency: str):
- def check_list(name: str):
- for i in self.interpreter.dependencies:
- if name == i['name']:
- return i
+ def all_assignments(self, varname: str) -> T.List[BaseNode]:
+ assigned_values = []
+ for ass in self.interpreter.all_assignment_nodes[varname]:
+ if isinstance(ass, PlusAssignmentNode):
+ continue
+ assert isinstance(ass, AssignmentNode)
+ assigned_values.append(ass.value)
+ return assigned_values
+
+ def find_target(self, target: str) -> T.Optional[IntrospectionBuildTarget]:
+ for i in self.interpreter.targets:
+ if target == i.id:
+ return i
+
+ potential_tgts = []
+ for i in self.interpreter.targets:
+ if target == i.name:
+ potential_tgts.append(i)
+
+ if not potential_tgts:
+ potenial_tgts_1 = self.all_assignments(target)
+ potenial_tgts_1 = [self.interpreter.node_to_runtime_value(el) for el in potenial_tgts_1]
+ potential_tgts = [el for el in potenial_tgts_1 if isinstance(el, IntrospectionBuildTarget)]
+
+ if not potential_tgts:
return None
+ elif len(potential_tgts) == 1:
+ return potential_tgts[0]
+ else:
+ mlog.error('There are multiple targets matching', mlog.bold(target))
+ for i in potential_tgts:
+ mlog.error(' -- Target name', mlog.bold(i.name), 'with ID', mlog.bold(i.id))
+ mlog.error('Please try again with the unique ID of the target', *self.on_error())
+ self.handle_error()
+ return None
+
+ def find_dependency(self, dependency: str) -> T.Optional[IntrospectionDependency]:
+ potential_deps = []
+ for i in self.interpreter.dependencies:
+ if i.name == dependency:
+ potential_deps.append(i)
- dep = check_list(dependency)
- if dep is not None:
- return dep
+ checking_varnames = len(potential_deps) == 0
- # Check the assignments
- if dependency in self.interpreter.assignments:
- node = self.interpreter.assignments[dependency]
- if isinstance(node, FunctionNode):
- if node.func_name.value == 'dependency':
- name = self.interpreter.flatten_args(node.args)[0]
- dep = check_list(name)
+ if checking_varnames:
+ potential_deps1 = self.all_assignments(dependency)
+ potential_deps = [self.interpreter.node_to_runtime_value(el) for el in potential_deps1 if isinstance(el, FunctionNode) and el.func_name.value == 'dependency']
- return dep
+ if not potential_deps:
+ return None
+ elif len(potential_deps) == 1:
+ return potential_deps[0]
+ else:
+ mlog.error('There are multiple dependencies matching', mlog.bold(dependency))
+ for i in potential_deps:
+ mlog.error(' -- Dependency name', i)
+ if checking_varnames:
+ mlog.error('Please try again with the name of the dependency', *self.on_error())
+ self.handle_error()
+ return None
@RequiredKeys(rewriter_keys['default_options'])
- def process_default_options(self, cmd):
+ def process_default_options(self, cmd: T.Dict[str, T.Any]) -> None:
# First, remove the old values
- kwargs_cmd = {
+ kwargs_cmd: T.Dict[str, T.Any] = {
'function': 'project',
'id': "/",
'operation': 'remove_regex',
@@ -495,7 +520,7 @@ class Rewriter:
self.process_kwargs(kwargs_cmd)
@RequiredKeys(rewriter_keys['kwargs'])
- def process_kwargs(self, cmd):
+ def process_kwargs(self, cmd: T.Dict[str, T.Any]) -> None:
mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
if cmd['function'] not in rewriter_func_kwargs:
mlog.error('Unknown function type', cmd['function'], *self.on_error())
@@ -516,26 +541,26 @@ class Rewriter:
node = self.interpreter.project_node
arg_node = node.args
elif cmd['function'] == 'target':
- tmp = self.find_target(cmd['id'])
- if tmp:
- node = tmp['node']
+ tmp_tgt = self.find_target(cmd['id'])
+ if tmp_tgt:
+ node = tmp_tgt.node
arg_node = node.args
elif cmd['function'] == 'dependency':
- tmp = self.find_dependency(cmd['id'])
- if tmp:
- node = tmp['node']
+ tmp_dep = self.find_dependency(cmd['id'])
+ if tmp_dep:
+ node = tmp_dep.node
arg_node = node.args
if not node:
mlog.error('Unable to find the function node')
assert isinstance(node, FunctionNode)
assert isinstance(arg_node, ArgumentNode)
# Transform the key nodes to plain strings
- arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()}
+ kwargs = {T.cast(IdNode, k).value: v for k, v in arg_node.kwargs.items()}
# Print kwargs info
if cmd['operation'] == 'info':
- info_data = {}
- for key, val in sorted(arg_node.kwargs.items()):
+ info_data: T.Dict[str, T.Any] = {}
+ for key, val in sorted(kwargs.items()):
info_data[key] = None
if isinstance(val, ElementaryNode):
info_data[key] = val.value
@@ -561,21 +586,21 @@ class Rewriter:
if cmd['operation'] == 'delete':
# Remove the key from the kwargs
- if key not in arg_node.kwargs:
+ if key not in kwargs:
mlog.log(' -- Key', mlog.bold(key), 'is already deleted')
continue
mlog.log(' -- Deleting', mlog.bold(key), 'from the kwargs')
- del arg_node.kwargs[key]
+ del kwargs[key]
elif cmd['operation'] == 'set':
# Replace the key from the kwargs
mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(str(val)))
- arg_node.kwargs[key] = kwargs_def[key].new_node(val)
+ kwargs[key] = kwargs_def[key].new_node(val)
else:
# Modify the value from the kwargs
- if key not in arg_node.kwargs:
- arg_node.kwargs[key] = None
- modifier = kwargs_def[key](arg_node.kwargs[key])
+ if key not in kwargs:
+ kwargs[key] = None
+ modifier = kwargs_def[key](kwargs[key])
if not modifier.can_modify():
mlog.log(' -- Skipping', mlog.bold(key), 'because it is too complex to modify')
continue
@@ -593,24 +618,251 @@ class Rewriter:
modifier.remove_regex(val)
# Write back the result
- arg_node.kwargs[key] = modifier.get_node()
+ kwargs[key] = modifier.get_node()
num_changed += 1
# Convert the keys back to IdNode's
- arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()}
+ arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in kwargs.items()}
for k, v in arg_node.kwargs.items():
k.level = v.level
if num_changed > 0 and node not in self.modified_nodes:
self.modified_nodes += [node]
- def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
- if node.ast_id and node.ast_id in self.interpreter.reverse_assignment:
- return self.interpreter.reverse_assignment[node.ast_id]
+ def find_assignment_node(self, node: BaseNode) -> T.Optional[AssignmentNode]:
+ for k, v in self.interpreter.all_assignment_nodes.items():
+ for ass in v:
+ if ass.value == node:
+ return ass
return None
+ def affects_no_other_targets(self, candidate: BaseNode) -> bool:
+ affected = self.interpreter.dataflow_dag.reachable({candidate}, False)
+ affected_targets = [x for x in affected if isinstance(x, FunctionNode) and x.func_name.value in BUILD_TARGET_FUNCTIONS]
+ return len(affected_targets) == 1
+
+ def get_relto(self, target_node: BaseNode, node: BaseNode) -> Path:
+ cwd = Path(os.getcwd())
+ all_paths = self.interpreter.dataflow_dag.find_all_paths(node, target_node)
+ # len(all_paths) == 0 would imply that data does not flow from node to
+ # target_node. This would imply that adding sources to node would not
+ # add the source to the target.
+ assert all_paths
+ if len(all_paths) > 1:
+ return None
+ return (cwd / next(x for x in all_paths[0] if isinstance(x, FunctionNode)).filename).parent
+
+ def add_src_or_extra(self, op: str, target: IntrospectionBuildTarget, newfiles: T.List[str], to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]]) -> None:
+ assert op in {'src_add', 'extra_files_add'}
+
+ if op == 'src_add':
+ old: T.Set[T.Union[BaseNode, UnknownValue]] = set(target.source_nodes)
+ elif op == 'extra_files_add':
+ if target.extra_files is None:
+ old = set()
+ else:
+ old = {target.extra_files}
+ tgt_function: FunctionNode = target.node
+
+ cwd = Path(os.getcwd())
+ target_dir_abs = cwd / os.path.dirname(target.node.filename)
+ source_root_abs = cwd / self.interpreter.source_root
+
+ candidates1 = self.interpreter.dataflow_dag.reachable(old, True)
+ # A node is a member of the set `candidates1` exactly if data from this node
+ # flow into one of the `dest` nodes. We assume that this implies that if we
+ # add `foo.c` to this node, then 'foo.c' will be added to one of these
+ # nodes. This assumption is not always true:
+ # ar = ['a.c', 'b.c']
+ # srcs = ar[1]
+ # executable('name', srcs)
+ # Data flows from `ar` to `srcs`, but if we add 'foo.c':
+ # ar = ['a.c', 'b.c', 'foo.c']
+ # srcs = ar[1]
+ # executable('name', srcs)
+ # this does not add 'foo.c' to `srcs`. This is a known bug/limitation of
+ # the meson rewriter that could be fixed by replacing `reachable` with a
+ # more advanced analysis. But this is a lot of work and I think e.g.
+ # `srcs = ar[1]` is rare in real-world projects, so I will just leave
+ # this for now.
+
+ candidates2 = {x for x in candidates1 if isinstance(x, (FunctionNode, ArrayNode))}
+
+ # If we have this meson.build file:
+ # shared = ['shared.c']
+ # executable('foo', shared + ['foo.c'])
+ # executable('bar', shared + ['bar.c'])
+ # and we are tasked with adding 'new.c' to 'foo', we should do e.g this:
+ # shared = ['shared.c']
+ # executable('foo', shared + ['foo.c', 'new.c'])
+ # executable('bar', shared + ['bar.c'])
+ # but never this:
+ # shared = ['shared.c', 'new.c']
+ # executable('foo', shared + ['foo.c'])
+ # executable('bar', shared + ['bar.c'])
+ # We do this by removing the `['shared.c']`-node from `candidates2`.
+ candidates2 = {x for x in candidates2 if self.affects_no_other_targets(x)}
+
+ def path_contains_unknowns(candidate: BaseNode) -> bool:
+ all_paths = self.interpreter.dataflow_dag.find_all_paths(candidate, target.node)
+ for path in all_paths:
+ for el in path:
+ if isinstance(el, UnknownValue):
+ return True
+ return False
+
+ candidates2 = {x for x in candidates2 if not path_contains_unknowns(x)}
+
+ candidates2 = {x for x in candidates2 if self.get_relto(target.node, x) is not None}
+
+ chosen: T.Union[FunctionNode, ArrayNode] = None
+ new_kwarg_flag = False
+ if len(candidates2) > 0:
+ # So that files(['a', 'b']) gets modified to files(['a', 'b', 'c']) instead of files(['a', 'b'], 'c')
+ if len({x for x in candidates2 if isinstance(x, ArrayNode)}) > 0:
+ candidates2 = {x for x in candidates2 if isinstance(x, ArrayNode)}
+
+ # We choose one more or less arbitrary candidate
+ chosen = min(candidates2, key=lambda x: (x.lineno, x.colno))
+ elif op == 'src_add':
+ chosen = target.node
+ elif op == 'extra_files_add':
+ chosen = ArrayNode(_symbol('['), ArgumentNode(Token('', tgt_function.filename, 0, 0, 0, None, '[]')), _symbol(']'))
+
+ # this is fundamentally error prone
+ self.interpreter.dataflow_dag.add_edge(chosen, target.node)
+
+ extra_files_idnode = IdNode(Token('string', tgt_function.filename, 0, 0, 0, None, 'extra_files'))
+ if tgt_function not in self.modified_nodes:
+ self.modified_nodes += [tgt_function]
+ new_extra_files_node: BaseNode
+ if target.node.args.get_kwarg_or_default('extra_files', None) is None:
+ # Target has no extra_files kwarg, create one
+ new_kwarg_flag = True
+ new_extra_files_node = chosen
+ else:
+ new_kwarg_flag = True
+ old_extra_files = target.node.args.get_kwarg_or_default('extra_files', None)
+ target.node.args.kwargs = {k: v for k, v in target.node.args.kwargs.items() if not (isinstance(k, IdNode) and k.value == 'extra_files')}
+ new_extra_files_node = ArithmeticNode('add', old_extra_files, _symbol('+'), chosen)
+
+ tgt_function.args.kwargs[extra_files_idnode] = new_extra_files_node
+
+ newfiles_relto = self.get_relto(target.node, chosen)
+ old_src_list: T.List[T.Any] = flatten([self.interpreter.node_to_runtime_value(sn) for sn in old])
+
+ if op == 'src_add':
+ name = 'Source'
+ elif op == 'extra_files_add':
+ name = 'Extra file'
+ # Generate the new String nodes
+ to_append = []
+ added = []
+
+ old_src_list = [(target_dir_abs / x).resolve() if isinstance(x, str) else x.to_abs_path(source_root_abs) for x in old_src_list if not isinstance(x, UnknownValue)]
+ for _newf in sorted(set(newfiles)):
+ newf = Path(_newf)
+ if os.path.isabs(newf):
+ newf = Path(newf)
+ else:
+ newf = source_root_abs / newf
+ if newf in old_src_list:
+ mlog.log(' -- ', name, mlog.green(str(newf)), 'is already defined for the target --> skipping')
+ continue
+
+ mlog.log(' -- Adding ', name.lower(), mlog.green(str(newf)), 'at',
+ mlog.yellow(f'{chosen.filename}:{chosen.lineno}'))
+ added.append(newf)
+ mocktarget = self.interpreter.funcvals[target.node]
+ assert isinstance(mocktarget, IntrospectionBuildTarget)
+ # print("adding ", str(newf), 'to', mocktarget.name) todo: should we write something to stderr?
+
+ path = relpath(newf, newfiles_relto)
+ path = codecs.encode(path, 'unicode_escape').decode() # Because the StringNode constructor does the inverse
+ token = Token('string', chosen.filename, 0, 0, 0, None, path)
+ to_append += [StringNode(token)]
+
+ assert isinstance(chosen, (FunctionNode, ArrayNode))
+ arg_node = chosen.args
+ # Append to the AST at the right place
+ arg_node.arguments += to_append
+
+ # Mark the node as modified
+ if chosen not in to_sort_nodes:
+ to_sort_nodes += [chosen]
+ # If the extra_files array is newly created, i.e. if new_kwarg_flag is
+ # True, don't mark it as its parent function node already is, otherwise
+ # this would cause double modification.
+ if chosen not in self.modified_nodes and not new_kwarg_flag:
+ self.modified_nodes += [chosen]
+
+ # Utility function to get a list of the sources from a node
+ def arg_list_from_node(self, n: BaseNode) -> T.List[BaseNode]:
+ args = []
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name.value in BUILD_TARGET_FUNCTIONS:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ return args
+
+ def rm_src_or_extra(self, op: str, target: IntrospectionBuildTarget, to_be_removed: T.List[str], to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]]) -> None:
+ assert op in {'src_rm', 'extra_files_rm'}
+ cwd = Path(os.getcwd())
+ source_root_abs = cwd / self.interpreter.source_root
+
+ # Helper to find the exact string node and its parent
+ def find_node(src: str) -> T.Tuple[T.Optional[BaseNode], T.Optional[StringNode]]:
+ if op == 'src_rm':
+ nodes = self.interpreter.dataflow_dag.reachable(set(target.source_nodes), True).union({target.node})
+ elif op == 'extra_files_rm':
+ nodes = self.interpreter.dataflow_dag.reachable({target.extra_files}, True)
+ for i in nodes:
+ if isinstance(i, UnknownValue):
+ continue
+ relto = self.get_relto(target.node, i)
+ if relto is not None:
+ for j in self.arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ if os.path.normpath(relto / j.value) == os.path.normpath(source_root_abs / src):
+ return i, j
+ return None, None
+
+ if op == 'src_rm':
+ name = 'source'
+ elif op == 'extra_files_rm':
+ name = 'extra file'
+
+ for i in to_be_removed:
+ # Try to find the node with the source string
+ root, string_node = find_node(i)
+ if root is None:
+ mlog.warning(' -- Unable to find', name, mlog.green(i), 'in the target')
+ continue
+ if not self.affects_no_other_targets(string_node):
+ mlog.warning(' -- Removing the', name, mlog.green(i), 'is too compilicated')
+ continue
+
+ if not isinstance(root, (FunctionNode, ArrayNode)):
+ raise NotImplementedError # I'm lazy
+
+ # Remove the found string node from the argument list
+ arg_node = root.args
+ mlog.log(' -- Removing', name, mlog.green(i), 'from',
+ mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
+ arg_node.arguments.remove(string_node)
+
+ # Mark the node as modified
+ if root not in to_sort_nodes:
+ to_sort_nodes += [root]
+ if root not in self.modified_nodes:
+ self.modified_nodes += [root]
+
@RequiredKeys(rewriter_keys['target'])
- def process_target(self, cmd):
+ def process_target(self, cmd: T.Dict[str, T.Any]) -> None:
mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
target = self.find_target(cmd['target'])
if target is None and cmd['operation'] != 'target_add':
@@ -619,7 +871,7 @@ class Rewriter:
# Make source paths relative to the current subdir
def rel_source(src: str) -> str:
- subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+ subdir = os.path.abspath(os.path.join(self.sourcedir, target.subdir))
if os.path.isabs(src):
return os.path.relpath(src, subdir)
elif not os.path.exists(src):
@@ -630,180 +882,13 @@ class Rewriter:
if target is not None:
cmd['sources'] = [rel_source(x) for x in cmd['sources']]
- # Utility function to get a list of the sources from a node
- def arg_list_from_node(n):
- args = []
- if isinstance(n, FunctionNode):
- args = list(n.args.arguments)
- if n.func_name.value in BUILD_TARGET_FUNCTIONS:
- args.pop(0)
- elif isinstance(n, ArrayNode):
- args = n.args.arguments
- elif isinstance(n, ArgumentNode):
- args = n.arguments
- return args
-
- to_sort_nodes = []
-
- if cmd['operation'] == 'src_add':
- node = None
- if target['sources']:
- node = target['sources'][0]
- else:
- node = target['node']
- assert node is not None
-
- # Generate the current source list
- src_list = []
- for i in target['sources']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- src_list += [j.value]
-
- # Generate the new String nodes
- to_append = []
- for i in sorted(set(cmd['sources'])):
- if i in src_list:
- mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping')
- continue
- mlog.log(' -- Adding source', mlog.green(i), 'at',
- mlog.yellow(f'{node.filename}:{node.lineno}'))
- token = Token('string', node.filename, 0, 0, 0, None, i)
- to_append += [StringNode(token)]
-
- # Append to the AST at the right place
- arg_node = None
- if isinstance(node, (FunctionNode, ArrayNode)):
- arg_node = node.args
- elif isinstance(node, ArgumentNode):
- arg_node = node
- assert arg_node is not None
- arg_node.arguments += to_append
-
- # Mark the node as modified
- if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
- to_sort_nodes += [arg_node]
- if node not in self.modified_nodes:
- self.modified_nodes += [node]
-
- elif cmd['operation'] == 'src_rm':
- # Helper to find the exact string node and its parent
- def find_node(src):
- for i in target['sources']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- if j.value == src:
- return i, j
- return None, None
-
- for i in cmd['sources']:
- # Try to find the node with the source string
- root, string_node = find_node(i)
- if root is None:
- mlog.warning(' -- Unable to find source', mlog.green(i), 'in the target')
- continue
-
- # Remove the found string node from the argument list
- arg_node = None
- if isinstance(root, (FunctionNode, ArrayNode)):
- arg_node = root.args
- elif isinstance(root, ArgumentNode):
- arg_node = root
- assert arg_node is not None
- mlog.log(' -- Removing source', mlog.green(i), 'from',
- mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
- arg_node.arguments.remove(string_node)
-
- # Mark the node as modified
- if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
- to_sort_nodes += [arg_node]
- if root not in self.modified_nodes:
- self.modified_nodes += [root]
-
- elif cmd['operation'] == 'extra_files_add':
- tgt_function: FunctionNode = target['node']
- mark_array = True
- try:
- node = target['extra_files'][0]
- except IndexError:
- # Specifying `extra_files` with a list that flattens to empty gives an empty
- # target['extra_files'] list, account for that.
- try:
- extra_files_key = next(k for k in tgt_function.args.kwargs.keys() if isinstance(k, IdNode) and k.value == 'extra_files')
- node = tgt_function.args.kwargs[extra_files_key]
- except StopIteration:
- # Target has no extra_files kwarg, create one
- node = ArrayNode(_symbol('['), ArgumentNode(Token('', tgt_function.filename, 0, 0, 0, None, '[]')), _symbol(']'))
- tgt_function.args.kwargs[IdNode(Token('string', tgt_function.filename, 0, 0, 0, None, 'extra_files'))] = node
- mark_array = False
- if tgt_function not in self.modified_nodes:
- self.modified_nodes += [tgt_function]
- target['extra_files'] = [node]
- if isinstance(node, IdNode):
- node = self.interpreter.assignments[node.value]
- target['extra_files'] = [node]
- if not isinstance(node, ArrayNode):
- mlog.error('Target', mlog.bold(cmd['target']), 'extra_files argument must be a list', *self.on_error())
- return self.handle_error()
-
- # Generate the current extra files list
- extra_files_list = []
- for i in target['extra_files']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- extra_files_list += [j.value]
-
- # Generate the new String nodes
- to_append = []
- for i in sorted(set(cmd['sources'])):
- if i in extra_files_list:
- mlog.log(' -- Extra file', mlog.green(i), 'is already defined for the target --> skipping')
- continue
- mlog.log(' -- Adding extra file', mlog.green(i), 'at',
- mlog.yellow(f'{node.filename}:{node.lineno}'))
- token = Token('string', node.filename, 0, 0, 0, None, i)
- to_append += [StringNode(token)]
-
- # Append to the AST at the right place
- arg_node = node.args
- arg_node.arguments += to_append
-
- # Mark the node as modified
- if arg_node not in to_sort_nodes:
- to_sort_nodes += [arg_node]
- # If the extra_files array is newly created, don't mark it as its parent function node already is,
- # otherwise this would cause double modification.
- if mark_array and node not in self.modified_nodes:
- self.modified_nodes += [node]
-
- elif cmd['operation'] == 'extra_files_rm':
- # Helper to find the exact string node and its parent
- def find_node(src):
- for i in target['extra_files']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- if j.value == src:
- return i, j
- return None, None
-
- for i in cmd['sources']:
- # Try to find the node with the source string
- root, string_node = find_node(i)
- if root is None:
- mlog.warning(' -- Unable to find extra file', mlog.green(i), 'in the target')
- continue
+ to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]] = []
- # Remove the found string node from the argument list
- arg_node = root.args
- mlog.log(' -- Removing extra file', mlog.green(i), 'from',
- mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
- arg_node.arguments.remove(string_node)
+ if cmd['operation'] in {'src_add', 'extra_files_add'}:
+ self.add_src_or_extra(cmd['operation'], target, cmd['sources'], to_sort_nodes)
- # Mark the node as modified
- if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
- to_sort_nodes += [arg_node]
- if root not in self.modified_nodes:
- self.modified_nodes += [root]
+ elif cmd['operation'] in {'src_rm', 'extra_files_rm'}:
+ self.rm_src_or_extra(cmd['operation'], target, cmd['sources'], to_sort_nodes)
elif cmd['operation'] == 'target_add':
if target is not None:
@@ -813,7 +898,7 @@ class Rewriter:
id_base = re.sub(r'[- ]', '_', cmd['target'])
target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
source_id = id_base + '_sources'
- filename = os.path.join(cmd['subdir'], environment.build_filename)
+ filename = os.path.join(os.getcwd(), self.interpreter.source_root, cmd['subdir'], environment.build_filename)
# Build src list
src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
@@ -838,44 +923,55 @@ class Rewriter:
self.to_add_nodes += [src_ass_node, tgt_ass_node]
elif cmd['operation'] == 'target_rm':
- to_remove = self.find_assignment_node(target['node'])
+ to_remove: BaseNode = self.find_assignment_node(target.node)
if to_remove is None:
- to_remove = target['node']
+ to_remove = target.node
self.to_remove_nodes += [to_remove]
mlog.log(' -- Removing target', mlog.green(cmd['target']), 'at',
mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}'))
elif cmd['operation'] == 'info':
# T.List all sources in the target
- src_list = []
- for i in target['sources']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- src_list += [j.value]
- extra_files_list = []
- for i in target['extra_files']:
- for j in arg_list_from_node(i):
- if isinstance(j, StringNode):
- extra_files_list += [j.value]
+
+ cwd = Path(os.getcwd())
+ source_root_abs = cwd / self.interpreter.source_root
+
+ src_list = self.interpreter.nodes_to_pretty_filelist(source_root_abs, target.subdir, target.source_nodes)
+ extra_files_list = self.interpreter.nodes_to_pretty_filelist(source_root_abs, target.subdir, [target.extra_files] if target.extra_files else [])
+
+ src_list = ['unknown' if isinstance(x, UnknownValue) else relpath(x, source_root_abs) for x in src_list]
+ extra_files_list = ['unknown' if isinstance(x, UnknownValue) else relpath(x, source_root_abs) for x in extra_files_list]
+
test_data = {
- 'name': target['name'],
+ 'name': target.name,
'sources': src_list,
'extra_files': extra_files_list
}
- self.add_info('target', target['id'], test_data)
+ self.add_info('target', target.id, test_data)
# Sort files
for i in to_sort_nodes:
- convert = lambda text: int(text) if text.isdigit() else text.lower()
- alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
- path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+ def convert(text: str) -> T.Union[int, str]:
+ return int(text) if text.isdigit() else text.lower()
+
+ def alphanum_key(key: str) -> T.List[T.Union[int, str]]:
+ return [convert(c) for c in re.split('([0-9]+)', key)]
- unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
- sources = [x for x in i.arguments if isinstance(x, StringNode)]
+ def path_sorter(key: str) -> T.List[T.Tuple[bool, T.List[T.Union[int, str]]]]:
+ return [(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))]
+
+ if isinstance(i, FunctionNode) and i.func_name.value in BUILD_TARGET_FUNCTIONS:
+ src_args = i.args.arguments[1:]
+ target_name = [i.args.arguments[0]]
+ else:
+ src_args = i.args.arguments
+ target_name = []
+ unknown: T.List[BaseNode] = [x for x in src_args if not isinstance(x, StringNode)]
+ sources: T.List[StringNode] = [x for x in src_args if isinstance(x, StringNode)]
sources = sorted(sources, key=lambda x: path_sorter(x.value))
- i.arguments = unknown + sources
+ i.args.arguments = target_name + unknown + T.cast(T.List[BaseNode], sources)
- def process(self, cmd):
+ def process(self, cmd: T.Dict[str, T.Any]) -> None:
if 'type' not in cmd:
raise RewriterException('Command has no key "type"')
if cmd['type'] not in self.functions:
@@ -883,7 +979,7 @@ class Rewriter:
.format(cmd['type'], list(self.functions.keys())))
self.functions[cmd['type']](cmd)
- def apply_changes(self):
+ def apply_changes(self) -> None:
assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modified_nodes)
assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes)
assert all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modified_nodes)
@@ -891,7 +987,7 @@ class Rewriter:
# Sort based on line and column in reversed order
work_nodes = [{'node': x, 'action': 'modify'} for x in self.modified_nodes]
work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
- work_nodes = sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True)
+ work_nodes = sorted(work_nodes, key=lambda x: (T.cast(BaseNode, x['node']).lineno, T.cast(BaseNode, x['node']).colno), reverse=True)
work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
# Generating the new replacement string
@@ -900,11 +996,11 @@ class Rewriter:
new_data = ''
if i['action'] == 'modify' or i['action'] == 'add':
printer = AstPrinter()
- i['node'].accept(printer)
+ T.cast(BaseNode, i['node']).accept(printer)
printer.post_process()
new_data = printer.result.strip()
data = {
- 'file': i['node'].filename,
+ 'file': T.cast(BaseNode, i['node']).filename,
'str': new_data,
'node': i['node'],
'action': i['action']
@@ -912,11 +1008,11 @@ class Rewriter:
str_list += [data]
# Load build files
- files = {}
+ files: T.Dict[str, T.Any] = {}
for i in str_list:
if i['file'] in files:
continue
- fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+ fpath = os.path.realpath(T.cast(str, i['file']))
fdata = ''
# Create an empty file if it does not exist
if not os.path.exists(fpath):
@@ -933,14 +1029,14 @@ class Rewriter:
line_offsets += [offset]
offset += len(j)
- files[i['file']] = {
+ files[T.cast(str, i['file'])] = {
'path': fpath,
'raw': fdata,
'offsets': line_offsets
}
# Replace in source code
- def remove_node(i):
+ def remove_node(i: T.Dict[str, T.Any]) -> None:
offsets = files[i['file']]['offsets']
raw = files[i['file']]['raw']
node = i['node']
@@ -968,7 +1064,7 @@ class Rewriter:
if i['action'] in {'modify', 'rm'}:
remove_node(i)
elif i['action'] == 'add':
- files[i['file']]['raw'] += i['str'] + '\n'
+ files[T.cast(str, i['file'])]['raw'] += T.cast(str, i['str']) + '\n'
# Write the files back
for key, val in files.items():
@@ -999,7 +1095,7 @@ def list_to_dict(in_list: T.List[str]) -> T.Dict[str, str]:
raise TypeError('in_list parameter of list_to_dict must have an even length.')
return result
-def generate_target(options) -> T.List[dict]:
+def generate_target(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]:
return [{
'type': 'target',
'target': options.target,
@@ -1009,7 +1105,7 @@ def generate_target(options) -> T.List[dict]:
'target_type': options.tgt_type,
}]
-def generate_kwargs(options) -> T.List[dict]:
+def generate_kwargs(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]:
return [{
'type': 'kwargs',
'function': options.function,
@@ -1018,19 +1114,19 @@ def generate_kwargs(options) -> T.List[dict]:
'kwargs': list_to_dict(options.kwargs),
}]
-def generate_def_opts(options) -> T.List[dict]:
+def generate_def_opts(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]:
return [{
'type': 'default_options',
'operation': options.operation,
'options': list_to_dict(options.options),
}]
-def generate_cmd(options) -> T.List[dict]:
+def generate_cmd(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]:
if os.path.exists(options.json):
with open(options.json, encoding='utf-8') as fp:
- return json.load(fp)
+ return T.cast(T.List[T.Dict[str, T.Any]], json.load(fp))
else:
- return json.loads(options.json)
+ return T.cast(T.List[T.Dict[str, T.Any]], json.loads(options.json))
# Map options.type to the actual type name
cli_type_map = {
@@ -1043,7 +1139,7 @@ cli_type_map = {
'cmd': generate_cmd,
}
-def run(options):
+def run(options: argparse.Namespace) -> int:
mlog.redirect(True)
if not options.verbose:
mlog.set_quiet()
@@ -1062,12 +1158,22 @@ def run(options):
if not isinstance(commands, list):
raise TypeError('Command is not a list')
- for i in commands:
- if not isinstance(i, object):
+ for i, cmd in enumerate(commands):
+ if not isinstance(cmd, object):
raise TypeError('Command is not an object')
- rewriter.process(i)
+ rewriter.process(cmd)
+ rewriter.apply_changes()
+
+ if i == len(commands) - 1: # Improves the performance, is not necessary for correctness.
+ break
+
+ rewriter.modified_nodes = []
+ rewriter.to_remove_nodes = []
+ rewriter.to_add_nodes = []
+ # The AST changed, so we need to update every information that was derived from the AST
+ rewriter.interpreter = IntrospectionInterpreter(rewriter.sourcedir, '', rewriter.interpreter.backend, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ rewriter.analyze_meson()
- rewriter.apply_changes()
rewriter.print_info()
return 0
except Exception as e:
diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py
index 550faee..e5f7024 100644
--- a/mesonbuild/scripts/clangtidy.py
+++ b/mesonbuild/scripts/clangtidy.py
@@ -11,7 +11,7 @@ import os
import shutil
import sys
-from .run_tool import run_clang_tool, run_with_buffered_output
+from .run_tool import run_with_buffered_output, run_clang_tool_on_sources
from ..environment import detect_clangtidy, detect_clangapply
import typing as T
@@ -56,7 +56,7 @@ def run(args: T.List[str]) -> int:
fixesdir.unlink()
fixesdir.mkdir(parents=True)
- tidyret = run_clang_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
+ tidyret = run_clang_tool_on_sources('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
if fixesdir is not None:
print('Applying fix-its...')
applyret = subprocess.run(applyexe + ['-format', '-style=file', '-ignore-insert-conflict', fixesdir]).returncode
diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py
index e206ff7..6181c6d 100644
--- a/mesonbuild/scripts/run_tool.py
+++ b/mesonbuild/scripts/run_tool.py
@@ -128,6 +128,26 @@ def run_clang_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[...,
yield fn(path, *args)
return asyncio.run(_run_workers(all_clike_files(name, srcdir, builddir), wrapper))
+def run_clang_tool_on_sources(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., T.Coroutine[None, None, int]], *args: T.Any) -> int:
+ if sys.platform == 'win32':
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
+
+ source_files = set()
+ with open('meson-info/intro-targets.json', encoding='utf-8') as fp:
+ targets = json.load(fp)
+
+ for target in targets:
+ for target_source in target.get('target_sources') or []:
+ for source in target_source.get('sources') or []:
+ source_files.add(Path(source))
+
+ clike_files = set(all_clike_files(name, srcdir, builddir))
+ source_files = source_files.intersection(clike_files)
+
+ def wrapper(path: Path) -> T.Iterable[T.Coroutine[None, None, int]]:
+ yield fn(path, *args)
+ return asyncio.run(_run_workers(source_files, wrapper))
+
def run_tool_on_targets(fn: T.Callable[[T.Dict[str, T.Any]],
T.Iterable[T.Coroutine[None, None, int]]]) -> int:
if sys.platform == 'win32':
diff --git a/mesonbuild/templates/cpptemplates.py b/mesonbuild/templates/cpptemplates.py
index 1bfa2ae..cdfbbf8 100644
--- a/mesonbuild/templates/cpptemplates.py
+++ b/mesonbuild/templates/cpptemplates.py
@@ -16,7 +16,7 @@ hello_cpp_template = '''#include <iostream>
int main(int argc, char **argv) {{
if (argc != 1) {{
- std::cout << argv[0] << "takes no arguments.\\n";
+ std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
std::cout << "This is project " << PROJECT_NAME << ".\\n";
diff --git a/mesonbuild/utils/platform.py b/mesonbuild/utils/platform.py
index 8e762b6..8fdfee6 100644
--- a/mesonbuild/utils/platform.py
+++ b/mesonbuild/utils/platform.py
@@ -6,22 +6,30 @@ from __future__ import annotations
"""base classes providing no-op functionality.."""
+import enum
import os
import typing as T
from .. import mlog
-__all__ = ['BuildDirLock']
+__all__ = ['DirectoryLock', 'DirectoryLockAction', 'DirectoryLockBase']
-# This needs to be inherited by the specific implementations to make type
-# checking happy
-class BuildDirLock:
+class DirectoryLockAction(enum.Enum):
+ IGNORE = 0
+ WAIT = 1
+ FAIL = 2
- def __init__(self, builddir: str) -> None:
- self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
+class DirectoryLockBase:
+ def __init__(self, directory: str, lockfile: str, action: DirectoryLockAction, err: str) -> None:
+ self.action = action
+ self.err = err
+ self.lockpath = os.path.join(directory, lockfile)
def __enter__(self) -> None:
- mlog.debug('Calling the no-op version of BuildDirLock')
+ mlog.debug('Calling the no-op version of DirectoryLock')
def __exit__(self, *args: T.Any) -> None:
pass
+
+class DirectoryLock(DirectoryLockBase):
+ pass
diff --git a/mesonbuild/utils/posix.py b/mesonbuild/utils/posix.py
index e8387ba..a601dee 100644
--- a/mesonbuild/utils/posix.py
+++ b/mesonbuild/utils/posix.py
@@ -10,23 +10,33 @@ import fcntl
import typing as T
from .core import MesonException
-from .platform import BuildDirLock as BuildDirLockBase
+from .platform import DirectoryLockBase, DirectoryLockAction
-__all__ = ['BuildDirLock']
+__all__ = ['DirectoryLock', 'DirectoryLockAction']
-class BuildDirLock(BuildDirLockBase):
+class DirectoryLock(DirectoryLockBase):
def __enter__(self) -> None:
- self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+ self.lockfile = open(self.lockpath, 'w+', encoding='utf-8')
try:
- fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except (BlockingIOError, PermissionError):
+ flags = fcntl.LOCK_EX
+ if self.action != DirectoryLockAction.WAIT:
+ flags = flags | fcntl.LOCK_NB
+ fcntl.flock(self.lockfile, flags)
+ except BlockingIOError:
self.lockfile.close()
- raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+ if self.action == DirectoryLockAction.IGNORE:
+ return
+ raise MesonException(self.err)
+ except PermissionError:
+ self.lockfile.close()
+ raise MesonException(self.err)
except OSError as e:
self.lockfile.close()
- raise MesonException(f'Failed to lock the build directory: {e.strerror}')
+ raise MesonException(f'Failed to lock directory {self.lockpath}: {e.strerror}')
def __exit__(self, *args: T.Any) -> None:
+ if self.lockfile is None or self.lockfile.closed:
+ return
fcntl.flock(self.lockfile, fcntl.LOCK_UN)
self.lockfile.close()
diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py
index 5b3f131..0628310 100644
--- a/mesonbuild/utils/universal.py
+++ b/mesonbuild/utils/universal.py
@@ -38,6 +38,7 @@ if T.TYPE_CHECKING:
from ..environment import Environment
from ..compilers.compilers import Compiler
from ..interpreterbase.baseobjects import SubProject
+ from .. import programs
class _EnvPickleLoadable(Protocol):
@@ -432,7 +433,7 @@ class File(HoldableObject):
absdir = srcdir
if self.is_built:
absdir = builddir
- return os.path.join(absdir, self.relative_name())
+ return os.path.normpath(os.path.join(absdir, self.relative_name()))
@property
def suffix(self) -> str:
@@ -756,6 +757,20 @@ class VcsData:
rev_regex: str
dep: str
wc_dir: T.Optional[str] = None
+ repo_can_be_file: bool = False
+
+ def repo_exists(self, curdir: Path) -> bool:
+ if not shutil.which(self.cmd):
+ return False
+
+ repo = curdir / self.repo_dir
+ if repo.is_dir():
+ return True
+ if repo.is_file() and self.repo_can_be_file:
+ return True
+
+ return False
+
def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]:
vcs_systems = [
@@ -766,6 +781,7 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]:
get_rev = ['git', 'describe', '--dirty=+', '--always'],
rev_regex = '(.*)',
dep = '.git/logs/HEAD',
+ repo_can_be_file=True,
),
VcsData(
name = 'mercurial',
@@ -801,9 +817,7 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]:
parent_paths_and_self.appendleft(source_dir)
for curdir in parent_paths_and_self:
for vcs in vcs_systems:
- repodir = vcs.repo_dir
- cmd = vcs.cmd
- if curdir.joinpath(repodir).is_dir() and shutil.which(cmd):
+ if vcs.repo_exists(curdir):
vcs.wc_dir = str(curdir)
return vcs
return None
@@ -1226,7 +1240,7 @@ def do_replacement(regex: T.Pattern[str], line: str,
if variable_format == 'meson':
return do_replacement_meson(regex, line, confdata)
elif variable_format in {'cmake', 'cmake@'}:
- return do_replacement_cmake(regex, line, variable_format == 'cmake@', confdata)
+ return do_replacement_cmake(line, variable_format == 'cmake@', confdata)
else:
raise MesonException('Invalid variable format')
@@ -1261,44 +1275,92 @@ def do_replacement_meson(regex: T.Pattern[str], line: str,
return var_str
return re.sub(regex, variable_replace, line), missing_variables
-def do_replacement_cmake(regex: T.Pattern[str], line: str, at_only: bool,
+def do_replacement_cmake(line: str, at_only: bool,
confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
missing_variables: T.Set[str] = set()
- def variable_replace(match: T.Match[str]) -> str:
- # Pairs of escape characters before '@', '\@', '${' or '\${'
- if match.group(0).endswith('\\'):
- num_escapes = match.end(0) - match.start(0)
- return '\\' * (num_escapes // 2)
- # Handle cmake escaped \${} tags
- elif not at_only and match.group(0) == '\\${':
- return '${'
- # \@escaped\@ variables
- elif match.groupdict().get('escaped') is not None:
- return match.group('escaped')[1:-2]+'@'
+ character_regex = re.compile(r'''
+ [^a-zA-Z0-9_/.+\-]
+ ''', re.VERBOSE)
+
+ def variable_get(varname: str) -> str:
+ var_str = ''
+ if varname in confdata:
+ var, _ = confdata.get(varname)
+ if isinstance(var, str):
+ var_str = var
+ elif isinstance(var, bool):
+ var_str = str(int(var))
+ elif isinstance(var, int):
+ var_str = str(var)
+ else:
+ msg = f'Tried to replace variable {varname!r} value with ' \
+ f'something other than a string or int: {var!r}'
+ raise MesonException(msg)
else:
- # Template variable to be replaced
- varname = match.group('variable')
- if not varname:
- varname = match.group('cmake_variable')
-
- var_str = ''
- if varname in confdata:
- var, _ = confdata.get(varname)
- if isinstance(var, str):
- var_str = var
- elif isinstance(var, bool):
- var_str = str(int(var))
- elif isinstance(var, int):
- var_str = str(var)
- else:
- msg = f'Tried to replace variable {varname!r} value with ' \
- f'something other than a string or int: {var!r}'
+ missing_variables.add(varname)
+ return var_str
+
+ def parse_line(line: str) -> str:
+ index = 0
+ while len(line) > index:
+ if line[index] == '@':
+ next_at = line.find("@", index+1)
+ if next_at > index+1:
+ varname = line[index+1:next_at]
+ match = character_regex.search(varname)
+
+ # at substituion doesn't occur if they key isn't valid
+ # however it also doesn't raise an error
+ if not match:
+ value = variable_get(varname)
+ line = line[:index] + value + line[next_at+1:]
+
+ elif not at_only and line[index:index+2] == '${':
+ bracket_count = 1
+ end_bracket = index + 2
+ try:
+ while bracket_count > 0:
+ if line[end_bracket:end_bracket+2] == "${":
+ end_bracket += 2
+ bracket_count += 1
+ elif line[end_bracket] == "}":
+ end_bracket += 1
+ bracket_count -= 1
+ elif line[end_bracket] in {"@", "\n"}:
+ # these aren't valid variable characters
+ # but they are inconsequential at this point
+ end_bracket += 1
+ elif character_regex.search(line[end_bracket]):
+ invalid_character = line[end_bracket]
+ variable = line[index+2:end_bracket]
+ msg = f'Found invalid character {invalid_character!r}' \
+ f' in variable {variable!r}'
+ raise MesonException(msg)
+ else:
+ end_bracket += 1
+ except IndexError:
+ msg = f'Found incomplete variable {line[index:-1]!r}'
raise MesonException(msg)
- else:
- missing_variables.add(varname)
- return var_str
- return re.sub(regex, variable_replace, line), missing_variables
+
+ if bracket_count == 0:
+ varname = parse_line(line[index+2:end_bracket-1])
+ match = character_regex.search(varname)
+ if match:
+ invalid_character = line[end_bracket-2]
+ variable = line[index+2:end_bracket-3]
+ msg = f'Found invalid character {invalid_character!r}' \
+ f' in variable {variable!r}'
+ raise MesonException(msg)
+
+ value = variable_get(varname)
+ line = line[:index] + value + line[end_bracket:]
+
+ index += 1
+
+ return line
+
+ return parse_line(line), missing_variables
def do_define_meson(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
subproject: T.Optional[SubProject] = None) -> str:
@@ -1327,12 +1389,12 @@ def do_define_meson(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa
else:
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
-def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', at_only: bool,
+def do_define_cmake(line: str, confdata: 'ConfigurationData', at_only: bool,
subproject: T.Optional[SubProject] = None) -> str:
cmake_bool_define = 'cmakedefine01' in line
def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
- arr = line.split()
+ arr = line[1:].split()
if cmake_bool_define:
(v, desc) = confdata.get(arr[1])
@@ -1347,7 +1409,7 @@ def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa
define_value += [token]
return ' '.join(define_value)
- arr = line.split()
+ arr = line[1:].split()
if len(arr) != 2 and subproject is not None:
from ..interpreterbase.decorators import FeatureNew
@@ -1367,12 +1429,12 @@ def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa
result = get_cmake_define(line, confdata)
result = f'#define {varname} {result}'.strip() + '\n'
- result, _ = do_replacement_cmake(regex, result, at_only, confdata)
+ result, _ = do_replacement_cmake(result, at_only, confdata)
return result
def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'meson') -> T.Pattern[str]:
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
- if variable_format in {'meson', 'cmake@'}:
+ if variable_format == 'meson':
# Also allow escaping pairs of '@' with '\@'
regex = re.compile(r'''
(?:\\\\)+(?=\\?@) # Matches multiple backslashes followed by an @ symbol
@@ -1381,17 +1443,13 @@ def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'm
| # OR
(?P<escaped>\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols
''', re.VERBOSE)
- else:
+ elif variable_format == 'cmake@':
regex = re.compile(r'''
- (?:\\\\)+(?=\\?(\$|@)) # Match multiple backslashes followed by a dollar sign or an @ symbol
- | # OR
- \\\${ # Match a backslash followed by a dollar sign and an opening curly brace
- | # OR
- \${(?P<cmake_variable>[-a-zA-Z0-9_]+)} # Match a variable enclosed in curly braces and capture the variable name
- | # OR
(?<!\\)@(?P<variable>[-a-zA-Z0-9_]+)@ # Match a variable enclosed in @ symbols and capture the variable name; no matches beginning with '\@'
- | # OR
- (?P<escaped>\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols
+ ''', re.VERBOSE)
+ elif variable_format == "cmake":
+ regex = re.compile(r'''
+ \${(?P<variable>[-a-zA-Z0-9_]*)} # Match a variable enclosed in curly braces and capture the variable name
''', re.VERBOSE)
return regex
@@ -1439,9 +1497,7 @@ def do_conf_str_cmake(src: str, data: T.List[str], confdata: 'ConfigurationData'
if at_only:
variable_format = 'cmake@'
- regex = get_variable_regex(variable_format)
-
- search_token = '#cmakedefine'
+ search_token = 'cmakedefine'
result: T.List[str] = []
missing_variables: T.Set[str] = set()
@@ -1449,13 +1505,15 @@ def do_conf_str_cmake(src: str, data: T.List[str], confdata: 'ConfigurationData'
# during substitution so we can warn the user to use the `copy:` kwarg.
confdata_useless = not confdata.keys()
for line in data:
- if line.lstrip().startswith(search_token):
+ stripped_line = line.lstrip()
+ if len(stripped_line) >= 2 and stripped_line[0] == '#' and stripped_line[1:].lstrip().startswith(search_token):
confdata_useless = False
- line = do_define_cmake(regex, line, confdata, at_only, subproject)
+
+ line = do_define_cmake(line, confdata, at_only, subproject)
else:
if '#mesondefine' in line:
raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
- line, missing = do_replacement_cmake(regex, line, at_only, confdata)
+ line, missing = do_replacement_cmake(line, at_only, confdata)
missing_variables.update(missing)
if missing:
confdata_useless = False
@@ -1578,7 +1636,7 @@ def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
result.append(i)
return result
-def listify_array_value(value: T.Union[str, T.List[str]], shlex_split_args: bool = False) -> T.List[str]:
+def listify_array_value(value: object, shlex_split_args: bool = False) -> T.List[str]:
if isinstance(value, str):
if value.startswith('['):
try:
@@ -1738,7 +1796,7 @@ def Popen_safe_logged(args: T.List[str], msg: str = 'Called', **kwargs: T.Any) -
return p, o, e
-def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str | programs.ExternalProgram]) -> T.Optional[str]:
'''
Takes each regular expression in @regexiter and tries to search for it in
every item in @initer. If there is a match, returns that match.
@@ -1754,7 +1812,7 @@ def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.
return None
-def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
+def _substitute_values_check_errors(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
# Error checking
inregex: T.List[str] = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@']
outregex: T.List[str] = ['@OUTPUT([0-9]+)?@', '@OUTDIR@']
@@ -1794,7 +1852,7 @@ def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.
raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
-def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
+def substitute_values(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str | programs.ExternalProgram]:
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
@@ -1821,7 +1879,7 @@ def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.L
_substitute_values_check_errors(command, values)
# Substitution
- outcmd: T.List[str] = []
+ outcmd: T.List[str | programs.ExternalProgram] = []
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
diff --git a/mesonbuild/utils/win32.py b/mesonbuild/utils/win32.py
index 4fcb8ed..22aea86 100644
--- a/mesonbuild/utils/win32.py
+++ b/mesonbuild/utils/win32.py
@@ -10,20 +10,30 @@ import msvcrt
import typing as T
from .core import MesonException
-from .platform import BuildDirLock as BuildDirLockBase
+from .platform import DirectoryLockBase, DirectoryLockAction
-__all__ = ['BuildDirLock']
+__all__ = ['DirectoryLock', 'DirectoryLockAction']
-class BuildDirLock(BuildDirLockBase):
+class DirectoryLock(DirectoryLockBase):
def __enter__(self) -> None:
- self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+ self.lockfile = open(self.lockpath, 'w+', encoding='utf-8')
try:
- msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
- except (BlockingIOError, PermissionError):
+ mode = msvcrt.LK_LOCK
+ if self.action != DirectoryLockAction.WAIT:
+ mode = msvcrt.LK_NBLCK
+ msvcrt.locking(self.lockfile.fileno(), mode, 1)
+ except BlockingIOError:
self.lockfile.close()
- raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+ if self.action == DirectoryLockAction.IGNORE:
+ return
+ raise MesonException(self.err)
+ except PermissionError:
+ self.lockfile.close()
+ raise MesonException(self.err)
def __exit__(self, *args: T.Any) -> None:
+ if self.lockfile is None or self.lockfile.closed:
+ return
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
self.lockfile.close()
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 9af1f39..1cc2cee 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -21,6 +21,7 @@ import time
import typing as T
import textwrap
import json
+import gzip
from base64 import b64encode
from netrc import netrc
@@ -29,7 +30,10 @@ from functools import lru_cache
from . import WrapMode
from .. import coredata
-from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException, windows_proof_rmtree, Popen_safe
+from ..mesonlib import (
+ DirectoryLock, DirectoryLockAction, quiet_git, GIT, ProgressBar, MesonException,
+ windows_proof_rmtree, Popen_safe
+)
from ..interpreterbase import FeatureNew
from ..interpreterbase import SubProject
from .. import mesonlib
@@ -53,7 +57,21 @@ WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
ALL_TYPES = ['file', 'git', 'hg', 'svn', 'redirect']
-PATCH = shutil.which('patch')
+if mesonlib.is_windows():
+ from ..programs import ExternalProgram
+ from ..mesonlib import version_compare
+ _exclude_paths: T.List[str] = []
+ while True:
+ _patch = ExternalProgram('patch', silent=True, exclude_paths=_exclude_paths)
+ if not _patch.found():
+ break
+ if version_compare(_patch.get_version(), '>=2.6.1'):
+ break
+ _exclude_paths.append(os.path.dirname(_patch.get_path()))
+ PATCH = _patch.get_path() if _patch.found() else None
+else:
+ PATCH = shutil.which('patch')
+
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
@@ -66,16 +84,23 @@ def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}')
return url
-def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool = False) -> 'http.client.HTTPResponse':
+def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool = False, allow_compression: bool = False) -> http.client.HTTPResponse:
if have_opt:
insecure_msg = '\n\n To allow connecting anyway, pass `--allow-insecure`.'
else:
insecure_msg = ''
+ def do_urlopen(url: urllib.parse.ParseResult) -> http.client.HTTPResponse:
+ headers = {}
+ if allow_compression:
+ headers['Accept-Encoding'] = 'gzip'
+ req = urllib.request.Request(urllib.parse.urlunparse(url), headers=headers)
+ return T.cast('http.client.HTTPResponse', urllib.request.urlopen(req, timeout=REQ_TIMEOUT))
+
url = whitelist_wrapdb(urlstring)
if has_ssl:
try:
- return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
+ return do_urlopen(url)
except OSError as excp:
msg = f'WrapDB connection failed to {urlstring} with error {excp}.'
if isinstance(excp, urllib.error.URLError) and isinstance(excp.reason, ssl.SSLCertVerificationError):
@@ -92,15 +117,24 @@ def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool
mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.', once=True)
# If we got this far, allow_insecure was manually passed
- nossl_url = url._replace(scheme='http')
try:
- return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
+ return do_urlopen(url._replace(scheme='http'))
except OSError as excp:
raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+def read_and_decompress(resp: http.client.HTTPResponse) -> bytes:
+ data = resp.read()
+ encoding = resp.headers['Content-Encoding']
+ if encoding == 'gzip':
+ return gzip.decompress(data)
+ elif encoding:
+ raise WrapException(f'Unexpected Content-Encoding for {resp.url}: {encoding}')
+ else:
+ return data
+
def get_releases_data(allow_insecure: bool) -> bytes:
- url = open_wrapdburl('https://wrapdb.mesonbuild.com/v2/releases.json', allow_insecure, True)
- return url.read()
+ url = open_wrapdburl('https://wrapdb.mesonbuild.com/v2/releases.json', allow_insecure, True, True)
+ return read_and_decompress(url)
@lru_cache(maxsize=None)
def get_releases(allow_insecure: bool) -> T.Dict[str, T.Any]:
@@ -109,9 +143,9 @@ def get_releases(allow_insecure: bool) -> T.Dict[str, T.Any]:
def update_wrap_file(wrapfile: str, name: str, new_version: str, new_revision: str, allow_insecure: bool) -> None:
url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{new_version}-{new_revision}/{name}.wrap',
- allow_insecure, True)
+ allow_insecure, True, True)
with open(wrapfile, 'wb') as f:
- f.write(url.read())
+ f.write(read_and_decompress(url))
def parse_patch_url(patch_url: str) -> T.Tuple[str, str]:
u = urllib.parse.urlparse(patch_url)
@@ -213,6 +247,15 @@ class PackageDefinition:
wrap.original_filename = filename
wrap.parse_provide_section(config)
+ patch_url = values.get('patch_url')
+ if patch_url and patch_url.startswith('https://wrapdb.mesonbuild.com/v1'):
+ if name == 'sqlite':
+ mlog.deprecation('sqlite wrap has been renamed to sqlite3, update using `meson wrap install sqlite3`')
+ elif name == 'libjpeg':
+ mlog.deprecation('libjpeg wrap has been renamed to libjpeg-turbo, update using `meson wrap install libjpeg-turbo`')
+ else:
+ mlog.deprecation(f'WrapDB v1 is deprecated, updated using `meson wrap update {name}`')
+
with open(filename, 'r', encoding='utf-8') as file:
wrap.wrapfile_hash = hashlib.sha256(file.read().encode('utf-8')).hexdigest()
@@ -311,6 +354,7 @@ class Resolver:
self.wrapdb: T.Dict[str, T.Any] = {}
self.wrapdb_provided_deps: T.Dict[str, str] = {}
self.wrapdb_provided_programs: T.Dict[str, str] = {}
+ self.loaded_dirs: T.Set[str] = set()
self.load_wraps()
self.load_netrc()
self.load_wrapdb()
@@ -352,6 +396,7 @@ class Resolver:
# Add provided deps and programs into our lookup tables
for wrap in self.wraps.values():
self.add_wrap(wrap)
+ self.loaded_dirs.add(self.subdir)
def add_wrap(self, wrap: PackageDefinition) -> None:
for k in wrap.provided_deps.keys():
@@ -384,10 +429,10 @@ class Resolver:
self.check_can_download()
latest_version = info['versions'][0]
version, revision = latest_version.rsplit('-', 1)
- url = urllib.request.urlopen(f'https://wrapdb.mesonbuild.com/v2/{subp_name}_{version}-{revision}/{subp_name}.wrap')
+ url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{subp_name}_{version}-{revision}/{subp_name}.wrap', allow_compression=True)
fname = Path(self.subdir_root, f'{subp_name}.wrap')
with fname.open('wb') as f:
- f.write(url.read())
+ f.write(read_and_decompress(url))
mlog.log(f'Installed {subp_name} version {version} revision {revision}')
wrap = PackageDefinition.from_wrap_file(str(fname))
self.wraps[wrap.name] = wrap
@@ -396,16 +441,25 @@ class Resolver:
def _merge_wraps(self, other_resolver: 'Resolver') -> None:
for k, v in other_resolver.wraps.items():
- self.wraps.setdefault(k, v)
- for k, v in other_resolver.provided_deps.items():
- self.provided_deps.setdefault(k, v)
- for k, v in other_resolver.provided_programs.items():
- self.provided_programs.setdefault(k, v)
+ prev_wrap = self.wraps.get(v.directory)
+ if prev_wrap and prev_wrap.type is None and v.type is not None:
+ # This happens when a subproject has been previously downloaded
+ # using a wrap from another subproject and the wrap-redirect got
+ # deleted. In that case, the main project created a bare wrap
+ # for the download directory, but now we have a proper wrap.
+ # It also happens for wraps coming from Cargo.lock files, which
+ # don't create wrap-redirect.
+ del self.wraps[v.directory]
+ del self.provided_deps[v.directory]
+ if k not in self.wraps:
+ self.wraps[k] = v
+ self.add_wrap(v)
def load_and_merge(self, subdir: str, subproject: SubProject) -> None:
- if self.wrap_mode != WrapMode.nopromote:
+ if self.wrap_mode != WrapMode.nopromote and subdir not in self.loaded_dirs:
other_resolver = Resolver(self.source_dir, subdir, subproject, self.wrap_mode, self.wrap_frontend, self.allow_insecure, self.silent)
self._merge_wraps(other_resolver)
+ self.loaded_dirs.add(subdir)
def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
# Python's ini parser converts all key values to lowercase.
@@ -432,7 +486,7 @@ class Resolver:
return wrap_name
return None
- def resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]:
+ def _resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]:
wrap = self.wraps.get(packagename)
if wrap is None:
wrap = self.get_from_wrapdb(packagename)
@@ -530,6 +584,15 @@ class Resolver:
self.wrap.update_hash_cache(self.dirname)
return rel_path, method
+ def resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]:
+ try:
+ with DirectoryLock(self.subdir_root, '.wraplock',
+ DirectoryLockAction.WAIT,
+ 'Failed to lock subprojects directory'):
+ return self._resolve(packagename, force_method)
+ except FileNotFoundError:
+ raise WrapNotFoundException('Attempted to resolve subproject without subprojects directory present.')
+
def check_can_download(self) -> None:
# Don't download subproject data based on wrap file if requested.
# Git submodules are ok (see above)!
@@ -691,6 +754,23 @@ class Resolver:
resp = open_wrapdburl(urlstring, allow_insecure=self.allow_insecure, have_opt=self.wrap_frontend)
elif WHITELIST_SUBDOMAIN in urlstring:
raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
+ elif url.scheme == 'sftp':
+ sftp = shutil.which('sftp')
+ if sftp is None:
+ raise WrapException('Scheme sftp is not available. Install sftp to enable it.')
+ with tempfile.TemporaryDirectory() as workdir, \
+ tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False) as tmpfile:
+ args = []
+ # Older versions of the sftp client cannot handle URLs, hence the splitting of url below
+ if url.port:
+ args += ['-P', f'{url.port}']
+ user = f'{url.username}@' if url.username else ''
+ command = [sftp, '-o', 'KbdInteractiveAuthentication=no', *args, f'{user}{url.hostname}:{url.path[1:]}']
+ subprocess.run(command, cwd=workdir, check=True)
+ downloaded = os.path.join(workdir, os.path.basename(url.path))
+ tmpfile.close()
+ shutil.move(downloaded, tmpfile.name)
+ return self.hash_file(tmpfile.name), tmpfile.name
else:
headers = {
'User-Agent': f'mesonbuild/{coredata.version}',
@@ -715,7 +795,7 @@ class Resolver:
resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
except OSError as e:
mlog.log(str(e))
- raise WrapException(f'could not get {urlstring} is the internet available?')
+ raise WrapException(f'could not get {urlstring}; is the internet available?')
with contextlib.closing(resp) as resp, tmpfile as tmpfile:
try:
dlsize = int(resp.info()['Content-Length'])
@@ -746,14 +826,17 @@ class Resolver:
hashvalue = h.hexdigest()
return hashvalue, tmpfile.name
+ def hash_file(self, path: str) -> str:
+ h = hashlib.sha256()
+ with open(path, 'rb') as f:
+ h.update(f.read())
+ return h.hexdigest()
+
def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
if what + '_hash' not in self.wrap.values and not hash_required:
return
expected = self.wrap.get(what + '_hash').lower()
- h = hashlib.sha256()
- with open(path, 'rb') as f:
- h.update(f.read())
- dhash = h.hexdigest()
+ dhash = self.hash_file(path)
if dhash != expected:
raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py
index 5486a26..6f97fe2 100644
--- a/mesonbuild/wrap/wraptool.py
+++ b/mesonbuild/wrap/wraptool.py
@@ -9,8 +9,8 @@ import shutil
import typing as T
from glob import glob
-from .wrap import (open_wrapdburl, WrapException, get_releases, get_releases_data,
- parse_patch_url)
+from .wrap import (open_wrapdburl, read_and_decompress, WrapException, get_releases,
+ get_releases_data, parse_patch_url)
from pathlib import Path
from .. import mesonlib, msubprojects
@@ -99,9 +99,9 @@ def install(options: 'argparse.Namespace') -> None:
if os.path.exists(wrapfile):
raise SystemExit('Wrap file already exists.')
(version, revision) = get_latest_version(name, options.allow_insecure)
- url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap', options.allow_insecure, True)
+ url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap', options.allow_insecure, True, True)
with open(wrapfile, 'wb') as f:
- f.write(url.read())
+ f.write(read_and_decompress(url))
print(f'Installed {name} version {version} revision {revision}')
def get_current_version(wrapfile: str) -> T.Tuple[str, str, str, str, T.Optional[str]]: