aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild')
-rw-r--r--mesonbuild/arglist.py34
-rw-r--r--mesonbuild/ast/interpreter.py16
-rw-r--r--mesonbuild/ast/introspection.py22
-rw-r--r--mesonbuild/ast/postprocess.py2
-rw-r--r--mesonbuild/ast/printer.py12
-rw-r--r--mesonbuild/backend/ninjabackend.py14
-rw-r--r--mesonbuild/build.py198
-rw-r--r--mesonbuild/cmake/common.py66
-rw-r--r--mesonbuild/cmake/executor.py10
-rw-r--r--mesonbuild/cmake/fileapi.py4
-rw-r--r--mesonbuild/cmake/generator.py16
-rw-r--r--mesonbuild/cmake/toolchain.py6
-rw-r--r--mesonbuild/cmake/traceparser.py42
-rw-r--r--mesonbuild/compilers/cs.py4
-rw-r--r--mesonbuild/compilers/detect.py2
-rw-r--r--mesonbuild/compilers/mixins/arm.py20
-rw-r--r--mesonbuild/compilers/mixins/clang.py6
-rw-r--r--mesonbuild/compilers/mixins/clike.py28
-rw-r--r--mesonbuild/compilers/mixins/pgi.py4
-rw-r--r--mesonbuild/compilers/mixins/xc16.py16
-rw-r--r--mesonbuild/compilers/rust.py4
-rw-r--r--mesonbuild/compilers/swift.py4
-rw-r--r--mesonbuild/coredata.py36
-rw-r--r--mesonbuild/dependencies/base.py11
-rw-r--r--mesonbuild/dependencies/boost.py40
-rw-r--r--mesonbuild/dependencies/cmake.py4
-rw-r--r--mesonbuild/dependencies/cuda.py18
-rw-r--r--mesonbuild/dependencies/hdf5.py4
-rw-r--r--mesonbuild/dependencies/mpi.py2
-rw-r--r--mesonbuild/dependencies/pkgconfig.py4
-rw-r--r--mesonbuild/dependencies/python.py12
-rw-r--r--mesonbuild/dependencies/ui.py34
-rw-r--r--mesonbuild/envconfig.py4
-rw-r--r--mesonbuild/environment.py15
-rw-r--r--mesonbuild/interpreter/compiler.py58
-rw-r--r--mesonbuild/interpreter/interpreter.py82
-rw-r--r--mesonbuild/interpreter/interpreterobjects.py18
-rw-r--r--mesonbuild/interpreter/kwargs.py29
-rw-r--r--mesonbuild/interpreter/type_checking.py56
-rw-r--r--mesonbuild/interpreterbase/decorators.py6
-rw-r--r--mesonbuild/interpreterbase/interpreterbase.py4
-rw-r--r--mesonbuild/linkers/detect.py4
-rw-r--r--mesonbuild/mcompile.py10
-rw-r--r--mesonbuild/mconf.py9
-rw-r--r--mesonbuild/minstall.py14
-rw-r--r--mesonbuild/mintro.py44
-rw-r--r--mesonbuild/mlog.py8
-rw-r--r--mesonbuild/modules/__init__.py5
-rw-r--r--mesonbuild/modules/cmake.py2
-rw-r--r--mesonbuild/modules/external_project.py1
-rw-r--r--mesonbuild/modules/fs.py34
-rw-r--r--mesonbuild/modules/gnome.py16
-rw-r--r--mesonbuild/modules/hotdoc.py146
-rw-r--r--mesonbuild/modules/i18n.py3
-rw-r--r--mesonbuild/modules/python.py104
-rw-r--r--mesonbuild/modules/qt.py3
-rw-r--r--mesonbuild/modules/rust.py1
-rw-r--r--mesonbuild/modules/windows.py1
-rw-r--r--mesonbuild/mparser.py10
-rw-r--r--mesonbuild/msetup.py34
-rw-r--r--mesonbuild/mtest.py92
-rwxr-xr-xmesonbuild/scripts/cmake_run_ctgt.py2
-rw-r--r--mesonbuild/scripts/depfixer.py6
-rwxr-xr-xmesonbuild/scripts/python_info.py15
-rw-r--r--mesonbuild/utils/universal.py66
-rw-r--r--mesonbuild/utils/vsenv.py4
-rw-r--r--mesonbuild/wrap/wrap.py18
67 files changed, 947 insertions, 672 deletions
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py
index c44728a..f50d54e 100644
--- a/mesonbuild/arglist.py
+++ b/mesonbuild/arglist.py
@@ -26,7 +26,7 @@ if T.TYPE_CHECKING:
from .compilers import Compiler
# execinfo is a compiler lib on BSD
-UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt', 'execinfo'] # type: T.List[str]
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt', 'execinfo']
class Dedup(enum.Enum):
@@ -82,44 +82,44 @@ class CompilerArgs(T.MutableSequence[str]):
'''
# Arg prefixes that override by prepending instead of appending
- prepend_prefixes = () # type: T.Tuple[str, ...]
+ prepend_prefixes: T.Tuple[str, ...] = ()
# Arg prefixes and args that must be de-duped by returning 2
- dedup2_prefixes = () # type: T.Tuple[str, ...]
- dedup2_suffixes = () # type: T.Tuple[str, ...]
- dedup2_args = () # type: T.Tuple[str, ...]
+ dedup2_prefixes: T.Tuple[str, ...] = ()
+ dedup2_suffixes: T.Tuple[str, ...] = ()
+ dedup2_args: T.Tuple[str, ...] = ()
# Arg prefixes and args that must be de-duped by returning 1
#
# NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
- dedup1_prefixes = () # type: T.Tuple[str, ...]
- dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
+ dedup1_prefixes: T.Tuple[str, ...] = ()
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
# Match a .so of the form path/to/libfoo.so.0.1.0
# Only UNIX shared libraries require this. Others have a fixed extension.
dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
- dedup1_args = () # type: T.Tuple[str, ...]
+ dedup1_args: T.Tuple[str, ...] = ()
# In generate_link() we add external libs without de-dup, but we must
# *always* de-dup these because they're special arguments to the linker
# TODO: these should probably move too
- always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
+ always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS)
def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
iterable: T.Optional[T.Iterable[str]] = None):
self.compiler = compiler
- self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
- self.pre = collections.deque() # type: T.Deque[str]
- self.post = collections.deque() # type: T.Deque[str]
+ self._container: T.List[str] = list(iterable) if iterable is not None else []
+ self.pre: T.Deque[str] = collections.deque()
+ self.post: T.Deque[str] = collections.deque()
# Flush the saved pre and post list into the _container list
#
# This correctly deduplicates the entries after _can_dedup definition
# Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
def flush_pre_post(self) -> None:
- new = [] # type: T.List[str]
- pre_flush_set = set() # type: T.Set[str]
- post_flush = collections.deque() # type: T.Deque[str]
- post_flush_set = set() # type: T.Set[str]
+ new: T.List[str] = []
+ pre_flush_set: T.Set[str] = set()
+ post_flush: T.Deque[str] = collections.deque()
+ post_flush_set: T.Set[str] = set()
#The two lists are here walked from the front to the back, in order to not need removals for deduplication
for a in self.pre:
@@ -285,7 +285,7 @@ class CompilerArgs(T.MutableSequence[str]):
Add two CompilerArgs while taking into account overriding of arguments
and while preserving the order of arguments as much as possible
'''
- tmp_pre = collections.deque() # type: T.Deque[str]
+ tmp_pre: T.Deque[str] = collections.deque()
if not isinstance(args, collections.abc.Iterable):
raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs')
for arg in args:
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index 68e2b6e..70a4f1f 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -97,10 +97,10 @@ class AstInterpreter(InterpreterBase):
def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None):
super().__init__(source_root, subdir, subproject)
self.visitors = visitors if visitors is not None else []
- self.processed_buildfiles = set() # type: T.Set[str]
- self.assignments = {} # type: T.Dict[str, BaseNode]
- self.assign_vals = {} # type: T.Dict[str, T.Any]
- self.reverse_assignment = {} # type: T.Dict[str, BaseNode]
+ self.processed_buildfiles: T.Set[str] = set()
+ self.assignments: T.Dict[str, BaseNode] = {}
+ self.assign_vals: T.Dict[str, T.Any] = {}
+ self.reverse_assignment: T.Dict[str, BaseNode] = {}
self.funcs.update({'project': self.func_do_nothing,
'test': self.func_do_nothing,
'benchmark': self.func_do_nothing,
@@ -274,7 +274,7 @@ class AstInterpreter(InterpreterBase):
duplicate_key_error: T.Optional[str] = None,
) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
if isinstance(args, ArgumentNode):
- kwargs = {} # type: T.Dict[str, TYPE_nvar]
+ kwargs: T.Dict[str, TYPE_nvar] = {}
for key, val in args.kwargs.items():
kwargs[key_resolver(key)] = val
if args.incorrect_order():
@@ -383,7 +383,7 @@ class AstInterpreter(InterpreterBase):
elif isinstance(node, MethodNode):
src = quick_resolve(node.source_object)
margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect)
- mkwargs = {} # type: T.Dict[str, TYPE_nvar]
+ mkwargs: T.Dict[str, TYPE_nvar] = {}
try:
if isinstance(src, str):
result = StringHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs)
@@ -402,7 +402,7 @@ class AstInterpreter(InterpreterBase):
if isinstance(result, BaseNode):
result = self.resolve_node(result, include_unknown_args, id_loop_detect)
elif isinstance(result, list):
- new_res = [] # type: T.List[TYPE_nvar]
+ new_res: T.List[TYPE_nvar] = []
for i in result:
if isinstance(i, BaseNode):
resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
@@ -421,7 +421,7 @@ class AstInterpreter(InterpreterBase):
else:
args = [args_raw]
- flattened_args = [] # type: T.List[TYPE_nvar]
+ flattened_args: T.List[TYPE_nvar] = []
# Resolve the contents of args
for i in args:
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index d66e73f..e8055c5 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -46,9 +46,9 @@ class IntrospectionHelper(argparse.Namespace):
# mimic an argparse namespace
def __init__(self, cross_file: str):
super().__init__()
- self.cross_file = cross_file # type: str
- self.native_file = None # type: str
- self.cmd_line_options = {} # type: T.Dict[str, str]
+ self.cross_file = cross_file
+ self.native_file: str = None
+ self.cmd_line_options: T.Dict[str, str] = {}
def __eq__(self, other: object) -> bool:
return NotImplemented
@@ -78,10 +78,10 @@ class IntrospectionInterpreter(AstInterpreter):
self.coredata = self.environment.get_coredata()
self.backend = backend
self.default_options = {OptionKey('backend'): self.backend}
- self.project_data = {} # type: T.Dict[str, T.Any]
- self.targets = [] # type: T.List[T.Dict[str, T.Any]]
- self.dependencies = [] # type: T.List[T.Dict[str, T.Any]]
- self.project_node = None # type: BaseNode
+ self.project_data: T.Dict[str, T.Any] = {}
+ self.targets: T.List[T.Dict[str, T.Any]] = []
+ self.dependencies: T.List[T.Dict[str, T.Any]] = []
+ self.project_node: BaseNode = None
self.funcs.update({
'add_languages': self.func_add_languages,
@@ -170,7 +170,7 @@ class IntrospectionInterpreter(AstInterpreter):
self._add_languages(args, required, for_machine)
def _add_languages(self, raw_langs: T.List[TYPE_nvar], required: bool, for_machine: MachineChoice) -> None:
- langs = [] # type: T.List[str]
+ langs: T.List[str] = []
for l in self.flatten_args(raw_langs):
if isinstance(l, str):
langs.append(l)
@@ -238,7 +238,7 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs = self.flatten_kwargs(kwargs_raw, True)
def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
- res = [] # type: T.List[BaseNode]
+ res: T.List[BaseNode] = []
while inqueue:
curr = inqueue.pop(0)
arg_node = None
@@ -277,8 +277,8 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
for_machine = MachineChoice.HOST
- objects = [] # type: T.List[T.Any]
- empty_sources = [] # type: T.List[T.Any]
+ objects: T.List[T.Any] = []
+ empty_sources: T.List[T.Any] = []
# Passing the unresolved sources list causes errors
kwargs_reduced['_allow_no_sources'] = True
target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects,
diff --git a/mesonbuild/ast/postprocess.py b/mesonbuild/ast/postprocess.py
index 09c339d..7d2036e 100644
--- a/mesonbuild/ast/postprocess.py
+++ b/mesonbuild/ast/postprocess.py
@@ -80,7 +80,7 @@ class AstIndentationGenerator(AstVisitor):
class AstIDGenerator(AstVisitor):
def __init__(self) -> None:
- self.counter = {} # type: T.Dict[str, int]
+ self.counter: T.Dict[str, int] = {}
def visit_default_func(self, node: mparser.BaseNode) -> None:
name = type(node).__name__
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
index 579a83d..ebf63af 100644
--- a/mesonbuild/ast/printer.py
+++ b/mesonbuild/ast/printer.py
@@ -240,12 +240,12 @@ class AstPrinter(AstVisitor):
class AstJSONPrinter(AstVisitor):
def __init__(self) -> None:
- self.result = {} # type: T.Dict[str, T.Any]
+ self.result: T.Dict[str, T.Any] = {}
self.current = self.result
def _accept(self, key: str, node: mparser.BaseNode) -> None:
old = self.current
- data = {} # type: T.Dict[str, T.Any]
+ data: T.Dict[str, T.Any] = {}
self.current = data
node.accept(self)
self.current = old
@@ -253,7 +253,7 @@ class AstJSONPrinter(AstVisitor):
def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
old = self.current
- datalist = [] # type: T.List[T.Dict[str, T.Any]]
+ datalist: T.List[T.Dict[str, T.Any]] = []
for i in nodes:
self.current = {}
i.accept(self)
@@ -388,10 +388,10 @@ class AstJSONPrinter(AstVisitor):
def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
self._accept_list('positional', node.arguments)
- kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]]
+ kwargs_list: T.List[T.Dict[str, T.Dict[str, T.Any]]] = []
for key, val in node.kwargs.items():
- key_res = {} # type: T.Dict[str, T.Any]
- val_res = {} # type: T.Dict[str, T.Any]
+ key_res: T.Dict[str, T.Any] = {}
+ val_res: T.Dict[str, T.Any] = {}
self._raw_accept(key, key_res)
self._raw_accept(val, val_res)
kwargs_list += [{'key': key_res, 'val': val_res}]
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 3ddc197..03f33d6 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -664,9 +664,9 @@ class NinjaBackend(backends.Backend):
os.replace(tempfilename, outfilename)
mlog.cmd_ci_include(outfilename) # For CI debugging
# Refresh Ninja's caches. https://github.com/ninja-build/ninja/pull/1685
- if mesonlib.version_compare(self.ninja_version, '>=1.10.0') and os.path.exists('.ninja_deps'):
- subprocess.call(self.ninja_command + ['-t', 'restat'])
- subprocess.call(self.ninja_command + ['-t', 'cleandead'])
+ if mesonlib.version_compare(self.ninja_version, '>=1.10.0') and os.path.exists(os.path.join(self.environment.build_dir, '.ninja_log')):
+ subprocess.call(self.ninja_command + ['-t', 'restat'], cwd=self.environment.build_dir)
+ subprocess.call(self.ninja_command + ['-t', 'cleandead'], cwd=self.environment.build_dir)
self.generate_compdb()
self.generate_rust_project_json()
@@ -1147,7 +1147,7 @@ class NinjaBackend(backends.Backend):
deps.append(os.path.join(self.get_target_dir(i), output))
return deps
- def generate_custom_target(self, target):
+ def generate_custom_target(self, target: build.CustomTarget):
self.custom_target_generator_inputs(target)
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
deps = self.unwrap_dep_list(target)
@@ -1185,7 +1185,7 @@ class NinjaBackend(backends.Backend):
elem.add_item('pool', 'console')
full_name = Path(target.subdir, target.name).as_posix()
elem.add_item('COMMAND', cmd)
- elem.add_item('description', f'Generating {full_name} with a custom command{cmd_type}')
+ elem.add_item('description', target.description.format(full_name) + cmd_type)
self.add_build(elem)
self.processed_targets.add(target.get_id())
@@ -3815,7 +3815,7 @@ def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compi
# a common occurrence, which would lead to lots of
# distracting noise.
continue
- srcfile = srcdir / tdeps[usename].fname # type: Path
+ srcfile = srcdir / tdeps[usename].fname
if not srcfile.is_file():
if srcfile.name != src.name: # generated source file
pass
@@ -3837,7 +3837,7 @@ def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compi
ancestor_child = '_'.join(parents)
if ancestor_child not in tdeps:
raise MesonException("submodule {} relies on ancestor module {} that was not found.".format(submodmatch.group(2).lower(), ancestor_child.split('_', maxsplit=1)[0]))
- submodsrcfile = srcdir / tdeps[ancestor_child].fname # type: Path
+ submodsrcfile = srcdir / tdeps[ancestor_child].fname
if not submodsrcfile.is_file():
if submodsrcfile.name != src.name: # generated source file
pass
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 16bf412..8fed785 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -389,7 +389,7 @@ class IncludeDirs(HoldableObject):
def get_extra_build_dirs(self) -> T.List[str]:
return self.extra_build_dirs
- def to_string_list(self, sourcedir: str, builddir: T.Optional[str] = None) -> T.List[str]:
+ def to_string_list(self, sourcedir: str, builddir: str) -> T.List[str]:
"""Convert IncludeDirs object to a list of strings.
:param sourcedir: The absolute source directory
@@ -400,8 +400,7 @@ class IncludeDirs(HoldableObject):
strlist: T.List[str] = []
for idir in self.incdirs:
strlist.append(os.path.join(sourcedir, self.curdir, idir))
- if builddir:
- strlist.append(os.path.join(builddir, self.curdir, idir))
+ strlist.append(os.path.join(builddir, self.curdir, idir))
return strlist
@dataclass(eq=False)
@@ -723,7 +722,7 @@ class BuildTarget(Target):
kwargs):
super().__init__(name, subdir, subproject, True, for_machine, environment, install=kwargs.get('install', False))
self.all_compilers = compilers
- self.compilers = OrderedDict() # type: OrderedDict[str, Compiler]
+ self.compilers: OrderedDict[str, Compiler] = OrderedDict()
self.objects: T.List[ObjectTypes] = []
self.structured_sources = structured_sources
self.external_deps: T.List[dependencies.Dependency] = []
@@ -1346,8 +1345,8 @@ class BuildTarget(Target):
self.process_sourcelist(dep.get_sources())
self.add_deps(dep.ext_deps)
elif isinstance(dep, BuildTarget):
- raise InvalidArguments('''Tried to use a build target as a dependency.
-You probably should put it in link_with instead.''')
+ raise InvalidArguments(f'Tried to use a build target {dep.name} as a dependency of target {self.name}.\n'
+ 'You probably should put it in link_with instead.')
else:
# This is a bit of a hack. We do not want Build to know anything
# about the interpreter so we can't import it and use isinstance.
@@ -1380,17 +1379,6 @@ You probably should put it in link_with instead.''')
def link(self, targets):
for t in targets:
- if isinstance(self, StaticLibrary) and self.install:
- if isinstance(t, (CustomTarget, CustomTargetIndex)):
- if not t.should_install():
- mlog.warning(f'Try to link an installed static library target {self.name} with a'
- 'custom target that is not installed, this might cause problems'
- 'when you try to use this static library')
- elif t.is_internal():
- # When we're a static library and we link_with to an
- # internal/convenience library, promote to link_whole.
- self.link_whole([t])
- continue
if not isinstance(t, (Target, CustomTargetIndex)):
if isinstance(t, dependencies.ExternalLibrary):
raise MesonException(textwrap.dedent('''\
@@ -1403,6 +1391,11 @@ You probably should put it in link_with instead.''')
raise InvalidArguments(f'{t!r} is not a target.')
if not t.is_linkable_target():
raise InvalidArguments(f"Link target '{t!s}' is not linkable.")
+ if isinstance(self, StaticLibrary) and self.install and t.is_internal():
+ # When we're a static library and we link_with to an
+ # internal/convenience library, promote to link_whole.
+ self.link_whole([t], promoted=True)
+ continue
if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
msg += "Use the 'pic' option to static_library to build with PIC."
@@ -1415,7 +1408,7 @@ You probably should put it in link_with instead.''')
mlog.warning(msg + ' This will fail in cross build.')
self.link_targets.append(t)
- def link_whole(self, targets):
+ def link_whole(self, targets, promoted: bool = False):
for t in targets:
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.is_linkable_target():
@@ -1435,40 +1428,49 @@ You probably should put it in link_with instead.''')
else:
mlog.warning(msg + ' This will fail in cross build.')
if isinstance(self, StaticLibrary) and not self.uses_rust():
- if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust():
- # There are cases we cannot do this, however. In Rust, for
- # example, this can't be done with Rust ABI libraries, though
- # it could be done with C ABI libraries, though there are
- # several meson issues that need to be fixed:
- # https://github.com/mesonbuild/meson/issues/10722
- # https://github.com/mesonbuild/meson/issues/10723
- # https://github.com/mesonbuild/meson/issues/10724
- # FIXME: We could extract the .a archive to get object files
- raise InvalidArguments('Cannot link_whole a custom or Rust target into a static library')
# When we're a static library and we link_whole: to another static
# library, we need to add that target's objects to ourselves.
+ self.check_can_extract_objects(t, origin=self, promoted=promoted)
self.objects += [t.extract_all_objects()]
# If we install this static library we also need to include objects
# from all uninstalled static libraries it depends on.
if self.install:
- for lib in t.get_internal_static_libraries():
+ for lib in t.get_internal_static_libraries(origin=self):
self.objects += [lib.extract_all_objects()]
self.link_whole_targets.append(t)
@lru_cache(maxsize=None)
- def get_internal_static_libraries(self) -> OrderedSet[Target]:
+ def get_internal_static_libraries(self, origin: StaticLibrary) -> OrderedSet[Target]:
result: OrderedSet[Target] = OrderedSet()
- self.get_internal_static_libraries_recurse(result)
+ self.get_internal_static_libraries_recurse(result, origin)
return result
- def get_internal_static_libraries_recurse(self, result: OrderedSet[Target]) -> None:
+ def get_internal_static_libraries_recurse(self, result: OrderedSet[Target], origin: StaticLibrary) -> None:
for t in self.link_targets:
if t.is_internal() and t not in result:
+ self.check_can_extract_objects(t, origin, promoted=True)
result.add(t)
- t.get_internal_static_libraries_recurse(result)
+ t.get_internal_static_libraries_recurse(result, origin)
for t in self.link_whole_targets:
if t.is_internal():
- t.get_internal_static_libraries_recurse(result)
+ t.get_internal_static_libraries_recurse(result, origin)
+
+ def check_can_extract_objects(self, t: T.Union[Target, CustomTargetIndex], origin: StaticLibrary, promoted: bool = False) -> None:
+ if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust():
+ # To extract objects from a custom target we would have to extract
+ # the archive, WIP implementation can be found in
+ # https://github.com/mesonbuild/meson/pull/9218.
+ # For Rust C ABI we could in theory have access to objects, but there
+ # are several meson issues that need to be fixed:
+ # https://github.com/mesonbuild/meson/issues/10722
+ # https://github.com/mesonbuild/meson/issues/10723
+ # https://github.com/mesonbuild/meson/issues/10724
+ m = (f'Cannot link_whole a custom or Rust target {t.name!r} into a static library {origin.name!r}. '
+ 'Instead, pass individual object files with the "objects:" keyword argument if possible.')
+ if promoted:
+ m += (f' Meson had to promote link to link_whole because {origin.name!r} is installed but not {t.name!r},'
+ f' and thus has to include objects from {t.name!r} to be usable.')
+ raise InvalidArguments(m)
def add_pch(self, language: str, pchlist: T.List[str]) -> None:
if not pchlist:
@@ -1536,7 +1538,7 @@ You probably should put it in link_with instead.''')
See: https://github.com/mesonbuild/meson/issues/1653
'''
- langs = [] # type: T.List[str]
+ langs: T.List[str] = []
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
@@ -1947,10 +1949,6 @@ class Executable(BuildTarget):
# The import library this target will generate
self.import_filename = None
- # The import library that Visual Studio would generate (and accept)
- self.vs_import_filename = None
- # The import library that GCC would generate (and prefer)
- self.gcc_import_filename = None
# The debugging information file this target will generate
self.debug_filename = None
@@ -1960,12 +1958,10 @@ class Executable(BuildTarget):
if isinstance(self.implib, str):
implib_basename = self.implib
if machine.is_windows() or machine.is_cygwin():
- self.vs_import_filename = f'{implib_basename}.lib'
- self.gcc_import_filename = f'lib{implib_basename}.a'
if self.get_using_msvc():
- self.import_filename = self.vs_import_filename
+ self.import_filename = f'{implib_basename}.lib'
else:
- self.import_filename = self.gcc_import_filename
+ self.import_filename = f'lib{implib_basename}.a'
create_debug_file = (
machine.is_windows()
@@ -1994,11 +1990,6 @@ class Executable(BuildTarget):
"""
return self.import_filename
- def get_import_filenameslist(self):
- if self.import_filename:
- return [self.vs_import_filename, self.gcc_import_filename]
- return []
-
def get_debug_filename(self) -> T.Optional[str]:
"""
The name of debuginfo file that will be created by the compiler
@@ -2080,6 +2071,9 @@ class StaticLibrary(BuildTarget):
# libfoo.a. However, we cannot use foo.lib because that's the same as
# the import library. Using libfoo.a is ok because people using MSVC
# always pass the library filename while linking anyway.
+ #
+ # See our FAQ for more detailed rationale:
+ # https://mesonbuild.com/FAQ.html#why-does-building-my-project-with-msvc-output-static-libraries-called-libfooa
if not hasattr(self, 'prefix'):
self.prefix = 'lib'
if not hasattr(self, 'suffix'):
@@ -2135,17 +2129,13 @@ class SharedLibrary(BuildTarget):
environment: environment.Environment,
compilers: T.Dict[str, 'Compiler'],
kwargs):
- self.soversion = None
- self.ltversion = None
+ self.soversion: T.Optional[str] = None
+ self.ltversion: T.Optional[str] = None
# Max length 2, first element is compatibility_version, second is current_version
- self.darwin_versions = []
+ self.darwin_versions: T.Optional[T.Tuple[str, str]] = None
self.vs_module_defs = None
# The import library this target will generate
self.import_filename = None
- # The import library that Visual Studio would generate (and accept)
- self.vs_import_filename = None
- # The import library that GCC would generate (and prefer)
- self.gcc_import_filename = None
# The debugging information file this target will generate
self.debug_filename = None
# Use by the pkgconfig module
@@ -2200,21 +2190,16 @@ class SharedLibrary(BuildTarget):
The template is needed while creating aliases (self.get_aliases),
which are needed while generating .so shared libraries for Linux.
- Besides this, there's also the import library name, which is only used
- on Windows since on that platform the linker uses a separate library
- called the "import library" during linking instead of the shared
- library (DLL). The toolchain will output an import library in one of
- two formats: GCC or Visual Studio.
-
- When we're building with Visual Studio, the import library that will be
- generated by the toolchain is self.vs_import_filename, and with
- MinGW/GCC, it's self.gcc_import_filename. self.import_filename will
- always contain the import library name this target will generate.
+ Besides this, there's also the import library name (self.import_filename),
+ which is only used on Windows since on that platform the linker uses a
+ separate library called the "import library" during linking instead of
+ the shared library (DLL).
"""
prefix = ''
suffix = ''
create_debug_file = False
self.filename_tpl = self.basic_filename_tpl
+ import_filename_tpl = None
# NOTE: manual prefix/suffix override is currently only tested for C/C++
# C# and Mono
if 'cs' in self.compilers:
@@ -2227,20 +2212,18 @@ class SharedLibrary(BuildTarget):
# For all other targets/platforms import_filename stays None
elif self.environment.machines[self.for_machine].is_windows():
suffix = 'dll'
- self.vs_import_filename = '{}{}.lib'.format(self.prefix if self.prefix is not None else '', self.name)
- self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
if self.uses_rust():
# Shared library is of the form foo.dll
prefix = ''
# Import library is called foo.dll.lib
- self.import_filename = f'{self.name}.dll.lib'
+ import_filename_tpl = '{0.prefix}{0.name}.dll.lib'
# .pdb file is only created when debug symbols are enabled
create_debug_file = self.environment.coredata.get_option(OptionKey("debug"))
elif self.get_using_msvc():
# Shared library is of the form foo.dll
prefix = ''
# Import library is called foo.lib
- self.import_filename = self.vs_import_filename
+ import_filename_tpl = '{0.prefix}{0.name}.lib'
# .pdb file is only created when debug symbols are enabled
create_debug_file = self.environment.coredata.get_option(OptionKey("debug"))
# Assume GCC-compatible naming
@@ -2248,7 +2231,7 @@ class SharedLibrary(BuildTarget):
# Shared library is of the form libfoo.dll
prefix = 'lib'
# Import library is called libfoo.dll.a
- self.import_filename = self.gcc_import_filename
+ import_filename_tpl = '{0.prefix}{0.name}.dll.a'
# Shared library has the soversion if it is defined
if self.soversion:
self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
@@ -2256,12 +2239,12 @@ class SharedLibrary(BuildTarget):
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
elif self.environment.machines[self.for_machine].is_cygwin():
suffix = 'dll'
- self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
# Shared library is of the form cygfoo.dll
# (ld --dll-search-prefix=cyg is the default)
prefix = 'cyg'
# Import library is called libfoo.dll.a
- self.import_filename = self.gcc_import_filename
+ import_prefix = self.prefix if self.prefix is not None else 'lib'
+ import_filename_tpl = import_prefix + '{0.name}.dll.a'
if self.soversion:
self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
else:
@@ -2298,79 +2281,31 @@ class SharedLibrary(BuildTarget):
if self.suffix is None:
self.suffix = suffix
self.filename = self.filename_tpl.format(self)
+ if import_filename_tpl:
+ self.import_filename = import_filename_tpl.format(self)
# There may have been more outputs added by the time we get here, so
# only replace the first entry
self.outputs[0] = self.filename
if create_debug_file:
self.debug_filename = os.path.splitext(self.filename)[0] + '.pdb'
- @staticmethod
- def _validate_darwin_versions(darwin_versions):
- try:
- if isinstance(darwin_versions, int):
- darwin_versions = str(darwin_versions)
- if isinstance(darwin_versions, str):
- darwin_versions = 2 * [darwin_versions]
- if not isinstance(darwin_versions, list):
- raise InvalidArguments('Shared library darwin_versions: must be a string, integer,'
- f'or a list, not {darwin_versions!r}')
- if len(darwin_versions) > 2:
- raise InvalidArguments('Shared library darwin_versions: list must contain 2 or fewer elements')
- if len(darwin_versions) == 1:
- darwin_versions = 2 * darwin_versions
- for i, v in enumerate(darwin_versions[:]):
- if isinstance(v, int):
- v = str(v)
- if not isinstance(v, str):
- raise InvalidArguments('Shared library darwin_versions: list elements '
- f'must be strings or integers, not {v!r}')
- if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v):
- raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where '
- 'X, Y, Z are numbers, and Y and Z are optional')
- parts = v.split('.')
- if len(parts) in {1, 2, 3} and int(parts[0]) > 65535:
- raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
- 'where X is [0, 65535] and Y, Z are optional')
- if len(parts) in {2, 3} and int(parts[1]) > 255:
- raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
- 'where Y is [0, 255] and Y, Z are optional')
- if len(parts) == 3 and int(parts[2]) > 255:
- raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
- 'where Z is [0, 255] and Y, Z are optional')
- darwin_versions[i] = v
- except ValueError:
- raise InvalidArguments('Shared library darwin_versions: value is invalid')
- return darwin_versions
-
def process_kwargs(self, kwargs):
super().process_kwargs(kwargs)
if not self.environment.machines[self.for_machine].is_android():
# Shared library version
- if 'version' in kwargs:
- self.ltversion = kwargs['version']
- if not isinstance(self.ltversion, str):
- raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__)
- if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion):
- raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.')
- # Try to extract/deduce the soversion
- if 'soversion' in kwargs:
- self.soversion = kwargs['soversion']
- if isinstance(self.soversion, int):
- self.soversion = str(self.soversion)
- if not isinstance(self.soversion, str):
- raise InvalidArguments('Shared library soversion is not a string or integer.')
- elif self.ltversion:
+ self.ltversion = T.cast('T.Optional[str]', kwargs.get('version'))
+ self.soversion = T.cast('T.Optional[str]', kwargs.get('soversion'))
+ if self.soversion is None and self.ltversion is not None:
# library version is defined, get the soversion from that
# We replicate what Autotools does here and take the first
# number of the version by default.
self.soversion = self.ltversion.split('.')[0]
# macOS, iOS and tvOS dylib compatibility_version and current_version
- if 'darwin_versions' in kwargs:
- self.darwin_versions = self._validate_darwin_versions(kwargs['darwin_versions'])
- elif self.soversion:
+ self.darwin_versions = T.cast('T.Optional[T.Tuple[str, str]]', kwargs.get('darwin_versions'))
+ if self.darwin_versions is None and self.soversion is not None:
# If unspecified, pick the soversion
- self.darwin_versions = 2 * [self.soversion]
+ self.darwin_versions = (self.soversion, self.soversion)
# Visual Studio module-definitions file
if 'vs_module_defs' in kwargs:
@@ -2417,11 +2352,6 @@ class SharedLibrary(BuildTarget):
"""
return self.debug_filename
- def get_import_filenameslist(self):
- if self.import_filename:
- return [self.vs_import_filename, self.gcc_import_filename]
- return []
-
def get_all_link_deps(self):
return [self] + self.get_transitive_link_deps()
@@ -2582,6 +2512,7 @@ class CustomTarget(Target, CommandBase):
install_tag: T.Optional[T.List[T.Optional[str]]] = None,
absolute_paths: bool = False,
backend: T.Optional['Backend'] = None,
+ description: str = 'Generating {} with a custom command',
):
# TODO expose keyword arg to make MachineChoice.HOST configurable
super().__init__(name, subdir, subproject, False, MachineChoice.HOST, environment,
@@ -2606,6 +2537,7 @@ class CustomTarget(Target, CommandBase):
self.install_mode = install_mode
self.install_tag = _process_install_tag(install_tag, len(self.outputs))
self.name = name if name else self.outputs[0]
+ self.description = description
# Whether to use absolute paths for all files on the commandline
self.absolute_paths = absolute_paths
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index 3de6c16..415937e 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -123,7 +123,7 @@ def cmake_get_generator_args(env: 'Environment') -> T.List[str]:
return ['-G', backend_generator_map[backend_name]]
def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool = False) -> T.List[str]:
- res = [] # type: T.List[str]
+ res: T.List[str] = []
for i in raw:
for key, val in i.items():
@@ -144,7 +144,7 @@ def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool =
# TODO: this function will become obsolete once the `cmake_args` kwarg is dropped
def check_cmake_args(args: T.List[str]) -> T.List[str]:
- res = [] # type: T.List[str]
+ res: T.List[str] = []
dis = ['-D' + x for x in blacklist_cmake_defs]
assert dis # Ensure that dis is not empty.
for i in args:
@@ -166,14 +166,14 @@ class CMakeInclude:
class CMakeFileGroup:
def __init__(self, data: T.Dict[str, T.Any]) -> None:
- self.defines = data.get('defines', '') # type: str
- self.flags = _flags_to_list(data.get('compileFlags', '')) # type: T.List[str]
- self.is_generated = data.get('isGenerated', False) # type: bool
- self.language = data.get('language', 'C') # type: str
- self.sources = [Path(x) for x in data.get('sources', [])] # type: T.List[Path]
+ self.defines: str = data.get('defines', '')
+ self.flags = _flags_to_list(data.get('compileFlags', ''))
+ self.is_generated: bool = data.get('isGenerated', False)
+ self.language: str = data.get('language', 'C')
+ self.sources = [Path(x) for x in data.get('sources', [])]
# Fix the include directories
- self.includes = [] # type: T.List[CMakeInclude]
+ self.includes: T.List[CMakeInclude] = []
for i in data.get('includePath', []):
if isinstance(i, dict) and 'path' in i:
isSystem = i.get('isSystem', False)
@@ -196,21 +196,21 @@ class CMakeFileGroup:
class CMakeTarget:
def __init__(self, data: T.Dict[str, T.Any]) -> None:
- self.artifacts = [Path(x) for x in data.get('artifacts', [])] # type: T.List[Path]
- self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path
- self.build_dir = Path(data.get('buildDirectory', '')) # type: Path
- self.name = data.get('name', '') # type: str
- self.full_name = data.get('fullName', '') # type: str
- self.install = data.get('hasInstallRule', False) # type: bool
- self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] # type: T.List[Path]
- self.link_lang = data.get('linkerLanguage', '') # type: str
- self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) # type: T.List[str]
- self.link_flags = _flags_to_list(data.get('linkFlags', '')) # type: T.List[str]
- self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) # type: T.List[str]
- # self.link_path = Path(data.get('linkPath', '')) # type: Path
- self.type = data.get('type', 'EXECUTABLE') # type: str
+ self.artifacts = [Path(x) for x in data.get('artifacts', [])]
+ self.src_dir = Path(data.get('sourceDirectory', ''))
+ self.build_dir = Path(data.get('buildDirectory', ''))
+ self.name: str = data.get('name', '')
+ self.full_name: str = data.get('fullName', '')
+ self.install: bool = data.get('hasInstallRule', False)
+ self.install_paths = [Path(x) for x in set(data.get('installPaths', []))]
+ self.link_lang: str = data.get('linkerLanguage', '')
+ self.link_libraries = _flags_to_list(data.get('linkLibraries', ''))
+ self.link_flags = _flags_to_list(data.get('linkFlags', ''))
+ self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', ''))
+ # self.link_path = Path(data.get('linkPath', ''))
+ self.type: str = data.get('type', 'EXECUTABLE')
# self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool
- self.files = [] # type: T.List[CMakeFileGroup]
+ self.files: T.List[CMakeFileGroup] = []
for i in data.get('fileGroups', []):
self.files += [CMakeFileGroup(i)]
@@ -237,10 +237,10 @@ class CMakeTarget:
class CMakeProject:
def __init__(self, data: T.Dict[str, T.Any]) -> None:
- self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path
- self.build_dir = Path(data.get('buildDirectory', '')) # type: Path
- self.name = data.get('name', '') # type: str
- self.targets = [] # type: T.List[CMakeTarget]
+ self.src_dir = Path(data.get('sourceDirectory', ''))
+ self.build_dir = Path(data.get('buildDirectory', ''))
+ self.name: str = data.get('name', '')
+ self.targets: T.List[CMakeTarget] = []
for i in data.get('targets', []):
self.targets += [CMakeTarget(i)]
@@ -256,8 +256,8 @@ class CMakeProject:
class CMakeConfiguration:
def __init__(self, data: T.Dict[str, T.Any]) -> None:
- self.name = data.get('name', '') # type: str
- self.projects = [] # type: T.List[CMakeProject]
+ self.name: str = data.get('name', '')
+ self.projects: T.List[CMakeProject] = []
for i in data.get('projects', []):
self.projects += [CMakeProject(i)]
@@ -270,9 +270,9 @@ class CMakeConfiguration:
class SingleTargetOptions:
def __init__(self) -> None:
- self.opts = {} # type: T.Dict[str, str]
- self.lang_args = {} # type: T.Dict[str, T.List[str]]
- self.link_args = [] # type: T.List[str]
+ self.opts: T.Dict[str, str] = {}
+ self.lang_args: T.Dict[str, T.List[str]] = {}
+ self.link_args: T.List[str] = []
self.install = 'preserve'
def set_opt(self, opt: str, val: str) -> None:
@@ -290,7 +290,7 @@ class SingleTargetOptions:
self.install = 'true' if install else 'false'
def get_override_options(self, initial: T.List[str]) -> T.List[str]:
- res = [] # type: T.List[str]
+ res: T.List[str] = []
for i in initial:
opt = i[:i.find('=')]
if opt not in self.opts:
@@ -312,7 +312,7 @@ class SingleTargetOptions:
class TargetOptions:
def __init__(self) -> None:
self.global_options = SingleTargetOptions()
- self.target_options = {} # type: T.Dict[str, SingleTargetOptions]
+ self.target_options: T.Dict[str, SingleTargetOptions] = {}
def __getitem__(self, tgt: str) -> SingleTargetOptions:
if tgt not in self.target_options:
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index c22c0ca..7958baf 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -39,9 +39,9 @@ if T.TYPE_CHECKING:
class CMakeExecutor:
# The class's copy of the CMake path. Avoids having to search for it
# multiple times in the same Meson invocation.
- class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]]
- class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]]
- class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result]
+ class_cmakebin: PerMachine[T.Optional[ExternalProgram]] = PerMachine(None, None)
+ class_cmakevers: PerMachine[T.Optional[str]] = PerMachine(None, None)
+ class_cmake_cache: T.Dict[T.Any, TYPE_result] = {}
def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False):
self.min_version = version
@@ -50,8 +50,8 @@ class CMakeExecutor:
self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
self.always_capture_stderr = True
self.print_cmout = False
- self.prefix_paths = [] # type: T.List[str]
- self.extra_cmake_args = [] # type: T.List[str]
+ self.prefix_paths: T.List[str] = []
+ self.extra_cmake_args: T.List[str] = []
if self.cmakebin is None:
return
diff --git a/mesonbuild/cmake/fileapi.py b/mesonbuild/cmake/fileapi.py
index 9605f92..baf499f 100644
--- a/mesonbuild/cmake/fileapi.py
+++ b/mesonbuild/cmake/fileapi.py
@@ -28,8 +28,8 @@ class CMakeFileAPI:
self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1'
self.request_dir = self.api_base_dir / 'query' / 'client-meson'
self.reply_dir = self.api_base_dir / 'reply'
- self.cmake_sources = [] # type: T.List[CMakeBuildFile]
- self.cmake_configurations = [] # type: T.List[CMakeConfiguration]
+ self.cmake_sources: T.List[CMakeBuildFile] = []
+ self.cmake_configurations: T.List[CMakeConfiguration] = []
self.kind_resolver_map = {
'codemodel': self._parse_codemodel,
'cmakeFiles': self._parse_cmakeFiles,
diff --git a/mesonbuild/cmake/generator.py b/mesonbuild/cmake/generator.py
index 5b83479..750e4c2 100644
--- a/mesonbuild/cmake/generator.py
+++ b/mesonbuild/cmake/generator.py
@@ -38,8 +38,8 @@ def parse_generator_expressions(
if '$<' not in raw:
return raw
- out = '' # type: str
- i = 0 # type: int
+ out = ''
+ i = 0
def equal(arg: str) -> str:
col_pos = arg.find(',')
@@ -98,7 +98,7 @@ def parse_generator_expressions(
return ';'.join([x for x in tgt.properties['IMPORTED_LOCATION'] if x])
return ''
- supported = {
+ supported: T.Dict[str, T.Callable[[str], str]] = {
# Boolean functions
'BOOL': lambda x: '0' if x.upper() in {'', '0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'} or x.endswith('-NOTFOUND') else '1',
'AND': lambda x: '1' if all(y == '1' for y in x.split(',')) else '0',
@@ -140,17 +140,17 @@ def parse_generator_expressions(
'TARGET_NAME_IF_EXISTS': lambda x: x if x in trace.targets else '',
'TARGET_PROPERTY': target_property,
'TARGET_FILE': target_file,
- } # type: T.Dict[str, T.Callable[[str], str]]
+ }
# Recursively evaluate generator expressions
def eval_generator_expressions() -> str:
nonlocal i
i += 2
- func = '' # type: str
- args = '' # type: str
- res = '' # type: str
- exp = '' # type: str
+ func = ''
+ args = ''
+ res = ''
+ exp = ''
# Determine the body of the expression
while i < len(raw):
diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py
index 477629e..be5bd66 100644
--- a/mesonbuild/cmake/toolchain.py
+++ b/mesonbuild/cmake/toolchain.py
@@ -144,7 +144,7 @@ class CMakeToolchain:
return res
def get_defaults(self) -> T.Dict[str, T.List[str]]:
- defaults = {} # type: T.Dict[str, T.List[str]]
+ defaults: T.Dict[str, T.List[str]] = {}
# Do nothing if the user does not want automatic defaults
if not self.properties.get_cmake_defaults():
@@ -153,13 +153,13 @@ class CMakeToolchain:
# Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which
# is not trivial since CMake lacks a list of all supported
# CMAKE_SYSTEM_NAME values.
- SYSTEM_MAP = {
+ SYSTEM_MAP: T.Dict[str, str] = {
'android': 'Android',
'linux': 'Linux',
'windows': 'Windows',
'freebsd': 'FreeBSD',
'darwin': 'Darwin',
- } # type: T.Dict[str, str]
+ }
# Only set these in a cross build. Otherwise CMake will trip up in native
# builds and thing they are cross (which causes TRY_RUN() to break)
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index 7f31f13..dd0dfb5 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -67,9 +67,9 @@ class CMakeTarget:
self.properties = properties
self.imported = imported
self.tline = tline
- self.depends = [] # type: T.List[str]
- self.current_bin_dir = None # type: T.Optional[Path]
- self.current_src_dir = None # type: T.Optional[Path]
+ self.depends: T.List[str] = []
+ self.current_bin_dir: T.Optional[Path] = None
+ self.current_src_dir: T.Optional[Path] = None
def __repr__(self) -> str:
s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}'
@@ -89,10 +89,10 @@ class CMakeTarget:
class CMakeGeneratorTarget(CMakeTarget):
def __init__(self, name: str) -> None:
super().__init__(name, 'CUSTOM', {})
- self.outputs = [] # type: T.List[Path]
- self._outputs_str = [] # type: T.List[str]
- self.command = [] # type: T.List[T.List[str]]
- self.working_dir = None # type: T.Optional[Path]
+ self.outputs: T.List[Path] = []
+ self._outputs_str: T.List[str] = []
+ self.command: T.List[T.List[str]] = []
+ self.working_dir: T.Optional[Path] = None
class CMakeTraceParser:
def __init__(self, cmake_version: str, build_dir: Path, env: 'Environment', permissive: bool = True) -> None:
@@ -101,14 +101,14 @@ class CMakeTraceParser:
self.targets: T.Dict[str, CMakeTarget] = {}
self.cache: T.Dict[str, CMakeCacheEntry] = {}
- self.explicit_headers = set() # type: T.Set[Path]
+ self.explicit_headers: T.Set[Path] = set()
# T.List of targes that were added with add_custom_command to generate files
- self.custom_targets = [] # type: T.List[CMakeGeneratorTarget]
+ self.custom_targets: T.List[CMakeGeneratorTarget] = []
self.env = env
- self.permissive = permissive # type: bool
- self.cmake_version = cmake_version # type: str
+ self.permissive = permissive
+ self.cmake_version = cmake_version
self.trace_file = 'cmake_trace.txt'
self.trace_file_path = build_dir / self.trace_file
self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human'
@@ -118,11 +118,11 @@ class CMakeTraceParser:
# State for delayed command execution. Delayed command execution is realised
# with a custom CMake file that overrides some functions and adds some
# introspection information to the trace.
- self.delayed_commands = [] # type: T.List[str]
- self.stored_commands = [] # type: T.List[CMakeTraceLine]
+ self.delayed_commands: T.List[str] = []
+ self.stored_commands: T.List[CMakeTraceLine] = []
# All supported functions
- self.functions = {
+ self.functions: T.Dict[str, T.Callable[[CMakeTraceLine], None]] = {
'set': self._cmake_set,
'unset': self._cmake_unset,
'add_executable': self._cmake_add_executable,
@@ -145,7 +145,7 @@ class CMakeTraceParser:
'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls,
'meson_ps_reload_vars': self._meson_ps_reload_vars,
'meson_ps_disabled_function': self._meson_ps_disabled_function,
- } # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]]
+ }
if version_compare(self.cmake_version, '<3.17.0'):
mlog.deprecation(textwrap.dedent(f'''\
@@ -591,10 +591,10 @@ class CMakeTraceParser:
# With the JSON output format, introduced in CMake 3.17, spaces are
# handled properly and we don't have to do either options
- arglist = [] # type: T.List[T.Tuple[str, T.List[str]]]
+ arglist: T.List[T.Tuple[str, T.List[str]]] = []
if self.trace_format == 'human':
name = args.pop(0)
- values = [] # type: T.List[str]
+ values: T.List[str] = []
prop_regex = re.compile(r'^[A-Z_]+$')
for a in args:
if prop_regex.match(a):
@@ -768,7 +768,7 @@ class CMakeTraceParser:
def _flatten_args(self, args: T.List[str]) -> T.List[str]:
# Split lists in arguments
- res = [] # type: T.List[str]
+ res: T.List[str] = []
for i in args:
res += i.split(';')
return res
@@ -783,9 +783,9 @@ class CMakeTraceParser:
reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
- fixed_list = [] # type: T.List[str]
- curr_str = None # type: T.Optional[str]
- path_found = False # type: bool
+ fixed_list: T.List[str] = []
+ curr_str: T.Optional[str] = None
+ path_found = False
for i in broken_list:
if curr_str is None:
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index f0bed5f..cd99c81 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -28,7 +28,7 @@ if T.TYPE_CHECKING:
from ..environment import Environment
from ..mesonlib import MachineChoice
-cs_optimization_args = {
+cs_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': [],
'g': [],
@@ -36,7 +36,7 @@ cs_optimization_args = {
'2': ['-optimize+'],
'3': ['-optimize+'],
's': ['-optimize+'],
- } # type: T.Dict[str, T.List[str]]
+ }
class CsCompiler(BasicLinkerIsCompilerMixin, Compiler):
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index 210ec4d..f997247 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -969,7 +969,7 @@ def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler:
from . import rust
from ..linkers import linkers
- popen_exceptions = {} # type: T.Dict[str, Exception]
+ popen_exceptions: T.Dict[str, Exception] = {}
compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine)
is_cross = env.is_cross_build(for_machine)
info = env.machines[for_machine]
diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py
index 7c53327..3abc0c8 100644
--- a/mesonbuild/compilers/mixins/arm.py
+++ b/mesonbuild/compilers/mixins/arm.py
@@ -34,16 +34,16 @@ else:
# do). This gives up DRYer type checking, with no runtime impact
Compiler = object
-arm_buildtype_args = {
+arm_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
'custom': [],
-} # type: T.Dict[str, T.List[str]]
+}
-arm_optimization_args = {
+arm_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': ['-O0'],
'g': ['-g'],
@@ -51,18 +51,18 @@ arm_optimization_args = {
'2': [], # Compiler defaults to -O2
'3': ['-O3', '-Otime'],
's': ['-O3'], # Compiler defaults to -Ospace
-} # type: T.Dict[str, T.List[str]]
+}
-armclang_buildtype_args = {
+armclang_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
'custom': [],
-} # type: T.Dict[str, T.List[str]]
+}
-armclang_optimization_args = {
+armclang_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': [], # Compiler defaults to -O0
'g': ['-g'],
@@ -70,7 +70,7 @@ armclang_optimization_args = {
'2': ['-O2'],
'3': ['-O3'],
's': ['-Oz']
-} # type: T.Dict[str, T.List[str]]
+}
class ArmCompiler(Compiler):
@@ -82,12 +82,12 @@ class ArmCompiler(Compiler):
def __init__(self) -> None:
if not self.is_cross:
raise mesonlib.EnvironmentException('armcc supports only cross-compilation.')
- default_warn_args = [] # type: T.List[str]
+ default_warn_args: T.List[str] = []
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
'3': default_warn_args + [],
- 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ 'everything': default_warn_args + []}
# Assembly
self.can_compile_suffixes.add('s')
self.can_compile_suffixes.add('sx')
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index 24f24a8..6a9c79b 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -31,9 +31,9 @@ if T.TYPE_CHECKING:
from ...dependencies import Dependency # noqa: F401
clang_color_args: T.Dict[str, T.List[str]] = {
- 'auto': ['-fcolor-diagnostics'],
- 'always': ['-fcolor-diagnostics'],
- 'never': ['-fno-color-diagnostics'],
+ 'auto': ['-fdiagnostics-color=auto'],
+ 'always': ['-fdiagnostics-color=always'],
+ 'never': ['-fdiagnostics-color=never'],
}
clang_optimization_args: T.Dict[str, T.List[str]] = {
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 251a7bf..4999d60 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -450,6 +450,10 @@ class CLikeCompiler(Compiler):
for d in dependencies:
# Add compile flags needed by dependencies
cargs += d.get_compile_args()
+ system_incdir = d.get_include_type() == 'system'
+ for i in d.get_include_dirs():
+ for idir in i.to_string_list(env.get_source_dir(), env.get_build_dir()):
+ cargs.extend(self.get_include_args(idir, system_incdir))
if mode is CompileCheckMode.LINK:
# Add link flags needed to find dependencies
largs += d.get_link_args()
@@ -1090,27 +1094,25 @@ class CLikeCompiler(Compiler):
return [f]
@staticmethod
- def _get_file_from_list(env: 'Environment', paths: T.List[Path]) -> Path:
+ def _get_file_from_list(env: Environment, paths: T.List[Path]) -> T.Optional[Path]:
'''
We just check whether the library exists. We can't do a link check
because the library might have unresolved symbols that require other
libraries. On macOS we check if the library matches our target
architecture.
'''
- # If not building on macOS for Darwin, do a simple file check
- if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
- for p in paths:
- if p.is_file():
- return p
- # Run `lipo` and check if the library supports the arch we want
for p in paths:
- if not p.is_file():
- continue
- archs = mesonlib.darwin_get_object_archs(str(p))
- if archs and env.machines.host.cpu_family in archs:
+ if p.is_file():
+
+ if env.machines.host.is_darwin() and env.machines.build.is_darwin():
+ # Run `lipo` and check if the library supports the arch we want
+ archs = mesonlib.darwin_get_object_archs(str(p))
+ if not archs or env.machines.host.cpu_family not in archs:
+ mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}')
+ continue
+
return p
- else:
- mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}')
+
return None
@functools.lru_cache()
diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py
index 2fa736c..6362b46 100644
--- a/mesonbuild/compilers/mixins/pgi.py
+++ b/mesonbuild/compilers/mixins/pgi.py
@@ -32,14 +32,14 @@ else:
# do). This gives up DRYer type checking, with no runtime impact
Compiler = object
-pgi_buildtype_args = {
+pgi_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
'custom': [],
-} # type: T.Dict[str, T.List[str]]
+}
class PGICompiler(Compiler):
diff --git a/mesonbuild/compilers/mixins/xc16.py b/mesonbuild/compilers/mixins/xc16.py
index 36c2c10..2b39046 100644
--- a/mesonbuild/compilers/mixins/xc16.py
+++ b/mesonbuild/compilers/mixins/xc16.py
@@ -31,16 +31,16 @@ else:
# do). This gives up DRYer type checking, with no runtime impact
Compiler = object
-xc16_buildtype_args = {
+xc16_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
'custom': [],
-} # type: T.Dict[str, T.List[str]]
+}
-xc16_optimization_args = {
+xc16_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': ['-O0'],
'g': ['-O0'],
@@ -48,12 +48,12 @@ xc16_optimization_args = {
'2': ['-O2'],
'3': ['-O3'],
's': ['-Os']
-} # type: T.Dict[str, T.List[str]]
+}
-xc16_debug_args = {
+xc16_debug_args: T.Dict[bool, T.List[str]] = {
False: [],
True: []
-} # type: T.Dict[bool, T.List[str]]
+}
class Xc16Compiler(Compiler):
@@ -66,12 +66,12 @@ class Xc16Compiler(Compiler):
# Assembly
self.can_compile_suffixes.add('s')
self.can_compile_suffixes.add('sx')
- default_warn_args = [] # type: T.List[str]
+ default_warn_args: T.List[str] = []
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
'3': default_warn_args + [],
- 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ 'everything': default_warn_args + []}
def get_always_args(self) -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index ef0390e..d722039 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -32,7 +32,7 @@ if T.TYPE_CHECKING:
from ..dependencies import Dependency
-rust_optimization_args = {
+rust_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': [],
'g': ['-C', 'opt-level=0'],
@@ -40,7 +40,7 @@ rust_optimization_args = {
'2': ['-C', 'opt-level=2'],
'3': ['-C', 'opt-level=3'],
's': ['-C', 'opt-level=s'],
-} # type: T.Dict[str, T.List[str]]
+}
class RustCompiler(Compiler):
diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py
index 19866e2..68ef992 100644
--- a/mesonbuild/compilers/swift.py
+++ b/mesonbuild/compilers/swift.py
@@ -26,7 +26,7 @@ if T.TYPE_CHECKING:
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
-swift_optimization_args = {
+swift_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [],
'0': [],
'g': [],
@@ -34,7 +34,7 @@ swift_optimization_args = {
'2': ['-O'],
'3': ['-O'],
's': ['-O'],
-} # type: T.Dict[str, T.List[str]]
+}
class SwiftCompiler(Compiler):
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 1184866..4b0f9af 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -89,9 +89,10 @@ def get_genvs_default_buildtype_list() -> list[str]:
class MesonVersionMismatchException(MesonException):
'''Build directory generated with Meson version is incompatible with current version'''
- def __init__(self, old_version: str, current_version: str) -> None:
+ def __init__(self, old_version: str, current_version: str, extra_msg: str = '') -> None:
super().__init__(f'Build directory has been generated with Meson version {old_version}, '
- f'which is incompatible with the current version {current_version}.')
+ f'which is incompatible with the current version {current_version}.'
+ + extra_msg)
self.old_version = old_version
self.current_version = current_version
@@ -1013,15 +1014,20 @@ class CmdLineFileParser(configparser.ConfigParser):
return optionstr
class MachineFileParser():
- def __init__(self, filenames: T.List[str]) -> None:
+ def __init__(self, filenames: T.List[str], sourcedir: str) -> None:
self.parser = CmdLineFileParser()
self.constants: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {'True': True, 'False': False}
self.sections: T.Dict[str, T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = {}
- try:
- self.parser.read(filenames)
- except configparser.Error as e:
- raise EnvironmentException(f'Malformed cross or native file: {e}')
+ for fname in filenames:
+ with open(fname, encoding='utf-8') as f:
+ content = f.read()
+ content = content.replace('@GLOBAL_SOURCE_ROOT@', sourcedir)
+ content = content.replace('@DIRNAME@', os.path.dirname(fname))
+ try:
+ self.parser.read_string(content, fname)
+ except configparser.Error as e:
+ raise EnvironmentException(f'Malformed machine file: {e}')
# Parse [constants] first so they can be used in other sections
if self.parser.has_section('constants'):
@@ -1042,9 +1048,11 @@ class MachineFileParser():
value = value.replace('\\', '\\\\')
try:
ast = mparser.Parser(value, 'machinefile').parse()
+ if not ast.lines:
+ raise EnvironmentException('value cannot be empty')
res = self._evaluate_statement(ast.lines[0])
- except MesonException:
- raise EnvironmentException(f'Malformed value in machine file variable {entry!r}.')
+ except MesonException as e:
+ raise EnvironmentException(f'Malformed value in machine file variable {entry!r}: {str(e)}.')
except KeyError as e:
raise EnvironmentException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.')
section[entry] = res
@@ -1075,8 +1083,8 @@ class MachineFileParser():
return os.path.join(l, r)
raise EnvironmentException('Unsupported node type')
-def parse_machine_files(filenames: T.List[str]):
- parser = MachineFileParser(filenames)
+def parse_machine_files(filenames: T.List[str], sourcedir: str):
+ parser = MachineFileParser(filenames, sourcedir)
return parser.sections
def get_cmd_line_file(build_dir: str) -> str:
@@ -1141,9 +1149,9 @@ def major_versions_differ(v1: str, v2: str) -> bool:
# Major version differ, or one is development version but not the other.
return v1_major != v2_major or ('99' in {v1_minor, v2_minor} and v1_minor != v2_minor)
-def load(build_dir: str) -> CoreData:
+def load(build_dir: str, suggest_reconfigure: bool = True) -> CoreData:
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
- return pickle_load(filename, 'Coredata', CoreData)
+ return pickle_load(filename, 'Coredata', CoreData, suggest_reconfigure)
def save(obj: CoreData, build_dir: str) -> str:
@@ -1348,6 +1356,8 @@ BUILTIN_CORE_OPTIONS: 'MutableKeyedOptionDictType' = OrderedDict([
BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')),
(OptionKey('purelibdir', module='python'),
BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')),
+ (OptionKey('allow_limited_api', module='python'),
+ BuiltinOption(UserBooleanOption, 'Whether to allow use of the Python Limited API', True)),
])
BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index ce206b6..fa94f87 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -36,9 +36,8 @@ if T.TYPE_CHECKING:
from ..interpreterbase import FeatureCheckBase
from ..build import (
CustomTarget, IncludeDirs, CustomTargetIndex, LibTypes,
- StaticLibrary, StructuredSources, ExtractedObjects
+ StaticLibrary, StructuredSources, ExtractedObjects, GeneratedTypes
)
- from ..mesonlib import FileOrString
class DependencyException(MesonException):
@@ -109,7 +108,7 @@ class Dependency(HoldableObject):
# Raw -L and -l arguments without manual library searching
# If None, self.link_args will be used
self.raw_link_args: T.Optional[T.List[str]] = None
- self.sources: T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']] = []
+ self.sources: T.List[T.Union[mesonlib.File, GeneratedTypes, 'StructuredSources']] = []
self.extra_files: T.List[mesonlib.File] = []
self.include_type = self._process_include_type_kw(kwargs)
self.ext_deps: T.List[Dependency] = []
@@ -167,7 +166,7 @@ class Dependency(HoldableObject):
def found(self) -> bool:
return self.is_found
- def get_sources(self) -> T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']]:
+ def get_sources(self) -> T.List[T.Union[mesonlib.File, GeneratedTypes, 'StructuredSources']]:
"""Source files that need to be added to the target.
As an example, gtest-all.cc when using GTest."""
return self.sources
@@ -254,7 +253,7 @@ class InternalDependency(Dependency):
link_args: T.List[str],
libraries: T.List[LibTypes],
whole_libraries: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]],
- sources: T.Sequence[T.Union[FileOrString, CustomTarget, StructuredSources]],
+ sources: T.Sequence[T.Union[mesonlib.File, GeneratedTypes, StructuredSources]],
extra_files: T.Sequence[mesonlib.File],
ext_deps: T.List[Dependency], variables: T.Dict[str, str],
d_module_versions: T.List[T.Union[str, int]], d_import_dirs: T.List['IncludeDirs'],
@@ -567,7 +566,7 @@ def strip_system_includedirs(environment: 'Environment', for_machine: MachineCho
return [i for i in include_args if i not in exclude]
def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]:
- method = kwargs.get('method', 'auto') # type: T.Union[DependencyMethods, str]
+ method: T.Union[DependencyMethods, str] = kwargs.get('method', 'auto')
if isinstance(method, DependencyMethods):
return [method]
# TODO: try/except?
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 0a936e6..788ccbb 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -248,7 +248,7 @@ class BoostLibraryFile():
# Handle the boost_python naming madness.
# See https://github.com/mesonbuild/meson/issues/4788 for some distro
# specific naming variations.
- other_tags = [] # type: T.List[str]
+ other_tags: T.List[str] = []
# Split the current modname into the base name and the version
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
@@ -331,9 +331,9 @@ class BoostLibraryFile():
return True
def get_compiler_args(self) -> T.List[str]:
- args = [] # type: T.List[str]
+ args: T.List[str] = []
if self.mod_name in boost_libraries:
- libdef = boost_libraries[self.mod_name] # type: BoostLibrary
+ libdef = boost_libraries[self.mod_name]
if self.static:
args += libdef.static
else:
@@ -355,19 +355,19 @@ class BoostDependency(SystemDependency):
self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
- self.boost_root = None # type: T.Optional[Path]
+ self.boost_root: T.Optional[Path] = None
self.explicit_static = 'static' in kwargs
# Extract and validate modules
- self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
+ self.modules: T.List[str] = mesonlib.extract_as_list(kwargs, 'modules')
for i in self.modules:
if not isinstance(i, str):
raise DependencyException('Boost module argument is not a string.')
if i.startswith('boost_'):
raise DependencyException('Boost modules must be passed without the boost_ prefix')
- self.modules_found = [] # type: T.List[str]
- self.modules_missing = [] # type: T.List[str]
+ self.modules_found: T.List[str] = []
+ self.modules_missing: T.List[str] = []
# Do we need threads?
if 'thread' in self.modules:
@@ -450,7 +450,7 @@ class BoostDependency(SystemDependency):
mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
# 2. Find all boost libraries
- libs = [] # type: T.List[BoostLibraryFile]
+ libs: T.List[BoostLibraryFile] = []
for i in lib_dirs:
libs = self.detect_libraries(i)
if libs:
@@ -471,8 +471,8 @@ class BoostDependency(SystemDependency):
mlog.debug(f' - {j}')
# 3. Select the libraries matching the requested modules
- not_found = [] # type: T.List[str]
- selected_modules = [] # type: T.List[BoostLibraryFile]
+ not_found: T.List[str] = []
+ selected_modules: T.List[BoostLibraryFile] = []
for mod in modules:
found = False
for l in f_libs:
@@ -485,8 +485,8 @@ class BoostDependency(SystemDependency):
# log the result
mlog.debug(' - found:')
- comp_args = [] # type: T.List[str]
- link_args = [] # type: T.List[str]
+ comp_args: T.List[str] = []
+ link_args: T.List[str] = []
for j in selected_modules:
c_args = j.get_compiler_args()
l_args = j.get_link_args()
@@ -524,7 +524,7 @@ class BoostDependency(SystemDependency):
return False
def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
- candidates = [] # type: T.List[Path]
+ candidates: T.List[Path] = []
inc_root = root / 'include'
candidates += [root / 'boost']
@@ -555,8 +555,8 @@ class BoostDependency(SystemDependency):
# No system include paths were found --> fall back to manually looking
# for library dirs in root
- dirs = [] # type: T.List[Path]
- subdirs = [] # type: T.List[Path]
+ dirs: T.List[Path] = []
+ subdirs: T.List[Path] = []
for i in root.iterdir():
if i.is_dir() and i.name.startswith('lib'):
dirs += [i]
@@ -578,7 +578,7 @@ class BoostDependency(SystemDependency):
raw_list = dirs + subdirs
no_arch = [x for x in raw_list if not any(y in x.name for y in arch_list_32 + arch_list_64)]
- matching_arch = [] # type: T.List[Path]
+ matching_arch: T.List[Path] = []
if '32' in self.arch:
matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_32)]
elif '64' in self.arch:
@@ -624,7 +624,7 @@ class BoostDependency(SystemDependency):
return libs
def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
- libs = set() # type: T.Set[BoostLibraryFile]
+ libs: T.Set[BoostLibraryFile] = set()
for i in libdir.iterdir():
if not i.is_file():
continue
@@ -655,7 +655,7 @@ class BoostDependency(SystemDependency):
self.is_found = self.run_check([boost_inc_dir], [lib_dir])
def detect_roots(self) -> None:
- roots = [] # type: T.List[Path]
+ roots: T.List[Path] = []
# Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
# allows BoostDependency to find boost from Conan. See #5438
@@ -686,7 +686,7 @@ class BoostDependency(SystemDependency):
# Where boost prebuilt binaries are
local_boost = Path('C:/local')
- candidates = [] # type: T.List[Path]
+ candidates: T.List[Path] = []
if prog_files.is_dir():
candidates += [*prog_files.iterdir()]
if local_boost.is_dir():
@@ -694,7 +694,7 @@ class BoostDependency(SystemDependency):
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
- tmp = [] # type: T.List[Path]
+ tmp: T.List[Path] = []
# Add some default system paths
tmp += [Path('/opt/local')]
diff --git a/mesonbuild/dependencies/cmake.py b/mesonbuild/dependencies/cmake.py
index 8827c9a..11d3564 100644
--- a/mesonbuild/dependencies/cmake.py
+++ b/mesonbuild/dependencies/cmake.py
@@ -80,7 +80,7 @@ class CMakeDependency(ExternalDependency):
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, force_use_global_compilers: bool = False) -> None:
# Gather a list of all languages to support
- self.language_list = [] # type: T.List[str]
+ self.language_list: T.List[str] = []
if language is None or force_use_global_compilers:
compilers = None
if kwargs.get('native', False):
@@ -312,7 +312,7 @@ class CMakeDependency(ExternalDependency):
return True
# Check PATH
- system_env = [] # type: T.List[str]
+ system_env: T.List[str] = []
for i in os.environ.get('PATH', '').split(os.pathsep):
if i.endswith('/bin') or i.endswith('\\bin'):
i = i[:-4]
diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py
index af0ae4b..aaed6b3 100644
--- a/mesonbuild/dependencies/cuda.py
+++ b/mesonbuild/dependencies/cuda.py
@@ -45,8 +45,18 @@ class CudaDependency(SystemDependency):
super().__init__('cuda', environment, kwargs, language=language)
self.lib_modules: T.Dict[str, T.List[str]] = {}
self.requested_modules = self.get_requested(kwargs)
- if 'cudart' not in self.requested_modules:
- self.requested_modules = ['cudart'] + self.requested_modules
+ if not any(runtime in self.requested_modules for runtime in ['cudart', 'cudart_static']):
+ # By default, we prefer to link the static CUDA runtime, since this is what nvcc also does by default:
+ # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#cudart-none-shared-static-cudart
+ req_modules = ['cudart']
+ if kwargs.get('static', True):
+ req_modules = ['cudart_static']
+ machine = self.env.machines[self.for_machine]
+ if machine.is_linux():
+ # extracted by running
+ # nvcc -v foo.o
+ req_modules += ['rt', 'pthread', 'dl']
+ self.requested_modules = req_modules + self.requested_modules
(self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version()
if not self.is_found:
@@ -195,8 +205,8 @@ class CudaDependency(SystemDependency):
except ValueError:
continue
# use // for floor instead of / which produces a float
- major = vers_int // 1000 # type: int
- minor = (vers_int - major * 1000) // 10 # type: int
+ major = vers_int // 1000
+ minor = (vers_int - major * 1000) // 10
return f'{major}.{minor}'
return None
diff --git a/mesonbuild/dependencies/hdf5.py b/mesonbuild/dependencies/hdf5.py
index 501e89d..a437e84 100644
--- a/mesonbuild/dependencies/hdf5.py
+++ b/mesonbuild/dependencies/hdf5.py
@@ -48,7 +48,7 @@ class HDF5PkgConfigDependency(PkgConfigDependency):
return
# some broken pkgconfig don't actually list the full path to the needed includes
- newinc = [] # type: T.List[str]
+ newinc: T.List[str] = []
for arg in self.compile_args:
if arg.startswith('-I'):
stem = 'static' if self.static else 'shared'
@@ -56,7 +56,7 @@ class HDF5PkgConfigDependency(PkgConfigDependency):
newinc.append('-I' + str(Path(arg[2:]) / stem))
self.compile_args += newinc
- link_args = [] # type: T.List[str]
+ link_args: T.List[str] = []
for larg in self.get_link_args():
lpath = Path(larg)
# some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
diff --git a/mesonbuild/dependencies/mpi.py b/mesonbuild/dependencies/mpi.py
index 240e6fd..d9a1585 100644
--- a/mesonbuild/dependencies/mpi.py
+++ b/mesonbuild/dependencies/mpi.py
@@ -74,7 +74,7 @@ def mpi_factory(env: 'Environment',
elif language == 'fortran':
tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
- cls = IntelMPIConfigToolDependency # type: T.Type[ConfigToolDependency]
+ cls: T.Type[ConfigToolDependency] = IntelMPIConfigToolDependency
else: # OpenMPI, which doesn't work with intel
#
# We try the environment variables for the tools first, but then
diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py
index cfe9cbb..e8f349e 100644
--- a/mesonbuild/dependencies/pkgconfig.py
+++ b/mesonbuild/dependencies/pkgconfig.py
@@ -26,6 +26,8 @@ import shlex
import typing as T
if T.TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ..environment import Environment
from ..mesonlib import MachineChoice
from ..utils.core import EnvironOrDict
@@ -78,7 +80,7 @@ class PkgConfigCLI(PkgConfigInterface):
# The class's copy of the pkg-config path. Avoids having to search for it
# multiple times in the same Meson invocation.
- class_pkgbin: PerMachine[T.Union[None, T.Literal[False], ExternalProgram]] = PerMachine(None, None)
+ class_pkgbin: PerMachine[T.Union[None, Literal[False], ExternalProgram]] = PerMachine(None, None)
# We cache all pkg-config subprocess invocations to avoid redundant calls
pkgbin_cache: T.Dict[
T.Tuple[ExternalProgram, T.Tuple[str, ...], T.FrozenSet[T.Tuple[str, str]]],
diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py
index 1607728..efb904e 100644
--- a/mesonbuild/dependencies/python.py
+++ b/mesonbuild/dependencies/python.py
@@ -44,6 +44,7 @@ if T.TYPE_CHECKING:
paths: T.Dict[str, str]
platform: str
suffix: str
+ limited_api_suffix: str
variables: T.Dict[str, str]
version: str
@@ -94,6 +95,7 @@ class BasicPythonExternalProgram(ExternalProgram):
'paths': {},
'platform': 'sentinel',
'suffix': 'sentinel',
+ 'limited_api_suffix': 'sentinel',
'variables': {},
'version': '0.0',
}
@@ -197,7 +199,7 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
if self.link_libpython:
# link args
if mesonlib.is_windows():
- self.find_libpy_windows(environment)
+ self.find_libpy_windows(environment, limited_api=False)
else:
self.find_libpy(environment)
else:
@@ -259,7 +261,7 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
mlog.log(f'Unknown Windows Python platform {self.platform!r}')
return None
- def get_windows_link_args(self) -> T.Optional[T.List[str]]:
+ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]:
if self.platform.startswith('win'):
vernum = self.variables.get('py_version_nodot')
verdot = self.variables.get('py_version_short')
@@ -277,6 +279,8 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
else:
libpath = Path(f'python{vernum}.dll')
else:
+ if limited_api:
+ vernum = vernum[0]
libpath = Path('libs') / f'python{vernum}.lib'
# For a debug build, pyconfig.h may force linking with
# pythonX_d.lib (see meson#10776). This cannot be avoided
@@ -317,7 +321,7 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
return None
return [str(lib)]
- def find_libpy_windows(self, env: 'Environment') -> None:
+ def find_libpy_windows(self, env: 'Environment', limited_api: bool = False) -> None:
'''
Find python3 libraries on Windows and also verify that the arch matches
what we are building for.
@@ -332,7 +336,7 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
self.is_found = False
return
# This can fail if the library is not found
- largs = self.get_windows_link_args()
+ largs = self.get_windows_link_args(limited_api)
if largs is None:
self.is_found = False
return
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 1dffa1f..6de5534 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -17,11 +17,13 @@
from __future__ import annotations
import os
+import re
import subprocess
import typing as T
from .. import mlog
from .. import mesonlib
+from ..compilers.compilers import CrossNoRunException
from ..mesonlib import (
Popen_safe, extract_as_list, version_compare_many
)
@@ -235,10 +237,6 @@ class VulkanDependencySystem(SystemDependency):
self.compile_args.append('-I' + inc_path)
self.link_args.append('-L' + lib_path)
self.link_args.append('-l' + lib_name)
-
- # TODO: find a way to retrieve the version from the sdk?
- # Usually it is a part of the path to it (but does not have to be)
- return
else:
# simply try to guess it, usually works on linux
libs = self.clib_compiler.find_library('vulkan', environment, [])
@@ -246,7 +244,33 @@ class VulkanDependencySystem(SystemDependency):
self.is_found = True
for lib in libs:
self.link_args.append(lib)
- return
+
+ if self.is_found:
+ get_version = '''\
+#include <stdio.h>
+#include <vulkan/vulkan.h>
+
+int main() {
+ printf("%i.%i.%i", VK_VERSION_MAJOR(VK_HEADER_VERSION_COMPLETE),
+ VK_VERSION_MINOR(VK_HEADER_VERSION_COMPLETE),
+ VK_VERSION_PATCH(VK_HEADER_VERSION_COMPLETE));
+ return 0;
+}
+'''
+ try:
+ run = self.clib_compiler.run(get_version, environment, extra_args=self.compile_args)
+ except CrossNoRunException:
+ run = None
+ if run and run.compiled and run.returncode == 0:
+ self.version = run.stdout
+ elif self.vulkan_sdk:
+ # fall back to heuristics: detect version number in path
+ # matches the default install path on Windows
+ match = re.search(rf'VulkanSDK{re.escape(os.path.sep)}([0-9]+(?:\.[0-9]+)+)', self.vulkan_sdk)
+ if match:
+ self.version = match.group(1)
+ else:
+ mlog.warning(f'Environment variable VULKAN_SDK={self.vulkan_sdk} is present, but Vulkan version could not be extracted.')
packages['gl'] = gl_factory = DependencyFactory(
'gl',
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index 7e0c567..5340521 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -161,7 +161,7 @@ class Properties:
self,
properties: T.Optional[T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]] = None,
):
- self.properties = properties or {} # type: T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]
+ self.properties = properties or {}
def has_stdlib(self, language: str) -> bool:
return language + '_stdlib' in self.properties
@@ -460,7 +460,7 @@ class BinaryTable:
class CMakeVariables:
def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
variables = variables or {}
- self.variables = {} # type: T.Dict[str, T.List[str]]
+ self.variables: T.Dict[str, T.List[str]] = {}
for key, value in variables.items():
value = mesonlib.listify(value)
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 3ec7713..7590931 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -343,7 +343,7 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
# MIPS64 is able to run MIPS32 code natively, so there is a chance that
# such mixture mentioned above exists.
elif trial == 'mips64':
- if not any_compiler_has_define(compilers, '__mips64'):
+ if compilers and not any_compiler_has_define(compilers, '__mips64'):
trial = 'mips'
if trial not in known_cpu_families:
@@ -383,7 +383,7 @@ def detect_cpu(compilers: CompilersDict) -> str:
if '64' not in trial:
trial = 'mips'
else:
- if not any_compiler_has_define(compilers, '__mips64'):
+ if compilers and not any_compiler_has_define(compilers, '__mips64'):
trial = 'mips'
else:
trial = 'mips64'
@@ -469,6 +469,7 @@ def machine_info_can_run(machine_info: MachineInfo):
return \
(machine_info.cpu_family == true_build_cpu_family) or \
((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+ ((true_build_cpu_family == 'mips64') and (machine_info.cpu_family == 'mips')) or \
((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
class Environment:
@@ -476,7 +477,7 @@ class Environment:
log_dir = 'meson-logs'
info_dir = 'meson-info'
- def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
+ def __init__(self, source_dir: str, build_dir: str, options: 'argparse.Namespace') -> None:
self.source_dir = source_dir
self.build_dir = build_dir
# Do not try to create build directories when build_dir is none.
@@ -489,7 +490,7 @@ class Environment:
os.makedirs(self.log_dir, exist_ok=True)
os.makedirs(self.info_dir, exist_ok=True)
try:
- self.coredata: coredata.CoreData = coredata.load(self.get_build_dir())
+ self.coredata: coredata.CoreData = coredata.load(self.get_build_dir(), suggest_reconfigure=False)
self.first_invocation = False
except FileNotFoundError:
self.create_new_coredata(options)
@@ -507,7 +508,7 @@ class Environment:
coredata.read_cmd_line_file(self.build_dir, options)
self.create_new_coredata(options)
else:
- raise e
+ raise MesonException(f'{str(e)} Try regenerating using "meson setup --wipe".')
else:
# Just create a fresh coredata in this case
self.scratch_dir = ''
@@ -549,7 +550,7 @@ class Environment:
## Read in native file(s) to override build machine configuration
if self.coredata.config_files is not None:
- config = coredata.parse_machine_files(self.coredata.config_files)
+ config = coredata.parse_machine_files(self.coredata.config_files, self.source_dir)
binaries.build = BinaryTable(config.get('binaries', {}))
properties.build = Properties(config.get('properties', {}))
cmakevars.build = CMakeVariables(config.get('cmake', {}))
@@ -560,7 +561,7 @@ class Environment:
## Read in cross file(s) to override host machine configuration
if self.coredata.cross_files:
- config = coredata.parse_machine_files(self.coredata.cross_files)
+ config = coredata.parse_machine_files(self.coredata.cross_files, self.source_dir)
properties.host = Properties(config.get('properties', {}))
binaries.host = BinaryTable(config.get('binaries', {}))
cmakevars.host = CMakeVariables(config.get('cmake', {}))
diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py
index fe30195..b85aa37 100644
--- a/mesonbuild/interpreter/compiler.py
+++ b/mesonbuild/interpreter/compiler.py
@@ -45,20 +45,20 @@ if T.TYPE_CHECKING:
args: T.List[str]
dependencies: T.List[dependencies.Dependency]
- class CompileKW(TypedDict):
-
- name: str
+ class BaseCompileKW(TypedDict):
no_builtin_args: bool
include_directories: T.List[build.IncludeDirs]
args: T.List[str]
+
+ class CompileKW(BaseCompileKW):
+
+ name: str
dependencies: T.List[dependencies.Dependency]
+ werror: bool
- class CommonKW(TypedDict):
+ class CommonKW(BaseCompileKW):
prefix: str
- no_builtin_args: bool
- include_directories: T.List[build.IncludeDirs]
- args: T.List[str]
dependencies: T.List[dependencies.Dependency]
class ComputeIntKW(CommonKW):
@@ -163,13 +163,15 @@ _PREFIX_KW: KwargInfo[str] = KwargInfo(
_NO_BUILTIN_ARGS_KW = KwargInfo('no_builtin_args', bool, default=False)
_NAME_KW = KwargInfo('name', str, default='')
+_WERROR_KW = KwargInfo('werror', bool, default=False, since='1.3.0')
# Many of the compiler methods take this kwarg signature exactly, this allows
# simplifying the `typed_kwargs` calls
_COMMON_KWS: T.List[KwargInfo] = [_ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _PREFIX_KW, _NO_BUILTIN_ARGS_KW]
# Common methods of compiles, links, runs, and similar
-_COMPILES_KWS: T.List[KwargInfo] = [_NAME_KW, _ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _NO_BUILTIN_ARGS_KW]
+_COMPILES_KWS: T.List[KwargInfo] = [_NAME_KW, _ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _NO_BUILTIN_ARGS_KW,
+ _WERROR_KW]
_HEADER_KWS: T.List[KwargInfo] = [REQUIRED_KW.evolve(since='0.50.0', default=False), *_COMMON_KWS]
_HAS_REQUIRED_KW = REQUIRED_KW.evolve(since='1.3.0', default=False)
@@ -251,20 +253,20 @@ class CompilerHolder(ObjectHolder['Compiler']):
def cmd_array_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
return self.compiler.exelist
- def _determine_args(self, nobuiltins: bool,
- incdirs: T.List[build.IncludeDirs],
- extra_args: T.List[str],
+ def _determine_args(self, kwargs: BaseCompileKW,
mode: CompileCheckMode = CompileCheckMode.LINK) -> T.List[str]:
args: T.List[str] = []
- for i in incdirs:
- for idir in i.to_string_list(self.environment.get_source_dir()):
+ for i in kwargs['include_directories']:
+ for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()):
args.extend(self.compiler.get_include_args(idir, False))
- if not nobuiltins:
+ if not kwargs['no_builtin_args']:
opts = self.environment.coredata.options
args += self.compiler.get_option_compile_args(opts)
if mode is CompileCheckMode.LINK:
args.extend(self.compiler.get_option_link_args(opts))
- args.extend(extra_args)
+ if kwargs.get('werror', False):
+ args.extend(self.compiler.get_werror_args())
+ args.extend(kwargs['args'])
return args
def _determine_dependencies(self, deps: T.List['dependencies.Dependency'], compile_only: bool = False, endl: str = ':') -> T.Tuple[T.List['dependencies.Dependency'], str]:
@@ -298,7 +300,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
code = mesonlib.File.from_absolute_file(
code.rel_to_builddir(self.environment.source_dir))
testname = kwargs['name']
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False, endl=None)
result = self.compiler.run(code, self.environment, extra_args=extra_args,
dependencies=deps)
@@ -340,7 +342,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Type', mlog.bold(typename, True), 'has member', mlog.bold(membername, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
had, cached = self.compiler.has_members(typename, [membername], kwargs['prefix'],
self.environment,
@@ -366,7 +368,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Type', mlog.bold(typename, True), 'has members', members, 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
had, cached = self.compiler.has_members(typename, membernames, kwargs['prefix'],
self.environment,
@@ -392,7 +394,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Has function', mlog.bold(funcname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = self._determine_args(kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = self._determine_args(kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
had, cached = self.compiler.has_function(funcname, kwargs['prefix'], self.environment,
extra_args=extra_args,
@@ -415,7 +417,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Has type', mlog.bold(typename, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
had, cached = self.compiler.has_type(typename, kwargs['prefix'], self.environment,
extra_args=extra_args, dependencies=deps)
@@ -440,7 +442,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
)
def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int:
expression = args[0]
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
res = self.compiler.compute_int(expression, kwargs['low'], kwargs['high'],
kwargs['guess'], kwargs['prefix'],
@@ -453,7 +455,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_kwargs('compiler.sizeof', *_COMMON_KWS)
def sizeof_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> int:
element = args[0]
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
esize, cached = self.compiler.sizeof(element, kwargs['prefix'], self.environment,
extra_args=extra_args, dependencies=deps)
@@ -467,7 +469,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
@typed_kwargs('compiler.get_define', *_COMMON_KWS)
def get_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> str:
element = args[0]
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
value, cached = self.compiler.get_define(element, kwargs['prefix'], self.environment,
extra_args=extra_args,
@@ -488,7 +490,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
code = mesonlib.File.from_absolute_file(
code.absolute_path(self.environment.source_dir, self.environment.build_dir))
testname = kwargs['name']
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], endl=None)
result, cached = self.compiler.compiles(code, self.environment,
extra_args=extra_args,
@@ -527,7 +529,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
compiler = clist[SUFFIX_TO_LANG[suffix]]
testname = kwargs['name']
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
result, cached = self.compiler.links(code, self.environment,
compiler=compiler,
@@ -551,7 +553,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
haz, cached = self.compiler.check_header(hname, kwargs['prefix'], self.environment,
extra_args=extra_args,
@@ -571,7 +573,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
haz, cached = self.compiler.has_header(hname, kwargs['prefix'], self.environment,
extra_args=extra_args, dependencies=deps)
@@ -598,7 +600,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
if disabled:
mlog.log('Header', mlog.bold(hname, True), 'has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
- extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ extra_args = functools.partial(self._determine_args, kwargs)
deps, msg = self._determine_dependencies(kwargs['dependencies'])
haz, cached = self.compiler.has_header_symbol(hname, symbol, kwargs['prefix'], self.environment,
extra_args=extra_args,
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index 95a6e1d..4751af9 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -131,6 +131,8 @@ if T.TYPE_CHECKING:
BuildTargetSource = T.Union[mesonlib.FileOrString, build.GeneratedTypes, build.StructuredSources]
+ ProgramVersionFunc = T.Callable[[T.Union[ExternalProgram, build.Executable, OverrideProgram]], str]
+
def _project_version_validator(value: T.Union[T.List, str, mesonlib.File, None]) -> T.Optional[str]:
if isinstance(value, list):
@@ -295,7 +297,7 @@ class Interpreter(InterpreterBase, HoldableObject):
self.sanity_check_ast()
self.builtin.update({'meson': MesonMain(self.build, self)})
self.generators: T.List[build.Generator] = []
- self.processed_buildfiles = set() # type: T.Set[str]
+ self.processed_buildfiles: T.Set[str] = set()
self.project_args_frozen = False
self.global_args_frozen = False # implies self.project_args_frozen
self.subprojects: T.Dict[str, SubprojectHolder] = {}
@@ -691,7 +693,8 @@ class Interpreter(InterpreterBase, HoldableObject):
KwargInfo('version', (str, NoneType)),
KwargInfo('objects', ContainerTypeInfo(list, build.ExtractedObjects), listify=True, default=[], since='1.1.0'),
)
- def func_declare_dependency(self, node, args, kwargs):
+ def func_declare_dependency(self, node: mparser.BaseNode, args: T.List[TYPE_var],
+ kwargs: kwtypes.FuncDeclareDependency) -> dependencies.Dependency:
deps = kwargs['dependencies']
incs = self.extract_incdirs(kwargs)
libs = kwargs['link_with']
@@ -719,9 +722,6 @@ class Interpreter(InterpreterBase, HoldableObject):
continue
if p.is_absolute() and p.is_dir() and srcdir / self.root_subdir in [p] + list(Path(os.path.abspath(p)).parents):
variables[k] = P_OBJ.DependencyVariableString(v)
- for d in deps:
- if not isinstance(d, dependencies.Dependency):
- raise InterpreterException('Invalid dependency')
dep = dependencies.InternalDependency(version, incs, compile_args,
link_args, libs, libs_whole, sources, extra_files,
@@ -1623,46 +1623,23 @@ class Interpreter(InterpreterBase, HoldableObject):
required: bool = True, silent: bool = True,
wanted: T.Union[str, T.List[str]] = '',
search_dirs: T.Optional[T.List[str]] = None,
- version_func: T.Optional[T.Callable[[T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']], str]] = None
+ version_func: T.Optional[ProgramVersionFunc] = None
) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']:
args = mesonlib.listify(args)
extra_info: T.List[mlog.TV_Loggable] = []
- progobj = self.program_lookup(args, for_machine, default_options, required, search_dirs, extra_info)
- if progobj is None:
+ progobj = self.program_lookup(args, for_machine, default_options, required, search_dirs, wanted, version_func, extra_info)
+ if progobj is None or not self.check_program_version(progobj, wanted, version_func, extra_info):
progobj = self.notfound_program(args)
if isinstance(progobj, ExternalProgram) and not progobj.found():
if not silent:
- mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), *extra_info)
if required:
m = 'Program {!r} not found or not executable'
raise InterpreterException(m.format(progobj.get_name()))
return progobj
- if wanted:
- if version_func:
- version = version_func(progobj)
- elif isinstance(progobj, build.Executable):
- if progobj.subproject:
- interp = self.subprojects[progobj.subproject].held_object
- else:
- interp = self
- assert isinstance(interp, Interpreter)
- version = interp.project_version
- else:
- version = progobj.get_version(self)
- is_found, not_found, _ = mesonlib.version_compare_many(version, wanted)
- if not is_found:
- mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.red('NO'),
- 'found', mlog.normal_cyan(version), 'but need:',
- mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info)
- if required:
- m = 'Invalid version of program, need {!r} {!r} found {!r}.'
- raise InterpreterException(m.format(progobj.name, not_found, version))
- return self.notfound_program(args)
- extra_info.insert(0, mlog.normal_cyan(version))
-
# Only store successful lookups
self.store_name_lookups(args)
if not silent:
@@ -1673,7 +1650,11 @@ class Interpreter(InterpreterBase, HoldableObject):
def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]],
- required: bool, search_dirs: T.List[str], extra_info: T.List[mlog.TV_Loggable]
+ required: bool,
+ search_dirs: T.List[str],
+ wanted: T.Union[str, T.List[str]],
+ version_func: T.Optional[ProgramVersionFunc],
+ extra_info: T.List[mlog.TV_Loggable]
) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
progobj = self.program_from_overrides(args, extra_info)
if progobj:
@@ -1696,11 +1677,42 @@ class Interpreter(InterpreterBase, HoldableObject):
if progobj is None and args[0].endswith('python3'):
prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
progobj = prog if prog.found() else None
+
+ if progobj and not self.check_program_version(progobj, wanted, version_func, extra_info):
+ progobj = None
+
if progobj is None and fallback and required:
+ progobj = self.notfound_program(args)
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), *extra_info)
+ extra_info.clear()
progobj = self.find_program_fallback(fallback, args, default_options, required, extra_info)
return progobj
+ def check_program_version(self, progobj: T.Union[ExternalProgram, build.Executable, OverrideProgram],
+ wanted: T.Union[str, T.List[str]],
+ version_func: T.Optional[ProgramVersionFunc],
+ extra_info: T.List[mlog.TV_Loggable]) -> bool:
+ if wanted:
+ if version_func:
+ version = version_func(progobj)
+ elif isinstance(progobj, build.Executable):
+ if progobj.subproject:
+ interp = self.subprojects[progobj.subproject].held_object
+ else:
+ interp = self
+ assert isinstance(interp, Interpreter)
+ version = interp.project_version
+ else:
+ version = progobj.get_version(self)
+ is_found, not_found, _ = mesonlib.version_compare_many(version, wanted)
+ if not is_found:
+ extra_info[:0] = ['found', mlog.normal_cyan(version), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in not_found]))]
+ return False
+ extra_info.insert(0, mlog.normal_cyan(version))
+ return True
+
def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]],
required: bool, extra_info: T.List[mlog.TV_Loggable]
@@ -2738,7 +2750,7 @@ class Interpreter(InterpreterBase, HoldableObject):
install_tag=install_tag, data_type='configure'))
return mesonlib.File.from_built_file(self.subdir, output)
- def extract_incdirs(self, kwargs, key: str = 'include_directories'):
+ def extract_incdirs(self, kwargs, key: str = 'include_directories') -> T.List[build.IncludeDirs]:
prospectives = extract_as_list(kwargs, key)
if key == 'include_directories':
for i in prospectives:
@@ -2747,7 +2759,7 @@ class Interpreter(InterpreterBase, HoldableObject):
f'Use include_directories({i!r}) instead', location=self.current_node)
break
- result = []
+ result: T.List[build.IncludeDirs] = []
for p in prospectives:
if isinstance(p, build.IncludeDirs):
result.append(p)
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
index 8648b48..f1f8ea8 100644
--- a/mesonbuild/interpreter/interpreterobjects.py
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -559,8 +559,10 @@ class DependencyHolder(ObjectHolder[Dependency]):
new_dep = self.held_object.generate_link_whole_dependency()
return new_dep
-class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
- def __init__(self, ep: ExternalProgram, interpreter: 'Interpreter') -> None:
+_EXTPROG = T.TypeVar('_EXTPROG', bound=ExternalProgram)
+
+class _ExternalProgramHolder(ObjectHolder[_EXTPROG]):
+ def __init__(self, ep: _EXTPROG, interpreter: 'Interpreter') -> None:
super().__init__(ep, interpreter)
self.methods.update({'found': self.found_method,
'path': self.path_method,
@@ -606,6 +608,9 @@ class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
def found(self) -> bool:
return self.held_object.found()
+class ExternalProgramHolder(_ExternalProgramHolder[ExternalProgram]):
+ pass
+
class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
super().__init__(el, interpreter)
@@ -969,8 +974,10 @@ class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
assert self.interpreter.backend is not None
return self.interpreter.backend.get_target_filename_abs(self.held_object)
-class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
- def __init__(self, target: 'build.CustomTarget', interp: 'Interpreter'):
+_CT = T.TypeVar('_CT', bound=build.CustomTarget)
+
+class _CustomTargetHolder(ObjectHolder[_CT]):
+ def __init__(self, target: _CT, interp: 'Interpreter'):
super().__init__(target, interp)
self.methods.update({'full_path': self.full_path_method,
'to_list': self.to_list_method,
@@ -1007,6 +1014,9 @@ class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
except IndexError:
raise InvalidArguments(f'Index {other} out of bounds of custom target {self.held_object.name} output of size {len(self.held_object)}.')
+class CustomTargetHolder(_CustomTargetHolder[build.CustomTarget]):
+ pass
+
class RunTargetHolder(ObjectHolder[build.RunTarget]):
pass
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
index 2a1cd61..af5733f 100644
--- a/mesonbuild/interpreter/kwargs.py
+++ b/mesonbuild/interpreter/kwargs.py
@@ -12,6 +12,7 @@ from typing_extensions import TypedDict, Literal, Protocol
from .. import build
from .. import coredata
from ..compilers import Compiler
+from ..dependencies.base import Dependency
from ..mesonlib import EnvironmentVariables, MachineChoice, File, FileMode, FileOrString, OptionKey
from ..modules.cmake import CMakeSubprojectOptions
from ..programs import ExternalProgram
@@ -337,7 +338,14 @@ class StaticLibrary(_BuildTarget):
pass
-class SharedLibrary(_BuildTarget):
+class _SharedLibMixin(TypedDict):
+
+ darwin_versions: T.Optional[T.Tuple[str, str]]
+ soversion: T.Optional[str]
+ version: T.Optional[str]
+
+
+class SharedLibrary(_BuildTarget, _SharedLibMixin):
pass
@@ -345,7 +353,7 @@ class SharedModule(_BuildTarget):
pass
-class Library(_BuildTarget):
+class Library(_BuildTarget, _SharedLibMixin):
"""For library, both_library, and as a base for build_target"""
@@ -360,3 +368,20 @@ class Jar(_BaseBuildTarget):
main_class: str
java_resources: T.Optional[build.StructuredSources]
+
+
+class FuncDeclareDependency(TypedDict):
+
+ compile_args: T.List[str]
+ d_import_dirs: T.List[T.Union[build.IncludeDirs, str]]
+ d_module_versions: T.List[T.Union[str, int]]
+ dependencies: T.List[Dependency]
+ extra_files: T.List[FileOrString]
+ include_directories: T.List[T.Union[build.IncludeDirs, str]]
+ link_args: T.List[str]
+ link_whole: T.List[T.Union[build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]]
+ link_with: T.List[build.LibTypes]
+ objects: T.List[build.ExtractedObjects]
+ sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+ variables: T.Dict[str, str]
+ version: T.Optional[str]
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
index b919d6a..28c9152 100644
--- a/mesonbuild/interpreter/type_checking.py
+++ b/mesonbuild/interpreter/type_checking.py
@@ -119,6 +119,13 @@ def _lower_strlist(input: T.List[str]) -> T.List[str]:
return [i.lower() for i in input]
+def _validate_shlib_version(val: T.Optional[str]) -> T.Optional[str]:
+ if val is not None and not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', val):
+ return (f'Invalid Shared library version "{val}". '
+ 'Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.')
+ return None
+
+
def variables_validator(contents: T.Union[str, T.List[str], T.Dict[str, str]]) -> T.Optional[str]:
if isinstance(contents, str):
contents = [contents]
@@ -497,6 +504,49 @@ def _validate_win_subsystem(value: T.Optional[str]) -> T.Optional[str]:
return f'Invalid value for win_subsystem: {value}.'
return None
+
+def _validate_darwin_versions(darwin_versions: T.List[T.Union[str, int]]) -> T.Optional[str]:
+ if len(darwin_versions) > 2:
+ return f"Must contain between 0 and 2 elements, not {len(darwin_versions)}"
+ if len(darwin_versions) == 1:
+ darwin_versions = 2 * darwin_versions
+ for v in darwin_versions:
+ if isinstance(v, int):
+ v = str(v)
+ if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v):
+ return 'must be X.Y.Z where X, Y, Z are numbers, and Y and Z are optional'
+ try:
+ parts = v.split('.')
+ except ValueError:
+ return f'badly formed value: "{v}, not in X.Y.Z form'
+ if len(parts) in {1, 2, 3} and int(parts[0]) > 65535:
+ return 'must be X.Y.Z where X is [0, 65535] and Y, Z are optional'
+ if len(parts) in {2, 3} and int(parts[1]) > 255:
+ return 'must be X.Y.Z where Y is [0, 255] and Y, Z are optional'
+ if len(parts) == 3 and int(parts[2]) > 255:
+ return 'must be X.Y.Z where Z is [0, 255] and Y, Z are optional'
+ return None
+
+
+def _convert_darwin_versions(val: T.List[T.Union[str, int]]) -> T.Optional[T.Tuple[str, str]]:
+ if not val:
+ return None
+ elif len(val) == 1:
+ v = str(val[0])
+ return (v, v)
+ return (str(val[0]), str(val[1]))
+
+
+_DARWIN_VERSIONS_KW: KwargInfo[T.List[T.Union[str, int]]] = KwargInfo(
+ 'darwin_versions',
+ ContainerTypeInfo(list, (str, int)),
+ default=[],
+ listify=True,
+ validator=_validate_darwin_versions,
+ convertor=_convert_darwin_versions,
+ since='0.48.0',
+)
+
# Arguments exclusive to Executable. These are separated to make integrating
# them into build_target easier
_EXCLUSIVE_EXECUTABLE_KWS: T.List[KwargInfo] = [
@@ -527,7 +577,11 @@ STATIC_LIB_KWS = [
# Arguments exclusive to SharedLibrary. These are separated to make integrating
# them into build_target easier
-_EXCLUSIVE_SHARED_LIB_KWS: T.List[KwargInfo] = []
+_EXCLUSIVE_SHARED_LIB_KWS: T.List[KwargInfo] = [
+ _DARWIN_VERSIONS_KW,
+ KwargInfo('soversion', (str, int, NoneType), convertor=lambda x: str(x) if x is not None else None),
+ KwargInfo('version', (str, NoneType), validator=_validate_shlib_version)
+]
# The total list of arguments used by SharedLibrary
SHARED_LIB_KWS = [
diff --git a/mesonbuild/interpreterbase/decorators.py b/mesonbuild/interpreterbase/decorators.py
index cecdbfd..5bb8306 100644
--- a/mesonbuild/interpreterbase/decorators.py
+++ b/mesonbuild/interpreterbase/decorators.py
@@ -606,9 +606,9 @@ class FeatureCheckBase(metaclass=abc.ABCMeta):
unconditional = False
def __init__(self, feature_name: str, feature_version: str, extra_message: str = ''):
- self.feature_name = feature_name # type: str
- self.feature_version = feature_version # type: str
- self.extra_message = extra_message # type: str
+ self.feature_name = feature_name
+ self.feature_version = feature_version
+ self.extra_message = extra_message
@staticmethod
def get_target_version(subproject: str) -> str:
diff --git a/mesonbuild/interpreterbase/interpreterbase.py b/mesonbuild/interpreterbase/interpreterbase.py
index d23a23d..902f84a 100644
--- a/mesonbuild/interpreterbase/interpreterbase.py
+++ b/mesonbuild/interpreterbase/interpreterbase.py
@@ -93,12 +93,12 @@ class InterpreterBase:
self.current_lineno = -1
# Current node set during a function call. This can be used as location
# when printing a warning message during a method call.
- self.current_node = None # type: mparser.BaseNode
+ self.current_node: mparser.BaseNode = None
# This is set to `version_string` when this statement is evaluated:
# meson.version().compare_version(version_string)
# If it was part of a if-clause, it is used to temporally override the
# current meson version target within that if-block.
- self.tmp_meson_version = None # type: T.Optional[str]
+ self.tmp_meson_version: T.Optional[str] = None
def handle_meson_version_from_ast(self, strict: bool = True) -> None:
# do nothing in an AST interpreter
diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py
index e09a28e..4261144 100644
--- a/mesonbuild/linkers/detect.py
+++ b/mesonbuild/linkers/detect.py
@@ -61,7 +61,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
check_args += env.coredata.get_external_link_args(for_machine, comp_class.language)
- override = [] # type: T.List[str]
+ override: T.List[str] = []
value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
if value is not None:
override = comp_class.use_linker_args(value[0], comp_version)
@@ -138,7 +138,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
else:
check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
- override = [] # type: T.List[str]
+ override: T.List[str] = []
value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
if value is not None:
override = comp_class.use_linker_args(value[0], comp_version)
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index b9bd71b..19875c2 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -54,7 +54,7 @@ def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
with path_to_intro.open(encoding='utf-8') as f:
schema = json.load(f)
- parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+ parsed_data: T.Dict[str, T.List[dict]] = defaultdict(list)
for target in schema:
parsed_data[target['name']] += [target]
return parsed_data
@@ -100,7 +100,7 @@ def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introsp
raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
intro_targets = introspect_data[target.name]
- found_targets = [] # type: T.List[T.Dict[str, T.Any]]
+ found_targets: T.List[T.Dict[str, T.Any]] = []
resolved_bdir = builddir.resolve()
@@ -174,7 +174,7 @@ def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect
# Normalize project name
# Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
- target_name = re.sub(r"[\%\$\@\;\.\(\)']", '_', intro_target['id']) # type: str
+ target_name = re.sub(r"[\%\$\@\;\.\(\)']", '_', intro_target['id'])
rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
if rel_path != Path('.'):
target_name = str(rel_path / target_name)
@@ -337,8 +337,8 @@ def run(options: 'argparse.Namespace') -> int:
if setup_vsenv(need_vsenv):
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
- cmd = [] # type: T.List[str]
- env = None # type: T.Optional[T.Dict[str, str]]
+ cmd: T.List[str] = []
+ env: T.Optional[T.Dict[str, str]] = None
backend = cdata.get_option(mesonlib.OptionKey('backend'))
assert isinstance(backend, str)
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index d4ecce8..9f6c685 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -301,9 +301,7 @@ class Conf:
for m in mismatching:
mlog.log(f'{m[0]:21}{m[1]:10}{m[2]:10}')
-def run(options: argparse.Namespace) -> int:
- coredata.parse_cmd_line_options(options)
- builddir = os.path.abspath(os.path.realpath(options.builddir))
+def run_impl(options: argparse.Namespace, builddir: str) -> int:
print_only = not options.cmd_line_options and not options.clearcache
c = None
try:
@@ -334,3 +332,8 @@ def run(options: argparse.Namespace) -> int:
# Pager quit before we wrote everything.
pass
return 0
+
+def run(options: argparse.Namespace) -> int:
+ coredata.parse_cmd_line_options(options)
+ builddir = os.path.abspath(os.path.realpath(options.builddir))
+ return run_impl(options, builddir)
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index a9c561f..0d397b2 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -403,13 +403,15 @@ class Installer:
append_to_log(self.lf, f'# Preserving old file {to_file}\n')
self.preserved_file_count += 1
return False
+ self.log(f'Installing {from_file} to {outdir}')
self.remove(to_file)
- elif makedirs:
- # Unpack tuple
- dirmaker, outdir = makedirs
- # Create dirs if needed
- dirmaker.makedirs(outdir, exist_ok=True)
- self.log(f'Installing {from_file} to {outdir}')
+ else:
+ self.log(f'Installing {from_file} to {outdir}')
+ if makedirs:
+ # Unpack tuple
+ dirmaker, outdir = makedirs
+ # Create dirs if needed
+ dirmaker.makedirs(outdir, exist_ok=True)
if os.path.islink(from_file):
if not os.path.exists(from_file):
# Dangling symlink. Replicate as is.
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 9657da7..0391535 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -132,7 +132,7 @@ def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
return res
def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
- plan = {
+ plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] = {
'targets': {
os.path.join(installdata.build_dir, target.fname): {
'destination': target.out_name,
@@ -141,7 +141,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
}
for target in installdata.targets
},
- } # type: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]
+ }
for key, data_list in {
'data': installdata.data,
'man': installdata.man,
@@ -178,13 +178,13 @@ def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
return subdir
def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
- tlist = [] # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
+ tlist: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] = []
root_dir = Path(intr.source_root)
def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
- res = [] # type: T.List[Path]
+ res: T.List[Path] = []
for n in node_list:
- args = [] # type: T.List[BaseNode]
+ args: T.List[BaseNode] = []
if isinstance(n, FunctionNode):
args = list(n.args.arguments)
if n.func_name in BUILD_TARGET_FUNCTIONS:
@@ -231,7 +231,7 @@ def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[st
return tlist
def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
- tlist = [] # type: T.List[T.Any]
+ tlist: T.List[T.Any] = []
build_dir = builddata.environment.get_build_dir()
src_dir = builddata.environment.get_source_dir()
@@ -290,7 +290,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di
return list_buildoptions(intr.coredata, subprojects)
def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
- optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
+ optlist: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = []
subprojects = subprojects or []
dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS)
@@ -379,7 +379,7 @@ def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[s
return compilers
def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
- result = [] # type: T.List[T.Dict[str, T.Union[str, bool]]]
+ result: T.List[T.Dict[str, T.Union[str, bool]]] = []
for i in intr.dependencies:
keys = [
'name',
@@ -412,7 +412,7 @@ def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.D
'version': d.get_version(),
'compile_args': d.get_compile_args(),
'link_args': d.get_link_args(),
- 'include_directories': [i for idirs in d.get_include_dirs() for i in idirs.to_string_list(backend.source_dir)],
+ 'include_directories': [i for idirs in d.get_include_dirs() for i in idirs.to_string_list(backend.source_dir, backend.build_dir)],
'sources': [f for s in d.get_sources() for f in _src_to_str(s)],
'extra_files': [f for s in d.get_extra_files() for f in _src_to_str(s)],
'dependencies': [e.name for e in d.ext_deps],
@@ -436,9 +436,9 @@ def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.D
return list(result.values())
def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
- result = [] # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
+ result: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]] = []
for t in testdata:
- to = {} # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
+ to: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]] = {}
if isinstance(t.fname, str):
fname = [t.fname]
else:
@@ -477,14 +477,18 @@ def list_machines(builddata: build.Build) -> T.Dict[str, T.Dict[str, T.Union[str
return machines
def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
- result = {'version': builddata.project_version,
- 'descriptive_name': builddata.project_name,
- 'subproject_dir': builddata.subproject_dir} # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
+ result: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]] = {
+ 'version': builddata.project_version,
+ 'descriptive_name': builddata.project_name,
+ 'subproject_dir': builddata.subproject_dir,
+ }
subprojects = []
for k, v in builddata.subprojects.items():
- c = {'name': k,
- 'version': v,
- 'descriptive_name': builddata.projects.get(k)} # type: T.Dict[str, str]
+ c: T.Dict[str, str] = {
+ 'name': k,
+ 'version': v,
+ 'descriptive_name': builddata.projects.get(k),
+ }
subprojects.append(c)
result['subprojects'] = subprojects
return result
@@ -537,7 +541,7 @@ def run(options: argparse.Namespace) -> int:
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
indent = 4 if options.indent else None
- results = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+ results: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
intro_types = get_meson_introspection_types(sourcedir=sourcedir)
@@ -590,7 +594,7 @@ def run(options: argparse.Namespace) -> int:
return print_results(options, results, indent)
-updated_introspection_files = [] # type: T.List[str]
+updated_introspection_files: T.List[str] = []
def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
for kind, data in intro_info:
@@ -605,7 +609,7 @@ def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.
def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
coredata = builddata.environment.get_coredata()
intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
- intro_info = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+ intro_info: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
for key, val in intro_types.items():
if not val.func:
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index e51399b..0e62a57 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -60,7 +60,7 @@ def _windows_ansi() -> bool:
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
def colorize_console() -> bool:
- _colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool
+ _colorize_console: bool = getattr(sys.stdout, 'colorize_console', None)
if _colorize_console is not None:
return _colorize_console
@@ -201,7 +201,7 @@ class _Logger:
self.log_fatal_warnings = fatal_warnings
def process_markup(self, args: T.Sequence[TV_Loggable], keep: bool, display_timestamp: bool = True) -> T.List[str]:
- arr = [] # type: T.List[str]
+ arr: T.List[str] = []
if self.log_timestamp_start is not None and display_timestamp:
arr = ['[{:.3f}]'.format(time.monotonic() - self.log_timestamp_start)]
for arg in args:
@@ -312,7 +312,7 @@ class _Logger:
# The typing requirements here are non-obvious. Lists are invariant,
# therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
if severity is _Severity.NOTICE:
- label = [bold('NOTICE:')] # type: TV_LoggableList
+ label: TV_LoggableList = [bold('NOTICE:')]
elif severity is _Severity.WARNING:
label = [yellow('WARNING:')]
elif severity is _Severity.ERROR:
@@ -373,7 +373,7 @@ class _Logger:
if prefix is None:
prefix = red('ERROR:')
self.log()
- args = [] # type: T.List[T.Union[AnsiDecorator, str]]
+ args: T.List[T.Union[AnsiDecorator, str]] = []
if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
# Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
# that this is correct, so we'll just ignore it.
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index b46b300..eef67a1 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -26,6 +26,7 @@ from ..programs import ExternalProgram
if T.TYPE_CHECKING:
from ..interpreter import Interpreter
+ from ..interpreter.interpreter import ProgramVersionFunc
from ..interpreter.interpreterobjects import MachineHolder
from ..interpreterbase import TYPE_var, TYPE_kwargs
from ..programs import OverrideProgram
@@ -86,7 +87,7 @@ class ModuleState:
def find_program(self, prog: T.Union[mesonlib.FileOrString, T.List[mesonlib.FileOrString]],
required: bool = True,
- version_func: T.Optional[T.Callable[[T.Union[ExternalProgram, build.Executable, OverrideProgram]], str]] = None,
+ version_func: T.Optional[ProgramVersionFunc] = None,
wanted: T.Optional[str] = None, silent: bool = False,
for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, build.Executable, OverrideProgram]:
if not isinstance(prog, list):
@@ -166,6 +167,8 @@ class ModuleState:
else:
yield self._interpreter.build_incdir_object([d])
+ def add_language(self, lang: str, for_machine: MachineChoice) -> None:
+ self._interpreter.add_languages([lang], True, for_machine)
class ModuleObject(HoldableObject):
"""Base class for all objects returned by modules
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index c6048f9..bec1b2a 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -188,7 +188,7 @@ class CMakeSubproject(ModuleObject):
class CMakeSubprojectOptions(ModuleObject):
def __init__(self) -> None:
super().__init__()
- self.cmake_options = [] # type: T.List[str]
+ self.cmake_options: T.List[str] = []
self.target_options = TargetOptions()
self.methods.update(
diff --git a/mesonbuild/modules/external_project.py b/mesonbuild/modules/external_project.py
index f7a72bc..e7b7c43 100644
--- a/mesonbuild/modules/external_project.py
+++ b/mesonbuild/modules/external_project.py
@@ -247,6 +247,7 @@ class ExternalProject(NewExtensionModule):
depfile=f'{self.name}.d',
console=True,
extra_depends=extra_depends,
+ description='Generating external project {}',
)
idir = build.InstallDir(self.subdir.as_posix(),
diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py
index 7d96995..5a9533c 100644
--- a/mesonbuild/modules/fs.py
+++ b/mesonbuild/modules/fs.py
@@ -20,19 +20,16 @@ import typing as T
from . import ExtensionModule, ModuleReturnValue, ModuleInfo
from .. import mlog
-from ..build import CustomTarget, InvalidArguments
+from ..build import BuildTarget, CustomTarget, CustomTargetIndex, InvalidArguments
from ..interpreter.type_checking import INSTALL_KW, INSTALL_MODE_KW, INSTALL_TAG_KW, NoneType
from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
-from ..mesonlib import (
- File,
- MesonException,
- has_path_sep,
- path_is_in_root,
-)
+from ..mesonlib import File, MesonException, has_path_sep, path_is_in_root, relpath
if T.TYPE_CHECKING:
from . import ModuleState
+ from ..build import BuildTargetTypes
from ..interpreter import Interpreter
+ from ..interpreterbase import TYPE_kwargs
from ..mesonlib import FileOrString, FileMode
from typing_extensions import TypedDict
@@ -75,6 +72,7 @@ class FSModule(ExtensionModule):
'stem': self.stem,
'read': self.read,
'copyfile': self.copyfile,
+ 'relative_to': self.relative_to,
})
def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
@@ -261,8 +259,10 @@ class FSModule(ExtensionModule):
try:
with open(path, encoding=encoding) as f:
data = f.read()
+ except FileNotFoundError:
+ raise MesonException(f'File {args[0]} does not exist.')
except UnicodeDecodeError:
- raise MesonException(f'decoding failed for {path}')
+ raise MesonException(f'decoding failed for {args[0]}')
# Reconfigure when this file changes as it can contain data used by any
# part of the build configuration (e.g. `project(..., version:
# fs.read_file('VERSION')` or `configure_file(...)`
@@ -306,10 +306,28 @@ class FSModule(ExtensionModule):
install_mode=kwargs['install_mode'],
install_tag=[kwargs['install_tag']],
backend=state.backend,
+ description='Copying file {}',
)
return ModuleReturnValue(ct, [ct])
+ @FeatureNew('fs.relative_to', '1.3.0')
+ @typed_pos_args('fs.relative_to', (str, File, CustomTarget, CustomTargetIndex, BuildTarget), (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
+ @noKwargs
+ def relative_to(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes], T.Union[FileOrString, BuildTargetTypes]], kwargs: TYPE_kwargs) -> str:
+ def to_path(arg: T.Union[FileOrString, CustomTarget, CustomTargetIndex, BuildTarget]) -> str:
+ if isinstance(arg, File):
+ return arg.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ elif isinstance(arg, (CustomTarget, CustomTargetIndex, BuildTarget)):
+ return state.backend.get_target_filename_abs(arg)
+ else:
+ return os.path.join(state.environment.source_dir, state.subdir, arg)
+
+ t = to_path(args[0])
+ f = to_path(args[1])
+
+ return relpath(t, f)
+
def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
return FSModule(*args, **kwargs)
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 18862e7..039e122 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -1113,6 +1113,9 @@ class GnomeModule(ExtensionModule):
)
def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[Executable, build.SharedLibrary, build.StaticLibrary]]],
kwargs: 'GenerateGir') -> ModuleReturnValue:
+ # Ensure we have a C compiler even in C++ projects.
+ state.add_language('c', MachineChoice.HOST)
+
girtargets = [self._unwrap_gir_target(arg, state) for arg in args[0]]
if len(girtargets) > 1 and any(isinstance(el, Executable) for el in girtargets):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
@@ -1242,6 +1245,7 @@ class GnomeModule(ExtensionModule):
['gschemas.compiled'],
build_by_default=kwargs['build_by_default'],
depend_files=kwargs['depend_files'],
+ description='Compiling gschemas {}',
)
self._devenv_prepend('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir))
return ModuleReturnValue(target_g, [target_g])
@@ -1355,6 +1359,7 @@ class GnomeModule(ExtensionModule):
[po_file],
[gmo_file],
install_tag=['doc'],
+ description='Generating yelp doc {}',
)
targets.append(gmotarget)
@@ -1370,6 +1375,7 @@ class GnomeModule(ExtensionModule):
install=True,
install_dir=[l_install_dir],
install_tag=['doc'],
+ description='Generating yelp doc {}',
)
targets.append(mergetarget)
@@ -1427,6 +1433,9 @@ class GnomeModule(ExtensionModule):
namespace = kwargs['namespace']
+ # Ensure we have a C compiler even in C++ projects.
+ state.add_language('c', MachineChoice.HOST)
+
def abs_filenames(files: T.Iterable['FileOrString']) -> T.Iterator[str]:
for f in files:
if isinstance(f, mesonlib.File):
@@ -1513,6 +1522,7 @@ class GnomeModule(ExtensionModule):
[f'{modulename}-decl.txt'],
build_always_stale=True,
extra_depends=new_depends,
+ description='Generating gtkdoc {}',
)
alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject, state.environment)
if kwargs['check']:
@@ -1656,6 +1666,7 @@ class GnomeModule(ExtensionModule):
xml_files,
[output],
build_by_default=build_by_default,
+ description='Generating gdbus source {}',
)
targets.append(cfile_custom_target)
@@ -1680,6 +1691,7 @@ class GnomeModule(ExtensionModule):
install=install_header,
install_dir=[install_dir],
install_tag=['devel'],
+ description='Generating gdbus header {}',
)
targets.append(hfile_custom_target)
@@ -1708,6 +1720,7 @@ class GnomeModule(ExtensionModule):
outputs,
build_by_default=build_by_default,
extra_depends=depends,
+ description='Generating gdbus docbook {}',
)
targets.append(docbook_custom_target)
@@ -1930,6 +1943,7 @@ class GnomeModule(ExtensionModule):
extra_depends=depends,
# https://github.com/mesonbuild/meson/issues/973
absolute_paths=True,
+ description='Generating GObject enum file {}',
)
@typed_pos_args('gnome.genmarshal', str)
@@ -1996,6 +2010,7 @@ class GnomeModule(ExtensionModule):
install_tag=['devel'],
capture=capture,
depend_files=kwargs['depend_files'],
+ description='Generating glib marshaller header {}',
)
c_cmd = cmd + ['--body', '@INPUT@']
@@ -2015,6 +2030,7 @@ class GnomeModule(ExtensionModule):
capture=capture,
depend_files=kwargs['depend_files'],
extra_depends=extra_deps,
+ description='Generating glib marshaller source {}',
)
rv = [body, header]
diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py
index cf1a247..f113549 100644
--- a/mesonbuild/modules/hotdoc.py
+++ b/mesonbuild/modules/hotdoc.py
@@ -15,25 +15,47 @@ from __future__ import annotations
'''This module provides helper functions for generating documentation using hotdoc'''
-import os
-import subprocess
-
-from mesonbuild import mesonlib
-from mesonbuild import mlog, build
-from mesonbuild.mesonlib import MesonException
-from . import ModuleReturnValue, ModuleInfo
-from . import ExtensionModule
+import os, subprocess
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from .. import build, mesonlib, mlog
+from ..build import CustomTarget, CustomTargetIndex
from ..dependencies import Dependency, InternalDependency
from ..interpreterbase import (
InvalidArguments, noPosargs, noKwargs, typed_kwargs, FeatureDeprecated,
ContainerTypeInfo, KwargInfo, typed_pos_args
)
-from ..interpreter import CustomTargetHolder
+from ..interpreter.interpreterobjects import _CustomTargetHolder
from ..interpreter.type_checking import NoneType
+from ..mesonlib import File, MesonException
from ..programs import ExternalProgram
-
-def ensure_list(value):
+if T.TYPE_CHECKING:
+ from typing_extensions import TypedDict
+
+ from . import ModuleState
+ from ..environment import Environment
+ from ..interpreter import Interpreter
+ from ..interpreterbase import TYPE_kwargs, TYPE_var
+
+ _T = T.TypeVar('_T')
+
+ class GenerateDocKwargs(TypedDict):
+ sitemap: T.Union[str, File, CustomTarget, CustomTargetIndex]
+ index: T.Union[str, File, CustomTarget, CustomTargetIndex]
+ project_version: str
+ html_extra_theme: T.Optional[str]
+ include_paths: T.List[str]
+ dependencies: T.List[T.Union[Dependency, build.StaticLibrary, build.SharedLibrary, CustomTarget, CustomTargetIndex]]
+ depends: T.List[T.Union[CustomTarget, CustomTargetIndex]]
+ gi_c_source_roots: T.List[str]
+ extra_assets: T.List[str]
+ extra_extension_paths: T.List[str]
+ subprojects: T.List['HotdocTarget']
+ install: bool
+
+def ensure_list(value: T.Union[_T, T.List[_T]]) -> T.List[_T]:
if not isinstance(value, list):
return [value]
return value
@@ -41,34 +63,39 @@ def ensure_list(value):
MIN_HOTDOC_VERSION = '0.8.100'
-file_types = (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex)
+file_types = (str, File, CustomTarget, CustomTargetIndex)
+
+
+class HotdocExternalProgram(ExternalProgram):
+ def run_hotdoc(self, cmd: T.List[str]) -> int:
+ return subprocess.run(self.get_command() + cmd, stdout=subprocess.DEVNULL).returncode
class HotdocTargetBuilder:
- def __init__(self, name, state, hotdoc, interpreter, kwargs):
+ def __init__(self, name: str, state: ModuleState, hotdoc: HotdocExternalProgram, interpreter: Interpreter, kwargs):
self.hotdoc = hotdoc
self.build_by_default = kwargs.pop('build_by_default', False)
self.kwargs = kwargs
self.name = name
self.state = state
self.interpreter = interpreter
- self.include_paths = mesonlib.OrderedSet()
+ self.include_paths: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
self.builddir = state.environment.get_build_dir()
self.sourcedir = state.environment.get_source_dir()
self.subdir = state.subdir
self.build_command = state.environment.get_build_command()
- self.cmd = ['conf', '--project-name', name, "--disable-incremental-build",
- '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')]
+ self.cmd: T.List[TYPE_var] = ['conf', '--project-name', name, "--disable-incremental-build",
+ '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')]
self._extra_extension_paths = set()
self.extra_assets = set()
self.extra_depends = []
self._subprojects = []
- def process_known_arg(self, option, argname=None, value_processor=None):
+ def process_known_arg(self, option: str, argname: T.Optional[str] = None, value_processor: T.Optional[T.Callable] = None) -> None:
if not argname:
argname = option.strip("-").replace("-", "_")
@@ -78,7 +105,7 @@ class HotdocTargetBuilder:
self.set_arg_value(option, value)
- def set_arg_value(self, option, value):
+ def set_arg_value(self, option: str, value: TYPE_var) -> None:
if value is None:
return
@@ -111,18 +138,18 @@ class HotdocTargetBuilder:
else:
self.cmd.extend([option, value])
- def check_extra_arg_type(self, arg, value):
+ def check_extra_arg_type(self, arg: str, value: TYPE_var) -> None:
if isinstance(value, list):
for v in value:
self.check_extra_arg_type(arg, v)
return
- valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.CustomTargetIndex, build.BuildTarget)
+ valid_types = (str, bool, File, build.IncludeDirs, CustomTarget, CustomTargetIndex, build.BuildTarget)
if not isinstance(value, valid_types):
raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format(
arg, value, [t.__name__ for t in valid_types]))
- def process_extra_args(self):
+ def process_extra_args(self) -> None:
for arg, value in self.kwargs.items():
option = "--" + arg.replace("_", "-")
self.check_extra_arg_type(arg, value)
@@ -153,7 +180,7 @@ class HotdocTargetBuilder:
return None, None
- def add_extension_paths(self, paths):
+ def add_extension_paths(self, paths: T.Union[T.List[str], T.Set[str]]) -> None:
for path in paths:
if path in self._extra_extension_paths:
continue
@@ -161,10 +188,10 @@ class HotdocTargetBuilder:
self._extra_extension_paths.add(path)
self.cmd.extend(["--extra-extension-path", path])
- def replace_dirs_in_string(self, string):
+ def replace_dirs_in_string(self, string: str) -> str:
return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir)
- def process_gi_c_source_roots(self):
+ def process_gi_c_source_roots(self) -> None:
if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0:
return
@@ -176,7 +203,7 @@ class HotdocTargetBuilder:
self.cmd += ['--gi-c-source-roots'] + value
- def process_dependencies(self, deps):
+ def process_dependencies(self, deps: T.List[T.Union[Dependency, build.StaticLibrary, build.SharedLibrary, CustomTarget, CustomTargetIndex]]) -> T.List[str]:
cflags = set()
for dep in mesonlib.listify(ensure_list(deps)):
if isinstance(dep, InternalDependency):
@@ -200,29 +227,29 @@ class HotdocTargetBuilder:
self.include_paths.add(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
self.cmd += ['--extra-assets=' + p for p in dep.extra_assets]
self.add_extension_paths(dep.extra_extension_paths)
- elif isinstance(dep, (build.CustomTarget, build.BuildTarget)):
+ elif isinstance(dep, (CustomTarget, build.BuildTarget)):
self.extra_depends.append(dep)
- elif isinstance(dep, build.CustomTargetIndex):
+ elif isinstance(dep, CustomTargetIndex):
self.extra_depends.append(dep.target)
return [f.strip('-I') for f in cflags]
- def process_extra_assets(self):
+ def process_extra_assets(self) -> None:
self._extra_assets = self.kwargs.pop('extra_assets')
for assets_path in self._extra_assets:
self.cmd.extend(["--extra-assets", assets_path])
- def process_subprojects(self):
+ def process_subprojects(self) -> None:
value = self.kwargs.pop('subprojects')
self.process_dependencies(value)
self._subprojects.extend(value)
- def flatten_config_command(self):
+ def flatten_config_command(self) -> T.List[str]:
cmd = []
for arg in mesonlib.listify(self.cmd, flatten=True):
- if isinstance(arg, mesonlib.File):
+ if isinstance(arg, File):
arg = arg.absolute_path(self.state.environment.get_source_dir(),
self.state.environment.get_build_dir())
elif isinstance(arg, build.IncludeDirs):
@@ -231,10 +258,10 @@ class HotdocTargetBuilder:
cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir))
continue
- elif isinstance(arg, (build.BuildTarget, build.CustomTarget)):
+ elif isinstance(arg, (build.BuildTarget, CustomTarget)):
self.extra_depends.append(arg)
arg = self.interpreter.backend.get_target_filename_abs(arg)
- elif isinstance(arg, build.CustomTargetIndex):
+ elif isinstance(arg, CustomTargetIndex):
self.extra_depends.append(arg.target)
arg = self.interpreter.backend.get_target_filename_abs(arg)
@@ -242,7 +269,7 @@ class HotdocTargetBuilder:
return cmd
- def generate_hotdoc_config(self):
+ def generate_hotdoc_config(self) -> None:
cwd = os.path.abspath(os.curdir)
ncwd = os.path.join(self.sourcedir, self.subdir)
mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name))
@@ -251,7 +278,7 @@ class HotdocTargetBuilder:
raise MesonException('hotdoc failed to configure')
os.chdir(cwd)
- def ensure_file(self, value):
+ def ensure_file(self, value: T.Union[str, File, CustomTarget, CustomTargetIndex]) -> T.Union[File, CustomTarget, CustomTargetIndex]:
if isinstance(value, list):
res = []
for val in value:
@@ -259,11 +286,11 @@ class HotdocTargetBuilder:
return res
if isinstance(value, str):
- return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value)
+ return File.from_source_file(self.sourcedir, self.subdir, value)
return value
- def ensure_dir(self, value):
+ def ensure_dir(self, value: str) -> str:
if os.path.isabs(value):
_dir = value
else:
@@ -274,12 +301,12 @@ class HotdocTargetBuilder:
return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir))
- def check_forbidden_args(self):
+ def check_forbidden_args(self) -> None:
for arg in ['conf_file']:
if arg in self.kwargs:
raise InvalidArguments(f'Argument "{arg}" is forbidden.')
- def make_targets(self):
+ def make_targets(self) -> T.Tuple[HotdocTarget, mesonlib.ExecutableSerialisation]:
self.check_forbidden_args()
self.process_known_arg("--index", value_processor=self.ensure_file)
self.process_known_arg("--project-version")
@@ -325,7 +352,7 @@ class HotdocTargetBuilder:
subdir=self.subdir,
subproject=self.state.subproject,
environment=self.state.environment,
- hotdoc_conf=mesonlib.File.from_built_file(
+ hotdoc_conf=File.from_built_file(
self.subdir, hotdoc_config_name),
extra_extension_paths=self._extra_extension_paths,
extra_assets=self._extra_assets,
@@ -364,29 +391,30 @@ class HotdocTargetBuilder:
return (target, install_script)
-class HotdocTargetHolder(CustomTargetHolder):
- def __init__(self, target, interp):
+class HotdocTargetHolder(_CustomTargetHolder['HotdocTarget']):
+ def __init__(self, target: HotdocTarget, interp: Interpreter):
super().__init__(target, interp)
self.methods.update({'config_path': self.config_path_method})
@noPosargs
@noKwargs
- def config_path_method(self, *args, **kwargs):
+ def config_path_method(self, *args: T.Any, **kwargs: T.Any) -> str:
conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir,
self.interpreter.environment.build_dir)
return conf
-class HotdocTarget(build.CustomTarget):
- def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets,
- subprojects, environment, **kwargs):
+class HotdocTarget(CustomTarget):
+ def __init__(self, name: str, subdir: str, subproject: str, hotdoc_conf: File,
+ extra_extension_paths: T.Set[str], extra_assets: T.List[str],
+ subprojects: T.List['HotdocTarget'], environment: Environment, **kwargs: T.Any):
super().__init__(name, subdir, subproject, environment, **kwargs, absolute_paths=True)
self.hotdoc_conf = hotdoc_conf
self.extra_extension_paths = extra_extension_paths
self.extra_assets = extra_assets
self.subprojects = subprojects
- def __getstate__(self):
+ def __getstate__(self) -> dict:
# Make sure we do not try to pickle subprojects
res = self.__dict__.copy()
res['subprojects'] = []
@@ -398,19 +426,15 @@ class HotDocModule(ExtensionModule):
INFO = ModuleInfo('hotdoc', '0.48.0')
- def __init__(self, interpreter):
+ def __init__(self, interpreter: Interpreter):
super().__init__(interpreter)
- self.hotdoc = ExternalProgram('hotdoc')
+ self.hotdoc = HotdocExternalProgram('hotdoc')
if not self.hotdoc.found():
raise MesonException('hotdoc executable not found')
version = self.hotdoc.get_version(interpreter)
if not mesonlib.version_compare(version, f'>={MIN_HOTDOC_VERSION}'):
raise MesonException(f'hotdoc {MIN_HOTDOC_VERSION} required but not found.)')
- def run_hotdoc(cmd):
- return subprocess.run(self.hotdoc.get_command() + cmd, stdout=subprocess.DEVNULL).returncode
-
- self.hotdoc.run_hotdoc = run_hotdoc
self.methods.update({
'has_extensions': self.has_extensions,
'generate_doc': self.generate_doc,
@@ -418,7 +442,7 @@ class HotDocModule(ExtensionModule):
@noKwargs
@typed_pos_args('hotdoc.has_extensions', varargs=str, min_varargs=1)
- def has_extensions(self, state, args, kwargs):
+ def has_extensions(self, state: ModuleState, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> bool:
return self.hotdoc.run_hotdoc([f'--has-extension={extension}' for extension in args[0]]) == 0
@typed_pos_args('hotdoc.generate_doc', str)
@@ -433,13 +457,13 @@ class HotDocModule(ExtensionModule):
KwargInfo(
'dependencies',
ContainerTypeInfo(list, (Dependency, build.StaticLibrary, build.SharedLibrary,
- build.CustomTarget, build.CustomTargetIndex)),
+ CustomTarget, CustomTargetIndex)),
listify=True,
default=[],
),
KwargInfo(
'depends',
- ContainerTypeInfo(list, (build.CustomTarget, build.CustomTargetIndex)),
+ ContainerTypeInfo(list, (CustomTarget, CustomTargetIndex)),
listify=True,
default=[],
since='0.64.1',
@@ -451,21 +475,21 @@ class HotDocModule(ExtensionModule):
KwargInfo('install', bool, default=False),
allow_unknown=True
)
- def generate_doc(self, state, args, kwargs):
+ def generate_doc(self, state: ModuleState, args: T.Tuple[str], kwargs: GenerateDocKwargs) -> ModuleReturnValue:
project_name = args[0]
- if any(isinstance(x, (build.CustomTarget, build.CustomTargetIndex)) for x in kwargs['dependencies']):
+ if any(isinstance(x, (CustomTarget, CustomTargetIndex)) for x in kwargs['dependencies']):
FeatureDeprecated.single_use('hotdoc.generate_doc dependencies argument with custom_target',
'0.64.1', state.subproject, 'use `depends`', state.current_node)
builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs)
target, install_script = builder.make_targets()
- targets = [target]
+ targets: T.List[T.Union[HotdocTarget, mesonlib.ExecutableSerialisation]] = [target]
if install_script:
targets.append(install_script)
- return ModuleReturnValue(targets[0], targets)
+ return ModuleReturnValue(target, targets)
-def initialize(interpreter):
+def initialize(interpreter: Interpreter) -> HotDocModule:
mod = HotDocModule(interpreter)
mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder)
return mod
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index 11dd9ef..c82e580 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -212,6 +212,7 @@ class I18nModule(ExtensionModule):
install=kwargs['install'],
install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
install_tag=install_tag,
+ description='Merging translations for {}',
)
return ModuleReturnValue(ct, [ct])
@@ -304,6 +305,7 @@ class I18nModule(ExtensionModule):
# Bonus: the build tree has something usable as an uninstalled bindtextdomain() target dir.
install_dir=[path.join(install_dir, l, 'LC_MESSAGES')],
install_tag=['i18n'],
+ description='Building translation {}',
)
targets.append(gmotarget)
gmotargets.append(gmotarget)
@@ -390,6 +392,7 @@ class I18nModule(ExtensionModule):
install=kwargs['install'],
install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
install_tag=install_tag,
+ description='Merging translations for {}',
)
return ModuleReturnValue(ct, [ct])
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 879c548..c8af224 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -13,7 +13,7 @@
# limitations under the License.
from __future__ import annotations
-import copy, json, os, shutil
+import copy, json, os, shutil, re
import typing as T
from . import ExtensionModule, ModuleInfo
@@ -24,15 +24,15 @@ from ..build import known_shmod_kwargs, CustomTarget, CustomTargetIndex, BuildTa
from ..dependencies import NotFoundDependency
from ..dependencies.detect import get_dep_identifier, find_external_dependency
from ..dependencies.python import BasicPythonExternalProgram, python_factory, _PythonDependencyBase
-from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs
-from ..interpreter import primitives as P_OBJ
+from ..interpreter import extract_required_kwarg, permitted_dependency_kwargs, primitives as P_OBJ
+from ..interpreter.interpreterobjects import _ExternalProgramHolder
from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW, SHARED_MOD_KWS
from ..interpreterbase import (
noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo,
InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo,
FeatureNew, FeatureNewKwargs, disablerIfNotFound
)
-from ..mesonlib import MachineChoice
+from ..mesonlib import MachineChoice, OptionKey
from ..programs import ExternalProgram, NonExistingExternalProgram
if T.TYPE_CHECKING:
@@ -62,8 +62,10 @@ if T.TYPE_CHECKING:
subdir: NotRequired[T.Optional[str]]
+ MaybePythonProg = T.Union[NonExistingExternalProgram, 'PythonExternalProgram']
-mod_kwargs = {'subdir'}
+
+mod_kwargs = {'subdir', 'limited_api'}
mod_kwargs.update(known_shmod_kwargs)
mod_kwargs -= {'name_prefix', 'name_suffix'}
@@ -84,12 +86,12 @@ class PythonExternalProgram(BasicPythonExternalProgram):
self.purelib = self._get_path(state, 'purelib')
return ret
- def _get_path(self, state: T.Optional['ModuleState'], key: str) -> None:
+ def _get_path(self, state: T.Optional['ModuleState'], key: str) -> str:
rel_path = self.info['install_paths'][key][1:]
if not state:
# This happens only from run_project_tests.py
return rel_path
- value = state.get_option(f'{key}dir', module='python')
+ value = T.cast('str', state.get_option(f'{key}dir', module='python'))
if value:
if state.is_user_defined_option('install_env', module='python'):
raise mesonlib.MesonException(f'python.{key}dir and python.install_env are mutually exclusive')
@@ -112,16 +114,18 @@ class PythonExternalProgram(BasicPythonExternalProgram):
_PURE_KW = KwargInfo('pure', (bool, NoneType))
_SUBDIR_KW = KwargInfo('subdir', str, default='')
+_LIMITED_API_KW = KwargInfo('limited_api', str, default='', since='1.3.0')
_DEFAULTABLE_SUBDIR_KW = KwargInfo('subdir', (str, NoneType))
-class PythonInstallation(ExternalProgramHolder):
+class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
def __init__(self, python: 'PythonExternalProgram', interpreter: 'Interpreter'):
- ExternalProgramHolder.__init__(self, python, interpreter)
+ _ExternalProgramHolder.__init__(self, python, interpreter)
info = python.info
prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
assert isinstance(prefix, str), 'for mypy'
self.variables = info['variables']
self.suffix = info['suffix']
+ self.limited_api_suffix = info['limited_api_suffix']
self.paths = info['paths']
self.pure = python.pure
self.platlib_install_path = os.path.join(prefix, python.platlib)
@@ -146,7 +150,7 @@ class PythonInstallation(ExternalProgramHolder):
@permittedKwargs(mod_kwargs)
@typed_pos_args('python.extension_module', str, varargs=(str, mesonlib.File, CustomTarget, CustomTargetIndex, GeneratedList, StructuredSources, ExtractedObjects, BuildTarget))
- @typed_kwargs('python.extension_module', *_MOD_KWARGS, _DEFAULTABLE_SUBDIR_KW, allow_unknown=True)
+ @typed_kwargs('python.extension_module', *_MOD_KWARGS, _DEFAULTABLE_SUBDIR_KW, _LIMITED_API_KW, allow_unknown=True)
def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]], kwargs: ExtensionModuleKw) -> 'SharedModule':
if 'install_dir' in kwargs:
if kwargs['subdir'] is not None:
@@ -159,9 +163,11 @@ class PythonInstallation(ExternalProgramHolder):
kwargs['install_dir'] = self._get_install_dir_impl(False, subdir)
+ target_suffix = self.suffix
+
new_deps = mesonlib.extract_as_list(kwargs, 'dependencies')
- has_pydep = any(isinstance(dep, _PythonDependencyBase) for dep in new_deps)
- if not has_pydep:
+ pydep = next((dep for dep in new_deps if isinstance(dep, _PythonDependencyBase)), None)
+ if pydep is None:
pydep = self._dependency_method_impl({})
if not pydep.found():
raise mesonlib.MesonException('Python dependency not found')
@@ -169,15 +175,62 @@ class PythonInstallation(ExternalProgramHolder):
FeatureNew.single_use('python_installation.extension_module with implicit dependency on python',
'0.63.0', self.subproject, 'use python_installation.dependency()',
self.current_node)
+
+ limited_api_version = kwargs.pop('limited_api')
+ allow_limited_api = self.interpreter.environment.coredata.get_option(OptionKey('allow_limited_api', module='python'))
+ if limited_api_version != '' and allow_limited_api:
+
+ target_suffix = self.limited_api_suffix
+
+ limited_api_version_hex = self._convert_api_version_to_py_version_hex(limited_api_version, pydep.version)
+ limited_api_definition = f'-DPy_LIMITED_API={limited_api_version_hex}'
+
+ new_c_args = mesonlib.extract_as_list(kwargs, 'c_args')
+ new_c_args.append(limited_api_definition)
+ kwargs['c_args'] = new_c_args
+
+ new_cpp_args = mesonlib.extract_as_list(kwargs, 'cpp_args')
+ new_cpp_args.append(limited_api_definition)
+ kwargs['cpp_args'] = new_cpp_args
+
+ # When compiled under MSVC, Python's PC/pyconfig.h forcibly inserts pythonMAJOR.MINOR.lib
+ # into the linker path when not running in debug mode via a series #pragma comment(lib, "")
+ # directives. We manually override these here as this interferes with the intended
+ # use of the 'limited_api' kwarg
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ compilers = self.interpreter.environment.coredata.compilers[for_machine]
+ if any(compiler.get_id() == 'msvc' for compiler in compilers.values()):
+ pydep_copy = copy.copy(pydep)
+ pydep_copy.find_libpy_windows(self.env, limited_api=True)
+ if not pydep_copy.found():
+ raise mesonlib.MesonException('Python dependency supporting limited API not found')
+
+ new_deps.remove(pydep)
+ new_deps.append(pydep_copy)
+
+ pyver = pydep.version.replace('.', '')
+ python_windows_debug_link_exception = f'/NODEFAULTLIB:python{pyver}_d.lib'
+ python_windows_release_link_exception = f'/NODEFAULTLIB:python{pyver}.lib'
+
+ new_link_args = mesonlib.extract_as_list(kwargs, 'link_args')
+
+ is_debug = self.interpreter.environment.coredata.options[OptionKey('debug')].value
+ if is_debug:
+ new_link_args.append(python_windows_debug_link_exception)
+ else:
+ new_link_args.append(python_windows_release_link_exception)
+
+ kwargs['link_args'] = new_link_args
+
kwargs['dependencies'] = new_deps
# msys2's python3 has "-cpython-36m.dll", we have to be clever
# FIXME: explain what the specific cleverness is here
- split, suffix = self.suffix.rsplit('.', 1)
+ split, target_suffix = target_suffix.rsplit('.', 1)
args = (args[0] + split, args[1])
kwargs['name_prefix'] = ''
- kwargs['name_suffix'] = suffix
+ kwargs['name_suffix'] = target_suffix
if 'gnu_symbol_visibility' not in kwargs and \
(self.is_pypy or mesonlib.version_compare(self.version, '>=3.9')):
@@ -185,6 +238,22 @@ class PythonInstallation(ExternalProgramHolder):
return self.interpreter.build_target(self.current_node, args, kwargs, SharedModule)
+ def _convert_api_version_to_py_version_hex(self, api_version: str, detected_version: str) -> str:
+ python_api_version_format = re.compile(r'[0-9]\.[0-9]{1,2}')
+ decimal_match = python_api_version_format.fullmatch(api_version)
+ if not decimal_match:
+ raise InvalidArguments(f'Python API version invalid: "{api_version}".')
+ if mesonlib.version_compare(api_version, '<3.2'):
+ raise InvalidArguments(f'Python Limited API version invalid: {api_version} (must be greater than 3.2)')
+ if mesonlib.version_compare(api_version, '>' + detected_version):
+ raise InvalidArguments(f'Python Limited API version too high: {api_version} (detected {detected_version})')
+
+ version_components = api_version.split('.')
+ major = int(version_components[0])
+ minor = int(version_components[1])
+
+ return '0x{:02x}{:02x}0000'.format(major, minor)
+
def _dependency_method_impl(self, kwargs: TYPE_kwargs) -> Dependency:
for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
identifier = get_dep_identifier(self._full_path(), kwargs)
@@ -303,7 +372,7 @@ class PythonModule(ExtensionModule):
def __init__(self, interpreter: 'Interpreter') -> None:
super().__init__(interpreter)
- self.installations: T.Dict[str, ExternalProgram] = {}
+ self.installations: T.Dict[str, MaybePythonProg] = {}
self.methods.update({
'find_installation': self.find_installation,
})
@@ -377,7 +446,7 @@ class PythonModule(ExtensionModule):
else:
return None
- def _find_installation_impl(self, state: 'ModuleState', display_name: str, name_or_path: str, required: bool) -> ExternalProgram:
+ def _find_installation_impl(self, state: 'ModuleState', display_name: str, name_or_path: str, required: bool) -> MaybePythonProg:
if not name_or_path:
python = PythonExternalProgram('python3', mesonlib.python_command)
else:
@@ -420,7 +489,7 @@ class PythonModule(ExtensionModule):
_PURE_KW.evolve(default=True, since='0.64.0'),
)
def find_installation(self, state: 'ModuleState', args: T.Tuple[T.Optional[str]],
- kwargs: 'FindInstallationKw') -> ExternalProgram:
+ kwargs: 'FindInstallationKw') -> MaybePythonProg:
feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0')
disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check)
@@ -482,6 +551,7 @@ class PythonModule(ExtensionModule):
raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
return NonExistingExternalProgram(python.name)
else:
+ assert isinstance(python, PythonExternalProgram), 'for mypy'
python = copy.copy(python)
python.pure = kwargs['pure']
python.run_bytecompile.setdefault(python.info['version'], False)
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index 8555888..20ff111 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -348,6 +348,7 @@ class QtBaseModule(ExtensionModule):
[f'{name}.cpp'],
depend_files=qrc_deps,
depfile=f'{name}.d',
+ description='Compiling Qt resources {}',
)
targets.append(res_target)
else:
@@ -368,6 +369,7 @@ class QtBaseModule(ExtensionModule):
[f'{name}.cpp'],
depend_files=qrc_deps,
depfile=f'{name}.d',
+ description='Compiling Qt resources {}',
)
targets.append(res_target)
@@ -600,6 +602,7 @@ class QtBaseModule(ExtensionModule):
install_dir=[kwargs['install_dir']],
install_tag=['i18n'],
build_by_default=kwargs['build_by_default'],
+ description='Compiling Qt translations {}',
)
translations.append(lrelease_target)
if qresource:
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index 4f5494a..0bda2c2 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -263,6 +263,7 @@ class RustModule(ExtensionModule):
extra_depends=depends,
depend_files=depend_files,
backend=state.backend,
+ description='Generating bindings for Rust {}',
)
return ModuleReturnValue([target], [target])
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index f9c7c57..b7cdeb3 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -206,6 +206,7 @@ class WindowsModule(ExtensionModule):
depfile=depfile,
depend_files=wrc_depend_files,
extra_depends=wrc_depends,
+ description='Compiling Windows resource {}',
))
return ModuleReturnValue(res_targets, [res_targets])
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 85f1ef3..fb4e433 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -163,7 +163,7 @@ class Lexer:
col = 0
while loc < len(self.code):
matched = False
- value = None # type: T.Union[str, bool, int]
+ value: T.Union[str, bool, int] = None
for (tid, reg) in self.token_specification:
mo = reg.match(self.code, loc)
if mo:
@@ -610,7 +610,7 @@ class Parser:
def __init__(self, code: str, filename: str):
self.lexer = Lexer(code)
self.stream = self.lexer.lex(filename)
- self.current = Token('eof', '', 0, 0, 0, (0, 0), None) # type: Token
+ self.current: Token = Token('eof', '', 0, 0, 0, (0, 0), None)
self.getsym()
self.in_ternary = False
@@ -811,7 +811,7 @@ class Parser:
return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
def key_values(self) -> ArgumentNode:
- s = self.statement() # type: BaseNode
+ s = self.statement()
a = ArgumentNode(self.current)
while not isinstance(s, EmptyNode):
@@ -828,7 +828,7 @@ class Parser:
return a
def args(self) -> ArgumentNode:
- s = self.statement() # type: BaseNode
+ s = self.statement()
a = ArgumentNode(self.current)
while not isinstance(s, EmptyNode):
@@ -875,7 +875,7 @@ class Parser:
self.expect('id')
assert isinstance(t.value, str)
varname = t
- varnames = [t.value] # type: T.List[str]
+ varnames = [t.value]
if self.accept('comma'):
t = self.current
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 73c9fb3..4ff6000 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -55,15 +55,15 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='Wipe build directory and reconfigure using previous command line options. ' +
'Useful when build directory got corrupted, or when rebuilding with a ' +
'newer version of meson.')
+ parser.add_argument('--clearcache', action='store_true', default=False,
+ help='Clear cached state (e.g. found dependencies). Since 1.3.0.')
parser.add_argument('builddir', nargs='?', default=None)
parser.add_argument('sourcedir', nargs='?', default=None)
class MesonApp:
def __init__(self, options: argparse.Namespace) -> None:
- (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
- options.sourcedir,
- options.reconfigure,
- options.wipe)
+ self.options = options
+ (self.source_dir, self.build_dir) = self.validate_dirs()
if options.wipe:
# Make a copy of the cmd line file to make sure we can always
# restore that file if anything bad happens. For example if
@@ -96,8 +96,6 @@ class MesonApp:
os.makedirs(os.path.dirname(f), exist_ok=True)
shutil.move(b, f)
- self.options = options
-
def has_build_file(self, dirname: str) -> bool:
fname = os.path.join(dirname, environment.build_filename)
return os.path.exists(fname)
@@ -144,8 +142,8 @@ class MesonApp:
with open(os.path.join(build_dir, '.hgignore'), 'w', encoding='utf-8') as ofile:
ofile.write(hg_ignore_file)
- def validate_dirs(self, dir1: T.Optional[str], dir2: T.Optional[str], reconfigure: bool, wipe: bool) -> T.Tuple[str, str]:
- (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
+ def validate_dirs(self) -> T.Tuple[str, str]:
+ (src_dir, build_dir) = self.validate_core_dirs(self.options.builddir, self.options.sourcedir)
if Path(build_dir) in Path(src_dir).parents:
raise MesonException(f'Build directory {build_dir} cannot be a parent of source directory {src_dir}')
if not os.listdir(build_dir):
@@ -155,21 +153,17 @@ class MesonApp:
has_valid_build = os.path.exists(os.path.join(priv_dir, 'coredata.dat'))
has_partial_build = os.path.isdir(priv_dir)
if has_valid_build:
- if not reconfigure and not wipe:
+ if not self.options.reconfigure and not self.options.wipe:
print('Directory already configured.\n\n'
'Just run your build command (e.g. ninja) and Meson will regenerate as necessary.\n'
- 'If ninja fails, run "ninja reconfigure" or "meson setup --reconfigure"\n'
- 'to force Meson to regenerate.\n\n'
+ 'Run "meson setup --reconfigure to force Meson to regenerate.\n\n'
'If build failures persist, run "meson setup --wipe" to rebuild from scratch\n'
- 'using the same options as passed when configuring the build.\n'
- 'To change option values, run "meson configure" instead.')
- # FIXME: This returns success and ignores new option values from CLI.
- # We should either make this a hard error, or update options and
- # return success.
- # Note that making this an error would not be backward compatible (and also isn't
- # universally agreed on): https://github.com/mesonbuild/meson/pull/4249.
+ 'using the same options as passed when configuring the build.')
+ if self.options.cmd_line_options:
+ from . import mconf
+ raise SystemExit(mconf.run_impl(self.options, build_dir))
raise SystemExit(0)
- elif not has_partial_build and wipe:
+ elif not has_partial_build and self.options.wipe:
raise MesonException(f'Directory is not empty and does not contain a previous build:\n{build_dir}')
return src_dir, build_dir
@@ -179,6 +173,8 @@ class MesonApp:
mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
if self.options.profile:
mlog.set_timestamp_start(time.monotonic())
+ if self.options.clearcache:
+ env.coredata.clear_cache()
with mesonlib.BuildDirLock(self.build_dir):
return self._generate(env, capture, vslite_ctx)
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 0e1086c..1298cc0 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -521,10 +521,10 @@ class ConsoleLogger(TestLogger):
RTRI = "\u25B6 "
def __init__(self) -> None:
- self.running_tests = OrderedSet() # type: OrderedSet['TestRun']
- self.progress_test = None # type: T.Optional['TestRun']
- self.progress_task = None # type: T.Optional[asyncio.Future]
- self.max_left_width = 0 # type: int
+ self.running_tests: OrderedSet['TestRun'] = OrderedSet()
+ self.progress_test: T.Optional['TestRun'] = None
+ self.progress_task: T.Optional[asyncio.Future] = None
+ self.max_left_width = 0
self.stop = False
# TODO: before 3.10 this cannot be created immediately, because
# it will create a new event loop
@@ -770,14 +770,16 @@ class TextLogfileBuilder(TestFileLogger):
class JsonLogfileBuilder(TestFileLogger):
def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
- jresult = {'name': result.name,
- 'stdout': result.stdo,
- 'result': result.res.value,
- 'starttime': result.starttime,
- 'duration': result.duration,
- 'returncode': result.returncode,
- 'env': result.env,
- 'command': result.cmd} # type: T.Dict[str, T.Any]
+ jresult: T.Dict[str, T.Any] = {
+ 'name': result.name,
+ 'stdout': result.stdo,
+ 'result': result.res.value,
+ 'starttime': result.starttime,
+ 'duration': result.duration,
+ 'returncode': result.returncode,
+ 'env': result.env,
+ 'command': result.cmd,
+ }
if result.stde:
jresult['stderr'] = result.stde
self.file.write(json.dumps(jresult) + '\n')
@@ -804,7 +806,7 @@ class JunitBuilder(TestLogger):
self.filename = filename
self.root = et.Element(
'testsuites', tests='0', errors='0', failures='0')
- self.suites = {} # type: T.Dict[str, et.Element]
+ self.suites: T.Dict[str, et.Element] = {}
def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
"""Log a single test case."""
@@ -867,10 +869,10 @@ class JunitBuilder(TestLogger):
et.SubElement(testcase, 'system-out').text = subtest.explanation
if test.stdo:
out = et.SubElement(suite, 'system-out')
- out.text = test.stdo.rstrip()
+ out.text = replace_unencodable_xml_chars(test.stdo.rstrip())
if test.stde:
err = et.SubElement(suite, 'system-err')
- err.text = test.stde.rstrip()
+ err.text = replace_unencodable_xml_chars(test.stde.rstrip())
else:
if test.project not in self.suites:
suite = self.suites[test.project] = et.Element(
@@ -893,10 +895,10 @@ class JunitBuilder(TestLogger):
suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
if test.stdo:
out = et.SubElement(testcase, 'system-out')
- out.text = test.stdo.rstrip()
+ out.text = replace_unencodable_xml_chars(test.stdo.rstrip())
if test.stde:
err = et.SubElement(testcase, 'system-err')
- err.text = test.stde.rstrip()
+ err.text = replace_unencodable_xml_chars(test.stde.rstrip())
async def finish(self, harness: 'TestHarness') -> None:
"""Calculate total test counts and write out the xml result."""
@@ -922,24 +924,24 @@ class TestRun:
name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool):
self.res = TestResult.PENDING
self.test = test
- self._num = None # type: T.Optional[int]
+ self._num: T.Optional[int] = None
self.name = name
self.timeout = timeout
- self.results = [] # type: T.List[TAPParser.Test]
- self.returncode = None # type: T.Optional[int]
- self.starttime = None # type: T.Optional[float]
- self.duration = None # type: T.Optional[float]
+ self.results: T.List[TAPParser.Test] = []
+ self.returncode: T.Optional[int] = None
+ self.starttime: T.Optional[float] = None
+ self.duration: T.Optional[float] = None
self.stdo = ''
self.stde = ''
self.additional_error = ''
- self.cmd = None # type: T.Optional[T.List[str]]
- self.env = test_env # type: T.Dict[str, str]
+ self.cmd: T.Optional[T.List[str]] = None
+ self.env = test_env
self.should_fail = test.should_fail
self.project = test.project_name
- self.junit = None # type: T.Optional[et.ElementTree]
+ self.junit: T.Optional[et.ElementTree] = None
self.is_parallel = is_parallel
self.verbose = verbose
- self.warnings = [] # type: T.List[str]
+ self.warnings: T.List[str] = []
def start(self, cmd: T.List[str]) -> None:
self.res = TestResult.RUNNING
@@ -1086,7 +1088,7 @@ class TestRunTAP(TestRun):
async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
res = None
- warnings = [] # type: T.List[TAPParser.UnknownLine]
+ warnings: T.List[TAPParser.UnknownLine] = []
version = 12
async for i in TAPParser().parse_async(lines):
@@ -1180,9 +1182,9 @@ def decode(stream: T.Union[None, bytes]) -> str:
if stream is None:
return ''
try:
- return replace_unencodable_xml_chars(stream.decode('utf-8'))
+ return stream.decode('utf-8')
except UnicodeDecodeError:
- return replace_unencodable_xml_chars(stream.decode('iso-8859-1', errors='ignore'))
+ return stream.decode('iso-8859-1', errors='ignore')
async def read_decode(reader: asyncio.StreamReader,
queue: T.Optional['asyncio.Queue[T.Optional[str]]'],
@@ -1285,9 +1287,9 @@ class TestSubprocess:
self.stderr = stderr
self.stdo_task: T.Optional[asyncio.Task[None]] = None
self.stde_task: T.Optional[asyncio.Task[None]] = None
- self.postwait_fn = postwait_fn # type: T.Callable[[], None]
- self.all_futures = [] # type: T.List[asyncio.Future]
- self.queue = None # type: T.Optional[asyncio.Queue[T.Optional[str]]]
+ self.postwait_fn = postwait_fn
+ self.all_futures: T.List[asyncio.Future] = []
+ self.queue: T.Optional[asyncio.Queue[T.Optional[str]]] = None
def stdout_lines(self) -> T.AsyncIterator[str]:
self.queue = asyncio.Queue()
@@ -1533,7 +1535,7 @@ class SingleTestRunner:
if not self.options.split and not self.runobj.needs_parsing \
else asyncio.subprocess.PIPE
- extra_cmd = [] # type: T.List[str]
+ extra_cmd: T.List[str] = []
if self.test.protocol is TestProtocol.GTEST:
gtestname = self.test.name
if self.test.workdir:
@@ -1568,7 +1570,7 @@ class SingleTestRunner:
class TestHarness:
def __init__(self, options: argparse.Namespace):
self.options = options
- self.collected_failures = [] # type: T.List[TestRun]
+ self.collected_failures: T.List[TestRun] = []
self.fail_count = 0
self.expectedfail_count = 0
self.unexpectedpass_count = 0
@@ -1578,13 +1580,13 @@ class TestHarness:
self.test_count = 0
self.name_max_len = 0
self.is_run = False
- self.loggers = [] # type: T.List[TestLogger]
+ self.loggers: T.List[TestLogger] = []
self.console_logger = ConsoleLogger()
self.loggers.append(self.console_logger)
self.need_console = False
- self.ninja = None # type: T.List[str]
+ self.ninja: T.List[str] = None
- self.logfile_base = None # type: T.Optional[str]
+ self.logfile_base: T.Optional[str] = None
if self.options.logbase and not self.options.gdb:
namebase = None
self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
@@ -1807,7 +1809,7 @@ class TestHarness:
startdir = os.getcwd()
try:
os.chdir(self.options.wd)
- runners = [] # type: T.List[SingleTestRunner]
+ runners: T.List[SingleTestRunner] = []
for i in range(self.options.repeat):
runners.extend(self.get_test_runner(test) for test in tests)
if i == 0:
@@ -1962,7 +1964,7 @@ class TestHarness:
@staticmethod
def get_wrapper(options: argparse.Namespace) -> T.List[str]:
- wrap = [] # type: T.List[str]
+ wrap: T.List[str] = []
if options.gdb:
wrap = [options.gdb_path, '--quiet']
if options.repeat > 1:
@@ -2005,10 +2007,10 @@ class TestHarness:
async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None:
semaphore = asyncio.Semaphore(self.options.num_processes)
- futures = deque() # type: T.Deque[asyncio.Future]
- running_tests = {} # type: T.Dict[asyncio.Future, str]
+ futures: T.Deque[asyncio.Future] = deque()
+ running_tests: T.Dict[asyncio.Future, str] = {}
interrupted = False
- ctrlc_times = deque(maxlen=MAX_CTRLC) # type: T.Deque[float]
+ ctrlc_times: T.Deque[float] = deque(maxlen=MAX_CTRLC)
loop = asyncio.get_running_loop()
async def run_test(test: SingleTestRunner) -> None:
@@ -2115,9 +2117,9 @@ def rebuild_deps(ninja: T.List[str], wd: str, tests: T.List[TestSerialisation])
assert len(ninja) > 0
- depends = set() # type: T.Set[str]
- targets = set() # type: T.Set[str]
- intro_targets = {} # type: T.Dict[str, T.List[str]]
+ depends: T.Set[str] = set()
+ targets: T.Set[str] = set()
+ intro_targets: T.Dict[str, T.List[str]] = {}
for target in load_info_file(get_infodir(wd), kind='targets'):
intro_targets[target['id']] = [
convert_path_to_target(f)
diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py
index 755530a..df3f361 100755
--- a/mesonbuild/scripts/cmake_run_ctgt.py
+++ b/mesonbuild/scripts/cmake_run_ctgt.py
@@ -9,7 +9,7 @@ from pathlib import Path
import typing as T
def run(argsv: T.List[str]) -> int:
- commands = [[]] # type: T.List[T.List[str]]
+ commands: T.List[T.List[str]] = [[]]
SEPARATOR = ';;;'
# Generate CMD parameters
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index b9c58fe..593efd9 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -123,8 +123,8 @@ class Elf(DataSizes):
def __init__(self, bfile: str, verbose: bool = True) -> None:
self.bfile = bfile
self.verbose = verbose
- self.sections = [] # type: T.List[SectionHeader]
- self.dynamic = [] # type: T.List[DynamicEntry]
+ self.sections: T.List[SectionHeader] = []
+ self.dynamic: T.List[DynamicEntry] = []
self.open_bf(bfile)
try:
(self.ptrsize, self.is_le) = self.detect_elf_type()
@@ -329,7 +329,7 @@ class Elf(DataSizes):
old_rpath = self.read_str()
# Some rpath entries may come from multiple sources.
# Only add each one once.
- new_rpaths = OrderedSet() # type: OrderedSet[bytes]
+ new_rpaths: OrderedSet[bytes] = OrderedSet()
if new_rpath:
new_rpaths.update(new_rpath.split(b':'))
if old_rpath:
diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py
index 9c3a079..0f7787c 100755
--- a/mesonbuild/scripts/python_info.py
+++ b/mesonbuild/scripts/python_info.py
@@ -65,6 +65,20 @@ elif sys.version_info < (3, 8, 7):
else:
suffix = variables.get('EXT_SUFFIX')
+limited_api_suffix = None
+if sys.version_info >= (3, 2):
+ try:
+ from importlib.machinery import EXTENSION_SUFFIXES
+ limited_api_suffix = EXTENSION_SUFFIXES[1]
+ except Exception:
+ pass
+
+# pypy supports modules targetting the limited api but
+# does not use a special suffix to distinguish them:
+# https://doc.pypy.org/en/latest/cpython_differences.html#permitted-abi-tags-in-extensions
+if '__pypy__' in sys.builtin_module_names:
+ limited_api_suffix = suffix
+
print(json.dumps({
'variables': variables,
'paths': paths,
@@ -76,4 +90,5 @@ print(json.dumps({
'is_venv': sys.prefix != variables['base_prefix'],
'link_libpython': links_against_libpython(),
'suffix': suffix,
+ 'limited_api_suffix': limited_api_suffix,
}))
diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py
index 5b7750c..a39825f 100644
--- a/mesonbuild/utils/universal.py
+++ b/mesonbuild/utils/universal.py
@@ -1207,6 +1207,8 @@ def do_replacement(regex: T.Pattern[str], line: str,
var, _ = confdata.get(varname)
if isinstance(var, str):
var_str = var
+ elif isinstance(var, bool):
+ var_str = str(int(var))
elif isinstance(var, int):
var_str = str(var)
else:
@@ -1220,8 +1222,17 @@ def do_replacement(regex: T.Pattern[str], line: str,
def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
variable_format: Literal['meson', 'cmake', 'cmake@'], subproject: T.Optional[SubProject] = None) -> str:
+ cmake_bool_define = False
+ if variable_format != "meson":
+ cmake_bool_define = "cmakedefine01" in line
+
def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
arr = line.split()
+
+ if cmake_bool_define:
+ (v, desc) = confdata.get(arr[1])
+ return str(int(bool(v)))
+
define_value: T.List[str] = []
for token in arr[2:]:
try:
@@ -1243,22 +1254,29 @@ def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
try:
v, _ = confdata.get(varname)
except KeyError:
- return '/* #undef %s */\n' % varname
- if isinstance(v, bool):
- if v:
- return '#define %s\n' % varname
+ if cmake_bool_define:
+ return '#define %s 0\n' % varname
else:
- return '#undef %s\n' % varname
- elif isinstance(v, int):
- return '#define %s %d\n' % (varname, v)
- elif isinstance(v, str):
+ return '/* #undef %s */\n' % varname
+
+ if isinstance(v, str) or variable_format != "meson":
if variable_format == 'meson':
result = v
else:
+ if not cmake_bool_define and not v:
+ return '/* #undef %s */\n' % varname
+
result = get_cmake_define(line, confdata)
- result = f'#define {varname} {result}\n'
+ result = f'#define {varname} {result}'.strip() + '\n'
result, _ = do_replacement(regex, result, variable_format, confdata)
return result
+ elif isinstance(v, bool):
+ if v:
+ return '#define %s\n' % varname
+ else:
+ return '#undef %s\n' % varname
+ elif isinstance(v, int):
+ return '#define %s %d\n' % (varname, v)
else:
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
@@ -1295,7 +1313,7 @@ def do_conf_str(src: str, data: T.List[str], confdata: 'ConfigurationData',
# during substitution so we can warn the user to use the `copy:` kwarg.
confdata_useless = not confdata.keys()
for line in data:
- if line.startswith(search_token):
+ if line.lstrip().startswith(search_token):
confdata_useless = False
line = do_define(regex, line, confdata, variable_format, subproject)
else:
@@ -1343,7 +1361,7 @@ CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
'''
-def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: T.Literal['c', 'nasm']) -> None:
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: Literal['c', 'nasm']) -> None:
if output_format == 'c':
prelude = CONF_C_PRELUDE
prefix = '#'
@@ -1508,7 +1526,7 @@ def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
- input_ = None # type: T.Optional[bytes]
+ input_: T.Optional[bytes] = None
if write is not None:
input_ = write.encode('utf-8')
o, e = p.communicate(input_)
@@ -1529,7 +1547,13 @@ def Popen_safe_logged(args: T.List[str], msg: str = 'Called', **kwargs: T.Any) -
'''
Wrapper around Popen_safe that assumes standard piped o/e and logs this to the meson log.
'''
- p, o, e = Popen_safe(args, **kwargs)
+ try:
+ p, o, e = Popen_safe(args, **kwargs)
+ except Exception as excp:
+ mlog.debug('-----------')
+ mlog.debug(f'{msg}: `{join_args(args)}` -> {excp}')
+ raise
+
rc, out, err = p.returncode, o.strip(), e.strip()
mlog.debug('-----------')
mlog.debug(f'{msg}: `{join_args(args)}` -> {rc}')
@@ -2387,22 +2411,22 @@ class OptionKey:
return self.type is OptionType.BASE
-def pickle_load(filename: str, object_name: str, object_type: T.Type[_PL]) -> _PL:
- load_fail_msg = f'{object_name} file {filename!r} is corrupted. Try with a fresh build tree.'
+def pickle_load(filename: str, object_name: str, object_type: T.Type[_PL], suggest_reconfigure: bool = True) -> _PL:
+ load_fail_msg = f'{object_name} file {filename!r} is corrupted.'
+ extra_msg = ' Consider reconfiguring the directory with "meson setup --reconfigure".' if suggest_reconfigure else ''
try:
with open(filename, 'rb') as f:
obj = pickle.load(f)
except (pickle.UnpicklingError, EOFError):
- raise MesonException(load_fail_msg)
+ raise MesonException(load_fail_msg + extra_msg)
except (TypeError, ModuleNotFoundError, AttributeError):
- build_dir = os.path.dirname(os.path.dirname(filename))
raise MesonException(
f"{object_name} file {filename!r} references functions or classes that don't "
"exist. This probably means that it was generated with an old "
- "version of meson. Try running from the source directory "
- f'meson setup {build_dir} --wipe')
+ "version of meson." + extra_msg)
+
if not isinstance(obj, object_type):
- raise MesonException(load_fail_msg)
+ raise MesonException(load_fail_msg + extra_msg)
# Because these Protocols are not available at runtime (and cannot be made
# available at runtime until we drop support for Python < 3.8), we have to
@@ -2416,7 +2440,7 @@ def pickle_load(filename: str, object_name: str, object_type: T.Type[_PL]) -> _P
from ..coredata import version as coredata_version
from ..coredata import major_versions_differ, MesonVersionMismatchException
if major_versions_differ(version, coredata_version):
- raise MesonVersionMismatchException(version, coredata_version)
+ raise MesonVersionMismatchException(version, coredata_version, extra_msg)
return obj
diff --git a/mesonbuild/utils/vsenv.py b/mesonbuild/utils/vsenv.py
index 550a8cf..5a02379 100644
--- a/mesonbuild/utils/vsenv.py
+++ b/mesonbuild/utils/vsenv.py
@@ -6,6 +6,7 @@ import json
import pathlib
import shutil
import tempfile
+import locale
from .. import mlog
from .core import MesonException
@@ -93,7 +94,8 @@ def _setup_vsenv(force: bool) -> bool:
bat_file.write(bat_contents)
bat_file.flush()
bat_file.close()
- bat_output = subprocess.check_output(bat_file.name, universal_newlines=True)
+ bat_output = subprocess.check_output(bat_file.name, universal_newlines=True,
+ encoding=locale.getpreferredencoding(False))
os.unlink(bat_file.name)
bat_lines = bat_output.split('\n')
bat_separator_seen = False
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 2b0a0ba..c0dd01c 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -145,11 +145,11 @@ class PackageDefinition:
def __init__(self, fname: str, subproject: str = ''):
self.filename = fname
self.subproject = SubProject(subproject)
- self.type = None # type: T.Optional[str]
- self.values = {} # type: T.Dict[str, str]
- self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
- self.provided_programs = [] # type: T.List[str]
- self.diff_files = [] # type: T.List[Path]
+ self.type: T.Optional[str] = None
+ self.values: T.Dict[str, str] = {}
+ self.provided_deps: T.Dict[str, T.Optional[str]] = {}
+ self.provided_programs: T.List[str] = []
+ self.diff_files: T.List[Path] = []
self.basename = os.path.basename(fname)
self.has_wrap = self.basename.endswith('.wrap')
self.name = self.basename[:-5] if self.has_wrap else self.basename
@@ -290,10 +290,10 @@ class Resolver:
def __post_init__(self) -> None:
self.subdir_root = os.path.join(self.source_dir, self.subdir)
self.cachedir = os.path.join(self.subdir_root, 'packagecache')
- self.wraps = {} # type: T.Dict[str, PackageDefinition]
+ self.wraps: T.Dict[str, PackageDefinition] = {}
self.netrc: T.Optional[netrc] = None
- self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
- self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_deps: T.Dict[str, PackageDefinition] = {}
+ self.provided_programs: T.Dict[str, PackageDefinition] = {}
self.wrapdb: T.Dict[str, T.Any] = {}
self.wrapdb_provided_deps: T.Dict[str, str] = {}
self.wrapdb_provided_programs: T.Dict[str, str] = {}
@@ -555,7 +555,7 @@ class Resolver:
revno = self.wrap.get('revision')
checkout_cmd = ['-c', 'advice.detachedHead=false', 'checkout', revno, '--']
is_shallow = False
- depth_option = [] # type: T.List[str]
+ depth_option: T.List[str] = []
if self.wrap.values.get('depth', '') != '':
is_shallow = True
depth_option = ['--depth', self.wrap.values.get('depth')]