aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild')
-rw-r--r--mesonbuild/arglist.py331
-rw-r--r--mesonbuild/ast/__init__.py3
-rw-r--r--mesonbuild/ast/interpreter.py5
-rw-r--r--mesonbuild/ast/introspection.py2
-rw-r--r--mesonbuild/ast/printer.py160
-rw-r--r--mesonbuild/ast/visitor.py3
-rw-r--r--mesonbuild/backend/backends.py253
-rw-r--r--mesonbuild/backend/ninjabackend.py476
-rw-r--r--mesonbuild/backend/vs2010backend.py79
-rw-r--r--mesonbuild/build.py57
-rw-r--r--mesonbuild/cmake/__init__.py5
-rw-r--r--mesonbuild/cmake/common.py95
-rwxr-xr-xmesonbuild/cmake/data/run_ctgt.py96
-rw-r--r--mesonbuild/cmake/executor.py51
-rw-r--r--mesonbuild/cmake/interpreter.py77
-rw-r--r--mesonbuild/cmake/traceparser.py26
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c.py5
-rw-r--r--mesonbuild/compilers/c_function_attributes.py2
-rw-r--r--mesonbuild/compilers/compilers.py394
-rw-r--r--mesonbuild/compilers/cpp.py11
-rw-r--r--mesonbuild/compilers/cuda.py10
-rw-r--r--mesonbuild/compilers/d.py28
-rw-r--r--mesonbuild/compilers/fortran.py14
-rw-r--r--mesonbuild/compilers/mixins/arm.py28
-rw-r--r--mesonbuild/compilers/mixins/clang.py17
-rw-r--r--mesonbuild/compilers/mixins/clike.py185
-rw-r--r--mesonbuild/compilers/mixins/gnu.py2
-rw-r--r--mesonbuild/compilers/mixins/islinker.py7
-rw-r--r--mesonbuild/compilers/mixins/visualstudio.py5
-rw-r--r--mesonbuild/compilers/objc.py2
-rw-r--r--mesonbuild/compilers/objcpp.py2
-rw-r--r--mesonbuild/coredata.py291
-rw-r--r--mesonbuild/dependencies/base.py164
-rw-r--r--mesonbuild/dependencies/boost.py319
-rw-r--r--mesonbuild/dependencies/cuda.py16
-rw-r--r--mesonbuild/dependencies/misc.py54
-rw-r--r--mesonbuild/dependencies/ui.py61
-rw-r--r--mesonbuild/envconfig.py83
-rw-r--r--mesonbuild/environment.py272
-rw-r--r--mesonbuild/interpreter.py486
-rw-r--r--mesonbuild/interpreterbase.py106
-rw-r--r--mesonbuild/linkers.py105
-rw-r--r--mesonbuild/mcompile.py308
-rw-r--r--mesonbuild/mconf.py31
-rw-r--r--mesonbuild/mdist.py4
-rw-r--r--mesonbuild/mesondata.py374
-rw-r--r--mesonbuild/mesonlib.py48
-rw-r--r--mesonbuild/minit.py6
-rw-r--r--mesonbuild/minstall.py2
-rw-r--r--mesonbuild/mintro.py52
-rw-r--r--mesonbuild/mlog.py33
-rw-r--r--mesonbuild/modules/__init__.py11
-rw-r--r--mesonbuild/modules/cmake.py113
-rw-r--r--mesonbuild/modules/gnome.py134
-rw-r--r--mesonbuild/modules/keyval.py (renamed from mesonbuild/modules/unstable_kconfig.py)10
-rw-r--r--mesonbuild/modules/pkgconfig.py126
-rw-r--r--mesonbuild/modules/python.py12
-rw-r--r--mesonbuild/modules/qt.py58
-rw-r--r--mesonbuild/modules/qt4.py3
-rw-r--r--mesonbuild/modules/qt5.py3
-rw-r--r--mesonbuild/modules/windows.py2
-rw-r--r--mesonbuild/mparser.py12
-rw-r--r--mesonbuild/msetup.py2
-rw-r--r--mesonbuild/mtest.py281
-rw-r--r--mesonbuild/optinterpreter.py21
-rwxr-xr-xmesonbuild/scripts/cmake_run_ctgt.py100
-rw-r--r--mesonbuild/scripts/coverage.py46
-rw-r--r--mesonbuild/scripts/depfixer.py32
-rw-r--r--mesonbuild/scripts/gtkdochelper.py8
-rw-r--r--mesonbuild/scripts/symbolextractor.py35
-rw-r--r--mesonbuild/wrap/wrap.py207
72 files changed, 4470 insertions, 1994 deletions
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py
new file mode 100644
index 0000000..fd4de96
--- /dev/null
+++ b/mesonbuild/arglist.py
@@ -0,0 +1,331 @@
+# Copyright 2012-2020 The Meson development team
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import lru_cache
+import collections
+import enum
+import os
+import re
+import typing as T
+
+from . import mesonlib
+
+if T.TYPE_CHECKING:
+ from .linkers import StaticLinker
+ from .compilers import Compiler
+
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
+# execinfo is a compiler lib on FreeBSD and NetBSD
+if mesonlib.is_freebsd() or mesonlib.is_netbsd():
+ UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
+
+
+class Dedup(enum.Enum):
+
+ """What kind of deduplication can be done to compiler args.
+
+ OVERRIDEN - Whether an argument can be 'overridden' by a later argument.
+ For example, -DFOO defines FOO and -UFOO undefines FOO. In this case,
+ we can safely remove the previous occurrence and add a new one. The
+ same is true for include paths and library paths with -I and -L.
+ UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
+ `-pipe`. New instances of these can be completely skipped.
+ NO_DEDUP - Whether it matters where or how many times on the command-line
+ a particular argument is present. This can matter for symbol
+ resolution in static or shared libraries, so we cannot de-dup or
+ reorder them.
+ """
+
+ NO_DEDUP = 0
+ UNIQUE = 1
+ OVERRIDEN = 2
+
+
+class CompilerArgs(collections.abc.MutableSequence):
+ '''
+ List-like class that manages a list of compiler arguments. Should be used
+ while constructing compiler arguments from various sources. Can be
+ operated with ordinary lists, so this does not need to be used
+ everywhere.
+
+ All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
+ and can converted to the native type of each compiler by using the
+ .to_native() method to which you must pass an instance of the compiler or
+ the compiler class.
+
+ New arguments added to this class (either with .append(), .extend(), or +=)
+ are added in a way that ensures that they override previous arguments.
+ For example:
+
+ >>> a = ['-Lfoo', '-lbar']
+ >>> a += ['-Lpho', '-lbaz']
+ >>> print(a)
+ ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
+
+ Arguments will also be de-duped if they can be de-duped safely.
+
+ Note that because of all this, this class is not commutative and does not
+ preserve the order of arguments if it is safe to not. For example:
+ >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
+ ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
+ >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
+ ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
+
+ '''
+ # Arg prefixes that override by prepending instead of appending
+ prepend_prefixes = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 2
+ dedup2_prefixes = () # type: T.Tuple[str, ...]
+ dedup2_suffixes = () # type: T.Tuple[str, ...]
+ dedup2_args = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 1
+ #
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = () # type: T.Tuple[str, ...]
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
+ # Match a .so of the form path/to/libfoo.so.0.1.0
+ # Only UNIX shared libraries require this. Others have a fixed extension.
+ dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+ dedup1_args = () # type: T.Tuple[str, ...]
+ # In generate_link() we add external libs without de-dup, but we must
+ # *always* de-dup these because they're special arguments to the linker
+ # TODO: these should probably move too
+ always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
+
+ def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
+ iterable: T.Optional[T.Iterable[str]] = None):
+ self.compiler = compiler
+ self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
+ self.pre = collections.deque() # type: T.Deque[str]
+ self.post = collections.deque() # type: T.Deque[str]
+
+ # Flush the saved pre and post list into the _container list
+ #
+ # This correctly deduplicates the entries after _can_dedup definition
+ # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
+ def flush_pre_post(self) -> None:
+ pre_flush = collections.deque() # type: T.Deque[str]
+ pre_flush_set = set() # type: T.Set[str]
+ post_flush = collections.deque() # type: T.Deque[str]
+ post_flush_set = set() # type: T.Set[str]
+
+ #The two lists are here walked from the front to the back, in order to not need removals for deduplication
+ for a in self.pre:
+ dedup = self._can_dedup(a)
+ if a not in pre_flush_set:
+ pre_flush.append(a)
+ if dedup is Dedup.OVERRIDEN:
+ pre_flush_set.add(a)
+ for a in reversed(self.post):
+ dedup = self._can_dedup(a)
+ if a not in post_flush_set:
+ post_flush.appendleft(a)
+ if dedup is Dedup.OVERRIDEN:
+ post_flush_set.add(a)
+
+ #pre and post will overwrite every element that is in the container
+ #only copy over args that are in _container but not in the post flush or pre flush set
+
+ for a in self._container:
+ if a not in post_flush_set and a not in pre_flush_set:
+ pre_flush.append(a)
+
+ self._container = list(pre_flush) + list(post_flush)
+ self.pre.clear()
+ self.post.clear()
+
+ def __iter__(self) -> T.Iterator[str]:
+ self.flush_pre_post()
+ return iter(self._container)
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: int) -> str: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
+ pass
+
+ def __getitem__(self, index): # noqa: F811
+ self.flush_pre_post()
+ return self._container[index]
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: int, value: str) -> None: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
+ pass
+
+ def __setitem__(self, index, value) -> None: # noqa: F811
+ self.flush_pre_post()
+ self._container[index] = value
+
+ def __delitem__(self, index: T.Union[int, slice]) -> None:
+ self.flush_pre_post()
+ del self._container[index]
+
+ def __len__(self) -> int:
+ return len(self._container) + len(self.pre) + len(self.post)
+
+ def insert(self, index: int, value: str) -> None:
+ self.flush_pre_post()
+ self._container.insert(index, value)
+
+ def copy(self) -> 'CompilerArgs':
+ self.flush_pre_post()
+ return type(self)(self.compiler, self._container.copy())
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _can_dedup(cls, arg: str) -> Dedup:
+ """Returns whether the argument can be safely de-duped.
+
+ In addition to these, we handle library arguments specially.
+ With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
+ to recursively search for symbols in the libraries. This is not needed
+ with other linkers.
+ """
+
+ # A standalone argument must never be deduplicated because it is
+ # defined by what comes _after_ it. Thus dedupping this:
+ # -D FOO -D BAR
+ # would yield either
+ # -D FOO BAR
+ # or
+ # FOO -D BAR
+ # both of which are invalid.
+ if arg in cls.dedup2_prefixes:
+ return Dedup.NO_DEDUP
+ if arg in cls.dedup2_args or \
+ arg.startswith(cls.dedup2_prefixes) or \
+ arg.endswith(cls.dedup2_suffixes):
+ return Dedup.OVERRIDEN
+ if arg in cls.dedup1_args or \
+ arg.startswith(cls.dedup1_prefixes) or \
+ arg.endswith(cls.dedup1_suffixes) or \
+ re.search(cls.dedup1_regex, arg):
+ return Dedup.UNIQUE
+ return Dedup.NO_DEDUP
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _should_prepend(cls, arg: str) -> bool:
+ return arg.startswith(cls.prepend_prefixes)
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ return self.compiler.unix_args_to_native(new._container)
+
+ def append_direct(self, arg: str) -> None:
+ '''
+ Append the specified argument without any reordering or de-dup except
+ for absolute paths to libraries, etc, which can always be de-duped
+ safely.
+ '''
+ self.flush_pre_post()
+ if os.path.isabs(arg):
+ self.append(arg)
+ else:
+ self._container.append(arg)
+
+ def extend_direct(self, iterable: T.Iterable[str]) -> None:
+ '''
+ Extend using the elements in the specified iterable without any
+ reordering or de-dup except for absolute paths where the order of
+ include search directories is not relevant
+ '''
+ self.flush_pre_post()
+ for elem in iterable:
+ self.append_direct(elem)
+
+ def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
+ normal_flags = []
+ lflags = []
+ for i in iterable:
+ if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
+ lflags.append(i)
+ else:
+ normal_flags.append(i)
+ self.extend(normal_flags)
+ self.extend_direct(lflags)
+
+ def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = self.copy()
+ new += args
+ return new
+
+ def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ '''
+ Add two CompilerArgs while taking into account overriding of arguments
+ and while preserving the order of arguments as much as possible
+ '''
+ tmp_pre = collections.deque() # type: T.Deque[str]
+ if not isinstance(args, collections.abc.Iterable):
+ raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
+ for arg in args:
+ # If the argument can be de-duped, do it either by removing the
+ # previous occurrence of it and adding a new one, or not adding the
+ # new occurrence.
+ dedup = self._can_dedup(arg)
+ if dedup is Dedup.UNIQUE:
+ # Argument already exists and adding a new instance is useless
+ if arg in self._container or arg in self.pre or arg in self.post:
+ continue
+ if self._should_prepend(arg):
+ tmp_pre.appendleft(arg)
+ else:
+ self.post.append(arg)
+ self.pre.extendleft(tmp_pre)
+ #pre and post is going to be merged later before a iter call
+ return self
+
+ def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = type(self)(self.compiler, args)
+ new += self
+ return new
+
+ def __eq__(self, other: T.Any) -> T.Union[bool]:
+ self.flush_pre_post()
+ # Only allow equality checks against other CompilerArgs and lists instances
+ if isinstance(other, CompilerArgs):
+ return self.compiler == other.compiler and self._container == other._container
+ elif isinstance(other, list):
+ return self._container == other
+ return NotImplemented
+
+ def append(self, arg: str) -> None:
+ self.__iadd__([arg])
+
+ def extend(self, args: T.Iterable[str]) -> None:
+ self.__iadd__(args)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py
index 48de523..4fb56cb 100644
--- a/mesonbuild/ast/__init__.py
+++ b/mesonbuild/ast/__init__.py
@@ -20,6 +20,7 @@ __all__ = [
'AstInterpreter',
'AstIDGenerator',
'AstIndentationGenerator',
+ 'AstJSONPrinter',
'AstVisitor',
'AstPrinter',
'IntrospectionInterpreter',
@@ -30,4 +31,4 @@ from .interpreter import AstInterpreter
from .introspection import IntrospectionInterpreter, build_target_functions
from .visitor import AstVisitor
from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
-from .printer import AstPrinter
+from .printer import AstPrinter, AstJSONPrinter
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index cc5c94c..6a826ef 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -297,6 +297,11 @@ class AstInterpreter(interpreterbase.InterpreterBase):
elif isinstance(node, ElementaryNode):
result = node.value
+ elif isinstance(node, NotNode):
+ result = self.resolve_node(node.value, include_unknown_args, id_loop_detect)
+ if isinstance(result, bool):
+ result = not result
+
elif isinstance(node, ArrayNode):
result = [x for x in node.args.arguments]
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 142c219..6e6927f 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -120,7 +120,7 @@ class IntrospectionInterpreter(AstInterpreter):
self.do_subproject(i)
self.coredata.init_backend_options(self.backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
+ options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')}
self.coredata.set_options(options)
self.func_add_languages(None, proj_langs, None)
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
index 39e2cca..a57ba20 100644
--- a/mesonbuild/ast/printer.py
+++ b/mesonbuild/ast/printer.py
@@ -18,6 +18,7 @@
from .. import mparser
from . import AstVisitor
import re
+import typing as T
arithmic_map = {
'add': '+',
@@ -155,7 +156,7 @@ class AstPrinter(AstVisitor):
self.append_padded(prefix + 'if', node)
prefix = 'el'
i.accept(self)
- if node.elseblock:
+ if not isinstance(node.elseblock, mparser.EmptyNode):
self.append('else', node)
node.elseblock.accept(self)
self.append('endif', node)
@@ -199,3 +200,160 @@ class AstPrinter(AstVisitor):
self.result = re.sub(r', \n$', '\n', self.result)
else:
self.result = re.sub(r', $', '', self.result)
+
+class AstJSONPrinter(AstVisitor):
+ def __init__(self) -> None:
+ self.result = {} # type: T.Dict[str, T.Any]
+ self.current = self.result
+
+ def _accept(self, key: str, node: mparser.BaseNode) -> None:
+ old = self.current
+ data = {} # type: T.Dict[str, T.Any]
+ self.current = data
+ node.accept(self)
+ self.current = old
+ self.current[key] = data
+
+ def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
+ old = self.current
+ datalist = [] # type: T.List[T.Dict[str, T.Any]]
+ for i in nodes:
+ self.current = {}
+ i.accept(self)
+ datalist += [self.current]
+ self.current = old
+ self.current[key] = datalist
+
+ def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None:
+ old = self.current
+ self.current = data
+ node.accept(self)
+ self.current = old
+
+ def setbase(self, node: mparser.BaseNode) -> None:
+ self.current['node'] = type(node).__name__
+ self.current['lineno'] = node.lineno
+ self.current['colno'] = node.colno
+ self.current['end_lineno'] = node.end_lineno
+ self.current['end_colno'] = node.end_colno
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ self.setbase(node)
+
+ def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None:
+ self.current['value'] = node.value
+ self.setbase(node)
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_IdNode(self, node: mparser.IdNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_StringNode(self, node: mparser.StringNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_OrNode(self, node: mparser.OrNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_AndNode(self, node: mparser.AndNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['ctype'] = node.ctype
+ self.setbase(node)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['op'] = arithmic_map[node.operation]
+ self.setbase(node)
+
+ def visit_NotNode(self, node: mparser.NotNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
+ self._accept_list('lines', node.lines)
+ self.setbase(node)
+
+ def visit_IndexNode(self, node: mparser.IndexNode) -> None:
+ self._accept('object', node.iobject)
+ self._accept('index', node.index)
+ self.setbase(node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ self._accept('object', node.source_object)
+ self._accept('args', node.args)
+ self.current['name'] = node.name
+ self.setbase(node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self._accept('args', node.args)
+ self.current['name'] = node.func_name
+ self.setbase(node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self._accept('items', node.items)
+ self._accept('block', node.block)
+ self.current['varnames'] = node.varnames
+ self.setbase(node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self._accept_list('ifs', node.ifs)
+ self._accept('else', node.elseblock)
+ self.setbase(node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('block', node.block)
+ self.setbase(node)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('true', node.trueblock)
+ self._accept('false', node.falseblock)
+ self.setbase(node)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
+ self._accept_list('positional', node.arguments)
+ kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]]
+ for key, val in node.kwargs.items():
+ key_res = {} # type: T.Dict[str, T.Any]
+ val_res = {} # type: T.Dict[str, T.Any]
+ self._raw_accept(key, key_res)
+ self._raw_accept(val, val_res)
+ kwargs_list += [{'key': key_res, 'val': val_res}]
+ self.current['kwargs'] = kwargs_list
+ self.setbase(node)
diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py
index 37be463..451020d 100644
--- a/mesonbuild/ast/visitor.py
+++ b/mesonbuild/ast/visitor.py
@@ -113,8 +113,7 @@ class AstVisitor:
self.visit_default_func(node)
for i in node.ifs:
i.accept(self)
- if node.elseblock:
- node.elseblock.accept(self)
+ node.elseblock.accept(self)
def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
self.visit_default_func(node)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 926a07d..86d20f7 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -12,24 +12,54 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os, pickle, re
+from collections import OrderedDict
+from functools import lru_cache
+from pathlib import Path
+import enum
+import json
+import os
+import pickle
+import re
+import shlex
+import subprocess
import textwrap
+import typing as T
+
from .. import build
from .. import dependencies
from .. import mesonlib
from .. import mlog
-import json
-import subprocess
from ..mesonlib import (
File, Language, MachineChoice, MesonException, OrderedSet,
OptionOverrideProxy, classify_unity_sources, unholder,
)
-from ..compilers import CompilerArgs, VisualStudioLikeCompiler
-from ..interpreter import Interpreter
-from collections import OrderedDict
-import shlex
-from functools import lru_cache
-import typing as T
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+
+
+class TestProtocol(enum.Enum):
+
+ EXITCODE = 0
+ TAP = 1
+ GTEST = 2
+
+ @classmethod
+ def from_str(cls, string: str) -> 'TestProtocol':
+ if string == 'exitcode':
+ return cls.EXITCODE
+ elif string == 'tap':
+ return cls.TAP
+ elif string == 'gtest':
+ return cls.GTEST
+ raise MesonException('unknown test format {}'.format(string))
+
+ def __str__(self) -> str:
+ if self is self.EXITCODE:
+ return 'exitcode'
+ elif self is self.GTEST:
+ return 'gtest'
+ return 'tap'
class CleanTrees:
@@ -60,12 +90,13 @@ class InstallData:
self.mesonintrospect = mesonintrospect
class TargetInstallData:
- def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False):
+ def __init__(self, fname, outdir, aliases, strip, install_name_mappings, rpath_dirs_to_remove, install_rpath, install_mode, optional=False):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
self.strip = strip
self.install_name_mappings = install_name_mappings
+ self.rpath_dirs_to_remove = rpath_dirs_to_remove
self.install_rpath = install_rpath
self.install_mode = install_mode
self.optional = optional
@@ -84,11 +115,12 @@ class ExecutableSerialisation:
class TestSerialisation:
def __init__(self, name: str, project: str, suite: str, fname: T.List[str],
- is_cross_built: bool, exe_wrapper: T.Optional[build.Executable],
+ is_cross_built: bool, exe_wrapper: T.Optional[dependencies.ExternalProgram],
needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str],
env: build.EnvironmentVariables, should_fail: bool,
timeout: T.Optional[int], workdir: T.Optional[str],
- extra_paths: T.List[str], protocol: str, priority: int):
+ extra_paths: T.List[str], protocol: TestProtocol, priority: int,
+ cmd_is_built: bool):
self.name = name
self.project_name = project
self.suite = suite
@@ -107,8 +139,10 @@ class TestSerialisation:
self.protocol = protocol
self.priority = priority
self.needs_exe_wrapper = needs_exe_wrapper
+ self.cmd_is_built = cmd_is_built
+
-def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional[Interpreter] = None) -> T.Optional['Backend']:
+def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']:
if backend == 'ninja':
from . import ninjabackend
return ninjabackend.NinjaBackend(build, interpreter)
@@ -135,7 +169,7 @@ def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, i
# This class contains the basic functionality that is needed by all backends.
# Feel free to move stuff in and out of it as you see fit.
class Backend:
- def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']):
# Make it possible to construct a dummy backend
# This is used for introspection without a build directory
if build is None:
@@ -150,9 +184,9 @@ class Backend:
self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
- def get_target_filename(self, t):
+ def get_target_filename(self, t, *, warn_multi_output: bool = True):
if isinstance(t, build.CustomTarget):
- if len(t.get_outputs()) != 1:
+ if warn_multi_output and len(t.get_outputs()) != 1:
mlog.warning('custom_target {!r} has more than one output! '
'Using the first one.'.format(t.name))
filename = t.get_outputs()[0]
@@ -197,7 +231,7 @@ class Backend:
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
if not target.is_linkable_target():
- raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name)
+ raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name))
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, build.Executable):
if target.import_filename:
@@ -228,7 +262,7 @@ class Backend:
return self.build_to_src
def get_target_private_dir(self, target):
- return os.path.join(self.get_target_dir(target), target.get_id())
+ return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p')
def get_target_private_dir_abs(self, target):
return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
@@ -283,7 +317,7 @@ class Backend:
ofile = init_language_file(comp.get_default_suffix(), unity_file_number)
unity_file_number += 1
files_in_current = 0
- ofile.write('#include<%s>\n' % src)
+ ofile.write('#include<{}>\n'.format(src))
files_in_current += 1
if ofile:
ofile.close()
@@ -413,6 +447,46 @@ class Backend:
return True
return False
+ def get_external_rpath_dirs(self, target):
+ dirs = set()
+ args = []
+ # FIXME: is there a better way?
+ for lang in ['c', 'cpp']:
+ try:
+ args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang))
+ except Exception:
+ pass
+ # Match rpath formats:
+ # -Wl,-rpath=
+ # -Wl,-rpath,
+ rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
+ # Match solaris style compat runpath formats:
+ # -Wl,-R
+ # -Wl,-R,
+ runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
+ # Match symbols formats:
+ # -Wl,--just-symbols=
+ # -Wl,--just-symbols,
+ symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
+ for arg in args:
+ rpath_match = rpath_regex.match(arg)
+ if rpath_match:
+ for dir in rpath_match.group(1).split(':'):
+ dirs.add(dir)
+ runpath_match = runpath_regex.match(arg)
+ if runpath_match:
+ for dir in runpath_match.group(1).split(':'):
+ # The symbols arg is an rpath if the path is a directory
+ if Path(dir).is_dir():
+ dirs.add(dir)
+ symbols_match = symbols_regex.match(arg)
+ if symbols_match:
+ for dir in symbols_match.group(1).split(':'):
+ # Prevent usage of --just-symbols to specify rpath
+ if Path(dir).is_dir():
+ raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir))
+ return dirs
+
def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
paths = []
for dep in target.external_deps:
@@ -427,6 +501,9 @@ class Backend:
if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
# No point in adding system paths.
continue
+ # Don't remove rpaths specified in LDFLAGS.
+ if libdir in self.get_external_rpath_dirs(target):
+ continue
# Windows doesn't support rpaths, but we use this function to
# emulate rpaths by setting PATH, so also accept DLLs here
if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']:
@@ -446,8 +523,15 @@ class Backend:
result = OrderedSet()
result.add('meson-out')
result.update(self.rpaths_for_bundled_shared_libraries(target))
+ target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result])
return tuple(result)
+ @staticmethod
+ def canonicalize_filename(fname):
+ for ch in ('/', '\\', ':'):
+ fname = fname.replace(ch, '_')
+ return fname
+
def object_filename_from_source(self, target, source):
assert isinstance(source, mesonlib.File)
build_dir = self.environment.get_build_dir()
@@ -478,7 +562,7 @@ class Backend:
source = os.path.relpath(os.path.join(build_dir, rel_src),
os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
- return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix()
+ return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
def determine_ext_objs(self, extobj, proj_dir_to_build_root):
result = []
@@ -538,14 +622,14 @@ class Backend:
def create_msvc_pch_implementation(self, target, lang, pch_header):
# We have to include the language in the file name, otherwise
# pch.c and pch.cpp will both end up as pch.obj in VS backends.
- impl_name = 'meson_pch-%s.%s' % (lang, lang)
+ impl_name = 'meson_pch-{}.{}'.format(lang, lang)
pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
# Make sure to prepend the build dir, since the working directory is
# not defined. Otherwise, we might create the file in the wrong path.
pch_file = os.path.join(self.build_dir, pch_rel_to_build)
os.makedirs(os.path.dirname(pch_file), exist_ok=True)
- content = '#include "%s"' % os.path.basename(pch_header)
+ content = '#include "{}"'.format(os.path.basename(pch_header))
pch_file_tmp = pch_file + '.tmp'
with open(pch_file_tmp, 'w') as f:
f.write(content)
@@ -554,36 +638,20 @@ class Backend:
@staticmethod
def escape_extra_args(compiler, args):
- # No extra escaping/quoting needed when not running on Windows
- if not mesonlib.is_windows():
- return args
+ # all backslashes in defines are doubly-escaped
extra_args = []
- # Compiler-specific escaping is needed for -D args but not for any others
- if isinstance(compiler, VisualStudioLikeCompiler):
- # MSVC needs escaping when a -D argument ends in \ or \"
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- # Without extra escaping for these two, the next character
- # gets eaten
- if arg.endswith('\\'):
- arg += '\\'
- elif arg.endswith('\\"'):
- arg = arg[:-2] + '\\\\"'
- extra_args.append(arg)
- else:
- # MinGW GCC needs all backslashes in defines to be doubly-escaped
- # FIXME: Not sure about Cygwin or Clang
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- arg = arg.replace('\\', '\\\\')
- extra_args.append(arg)
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ arg = arg.replace('\\', '\\\\')
+ extra_args.append(arg)
+
return extra_args
def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
# starting from hard-coded defaults followed by build options and so on.
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
copt_proxy = self.get_compiler_options_for_target(target)[compiler.language]
# First, the trivial ones that are impossible to override.
@@ -665,7 +733,7 @@ class Backend:
args = []
for d in deps:
if not (d.is_linkable_target()):
- raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
+ raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename()))
arg = self.get_target_filename_for_linking(d)
if not arg:
continue
@@ -706,6 +774,7 @@ class Backend:
for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
for bdep in extra_bdeps:
+ prospectives.add(bdep)
prospectives.update(bdep.get_transitive_link_deps())
# Internal deps
for ld in prospectives:
@@ -738,7 +807,16 @@ class Backend:
# E.g. an external verifier or simulator program run on a generated executable.
# Can always be run without a wrapper.
test_for_machine = MachineChoice.BUILD
- is_cross = not self.environment.machines.matches_build_machine(test_for_machine)
+
+ # we allow passing compiled executables to tests, which may be cross built.
+ # We need to consider these as well when considering whether the target is cross or not.
+ for a in t.cmd_args:
+ if isinstance(a, build.BuildTarget):
+ if a.for_machine is MachineChoice.HOST:
+ test_for_machine = MachineChoice.HOST
+ break
+
+ is_cross = self.environment.is_cross_build(test_for_machine)
if is_cross and self.environment.need_exe_wrapper():
exe_wrapper = self.environment.get_exe_wrapper()
else:
@@ -751,6 +829,7 @@ class Backend:
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
else:
extra_paths = []
+
cmd_args = []
for a in unholder(t.cmd_args):
if isinstance(a, build.BuildTarget):
@@ -760,6 +839,11 @@ class Backend:
cmd_args.append(a)
elif isinstance(a, str):
cmd_args.append(a)
+ elif isinstance(a, build.Executable):
+ p = self.construct_target_rel_path(a, t.workdir)
+ if p == a.get_filename():
+ p = './' + p
+ cmd_args.append(p)
elif isinstance(a, build.Target):
cmd_args.append(self.construct_target_rel_path(a, t.workdir))
else:
@@ -768,7 +852,8 @@ class Backend:
exe_wrapper, self.environment.need_exe_wrapper(),
t.is_parallel, cmd_args, t.env,
t.should_fail, t.timeout, t.workdir,
- extra_paths, t.protocol, t.priority)
+ extra_paths, t.protocol, t.priority,
+ isinstance(exe, build.Executable))
arr.append(ts)
return arr
@@ -854,7 +939,7 @@ class Backend:
m = regex.search(arg)
while m is not None:
index = int(m.group(1))
- src = '@OUTPUT%d@' % index
+ src = '@OUTPUT{}@'.format(index)
arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
m = regex.search(arg)
newargs.append(arg)
@@ -981,35 +1066,36 @@ class Backend:
elif not isinstance(i, str):
err_msg = 'Argument {0} is of unknown type {1}'
raise RuntimeError(err_msg.format(str(i), str(type(i))))
- elif '@SOURCE_ROOT@' in i:
- i = i.replace('@SOURCE_ROOT@', source_root)
- elif '@BUILD_ROOT@' in i:
- i = i.replace('@BUILD_ROOT@', build_root)
- elif '@DEPFILE@' in i:
- if target.depfile is None:
- msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
- 'keyword argument.'.format(target.name)
- raise MesonException(msg)
- dfilename = os.path.join(outdir, target.depfile)
- i = i.replace('@DEPFILE@', dfilename)
- elif '@PRIVATE_DIR@' in i:
- if target.absolute_paths:
- pdir = self.get_target_private_dir_abs(target)
- else:
- pdir = self.get_target_private_dir(target)
- i = i.replace('@PRIVATE_DIR@', pdir)
- elif '@PRIVATE_OUTDIR_' in i:
- match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
- if not match:
- msg = 'Custom target {!r} has an invalid argument {!r}' \
- ''.format(target.name, i)
- raise MesonException(msg)
- source = match.group(0)
- if match.group(1) is None and not target.absolute_paths:
- lead_dir = ''
- else:
- lead_dir = self.environment.get_build_dir()
- i = i.replace(source, os.path.join(lead_dir, outdir))
+ else:
+ if '@SOURCE_ROOT@' in i:
+ i = i.replace('@SOURCE_ROOT@', source_root)
+ if '@BUILD_ROOT@' in i:
+ i = i.replace('@BUILD_ROOT@', build_root)
+ if '@DEPFILE@' in i:
+ if target.depfile is None:
+ msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
+ 'keyword argument.'.format(target.name)
+ raise MesonException(msg)
+ dfilename = os.path.join(outdir, target.depfile)
+ i = i.replace('@DEPFILE@', dfilename)
+ if '@PRIVATE_DIR@' in i:
+ if target.absolute_paths:
+ pdir = self.get_target_private_dir_abs(target)
+ else:
+ pdir = self.get_target_private_dir(target)
+ i = i.replace('@PRIVATE_DIR@', pdir)
+ if '@PRIVATE_OUTDIR_' in i:
+ match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ if not match:
+ msg = 'Custom target {!r} has an invalid argument {!r}' \
+ ''.format(target.name, i)
+ raise MesonException(msg)
+ source = match.group(0)
+ if match.group(1) is None and not target.absolute_paths:
+ lead_dir = ''
+ else:
+ lead_dir = self.environment.get_build_dir()
+ i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# Substitute the rest of the template strings
values = mesonlib.get_filenames_templates_dict(inputs, outputs)
@@ -1110,6 +1196,7 @@ class Backend:
mappings = t.get_link_deps_mapping(d.prefix, self.environment)
i = TargetInstallData(self.get_target_filename(t), outdirs[0],
t.get_aliases(), should_strip, mappings,
+ t.rpath_dirs_to_remove,
t.install_rpath, install_mode)
d.targets.append(i)
@@ -1127,14 +1214,14 @@ class Backend:
implib_install_dir = self.environment.get_import_lib_dir()
# Install the import library; may not exist for shared modules
i = TargetInstallData(self.get_target_filename_for_linking(t),
- implib_install_dir, {}, False, {}, '', install_mode,
+ implib_install_dir, {}, False, {}, set(), '', install_mode,
optional=isinstance(t, build.SharedModule))
d.targets.append(i)
if not should_strip and t.get_debug_filename():
debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename())
i = TargetInstallData(debug_file, outdirs[0],
- {}, False, {}, '',
+ {}, False, {}, set(), '',
install_mode, optional=True)
d.targets.append(i)
# Install secondary outputs. Only used for Vala right now.
@@ -1144,7 +1231,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode)
d.targets.append(i)
elif isinstance(t, build.CustomTarget):
# If only one install_dir is specified, assume that all
@@ -1157,7 +1244,7 @@ class Backend:
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode,
+ i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)
else:
@@ -1166,7 +1253,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode,
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index ef9b809..968ad7c 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -15,8 +15,10 @@ import typing as T
import os
import re
import pickle
+import shlex
import subprocess
from collections import OrderedDict
+from enum import Enum, unique
import itertools
from pathlib import PurePath, Path
from functools import lru_cache
@@ -28,9 +30,15 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
-from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler,
- PGICCompiler, VisualStudioLikeCompiler)
-from ..linkers import ArLinker
+from ..arglist import CompilerArgs
+from ..compilers import (
+ Compiler, CCompiler,
+ DmdDCompiler,
+ FortranCompiler, PGICCompiler,
+ VisualStudioCsCompiler,
+ VisualStudioLikeCompiler,
+)
+from ..linkers import ArLinker, VisualStudioLinker
from ..mesonlib import (
File, LibType, Language, MachineChoice, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg, unholder,
@@ -45,18 +53,67 @@ FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+def cmd_quote(s):
+ # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
+
+ # backslash escape any existing double quotes
+ # any existing backslashes preceding a quote are doubled
+ s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s)
+ # any terminal backslashes likewise need doubling
+ s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s)
+ # and double quote
+ s = '"{}"'.format(s)
+
+ return s
+
+def gcc_rsp_quote(s):
+ # see: the function buildargv() in libiberty
+ #
+ # this differs from sh-quoting in that a backslash *always* escapes the
+ # following character, even inside single quotes.
+
+ s = s.replace('\\', '\\\\')
+
+ return shlex.quote(s)
+
+# How ninja executes command lines differs between Unix and Windows
+# (see https://ninja-build.org/manual.html#ref_rule_command)
if mesonlib.is_windows():
- # FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds
- # throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args
- # and NinjaBuildElement.write below) and need to be properly untangled before attempting this
- quote_func = lambda s: '"{}"'.format(s)
- execute_wrapper = ['cmd', '/c']
+ quote_func = cmd_quote
+ execute_wrapper = ['cmd', '/c'] # unused
rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
else:
quote_func = quote_arg
execute_wrapper = []
rmfile_prefix = ['rm', '-f', '{}', '&&']
+def get_rsp_threshold():
+ '''Return a conservative estimate of the commandline size in bytes
+ above which a response file should be used. May be overridden for
+ debugging by setting environment variable MESON_RSP_THRESHOLD.'''
+
+ if mesonlib.is_windows():
+ # Usually 32k, but some projects might use cmd.exe,
+ # and that has a limit of 8k.
+ limit = 8192
+ else:
+ # On Linux, ninja always passes the commandline as a single
+ # big string to /bin/sh, and the kernel limits the size of a
+ # single argument; see MAX_ARG_STRLEN
+ limit = 131072
+ # Be conservative
+ limit = limit / 2
+ return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
+
+# a conservative estimate of the command-line length limit
+rsp_threshold = get_rsp_threshold()
+
+# ninja variables whose value should remain unquoted. The value of these ninja
+# variables (or variables we use them in) is interpreted directly by ninja
+# (e.g. the value of the depfile variable is a pathname that ninja will read
+# from, etc.), so it must not be shell quoted.
+raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'}
+
def ninja_quote(text, is_build_line=False):
if is_build_line:
qcs = ('$', ' ', ':')
@@ -67,12 +124,31 @@ def ninja_quote(text, is_build_line=False):
if '\n' in text:
errmsg = '''Ninja does not support newlines in rules. The content was:
-%s
+{}
-Please report this error with a test case to the Meson bug tracker.''' % text
+Please report this error with a test case to the Meson bug tracker.'''.format(text)
raise MesonException(errmsg)
return text
+@unique
+class Quoting(Enum):
+ both = 0
+ notShell = 1
+ notNinja = 2
+ none = 3
+
+class NinjaCommandArg:
+ def __init__(self, s, quoting = Quoting.both):
+ self.s = s
+ self.quoting = quoting
+
+ def __str__(self):
+ return self.s
+
+ @staticmethod
+ def list(l, q):
+ return [NinjaCommandArg(i, q) for i in l]
+
class NinjaComment:
def __init__(self, comment):
self.comment = comment
@@ -86,49 +162,127 @@ class NinjaComment:
class NinjaRule:
def __init__(self, rule, command, args, description,
- rspable = False, deps = None, depfile = None, extra = None):
+ rspable = False, deps = None, depfile = None, extra = None,
+ rspfile_quote_style = 'gcc'):
+
+ def strToCommandArg(c):
+ if isinstance(c, NinjaCommandArg):
+ return c
+
+ # deal with common cases here, so we don't have to explicitly
+ # annotate the required quoting everywhere
+ if c == '&&':
+ # shell constructs shouldn't be shell quoted
+ return NinjaCommandArg(c, Quoting.notShell)
+ if c.startswith('$'):
+ var = re.search(r'\$\{?(\w*)\}?', c).group(1)
+ if var not in raw_names:
+ # ninja variables shouldn't be ninja quoted, and their value
+ # is already shell quoted
+ return NinjaCommandArg(c, Quoting.none)
+ else:
+ # shell quote the use of ninja variables whose value must
+ # not be shell quoted (as it also used by ninja)
+ return NinjaCommandArg(c, Quoting.notNinja)
+
+ return NinjaCommandArg(c)
+
self.name = rule
- self.command = command # includes args which never go into a rspfile
- self.args = args # args which will go into a rspfile, if used
+ self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
+ self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
self.description = description
self.deps = deps # depstyle 'gcc' or 'msvc'
self.depfile = depfile
self.extra = extra
self.rspable = rspable # if a rspfile can be used
self.refcount = 0
+ self.rsprefcount = 0
+ self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl'
- def write(self, outfile):
- if not self.refcount:
- return
+ if self.depfile == '$DEPFILE':
+ self.depfile += '_UNQUOTED'
+
+ @staticmethod
+ def _quoter(x, qf = quote_func):
+ if isinstance(x, NinjaCommandArg):
+ if x.quoting == Quoting.none:
+ return x.s
+ elif x.quoting == Quoting.notNinja:
+ return qf(x.s)
+ elif x.quoting == Quoting.notShell:
+ return ninja_quote(x.s)
+ # fallthrough
+ return ninja_quote(qf(str(x)))
- outfile.write('rule %s\n' % self.name)
- if self.rspable:
- outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command))
- outfile.write(' rspfile = $out.rsp\n')
- outfile.write(' rspfile_content = %s\n' % ' '.join(self.args))
+ def write(self, outfile):
+ if self.rspfile_quote_style == 'cl':
+ rspfile_quote_func = cmd_quote
else:
- outfile.write(' command = %s\n' % ' '.join(self.command + self.args))
- if self.deps:
- outfile.write(' deps = %s\n' % self.deps)
- if self.depfile:
- outfile.write(' depfile = %s\n' % self.depfile)
- outfile.write(' description = %s\n' % self.description)
- if self.extra:
- for l in self.extra.split('\n'):
- outfile.write(' ')
- outfile.write(l)
- outfile.write('\n')
- outfile.write('\n')
+ rspfile_quote_func = gcc_rsp_quote
+
+ def rule_iter():
+ if self.refcount:
+ yield ''
+ if self.rsprefcount:
+ yield '_RSP'
+
+ for rsp in rule_iter():
+ outfile.write('rule {}{}\n'.format(self.name, rsp))
+ if rsp == '_RSP':
+ outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ outfile.write(' rspfile = $out.rsp\n')
+ outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args])))
+ else:
+ outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)])))
+ if self.deps:
+ outfile.write(' deps = {}\n'.format(self.deps))
+ if self.depfile:
+ outfile.write(' depfile = {}\n'.format(self.depfile))
+ outfile.write(' description = {}\n'.format(self.description))
+ if self.extra:
+ for l in self.extra.split('\n'):
+ outfile.write(' ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+ def length_estimate(self, infiles, outfiles, elems):
+ # determine variables
+ # this order of actions only approximates ninja's scoping rules, as
+ # documented at: https://ninja-build.org/manual.html#ref_scope
+ ninja_vars = {}
+ for e in elems:
+ (name, value) = e
+ ninja_vars[name] = value
+ ninja_vars['deps'] = self.deps
+ ninja_vars['depfile'] = self.depfile
+ ninja_vars['in'] = infiles
+ ninja_vars['out'] = outfiles
+
+ # expand variables in command
+ command = ' '.join([self._quoter(x) for x in self.command + self.args])
+ expanded_command = ''
+ for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command):
+ chunk = m.group()
+ if chunk.startswith('$'):
+ chunk = chunk[1:]
+ chunk = re.sub(r'{(.*)}', r'\1', chunk)
+ chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
+ chunk = ' '.join(chunk)
+ expanded_command += chunk
+
+ # determine command length
+ return len(expanded_command)
class NinjaBuildElement:
- def __init__(self, all_outputs, outfilenames, rule, infilenames, implicit_outs=None):
+ def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
self.implicit_outfilenames = implicit_outs or []
if isinstance(outfilenames, str):
self.outfilenames = [outfilenames]
else:
self.outfilenames = outfilenames
- assert(isinstance(rule, str))
- self.rule = rule
+ assert(isinstance(rulename, str))
+ self.rulename = rulename
if isinstance(infilenames, str):
self.infilenames = [infilenames]
else:
@@ -151,10 +305,39 @@ class NinjaBuildElement:
self.orderdeps.add(dep)
def add_item(self, name, elems):
+ # Always convert from GCC-style argument naming to the naming used by the
+ # current compiler. Also filter system include paths, deduplicate, etc.
+ if isinstance(elems, CompilerArgs):
+ elems = elems.to_native()
if isinstance(elems, str):
elems = [elems]
self.elems.append((name, elems))
+ if name == 'DEPFILE':
+ self.elems.append((name + '_UNQUOTED', elems))
+
+ def _should_use_rspfile(self):
+ # 'phony' is a rule built-in to ninja
+ if self.rulename == 'phony':
+ return False
+
+ if not self.rule.rspable:
+ return False
+
+ infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames])
+ outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
+
+ return self.rule.length_estimate(infilenames,
+ outfilenames,
+ self.elems) >= rsp_threshold
+
+ def count_rule_references(self):
+ if self.rulename != 'phony':
+ if self._should_use_rspfile():
+ self.rule.rsprefcount += 1
+ else:
+ self.rule.refcount += 1
+
def write(self, outfile):
self.check_outputs()
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
@@ -162,7 +345,13 @@ class NinjaBuildElement:
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
if implicit_outs:
implicit_outs = ' | ' + implicit_outs
- line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rule, ins)
+ use_rspfile = self._should_use_rspfile()
+ if use_rspfile:
+ rulename = self.rulename + '_RSP'
+ mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames)
+ else:
+ rulename = self.rulename
+ line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins)
if len(self.deps) > 0:
line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps])
if len(self.orderdeps) > 0:
@@ -176,25 +365,24 @@ class NinjaBuildElement:
line = line.replace('\\', '/')
outfile.write(line)
- # ninja variables whose value should remain unquoted. The value of these
- # ninja variables (or variables we use them in) is interpreted directly
- # by ninja (e.g. the value of the depfile variable is a pathname that
- # ninja will read from, etc.), so it must not be shell quoted.
- raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'}
+ if use_rspfile:
+ if self.rule.rspfile_quote_style == 'cl':
+ qf = cmd_quote
+ else:
+ qf = gcc_rsp_quote
+ else:
+ qf = quote_func
for e in self.elems:
(name, elems) = e
should_quote = name not in raw_names
- line = ' %s = ' % name
+ line = ' {} = '.format(name)
newelems = []
for i in elems:
if not should_quote or i == '&&': # Hackety hack hack
quoter = ninja_quote
else:
- quoter = lambda x: ninja_quote(quote_func(x))
- i = i.replace('\\', '\\\\')
- if quote_func('') == '""':
- i = i.replace('"', '\\"')
+ quoter = lambda x: ninja_quote(qf(x))
newelems.append(quoter(i))
line += ' '.join(newelems)
line += '\n'
@@ -204,7 +392,7 @@ class NinjaBuildElement:
def check_outputs(self):
for n in self.outfilenames:
if n in self.all_outputs:
- raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n)
+ raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n))
self.all_outputs[n] = True
class NinjaBackend(backends.Backend):
@@ -271,7 +459,7 @@ int dummy;
# different locales have different messages with a different
# number of colons. Match up to the the drive name 'd:\'.
# When used in cross compilation, the path separator is a
- # backslash rather than a forward slash so handle both.
+ # forward slash rather than a backslash so handle both.
matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|\/).*stdio.h$")
def detect_prefix(out):
@@ -299,8 +487,7 @@ int dummy;
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~'
with open(tempfilename, 'w', encoding='utf-8') as outfile:
- outfile.write('# This is the build file for project "%s"\n' %
- self.build.get_project())
+ outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project()))
outfile.write('# It is autogenerated by the Meson build system.\n')
outfile.write('# Do not edit by hand.\n\n')
outfile.write('ninja_required_version = 1.7.1\n\n')
@@ -308,9 +495,9 @@ int dummy;
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
if num_pools > 0:
outfile.write('''pool link_pool
- depth = %d
+ depth = {}
-''' % num_pools)
+'''.format(num_pools))
with self.detect_vs_dep_prefix(tempfilename) as outfile:
self.generate_rules()
@@ -347,10 +534,14 @@ int dummy;
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
def generate_compdb(self):
rules = []
+ # TODO: Rather than an explicit list here, rules could be marked in the
+ # rule store as being wanted in compdb
for for_machine in MachineChoice:
for lang in self.environment.coredata.compilers[for_machine]:
- rules += [self.get_compiler_rule_name(lang, for_machine)]
- rules += [self.get_pch_rule_name(lang, for_machine)]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules
builddir = self.environment.get_build_dir()
@@ -571,7 +762,7 @@ int dummy;
generated_source_files.append(raw_src)
elif self.environment.is_object(rel_src):
obj_list.append(rel_src)
- elif self.environment.is_library(rel_src):
+ elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src):
pass
else:
# Assume anything not specifically a source file is a header. This is because
@@ -586,7 +777,7 @@ int dummy;
o = self.generate_llvm_ir_compile(target, src)
else:
o = self.generate_single_compile(target, src, True,
- header_deps=header_deps)
+ order_deps=header_deps)
obj_list.append(o)
use_pch = self.environment.coredata.base_options.get('b_pch', False)
@@ -765,7 +956,7 @@ int dummy;
target_name = 'meson-{}'.format(self.build_run_target_name(target))
elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', [])
elem.add_item('COMMAND', cmd)
- elem.add_item('description', 'Running external command %s' % target.name)
+ elem.add_item('description', 'Running external command {}'.format(target.name))
elem.add_item('pool', 'console')
# Alias that runs the target defined above with the name the user specified
self.create_target_alias(target_name)
@@ -778,6 +969,15 @@ int dummy;
self.processed_targets[target.get_id()] = True
def generate_coverage_command(self, elem, outputs):
+ targets = self.build.get_targets().values()
+ use_llvm_cov = False
+ for target in targets:
+ if not hasattr(target, 'compilers'):
+ continue
+ for compiler in target.compilers.values():
+ if compiler.get_id() == 'clang' and not compiler.info.is_darwin():
+ use_llvm_cov = True
+ break
elem.add_item('COMMAND', self.environment.get_build_command() +
['--internal', 'coverage'] +
outputs +
@@ -785,7 +985,8 @@ int dummy;
os.path.join(self.environment.get_source_dir(),
self.build.get_subproject_dir()),
self.environment.get_build_dir(),
- self.environment.get_log_dir()])
+ self.environment.get_log_dir()] +
+ ['--use_llvm_cov'] if use_llvm_cov else [])
def generate_coverage_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
@@ -874,13 +1075,15 @@ int dummy;
deps='gcc', depfile='$DEPFILE',
extra='restat = 1'))
- c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ c = self.environment.get_build_command() + \
['--internal',
'regenerate',
- ninja_quote(quote_func(self.environment.get_source_dir())),
- ninja_quote(quote_func(self.environment.get_build_dir()))]
+ self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ '--backend',
+ 'ninja']
self.add_rule(NinjaRule('REGENERATE_BUILD',
- c + ['--backend', 'ninja'], [],
+ c, [],
'Regenerating build files.',
extra='generator = 1'))
@@ -897,11 +1100,15 @@ int dummy;
def add_build(self, build):
self.build_elements.append(build)
- # increment rule refcount
- if build.rule != 'phony':
- self.ruledict[build.rule].refcount += 1
+ if build.rulename != 'phony':
+ # reference rule
+ build.rule = self.ruledict[build.rulename]
def write_rules(self, outfile):
+ for b in self.build_elements:
+ if isinstance(b, NinjaBuildElement):
+ b.count_rule_references()
+
for r in self.rules:
r.write(outfile)
@@ -980,12 +1187,12 @@ int dummy;
ofilename = os.path.join(self.get_target_private_dir(target), ofilebase)
elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
- elem.add_item('DESC', 'Compiling resource %s' % rel_sourcefile)
+ elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile))
self.add_build(elem)
deps.append(ofilename)
a = '-resource:' + ofilename
else:
- raise InvalidArguments('Unknown resource file %s.' % r)
+ raise InvalidArguments('Unknown resource file {}.'.format(r))
args.append(a)
return args, deps
@@ -997,7 +1204,7 @@ int dummy;
compiler = target.compilers[Language.CS]
rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
deps = []
- commands = CompilerArgs(compiler, target.extra_args.get(Language.CS, []))
+ commands = compiler.compiler_args(target.extra_args.get(Language.CS, []))
commands += compiler.get_buildtype_args(buildtype)
commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target))
commands += compiler.get_debug_args(self.get_option_for_target('debug', target))
@@ -1278,7 +1485,7 @@ int dummy;
main_rust_file = None
for i in target.get_sources():
if not rustc.can_compile(i):
- raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename())
+ raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename()))
if main_rust_file is None:
main_rust_file = i.rel_to_builddir(self.build_to_src)
if main_rust_file is None:
@@ -1349,7 +1556,8 @@ int dummy;
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
- rpath_args = rustc.build_rpath_args(self.environment,
+ (rpath_args, target.rpath_dirs_to_remove) = \
+ rustc.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
@@ -1376,12 +1584,12 @@ int dummy;
return PerMachine('_FOR_BUILD', '')[for_machine]
@classmethod
- def get_compiler_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
- return '%s_COMPILER%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
+ def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_COMPILER{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
- def get_pch_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
- return '%s_PCH%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
+ def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_PCH{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
def compiler_to_rule_name(cls, compiler: Compiler) -> str:
@@ -1453,7 +1661,7 @@ int dummy;
abs_headers.append(absh)
header_imports += swiftc.get_header_import_args(absh)
else:
- raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename())
+ raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename()))
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = swiftc.get_compile_only_args()
compile_args += swiftc.get_optimization_args(self.get_option_for_target('optimization', target))
@@ -1540,7 +1748,7 @@ int dummy;
static_linker = self.build.static_linker[for_machine]
if static_linker is None:
return
- rule = 'STATIC_LINKER%s' % self.get_rule_suffix(for_machine)
+ rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine))
cmdlist = []
args = ['$in']
# FIXME: Must normalize file names with pathlib.Path before writing
@@ -1554,7 +1762,7 @@ int dummy;
cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
cmdlist += static_linker.get_exelist()
cmdlist += ['$LINK_ARGS']
- cmdlist += static_linker.get_output_args('$out')
+ cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none)
description = 'Linking static target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1562,6 +1770,7 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, cmdlist, args, description,
rspable=static_linker.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc',
extra=pool))
def generate_dynamic_link_rules(self):
@@ -1574,9 +1783,9 @@ int dummy;
or langname == Language.RUST \
or langname == Language.CS:
continue
- rule = '%s_LINKER%s' % (langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
+ rule = '{}_LINKER{}'.format(langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
command = compiler.get_linker_exelist()
- args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS']
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS']
description = 'Linking target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1584,12 +1793,14 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
extra=pool))
- args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ args = self.environment.get_build_command() + \
['--internal',
'symbolextractor',
- ninja_quote(quote_func(self.environment.get_build_dir())),
+ self.environment.get_build_dir(),
'$in',
'$IMPLIB',
'$out']
@@ -1601,31 +1812,28 @@ int dummy;
def generate_java_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Java object $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_cs_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc
+ command = compiler.get_exelist()
args = ['$ARGS', '$in']
description = 'Compiling C Sharp target $out'
self.add_rule(NinjaRule(rule, command, args, description,
- rspable=mesonlib.is_windows()))
+ rspable=mesonlib.is_windows(),
+ rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc'))
def generate_vala_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Vala source $in'
self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
def generate_rust_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Rust source $in'
depfile = '$targetdep'
depstyle = 'gcc'
@@ -1634,18 +1842,18 @@ int dummy;
def generate_swift_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
+ full_exe = self.environment.get_build_command() + [
'--internal',
'dirchanger',
'$RUNDIR',
]
- invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()]
+ invoc = full_exe + compiler.get_exelist()
command = invoc + ['$ARGS', '$in']
description = 'Compiling Swift source $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_fortran_dep_hack(self, crstr):
- rule = 'FORTRAN_DEP_HACK%s' % (crstr)
+ rule = 'FORTRAN_DEP_HACK{}'.format(crstr)
if mesonlib.is_windows():
cmd = ['cmd', '/C']
else:
@@ -1659,8 +1867,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if self.created_llvm_ir_rule[compiler.for_machine]:
return
rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling LLVM IR object $in'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp()))
@@ -1689,16 +1897,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if langname == Language.FORTRAN:
self.generate_fortran_dep_hack(crstr)
rule = self.get_compiler_rule_name(langname, compiler.for_machine)
- depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
-
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
- description = 'Compiling %s object $out' % compiler.get_display_language()
+ depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
+ description = 'Compiling {} object $out'.format(compiler.get_display_language())
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
depfile = None
@@ -1707,6 +1909,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
depfile = '$DEPFILE'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
deps=deps, depfile=depfile))
def generate_pch_rule_for(self, langname, compiler):
@@ -1715,16 +1919,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
rule = self.compiler_to_pch_rule_name(compiler)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
if isinstance(compiler, VisualStudioLikeCompiler):
output = []
else:
- output = compiler.get_output_args('$out')
- command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in']
+ output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none)
+ command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in']
description = 'Precompiling header $in'
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
@@ -1859,9 +2058,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
modname = modmatch.group(1).lower()
if modname in module_files:
raise InvalidArguments(
- 'Namespace collision: module %s defined in '
- 'two files %s and %s.' %
- (modname, module_files[modname], s))
+ 'Namespace collision: module {} defined in '
+ 'two files {} and {}.'.format(modname, module_files[modname], s))
module_files[modname] = s
else:
submodmatch = submodre.match(line)
@@ -1872,9 +2070,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if submodname in submodule_files:
raise InvalidArguments(
- 'Namespace collision: submodule %s defined in '
- 'two files %s and %s.' %
- (submodname, submodule_files[submodname], s))
+ 'Namespace collision: submodule {} defined in '
+ 'two files {} and {}.'.format(submodname, submodule_files[submodname], s))
submodule_files[submodname] = s
self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files}
@@ -1960,11 +2157,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
return linker.get_link_debugfile_args(outname)
def generate_llvm_ir_compile(self, target, src):
+ base_proxy = self.get_base_options_for_target(target)
compiler = get_compiler_for_source(target.compilers.values(), src)
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Compiler args for compiling this target
- commands += compilers.get_base_compile_args(self.environment.coredata.base_options,
- compiler)
+ commands += compilers.get_base_compile_args(base_proxy, compiler)
if isinstance(src, File):
if src.is_built:
src_filename = os.path.join(src.subdir, src.fname)
@@ -1974,7 +2171,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
src_filename = os.path.basename(src)
else:
src_filename = src
- obj_basename = src_filename.replace('/', '_').replace('\\', '_')
+ obj_basename = self.canonicalize_filename(src_filename)
rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix()
commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
@@ -1987,9 +2184,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# Write the Ninja build command
compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
element.add_item('ARGS', commands)
self.add_build(element)
return rel_obj
@@ -2005,6 +2199,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
curdir = '.'
return compiler.get_include_args(curdir, False)
+ @lru_cache(maxsize=None)
+ def get_normpath_target(self, source) -> str:
+ return os.path.normpath(source)
+
def get_custom_target_dir_include_args(self, target, compiler):
custom_target_include_dirs = []
for i in target.get_generated_sources():
@@ -2013,7 +2211,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# own target build dir.
if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
continue
- idir = os.path.normpath(self.get_target_dir(i))
+ idir = self.get_normpath_target(self.get_target_dir(i))
if not idir:
idir = '.'
if idir not in custom_target_include_dirs:
@@ -2049,7 +2247,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
base_proxy = self.get_base_options_for_target(target)
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Start with symbol visibility.
commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility)
# Add compiler args for compiling this target derived from 'base' build
@@ -2129,7 +2327,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = self._generate_single_compile(target, compiler, is_generated)
- commands = CompilerArgs(commands.compiler, commands)
+ commands = commands.compiler.compiler_args(commands)
# Create introspection information
if is_generated is False:
@@ -2206,9 +2404,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
element.add_dep(pch_dep)
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
for i in self.get_fortran_orderdeps(target, compiler):
element.add_orderdep(i)
element.add_item('DEPFILE', dep_file)
@@ -2481,7 +2676,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
#
# Once all the linker options have been passed, we will start passing
# libraries and library paths from internal and external sources.
- commands = CompilerArgs(linker)
+ commands = linker.compiler_args()
# First, the trivial ones that are impossible to override.
#
# Add linker args for linking this target derived from 'base' build
@@ -2583,20 +2778,19 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
- commands += linker.build_rpath_args(self.environment,
+ (rpath_args, target.rpath_dirs_to_remove) = \
+ linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
target.build_rpath,
target.install_rpath)
+ commands += rpath_args
# Add libraries generated by custom targets
custom_target_libraries = self.get_custom_target_provided_libraries(target)
commands += extra_args
commands += custom_target_libraries
commands += stdlib_args # Standard library arguments go last, because they never depend on anything.
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
dep_targets.extend([self.get_dependency_filename(t) for t in dependencies])
dep_targets.extend([self.get_dependency_filename(t)
for t in target.link_depends])
@@ -2647,18 +2841,14 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def generate_gcov_clean(self):
gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
- script_root = self.environment.get_script_dir()
- clean_script = os.path.join(script_root, 'delwithsuffix.py')
- gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno'])
+ gcno_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcno'])
gcno_elem.add_item('description', 'Deleting gcno files')
self.add_build(gcno_elem)
# Alias that runs the target defined above
self.create_target_alias('meson-clean-gcno')
gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
- script_root = self.environment.get_script_dir()
- clean_script = os.path.join(script_root, 'delwithsuffix.py')
- gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda'])
+ gcda_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcda'])
gcda_elem.add_item('description', 'Deleting gcda files')
self.add_build(gcda_elem)
# Alias that runs the target defined above
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index b776d7a..7e28cfb 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -26,7 +26,6 @@ from .. import build
from .. import dependencies
from .. import mlog
from .. import compilers
-from ..compilers import CompilerArgs
from ..interpreter import Interpreter
from ..mesonlib import (
MesonException, File, python_command, replace_if_different
@@ -98,6 +97,9 @@ class Vs2010Backend(backends.Backend):
self.subdirs = {}
self.handled_target_deps = {}
+ def get_target_private_dir(self, target):
+ return os.path.join(self.get_target_dir(target), target.get_id())
+
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
custom_target_include_dirs = []
@@ -591,10 +593,8 @@ class Vs2010Backend(backends.Backend):
raise MesonException('Could not guess language from source file %s.' % src)
def add_pch(self, pch_sources, lang, inc_cl):
- if len(pch_sources) <= 1:
- # We only need per file precompiled headers if we have more than 1 language.
- return
- self.use_pch(pch_sources, lang, inc_cl)
+ if lang in pch_sources:
+ self.use_pch(pch_sources, lang, inc_cl)
def create_pch(self, pch_sources, lang, inc_cl):
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
@@ -602,6 +602,8 @@ class Vs2010Backend(backends.Backend):
self.add_pch_files(pch_sources, lang, inc_cl)
def use_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Use'
header = self.add_pch_files(pch_sources, lang, inc_cl)
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
pch_include.text = header + ';%(ForcedIncludeFiles)'
@@ -821,12 +823,12 @@ class Vs2010Backend(backends.Backend):
clconf = ET.SubElement(compiles, 'ClCompile')
# CRT type; debug or release
if vscrt_type.value == 'from_buildtype':
- if self.buildtype == 'debug' or self.buildtype == 'debugoptimized':
+ if self.buildtype == 'debug':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
else:
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
- ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
elif vscrt_type.value == 'mdd':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
@@ -855,6 +857,18 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
+ # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
+ # cl will give warning D9025: overriding '/Ehs' with cpp_eh value
+ if 'cpp' in target.compilers:
+ eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh']
+ if eh.value == 'a':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
+ elif eh.value == 's':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
+ elif eh.value == 'none':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
+ else: # 'sc' or 'default'
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
# End configuration
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
@@ -884,9 +898,9 @@ class Vs2010Backend(backends.Backend):
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
- file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items())
- file_defines = dict((lang, []) for lang in target.compilers)
- file_inc_dirs = dict((lang, []) for lang in target.compilers)
+ file_args = {l: c.compiler_args() for l, c in target.compilers.items()}
+ file_defines = {l: [] for l in target.compilers}
+ file_inc_dirs = {l: [] for l in target.compilers}
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
@@ -989,23 +1003,23 @@ class Vs2010Backend(backends.Backend):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
if isinstance(d, dependencies.OpenMPDependency):
- d_compile_args = compiler.openmp_flags()
+ ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
else:
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
- for arg in d_compile_args:
- if arg.startswith(('-D', '/D')):
- define = arg[2:]
- # De-dup
- if define in target_defines:
- target_defines.remove(define)
- target_defines.append(define)
- elif arg.startswith(('-I', '/I')):
- inc_dir = arg[2:]
- # De-dup
- if inc_dir not in target_inc_dirs:
- target_inc_dirs.append(inc_dir)
- else:
- target_args.append(arg)
+ for arg in d_compile_args:
+ if arg.startswith(('-D', '/D')):
+ define = arg[2:]
+ # De-dup
+ if define in target_defines:
+ target_defines.remove(define)
+ target_defines.append(define)
+ elif arg.startswith(('-I', '/I')):
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+ else:
+ target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
@@ -1046,12 +1060,10 @@ class Vs2010Backend(backends.Backend):
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
if self.environment.coredata.base_options.get('b_pch', False):
- pch_node = ET.SubElement(clconf, 'PrecompiledHeader')
for lang in [Language.C, Language.CPP]:
pch = target.get_pch(lang)
if not pch:
continue
- pch_node.text = 'Use'
if compiler.id == 'msvc':
if len(pch) == 1:
# Auto generate PCH.
@@ -1065,17 +1077,13 @@ class Vs2010Backend(backends.Backend):
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
pch_sources[lang] = [pch[0], None, lang, None]
- if len(pch_sources) == 1:
- # If there is only 1 language with precompiled headers, we can use it for the entire project, which
- # is cleaner than specifying it for each source file.
- self.use_pch(pch_sources, list(pch_sources)[0], clconf)
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
# Linker options
link = ET.SubElement(compiles, 'Link')
- extra_link_args = CompilerArgs(compiler)
+ extra_link_args = compiler.compiler_args()
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
@@ -1103,14 +1111,14 @@ class Vs2010Backend(backends.Backend):
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
@@ -1198,7 +1206,8 @@ class Vs2010Backend(backends.Backend):
# /nologo
ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
# /release
- ET.SubElement(link, 'SetChecksum').text = 'true'
+ if not self.environment.coredata.get_builtin_option('debug'):
+ ET.SubElement(link, 'SetChecksum').text = 'true'
meson_file_group = ET.SubElement(root, 'ItemGroup')
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index ddecb6e..e3b67de 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -12,12 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import copy, os, re
from collections import OrderedDict, defaultdict
-import itertools, pathlib
+from functools import lru_cache
+import copy
import hashlib
+import itertools, pathlib
+import os
import pickle
-from functools import lru_cache
+import re
import typing as T
from . import environment
@@ -82,6 +84,7 @@ buildtarget_kwargs = set([
'override_options',
'sources',
'gnu_symbol_visibility',
+ 'link_language',
])
known_build_target_kwargs = (
@@ -92,7 +95,7 @@ known_build_target_kwargs = (
rust_kwargs |
cs_kwargs)
-known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'}
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'}
known_stlib_kwargs = known_build_target_kwargs | {'pic'}
@@ -495,6 +498,7 @@ class BuildTarget(Target):
self.link_targets = []
self.link_whole_targets = []
self.link_depends = []
+ self.added_deps = set()
self.name_prefix_set = False
self.name_suffix_set = False
self.filename = 'no_name'
@@ -509,6 +513,8 @@ class BuildTarget(Target):
self.d_features = {}
self.pic = False
self.pie = False
+ # Track build_rpath entries so we can remove them at install time
+ self.rpath_dirs_to_remove = set()
# Sources can be:
# 1. Pre-existing source files in the source tree
# 2. Pre-existing sources generated by configure_file in the build tree
@@ -532,6 +538,9 @@ class BuildTarget(Target):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
+ def __str__(self):
+ return "{}".format(self.name)
+
def validate_install(self, environment):
if self.for_machine is MachineChoice.BUILD and self.need_install:
if environment.is_cross_build():
@@ -729,7 +738,7 @@ class BuildTarget(Target):
File.from_source_file(environment.source_dir, self.subdir, s))
elif hasattr(s, 'get_outputs'):
self.link_depends.extend(
- [File.from_built_file(s.subdir, p) for p in s.get_outputs()])
+ [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()])
else:
raise InvalidArguments(
'Link_depends arguments must be strings, Files, '
@@ -772,7 +781,7 @@ class BuildTarget(Target):
if isinstance(src, str):
src = File(False, self.subdir, src)
elif isinstance(src, File):
- FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject)
+ FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject)
else:
raise MesonException('Object extraction arguments must be strings or Files.')
# FIXME: It could be a generated source
@@ -813,7 +822,8 @@ class BuildTarget(Target):
def get_link_dep_subdirs(self):
result = OrderedSet()
for i in self.link_targets:
- result.add(i.get_subdir())
+ if not isinstance(i, StaticLibrary):
+ result.add(i.get_subdir())
result.update(i.get_link_dep_subdirs())
return result
@@ -1012,23 +1022,16 @@ This will become a hard error in a future Meson release.''')
def get_extra_args(self, language):
return self.extra_args.get(language, [])
- def get_dependencies(self, exclude=None, for_pkgconfig=False):
+ def get_dependencies(self, exclude=None):
transitive_deps = []
if exclude is None:
exclude = []
for t in itertools.chain(self.link_targets, self.link_whole_targets):
if t in transitive_deps or t in exclude:
continue
- # When generating `Libs:` and `Libs.private:` lists in pkg-config
- # files we don't want to include static libraries that we link_whole
- # or are uninstalled (they're implicitly promoted to link_whole).
- # But we still need to include their transitive dependencies,
- # a static library we link_whole would itself link to a shared
- # library or an installed static library.
- if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets):
- transitive_deps.append(t)
+ transitive_deps.append(t)
if isinstance(t, StaticLibrary):
- transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig)
+ transitive_deps += t.get_dependencies(transitive_deps + exclude)
return transitive_deps
def get_source_subdir(self):
@@ -1061,6 +1064,8 @@ This will become a hard error in a future Meson release.''')
def add_deps(self, deps):
deps = listify(deps)
for dep in unholder(deps):
+ if dep in self.added_deps:
+ continue
if isinstance(dep, dependencies.InternalDependency):
# Those parts that are internal.
self.process_sourcelist(dep.sources)
@@ -1099,6 +1104,7 @@ You probably should put it in link_with instead.''')
'either an external dependency (returned by find_library() or '
'dependency()) or an internal dependency (returned by '
'declare_dependency()).'.format(type(dep).__name__))
+ self.added_deps.add(dep)
def get_external_deps(self):
return self.external_deps
@@ -1115,7 +1121,7 @@ You probably should put it in link_with instead.''')
if not isinstance(t, (Target, CustomTargetIndex)):
raise InvalidArguments('{!r} is not a target.'.format(t))
if not t.is_linkable_target():
- raise InvalidArguments('Link target {!r} is not linkable.'.format(t))
+ raise InvalidArguments("Link target '{!s}' is not linkable.".format(t))
if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
msg += "Use the 'pic' option to static_library to build with PIC."
@@ -1228,11 +1234,7 @@ You probably should put it in link_with instead.''')
See: https://github.com/mesonbuild/meson/issues/1653
'''
- langs = []
-
- # User specified link_language of target (for multi-language targets)
- if self.link_language:
- return [self.link_language]
+ langs = [] # type: T.List[str]
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
@@ -1264,6 +1266,12 @@ You probably should put it in link_with instead.''')
# Populate list of all compilers, not just those being used to compile
# sources in this target
all_compilers = self.environment.coredata.compilers[self.for_machine]
+
+ # If the user set the link_language, just return that.
+ if self.link_language:
+ comp = all_compilers[self.link_language]
+ return comp, comp.language_stdlib_only_link_flags()
+
# Languages used by dependencies
dep_langs = self.get_langs_used_by_deps()
# Pick a compiler based on the language priority-order
@@ -2159,7 +2167,7 @@ class CustomTarget(Target):
'when installing a target')
if isinstance(kwargs['install_dir'], list):
- FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject)
+ FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject)
# If an item in this list is False, the output corresponding to
# the list index of that item will not be installed
self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))
@@ -2171,7 +2179,6 @@ class CustomTarget(Target):
if 'build_always' in kwargs and 'build_always_stale' in kwargs:
raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.')
elif 'build_always' in kwargs:
- mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.')
if 'build_by_default' not in kwargs:
self.build_by_default = kwargs['build_always']
self.build_always_stale = kwargs['build_always']
diff --git a/mesonbuild/cmake/__init__.py b/mesonbuild/cmake/__init__.py
index 01cc3f9..db7aefd 100644
--- a/mesonbuild/cmake/__init__.py
+++ b/mesonbuild/cmake/__init__.py
@@ -24,11 +24,14 @@ __all__ = [
'CMakeTarget',
'CMakeTraceLine',
'CMakeTraceParser',
+ 'SingleTargetOptions',
+ 'TargetOptions',
'parse_generator_expressions',
'language_map',
+ 'cmake_defines_to_args',
]
-from .common import CMakeException
+from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args
from .client import CMakeClient
from .executor import CMakeExecutor
from .fileapi import CMakeFileAPI
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index e7da0d7..4510b5d 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -60,6 +60,26 @@ def _flags_to_list(raw: str) -> T.List[str]:
res = list(filter(lambda x: len(x) > 0, res))
return res
+def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]:
+ res = [] # type: T.List[str]
+ if not isinstance(raw, list):
+ raw = [raw]
+
+ for i in raw:
+ if not isinstance(i, dict):
+ raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__))
+ for key, val in i.items():
+ assert isinstance(key, str)
+ if isinstance(val, (str, int, float)):
+ res += ['-D{}={}'.format(key, val)]
+ elif isinstance(val, bool):
+ val_str = 'ON' if val else 'OFF'
+ res += ['-D{}={}'.format(key, val_str)]
+ else:
+ raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key))
+
+ return res
+
class CMakeFileGroup:
def __init__(self, data: dict):
self.defines = data.get('defines', '')
@@ -163,3 +183,78 @@ class CMakeConfiguration:
mlog.log('Project {}:'.format(idx))
with mlog.nested():
i.log()
+
+class SingleTargetOptions:
+ def __init__(self) -> None:
+ self.opts = {} # type: T.Dict[str, str]
+ self.lang_args = {} # type: T.Dict[str, T.List[str]]
+ self.link_args = [] # type: T.List[str]
+ self.install = 'preserve'
+
+ def set_opt(self, opt: str, val: str) -> None:
+ self.opts[opt] = val
+
+ def append_args(self, lang: str, args: T.List[str]) -> None:
+ if lang not in self.lang_args:
+ self.lang_args[lang] = []
+ self.lang_args[lang] += args
+
+ def append_link_args(self, args: T.List[str]) -> None:
+ self.link_args += args
+
+ def set_install(self, install: bool) -> None:
+ self.install = 'true' if install else 'false'
+
+ def get_override_options(self, initial: T.List[str]) -> T.List[str]:
+ res = [] # type: T.List[str]
+ for i in initial:
+ opt = i[:i.find('=')]
+ if opt not in self.opts:
+ res += [i]
+ res += ['{}={}'.format(k, v) for k, v in self.opts.items()]
+ return res
+
+ def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]:
+ if lang in self.lang_args:
+ return initial + self.lang_args[lang]
+ return initial
+
+ def get_link_args(self, initial: T.List[str]) -> T.List[str]:
+ return initial + self.link_args
+
+ def get_install(self, initial: bool) -> bool:
+ return {'preserve': initial, 'true': True, 'false': False}[self.install]
+
+class TargetOptions:
+ def __init__(self) -> None:
+ self.global_options = SingleTargetOptions()
+ self.target_options = {} # type: T.Dict[str, SingleTargetOptions]
+
+ def __getitem__(self, tgt: str) -> SingleTargetOptions:
+ if tgt not in self.target_options:
+ self.target_options[tgt] = SingleTargetOptions()
+ return self.target_options[tgt]
+
+ def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_override_options(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_override_options(initial)
+ return initial
+
+ def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_compile_args(lang, initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_compile_args(lang, initial)
+ return initial
+
+ def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_link_args(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_link_args(initial)
+ return initial
+
+ def get_install(self, tgt: str, initial: bool) -> bool:
+ initial = self.global_options.get_install(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_install(initial)
+ return initial
diff --git a/mesonbuild/cmake/data/run_ctgt.py b/mesonbuild/cmake/data/run_ctgt.py
deleted file mode 100755
index 9d5d437..0000000
--- a/mesonbuild/cmake/data/run_ctgt.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-
-import argparse
-import subprocess
-import shutil
-import os
-import sys
-from pathlib import Path
-
-commands = [[]]
-SEPARATOR = ';;;'
-
-# Generate CMD parameters
-parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
-parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
-parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
-parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
-parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
-
-# Parse
-args = parser.parse_args()
-
-dummy_target = None
-if len(args.outputs) == 1 and len(args.original_outputs) == 0:
- dummy_target = args.outputs[0]
-elif len(args.outputs) != len(args.original_outputs):
- print('Length of output list and original output list differ')
- sys.exit(1)
-
-for i in args.commands:
- if i == SEPARATOR:
- commands += [[]]
- continue
-
- i = i.replace('"', '') # Remove lefover quotes
- commands[-1] += [i]
-
-# Execute
-for i in commands:
- # Skip empty lists
- if not i:
- continue
-
- cmd = []
- stdout = None
- stderr = None
- capture_file = ''
-
- for j in i:
- if j in ['>', '>>']:
- stdout = subprocess.PIPE
- continue
- elif j in ['&>', '&>>']:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- continue
-
- if stdout is not None or stderr is not None:
- capture_file += j
- else:
- cmd += [j]
-
- try:
- os.makedirs(args.directory, exist_ok=True)
-
- res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
- if capture_file:
- out_file = Path(args.directory) / capture_file
- out_file.write_bytes(res.stdout)
- except subprocess.CalledProcessError:
- exit(1)
-
-if dummy_target:
- with open(dummy_target, 'a'):
- os.utime(dummy_target, None)
- exit(0)
-
-# Copy outputs
-zipped_outputs = zip(args.outputs, args.original_outputs)
-for expected, generated in zipped_outputs:
- do_copy = False
- if not os.path.exists(expected):
- if not os.path.exists(generated):
- print('Unable to find generated file. This can cause the build to fail:')
- print(generated)
- do_copy = False
- else:
- do_copy = True
- elif os.path.exists(generated):
- if os.path.getmtime(generated) > os.path.getmtime(expected):
- do_copy = True
-
- if do_copy:
- if os.path.exists(expected):
- os.remove(expected)
- shutil.copyfile(generated, expected)
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index 349c8ec..d41cd22 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -28,6 +28,7 @@ import textwrap
from .. import mlog, mesonlib
from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice
from ..environment import Environment
+from ..envconfig import get_env_var
if T.TYPE_CHECKING:
from ..dependencies.base import ExternalProgram
@@ -48,6 +49,8 @@ class CMakeExecutor:
self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
self.always_capture_stderr = True
self.print_cmout = False
+ self.prefix_paths = [] # type: T.List[str]
+ self.extra_cmake_args = [] # type: T.List[str]
if self.cmakebin is False:
self.cmakebin = None
return
@@ -60,26 +63,23 @@ class CMakeExecutor:
self.cmakebin = None
return
+ self.prefix_paths = self.environment.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
+ env_pref_path = get_env_var(
+ self.for_machine,
+ self.environment.is_cross_build(),
+ 'CMAKE_PREFIX_PATH')
+ if env_pref_path is not None:
+ env_pref_path = re.split(r':|;', env_pref_path)
+ env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
+ if not self.prefix_paths:
+ self.prefix_paths = []
+ self.prefix_paths += env_pref_path
+
+ if self.prefix_paths:
+ self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))]
+
def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]:
- from ..dependencies.base import ExternalProgram
-
- # Create an iterator of options
- def search():
- # Lookup in cross or machine file.
- potential_cmakepath = environment.lookup_binary_entry(self.for_machine, 'cmake')
- if potential_cmakepath is not None:
- mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', self.for_machine, potential_cmakepath)
- yield ExternalProgram.from_entry('cmake', potential_cmakepath)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('CMake binary missing from cross or native file, or env var undefined.')
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if environment.machines.matches_build_machine(self.for_machine):
- for potential_cmakepath in environment.default_cmake:
- mlog.debug('Trying a default CMake fallback at', potential_cmakepath)
- yield ExternalProgram(potential_cmakepath, silent=True)
+ from ..dependencies.base import find_external_program
# Only search for CMake the first time and store the result in the class
# definition
@@ -89,10 +89,11 @@ class CMakeExecutor:
mlog.debug('CMake binary for %s is cached.' % self.for_machine)
else:
assert CMakeExecutor.class_cmakebin[self.for_machine] is None
+
mlog.debug('CMake binary for %s is not cached' % self.for_machine)
- for potential_cmakebin in search():
- mlog.debug('Trying CMake binary {} for machine {} at {}'
- .format(potential_cmakebin.name, self.for_machine, potential_cmakebin.command))
+ for potential_cmakebin in find_external_program(
+ environment, self.for_machine, 'cmake', 'CMake',
+ environment.default_cmake, allow_default_for_cross=False):
version_if_ok = self.check_cmake(potential_cmakebin)
if not version_if_ok:
continue
@@ -132,7 +133,7 @@ class CMakeExecutor:
msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
mlog.warning(msg)
return None
- cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
+ cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2)
return cmvers
def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None:
@@ -226,6 +227,7 @@ class CMakeExecutor:
if env is None:
env = os.environ
+ args = args + self.extra_cmake_args
if disable_cache:
return self._call_impl(args, build_dir, env)
@@ -362,5 +364,8 @@ class CMakeExecutor:
def get_command(self) -> T.List[str]:
return self.cmakebin.get_command()
+ def get_cmake_prefix_paths(self) -> T.List[str]:
+ return self.prefix_paths
+
def machine_choice(self) -> MachineChoice:
return self.for_machine
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 6208696..91700c7 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -15,16 +15,15 @@
# This class contains the basic functionality needed to run any interpreter
# or an interpreter-based tool.
-import pkg_resources
-
-from .common import CMakeException, CMakeTarget
+from .common import CMakeException, CMakeTarget, TargetOptions
from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel
from .fileapi import CMakeFileAPI
from .executor import CMakeExecutor
from .traceparser import CMakeTraceParser, CMakeGeneratorTarget
-from .. import mlog
+from .. import mlog, mesonlib
from ..environment import Environment
from ..mesonlib import Language, MachineChoice, OrderedSet, version_compare
+from ..mesondata import mesondata
from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
from enum import Enum
from functools import lru_cache
@@ -289,7 +288,17 @@ class ConverterTarget:
for j in self.compile_opts[i]:
m = ConverterTarget.std_regex.match(j)
if m:
- self.override_options += ['{}_std={}'.format(i, m.group(2))]
+ std = m.group(2)
+ supported = self._all_lang_stds(i)
+ if std not in supported:
+ mlog.warning(
+ 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-'
+ 'level {0}_std if build errors occur. Known '
+ '{0}_stds are: {2}'.format(i, std, ' '.join(supported)),
+ once=True
+ )
+ continue
+ self.override_options += ['{}_std={}'.format(i, std)]
elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']:
self.pie = True
elif j in blacklist_compiler_flags:
@@ -307,13 +316,6 @@ class ConverterTarget:
tgt = trace.targets.get(self.cmake_name)
if tgt:
self.depends_raw = trace.targets[self.cmake_name].depends
- if self.type.upper() == 'INTERFACE_LIBRARY':
- props = tgt.properties
-
- self.includes += props.get('INTERFACE_INCLUDE_DIRECTORIES', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_DEFINITIONS', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_OPTIONS', [])
- self.link_flags += props.get('INTERFACE_LINK_OPTIONS', [])
# TODO refactor this copy paste from CMakeDependency for future releases
reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$')
@@ -332,6 +334,12 @@ class ConverterTarget:
libraries = []
mlog.debug(tgt)
+ if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+ self.includes += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+ if 'INTERFACE_LINK_OPTIONS' in tgt.properties:
+ self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+
if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
@@ -346,8 +354,15 @@ class ConverterTarget:
cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x]
cfg = cfgs[0]
- if 'RELEASE' in cfgs:
- cfg = 'RELEASE'
+ is_debug = self.env.coredata.get_builtin_option('debug');
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties:
libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x]
@@ -539,6 +554,13 @@ class ConverterTarget:
suffixes += [x for x in exts]
return suffixes
+ @lru_cache(maxsize=None)
+ def _all_lang_stds(self, lang: str) -> T.List[str]:
+ lang_opts = self.env.coredata.compiler_options.build.get(lang, None)
+ if not lang_opts or 'std' not in lang_opts:
+ return []
+ return lang_opts['std'].choices
+
def process_inter_target_dependencies(self):
# Move the dependencies from all transfer_dependencies_from to the target
to_process = list(self.depends)
@@ -791,7 +813,7 @@ class CMakeInterpreter:
raise CMakeException('Unable to find CMake')
self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True)
- preload_file = pkg_resources.resource_filename('mesonbuild', 'cmake/data/preload.cmake')
+ preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env)
# Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible,
# since CMAKE_PROJECT_INCLUDE was actually designed for code injection.
@@ -970,7 +992,7 @@ class CMakeInterpreter:
mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.')
- def pretend_to_be_meson(self) -> CodeBlockNode:
+ def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode:
if not self.project_name:
raise CMakeException('CMakeInterpreter was not analysed')
@@ -1036,9 +1058,6 @@ class CMakeInterpreter:
root_cb.lines += [function('project', [self.project_name] + self.languages)]
# Add the run script for custom commands
- run_script = pkg_resources.resource_filename('mesonbuild', 'cmake/data/run_ctgt.py')
- run_script_var = 'ctgt_run_script'
- root_cb.lines += [assign(run_script_var, function('find_program', [[run_script]], {'required': True}))]
# Add the targets
processing = []
@@ -1134,21 +1153,26 @@ class CMakeInterpreter:
dep_var = '{}_dep'.format(tgt.name)
tgt_var = tgt.name
+ install_tgt = options.get_install(tgt.cmake_name, tgt.install)
+
# Generate target kwargs
tgt_kwargs = {
- 'build_by_default': tgt.install,
- 'link_args': tgt.link_flags + tgt.link_libraries,
+ 'build_by_default': install_tgt,
+ 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries),
'link_with': link_with,
'include_directories': id_node(inc_var),
- 'install': tgt.install,
- 'install_dir': tgt.install_dir,
- 'override_options': tgt.override_options,
+ 'install': install_tgt,
+ 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options),
'objects': [method(x, 'extract_all_objects') for x in objec_libs],
}
+ # Only set if installed and only override if it is set
+ if install_tgt and tgt.install_dir:
+ tgt_kwargs['install_dir'] = tgt.install_dir
+
# Handle compiler args
for key, val in tgt.compile_opts.items():
- tgt_kwargs['{}_args'.format(key)] = val
+ tgt_kwargs['{}_args'.format(key)] = options.get_compile_args(tgt.cmake_name, key, val)
# Handle -fPCI, etc
if tgt_func == 'executable':
@@ -1220,7 +1244,8 @@ class CMakeInterpreter:
# Generate the command list
command = []
- command += [id_node(run_script_var)]
+ command += mesonlib.meson_command
+ command += ['--internal', 'cmake_run_ctgt']
command += ['-o', '@OUTPUT@']
if tgt.original_outputs:
command += ['-O'] + tgt.original_outputs
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index 432cd21..a241360 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -64,6 +64,7 @@ class CMakeTarget:
return
for key, val in self.properties.items():
self.properties[key] = [x.strip() for x in val]
+ assert all([';' not in x for x in self.properties[key]])
class CMakeGeneratorTarget(CMakeTarget):
def __init__(self, name):
@@ -138,7 +139,7 @@ class CMakeTraceParser:
if not self.requires_stderr():
if not self.trace_file_path.exists and not self.trace_file_path.is_file():
raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path)))
- trace = self.trace_file_path.read_text()
+ trace = self.trace_file_path.read_text(errors='ignore')
if not trace:
raise CMakeException('CMake: The CMake trace was not provided or is empty')
@@ -574,10 +575,10 @@ class CMakeTraceParser:
continue
if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']:
- interface += [i]
+ interface += i.split(';')
if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']:
- private += [i]
+ private += i.split(';')
if paths:
interface = self._guess_files(interface)
@@ -655,30 +656,45 @@ class CMakeTraceParser:
# Try joining file paths that contain spaces
- reg_start = re.compile(r'^([A-Za-z]:)?/.*/[^./]+$')
+ reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
fixed_list = [] # type: T.List[str]
curr_str = None # type: T.Optional[str]
+ path_found = False # type: bool
for i in broken_list:
if curr_str is None:
curr_str = i
+ path_found = False
elif os.path.isfile(curr_str):
# Abort concatenation if curr_str is an existing file
fixed_list += [curr_str]
curr_str = i
+ path_found = False
elif not reg_start.match(curr_str):
# Abort concatenation if curr_str no longer matches the regex
fixed_list += [curr_str]
curr_str = i
- elif reg_end.match(i) or os.path.exists('{} {}'.format(curr_str, i)):
+ path_found = False
+ elif reg_end.match(i):
# File detected
curr_str = '{} {}'.format(curr_str, i)
fixed_list += [curr_str]
curr_str = None
+ path_found = False
+ elif os.path.exists('{} {}'.format(curr_str, i)):
+ # Path detected
+ curr_str = '{} {}'.format(curr_str, i)
+ path_found = True
+ elif path_found:
+ # Add path to fixed_list after ensuring the whole path is in curr_str
+ fixed_list += [curr_str]
+ curr_str = i
+ path_found = False
else:
curr_str = '{} {}'.format(curr_str, i)
+ path_found = False
if curr_str:
fixed_list += [curr_str]
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index af7e519..fd47545 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -48,7 +48,6 @@ __all__ = [
'ClangObjCPPCompiler',
'ClangClCCompiler',
'ClangClCPPCompiler',
- 'CompilerArgs',
'CPPCompiler',
'DCompiler',
'DmdDCompiler',
@@ -123,7 +122,6 @@ from .compilers import (
is_known_suffix,
lang_suffixes,
sort_clink,
- CompilerArgs,
)
from .c import (
CCompiler,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 52a5157..feae8ac 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -86,9 +86,10 @@ class ClangCCompiler(ClangCompiler, CCompiler):
_C18_VERSION = '>=8.0.0'
def __init__(self, exelist, version, for_machine: MachineChoice,
- is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
+ is_cross, info: 'MachineInfo', exe_wrapper=None,
+ defines: T.Optional[T.List[str]] = None, **kwargs):
CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, defines)
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py
index e5de485..f31229e 100644
--- a/mesonbuild/compilers/c_function_attributes.py
+++ b/mesonbuild/compilers/c_function_attributes.py
@@ -56,6 +56,8 @@ C_FUNC_ATTRIBUTES = {
'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));',
'format_arg':
'char * foo(const char * p) __attribute__((format_arg(1)));',
+ 'force_align_arg_pointer':
+ '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }',
'gnu_inline':
'inline __attribute__((gnu_inline)) int foo(void) { return 0; }',
'hot':
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index ecdb70d..c31439c 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -12,19 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import abc
import contextlib, os.path, re, tempfile
-import collections.abc
import itertools
import typing as T
from functools import lru_cache
-from ..linkers import (
- GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker,
- StaticLinker,
-)
from .. import coredata
from .. import mlog
from .. import mesonlib
+from ..linkers import LinkerEnvVarsMixin
from ..mesonlib import (
EnvironmentException, Language, MachineChoice, MesonException,
Popen_safe, split_args
@@ -32,6 +29,7 @@ from ..mesonlib import (
from ..envconfig import (
Properties, get_env_var
)
+from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
from ..coredata import OptionDictType
@@ -52,7 +50,7 @@ lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so')
# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
lang_suffixes = {
Language.C: ('c',),
- Language.CPP: ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'),
+ Language.CPP: ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino'),
Language.CUDA: ('cu',),
# f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
# f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
@@ -113,11 +111,6 @@ cflags_mapping = {
Language.RUST: 'RUSTFLAGS',
}
-unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt')
-# execinfo is a compiler lib on FreeBSD and NetBSD
-if mesonlib.is_freebsd() or mesonlib.is_netbsd():
- unixy_compiler_internal_libs += ('execinfo',)
-
# All these are only for C-linkable languages; see `clink_langs` above.
def sort_clink(lang):
@@ -153,11 +146,15 @@ def is_llvm_ir(fname):
fname = fname.fname
return fname.split('.')[-1] == 'll'
+@lru_cache(maxsize=None)
+def cached_by_name(fname):
+ suffix = fname.split('.')[-1]
+ return suffix in obj_suffixes
+
def is_object(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
- suffix = fname.split('.')[-1]
- return suffix in obj_suffixes
+ return cached_by_name(fname)
def is_library(fname):
if hasattr(fname, 'fname'):
@@ -201,7 +198,7 @@ rust_buildtype_args = {'plain': [],
d_gdc_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-finline-functions'],
- 'release': ['-frelease', '-finline-functions'],
+ 'release': ['-finline-functions'],
'minsize': [],
'custom': [],
}
@@ -209,7 +206,7 @@ d_gdc_buildtype_args = {'plain': [],
d_ldc_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'],
- 'release': ['-release', '-enable-inlining', '-Hkeep-all-bodies'],
+ 'release': ['-enable-inlining', '-Hkeep-all-bodies'],
'minsize': [],
'custom': [],
}
@@ -217,7 +214,7 @@ d_ldc_buildtype_args = {'plain': [],
d_dmd_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-inline'],
- 'release': ['-release', '-inline'],
+ 'release': ['-inline'],
'minsize': [],
'custom': [],
}
@@ -335,7 +332,7 @@ def get_base_compile_args(options, compiler):
if (options['b_ndebug'].value == 'true' or
(options['b_ndebug'].value == 'if-release' and
options['buildtype'].value in {'release', 'plain'})):
- args += ['-DNDEBUG']
+ args += compiler.get_disable_assert_args()
except KeyError:
pass
# This does not need a try...except
@@ -387,9 +384,10 @@ def get_base_link_args(options, linker, is_shared_module):
# -Wl,-dead_strip_dylibs is incompatible with bitcode
args.extend(linker.get_asneeded_args())
- # Apple's ld (the only one that supports bitcode) does not like any
- # -undefined arguments at all, so don't pass these when using bitcode
+ # Apple's ld (the only one that supports bitcode) does not like -undefined
+ # arguments or -headerpad_max_install_names when bitcode is enabled
if not bitcode:
+ args.extend(linker.headerpad_args())
if (not is_shared_module and
option_enabled(linker.base_options, options, 'b_lundef')):
args.extend(linker.no_undefined_link_args())
@@ -418,334 +416,8 @@ class RunResult:
self.stdout = stdout
self.stderr = stderr
-class CompilerArgs(collections.abc.MutableSequence):
- '''
- List-like class that manages a list of compiler arguments. Should be used
- while constructing compiler arguments from various sources. Can be
- operated with ordinary lists, so this does not need to be used
- everywhere.
-
- All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
- and can converted to the native type of each compiler by using the
- .to_native() method to which you must pass an instance of the compiler or
- the compiler class.
-
- New arguments added to this class (either with .append(), .extend(), or +=)
- are added in a way that ensures that they override previous arguments.
- For example:
-
- >>> a = ['-Lfoo', '-lbar']
- >>> a += ['-Lpho', '-lbaz']
- >>> print(a)
- ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
-
- Arguments will also be de-duped if they can be de-duped safely.
-
- Note that because of all this, this class is not commutative and does not
- preserve the order of arguments if it is safe to not. For example:
- >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
- ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
- >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
- ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
-
- '''
- # NOTE: currently this class is only for C-like compilers, but it can be
- # extended to other languages easily. Just move the following to the
- # compiler class and initialize when self.compiler is set.
-
- # Arg prefixes that override by prepending instead of appending
- prepend_prefixes = ('-I', '-L')
- # Arg prefixes and args that must be de-duped by returning 2
- dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
- dedup2_suffixes = ()
- dedup2_args = ()
- # Arg prefixes and args that must be de-duped by returning 1
- #
- # NOTE: not thorough. A list of potential corner cases can be found in
- # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
- dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
- dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
- # Match a .so of the form path/to/libfoo.so.0.1.0
- # Only UNIX shared libraries require this. Others have a fixed extension.
- dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
- dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
- # In generate_link() we add external libs without de-dup, but we must
- # *always* de-dup these because they're special arguments to the linker
- always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs)
-
- def __init__(self, compiler: T.Union['Compiler', StaticLinker],
- iterable: T.Optional[T.Iterable[str]] = None):
- self.compiler = compiler
- self.__container = list(iterable) if iterable is not None else [] # type: T.List[str]
- self.__seen_args = set()
- for arg in self.__container:
- self.__seen_args.add(arg)
-
- @T.overload # noqa: F811
- def __getitem__(self, index: int) -> str: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811
- pass
-
- def __getitem__(self, index): # noqa: F811
- return self.__container[index]
-
- @T.overload # noqa: F811
- def __setitem__(self, index: int, value: str) -> None: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811
- pass
-
- def __setitem__(self, index, value) -> None: # noqa: F811
- self.__container[index] = value
- for v in value:
- self.__seen_args.add(v)
-
- def __delitem__(self, index: T.Union[int, slice]) -> None:
- value = self.__container[index]
- del self.__container[index]
- if value in self.__seen_args and value in self.__container: # this is also honoring that you can have duplicated entries
- self.__seen_args.remove(value)
- def __len__(self) -> int:
- return len(self.__container)
-
- def insert(self, index: int, value: str) -> None:
- self.__container.insert(index, value)
- self.__seen_args.add(value)
-
- def copy(self) -> 'CompilerArgs':
- return CompilerArgs(self.compiler, self.__container.copy())
-
- @classmethod
- @lru_cache(maxsize=None)
- def _can_dedup(cls, arg):
- '''
- Returns whether the argument can be safely de-duped. This is dependent
- on three things:
-
- a) Whether an argument can be 'overridden' by a later argument. For
- example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we
- can safely remove the previous occurrence and add a new one. The same
- is true for include paths and library paths with -I and -L. For
- these we return `2`. See `dedup2_prefixes` and `dedup2_args`.
- b) Arguments that once specified cannot be undone, such as `-c` or
- `-pipe`. New instances of these can be completely skipped. For these
- we return `1`. See `dedup1_prefixes` and `dedup1_args`.
- c) Whether it matters where or how many times on the command-line
- a particular argument is present. This can matter for symbol
- resolution in static or shared libraries, so we cannot de-dup or
- reorder them. For these we return `0`. This is the default.
-
- In addition to these, we handle library arguments specially.
- With GNU ld, we surround library arguments with -Wl,--start/end-group
- to recursively search for symbols in the libraries. This is not needed
- with other linkers.
- '''
- # A standalone argument must never be deduplicated because it is
- # defined by what comes _after_ it. Thus dedupping this:
- # -D FOO -D BAR
- # would yield either
- # -D FOO BAR
- # or
- # FOO -D BAR
- # both of which are invalid.
- if arg in cls.dedup2_prefixes:
- return 0
- if arg.startswith('-L='):
- # DMD and LDC proxy all linker arguments using -L=; in conjunction
- # with ld64 on macOS this can lead to command line arguments such
- # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`.
- # These cannot be combined, ld64 insists they must be passed with
- # spaces and quoting does not work. if we deduplicate these then
- # one of the -L=0 arguments will be removed and the version
- # argument will consume the next argument instead.
- return 0
- if arg in cls.dedup2_args or \
- arg.startswith(cls.dedup2_prefixes) or \
- arg.endswith(cls.dedup2_suffixes):
- return 2
- if arg in cls.dedup1_args or \
- arg.startswith(cls.dedup1_prefixes) or \
- arg.endswith(cls.dedup1_suffixes) or \
- re.search(cls.dedup1_regex, arg):
- return 1
- return 0
-
- @classmethod
- @lru_cache(maxsize=None)
- def _should_prepend(cls, arg):
- if arg.startswith(cls.prepend_prefixes):
- return True
- return False
-
- def need_to_split_linker_args(self):
- return isinstance(self.compiler, Compiler) and self.compiler.get_language() == Language.D
-
- def to_native(self, copy: bool = False) -> T.List[str]:
- # Check if we need to add --start/end-group for circular dependencies
- # between static libraries, and for recursively searching for symbols
- # needed by static libraries that are provided by object files or
- # shared libraries.
- if copy:
- new = self.copy()
- else:
- new = self
- # To proxy these arguments with D you need to split the
- # arguments, thus you get `-L=-soname -L=lib.so` we don't
- # want to put the lib in a link -roup
- split_linker_args = self.need_to_split_linker_args()
- # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
- # all act like (or are) gnu ld
- # TODO: this could probably be added to the DynamicLinker instead
- if (isinstance(self.compiler, Compiler) and
- self.compiler.linker is not None and
- isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))):
- group_start = -1
- group_end = -1
- is_soname = False
- for i, each in enumerate(new):
- if is_soname:
- is_soname = False
- continue
- elif split_linker_args and '-soname' in each:
- is_soname = True
- continue
- if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
- not soregex.match(each):
- continue
- group_end = i
- if group_start < 0:
- # First occurrence of a library
- group_start = i
- if group_start >= 0:
- # Last occurrence of a library
- new.insert(group_end + 1, '-Wl,--end-group')
- new.insert(group_start, '-Wl,--start-group')
- # Remove system/default include paths added with -isystem
- if hasattr(self.compiler, 'get_default_include_dirs'):
- default_dirs = self.compiler.get_default_include_dirs()
- bad_idx_list = [] # type: T.List[int]
- for i, each in enumerate(new):
- # Remove the -isystem and the path if the path is a default path
- if (each == '-isystem' and
- i < (len(new) - 1) and
- new[i + 1] in default_dirs):
- bad_idx_list += [i, i + 1]
- elif each.startswith('-isystem=') and each[9:] in default_dirs:
- bad_idx_list += [i]
- elif each.startswith('-isystem') and each[8:] in default_dirs:
- bad_idx_list += [i]
- for i in reversed(bad_idx_list):
- new.pop(i)
- return self.compiler.unix_args_to_native(new.__container)
-
- def append_direct(self, arg: str) -> None:
- '''
- Append the specified argument without any reordering or de-dup except
- for absolute paths to libraries, etc, which can always be de-duped
- safely.
- '''
- if os.path.isabs(arg):
- self.append(arg)
- else:
- self.__container.append(arg)
- self.__seen_args.add(arg)
-
- def extend_direct(self, iterable: T.Iterable[str]) -> None:
- '''
- Extend using the elements in the specified iterable without any
- reordering or de-dup except for absolute paths where the order of
- include search directories is not relevant
- '''
- for elem in iterable:
- self.append_direct(elem)
-
- def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
- normal_flags = []
- lflags = []
- for i in iterable:
- if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
- lflags.append(i)
- else:
- normal_flags.append(i)
- self.extend(normal_flags)
- self.extend_direct(lflags)
-
- def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- new = self.copy()
- new += args
- return new
-
- def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- '''
- Add two CompilerArgs while taking into account overriding of arguments
- and while preserving the order of arguments as much as possible
- '''
- this_round_added = set() # a dict that contains a value, when the value was added this round
- pre = [] # type: T.List[str]
- post = [] # type: T.List[str]
- if not isinstance(args, collections.abc.Iterable):
- raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
- for arg in args:
- # If the argument can be de-duped, do it either by removing the
- # previous occurrence of it and adding a new one, or not adding the
- # new occurrence.
- dedup = self._can_dedup(arg)
- if dedup == 1:
- # Argument already exists and adding a new instance is useless
- if arg in self.__seen_args or arg in pre or arg in post:
- continue
- should_prepend = self._should_prepend(arg)
- if dedup == 2:
- # Remove all previous occurrences of the arg and add it anew
- if arg in self.__seen_args and arg not in this_round_added: #if __seen_args contains arg as well as this_round_added, then its not yet part in self.
- self.remove(arg)
- if should_prepend:
- if arg in pre:
- pre.remove(arg)
- else:
- if arg in post:
- post.remove(arg)
- if should_prepend:
- pre.append(arg)
- else:
- post.append(arg)
- self.__seen_args.add(arg)
- this_round_added.add(arg)
- # Insert at the beginning
- self[:0] = pre
- # Append to the end
- self.__container += post
- return self
-
- def __radd__(self, args: T.Iterable[str]):
- new = CompilerArgs(self.compiler, args)
- new += self
- return new
-
- def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
- # Only allow equality checks against other CompilerArgs and lists instances
- if isinstance(other, CompilerArgs):
- return self.compiler == other.compiler and self.__container == other.__container
- elif isinstance(other, list):
- return self.__container == other
- return NotImplemented
-
- def append(self, arg: str) -> None:
- self.__iadd__([arg])
-
- def extend(self, args: T.Iterable[str]) -> None:
- self.__iadd__(args)
-
- def __repr__(self) -> str:
- return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container)
-
-class Compiler:
+class Compiler(metaclass=abc.ABCMeta):
# Libraries to ignore in find_library() since they are provided by the
# compiler or the C library. Currently only used for MSVC.
ignore_libs = ()
@@ -968,8 +640,12 @@ class Compiler:
args += self.get_preprocess_only_args()
return args
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ """Return an appropriate CompilerArgs instance for this class."""
+ return CompilerArgs(self, args)
+
@contextlib.contextmanager
- def compile(self, code, extra_args=None, *, mode='link', want_output=False, temp_dir=None):
+ def compile(self, code: str, extra_args: list = None, *, mode: str = 'link', want_output: bool = False, temp_dir: str = None):
if extra_args is None:
extra_args = []
try:
@@ -986,7 +662,7 @@ class Compiler:
srcname = code.fname
# Construct the compiler command-line
- commands = CompilerArgs(self)
+ commands = self.compiler_args()
commands.append(srcname)
# Preprocess mode outputs to stdout, so no output args
if mode != 'preprocess':
@@ -1092,7 +768,7 @@ class Compiler:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
return self.linker.build_rpath_args(
env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
@@ -1102,6 +778,9 @@ class Compiler:
def openmp_flags(self):
raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language())
+ def openmp_link_flags(self):
+ return self.openmp_flags()
+
def language_stdlib_only_link_flags(self):
return []
@@ -1151,7 +830,7 @@ class Compiler:
def remove_linkerlike_args(self, args):
rm_exact = ('-headerpad_max_install_names',)
rm_prefixes = ('-Wl,', '-L',)
- rm_next = ('-L',)
+ rm_next = ('-L', '-framework',)
ret = []
iargs = iter(args)
for arg in iargs:
@@ -1184,12 +863,12 @@ class Compiler:
def get_asneeded_args(self) -> T.List[str]:
return self.linker.get_asneeded_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self.linker.headerpad_args()
+
def bitcode_args(self) -> T.List[str]:
return self.linker.bitcode_args()
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return self.linker.get_debug_crt_args()
-
def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
return self.linker.get_buildtype_args(buildtype)
@@ -1219,15 +898,18 @@ class Compiler:
def get_coverage_link_args(self) -> T.List[str]:
return self.linker.get_coverage_args()
+ def get_disable_assert_args(self) -> T.List[str]:
+ return []
+
def get_largefile_args(compiler):
'''
Enable transparent large-file-support for 32-bit UNIX systems
'''
- if not (compiler.info.is_windows() or compiler.info.is_darwin()):
+ if not (compiler.get_argument_syntax() == 'msvc' or compiler.info.is_darwin()):
# Enable large-file support unconditionally on all platforms other
- # than macOS and Windows. macOS is now 64-bit-only so it doesn't
- # need anything special, and Windows doesn't have automatic LFS.
+ # than macOS and MSVC. macOS is now 64-bit-only so it doesn't
+ # need anything special, and MSVC doesn't have automatic LFS.
# You must use the 64-bit counterparts explicitly.
# glibc, musl, and uclibc, and all BSD libcs support this. On Android,
# support for transparent LFS is available depending on the version of
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index c4c7da5..71a300a 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -155,10 +155,11 @@ class CPPCompiler(CLikeCompiler, Compiler):
class ClangCPPCompiler(ClangCompiler, CPPCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
- is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
+ is_cross, info: 'MachineInfo', exe_wrapper=None,
+ defines : T.Optional[T.List[str]] = None, **kwargs):
CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, defines)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
'1': default_warn_args,
@@ -238,7 +239,9 @@ class EmscriptenCPPCompiler(EmscriptenMixin, LinkerEnvVarsMixin, ClangCPPCompile
class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
- CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, exe_wrapper, **kwargs)
+ CPPCompiler.__init__(self, exelist=exelist, version=version,
+ for_machine=for_machine, is_cross=is_cross,
+ info=info, exe_wrapper=exe_wrapper, **kwargs)
ArmclangCompiler.__init__(self)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
@@ -574,7 +577,7 @@ class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixi
is_cross: bool, info: 'MachineInfo', exe_wrap, target, **kwargs):
CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs)
MSVCCompiler.__init__(self, target)
- self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like
+ self.base_options = ['b_pch', 'b_vscrt', 'b_ndebug'] # FIXME add lto, pgo and the like
self.id = 'msvc'
def get_options(self):
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index 00233b0..b109572 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -268,17 +268,15 @@ class CudaCompiler(Compiler):
def get_depfile_suffix(self):
return 'd'
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return self._cook_link_args(self.host_compiler.get_linker_debug_crt_args())
-
def get_buildtype_linker_args(self, buildtype):
return self._cook_link_args(self.host_compiler.get_buildtype_linker_args(buildtype))
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return self._cook_link_args(self.host_compiler.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath))
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ return (self._cook_link_args(rpath_args), rpath_dirs_to_remove)
def linker_to_compiler_args(self, args):
return args
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index caa8e44..d233713 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -25,7 +25,6 @@ from .compilers import (
d_ldc_buildtype_args,
clike_debug_args,
Compiler,
- CompilerArgs,
)
from .mixins.gnu import GnuCompiler
@@ -220,7 +219,7 @@ class DmdLikeCompilerMixin:
def build_rpath_args(self, env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
if self.info.is_windows():
- return []
+ return ([], set())
# GNU ld, solaris ld, and lld acting like GNU ld
if self.linker.id.startswith('ld'):
@@ -228,15 +227,16 @@ class DmdLikeCompilerMixin:
# do directly, each argument -rpath and the value to rpath, need to be
# split into two separate arguments both prefaced with the -L=.
args = []
- for r in super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ for r in rpath_args:
if ',' in r:
a, b = r.split(',', maxsplit=1)
args.append(a)
args.append(self.LINKER_PREFIX + b)
else:
args.append(r)
- return args
+ return (args, rpath_dirs_to_remove)
return super().build_rpath_args(
env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
@@ -581,7 +581,7 @@ class DCompiler(Compiler):
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- args = CompilerArgs(self)
+ args = self.compiler_args()
for d in dependencies:
# Add compile flags needed by dependencies
args += d.get_compile_args()
@@ -645,7 +645,8 @@ class GnuDCompiler(GnuCompiler, DCompiler):
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
- self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt', 'b_coverage']
+ self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic',
+ 'b_vscrt', 'b_coverage', 'b_pgo', 'b_ndebug']
self._has_color_support = version_compare(self.version, '>=4.9')
# dependencies were implemented before, but broken - support was fixed in GCC 7.1+
@@ -684,6 +685,9 @@ class GnuDCompiler(GnuCompiler, DCompiler):
return args
return args + ['-shared-libphobos']
+ def get_disable_assert_args(self):
+ return ['-frelease']
+
class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
@@ -691,7 +695,7 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
info: 'MachineInfo', arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs)
self.id = 'llvm'
- self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
+ self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']
def get_colorout_args(self, colortype):
if colortype == 'always':
@@ -733,6 +737,9 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
return args
return args + ['-link-defaultlib-shared']
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['--release']
+
class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
@@ -740,7 +747,7 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
info: 'MachineInfo', arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs)
self.id = 'dmd'
- self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
+ self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']
def get_colorout_args(self, colortype):
if colortype == 'always':
@@ -803,3 +810,6 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
if self.info.is_windows():
return args
return args + ['-defaultlib=phobos2', '-debuglib=phobos2']
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-release']
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 2b20aa4..31b8558 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -214,6 +214,18 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler):
def language_stdlib_only_link_flags(self) -> T.List[str]:
return ['-lgfortran', '-lm']
+ def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False):
+ '''
+ Derived from mixins/clike.py:has_header, but without C-style usage of
+ __has_include which breaks with GCC-Fortran 10:
+ https://github.com/mesonbuild/meson/issues/7017
+ '''
+ fargs = {'prefix': prefix, 'header': hname}
+ code = '{prefix}\n#include <{header}>'
+ return self.compiles(code.format(**fargs), env, extra_args=extra_args,
+ dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
+
+
class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None,
@@ -412,7 +424,7 @@ class FlangFortranCompiler(ClangCompiler, FortranCompiler):
**kwargs):
FortranCompiler.__init__(self, exelist, version, for_machine,
is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
self.id = 'flang'
default_warn_args = ['-Minform=inform']
self.warn_args = {'0': [],
diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py
index aa5d15d..b331d8f 100644
--- a/mesonbuild/compilers/mixins/arm.py
+++ b/mesonbuild/compilers/mixins/arm.py
@@ -27,10 +27,10 @@ if T.TYPE_CHECKING:
arm_buildtype_args = {
'plain': [],
- 'debug': ['-O0', '--debug'],
- 'debugoptimized': ['-O1', '--debug'],
- 'release': ['-O3', '-Otime'],
- 'minsize': ['-O3', '-Ospace'],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
'custom': [],
} # type: T.Dict[str, T.List[str]]
@@ -38,27 +38,27 @@ arm_optimization_args = {
'0': ['-O0'],
'g': ['-g'],
'1': ['-O1'],
- '2': ['-O2'],
- '3': ['-O3'],
- 's': [],
+ '2': [], # Compiler defaults to -O2
+ '3': ['-O3', '-Otime'],
+ 's': ['-O3'], # Compiler defaults to -Ospace
} # type: T.Dict[str, T.List[str]]
armclang_buildtype_args = {
'plain': [],
- 'debug': ['-O0', '-g'],
- 'debugoptimized': ['-O1', '-g'],
- 'release': ['-Os'],
- 'minsize': ['-Oz'],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
'custom': [],
} # type: T.Dict[str, T.List[str]]
armclang_optimization_args = {
- '0': ['-O0'],
+ '0': [], # Compiler defaults to -O0
'g': ['-g'],
'1': ['-O1'],
'2': ['-O2'],
'3': ['-O3'],
- 's': ['-Os']
+ 's': ['-Oz']
} # type: T.Dict[str, T.List[str]]
@@ -181,7 +181,7 @@ class ArmclangCompiler:
# Override CCompiler.get_dependency_gen_args
def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
- return []
+ return ['-MD', '-MT', outtarget, '-MF', outfile]
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return armclang_optimization_args[optimization_level]
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index 1c0ee45..7525c12 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -42,9 +42,10 @@ clang_optimization_args = {
} # type: T.Dict[str, T.List[str]]
class ClangCompiler(GnuLikeCompiler):
- def __init__(self):
+ def __init__(self, defines: T.Optional[T.Dict[str, str]]):
super().__init__()
self.id = 'clang'
+ self.defines = defines or {}
self.base_options.append('b_colorout')
# TODO: this really should be part of the linker base_options, but
# linkers don't have base_options.
@@ -56,6 +57,12 @@ class ClangCompiler(GnuLikeCompiler):
def get_colorout_args(self, colortype: str) -> T.List[str]:
return clang_color_args[colortype][:]
+ def has_builtin_define(self, define: str) -> bool:
+ return define in self.defines
+
+ def get_builtin_define(self, define: str) -> T.Optional[str]:
+ return self.defines.get(define)
+
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return clang_optimization_args[optimization_level]
@@ -106,6 +113,11 @@ class ClangCompiler(GnuLikeCompiler):
# (and other gcc-like compilers) cannot. This is becuse clang (being
# llvm based) is retargetable, while GCC is not.
#
+
+ # qcld: Qualcomm Snapdragon linker, based on LLVM
+ if linker == 'qcld':
+ return ['-fuse-ld=qcld']
+
if shutil.which(linker):
if not shutil.which(linker):
raise mesonlib.MesonException(
@@ -117,3 +129,6 @@ class ClangCompiler(GnuLikeCompiler):
# Clang only warns about unknown or ignored attributes, so force an
# error.
return ['-Werror=attributes']
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return ['--coverage']
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 260342e..a42b050 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -29,15 +29,79 @@ import subprocess
import typing as T
from pathlib import Path
+from ... import arglist
from ... import mesonlib
-from ...mesonlib import LibType
from ... import mlog
+from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker
+from ...mesonlib import LibType
from .. import compilers
from .visualstudio import VisualStudioLikeCompiler
if T.TYPE_CHECKING:
from ...environment import Environment
+SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+
+class CLikeCompilerArgs(arglist.CompilerArgs):
+ prepend_prefixes = ('-I', '-L')
+ dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
+
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
+ # all act like (or are) gnu ld
+ # TODO: this could probably be added to the DynamicLinker instead
+ if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker)):
+ group_start = -1
+ group_end = -1
+ for i, each in enumerate(new):
+ if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
+ not SOREGEX.match(each):
+ continue
+ group_end = i
+ if group_start < 0:
+ # First occurrence of a library
+ group_start = i
+ if group_start >= 0:
+ # Last occurrence of a library
+ new.insert(group_end + 1, '-Wl,--end-group')
+ new.insert(group_start, '-Wl,--start-group')
+ # Remove system/default include paths added with -isystem
+ if hasattr(self.compiler, 'get_default_include_dirs'):
+ default_dirs = self.compiler.get_default_include_dirs()
+ bad_idx_list = [] # type: T.List[int]
+ for i, each in enumerate(new):
+ # Remove the -isystem and the path if the path is a default path
+ if (each == '-isystem' and
+ i < (len(new) - 1) and
+ new[i + 1] in default_dirs):
+ bad_idx_list += [i, i + 1]
+ elif each.startswith('-isystem=') and each[9:] in default_dirs:
+ bad_idx_list += [i]
+ elif each.startswith('-isystem') and each[8:] in default_dirs:
+ bad_idx_list += [i]
+ for i in reversed(bad_idx_list):
+ new.pop(i)
+ return self.compiler.unix_args_to_native(new._container)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
+
class CLikeCompiler:
@@ -48,7 +112,7 @@ class CLikeCompiler:
program_dirs_cache = {}
find_library_cache = {}
find_framework_cache = {}
- internal_libs = compilers.unixy_compiler_internal_libs
+ internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
def __init__(self, is_cross: bool, exe_wrapper: T.Optional[str] = None):
# If a child ObjC or CPP class has already set it, don't set it ourselves
@@ -61,6 +125,9 @@ class CLikeCompiler:
else:
self.exe_wrapper = exe_wrapper.get_command()
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs:
+ return CLikeCompilerArgs(self, args)
+
def needs_static_linker(self):
return True # When compiling static libraries, so yes.
@@ -152,15 +219,24 @@ class CLikeCompiler:
if not files:
retval.append(d)
continue
- file_to_check = os.path.join(d, files[0])
- with open(file_to_check, 'rb') as fd:
- header = fd.read(5)
- # if file is not an ELF file, it's weird, but accept dir
- # if it is elf, and the class matches, accept dir
- if header[1:4] != b'ELF' or int(header[4]) == elf_class:
- retval.append(d)
- # at this point, it's an ELF file which doesn't match the
- # appropriate elf_class, so skip this one
+
+ for f in files:
+ file_to_check = os.path.join(d, f)
+ try:
+ with open(file_to_check, 'rb') as fd:
+ header = fd.read(5)
+ # if file is not an ELF file, it's weird, but accept dir
+ # if it is elf, and the class matches, accept dir
+ if header[1:4] != b'ELF' or int(header[4]) == elf_class:
+ retval.append(d)
+ # at this point, it's an ELF file which doesn't match the
+ # appropriate elf_class, so skip this one
+ # stop scanning after the first sucessful read
+ break
+ except OSError:
+ # Skip the file if we can't read it
+ pass
+
return tuple(retval)
@functools.lru_cache()
@@ -254,14 +330,14 @@ class CLikeCompiler:
code = 'int main(void) { int class=0; return class; }\n'
return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
- def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None):
+ def check_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None):
fargs = {'prefix': prefix, 'header': hname}
code = '''{prefix}
#include <{header}>'''
return self.compiles(code.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
- def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False):
+ def has_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None, disable_cache: bool = False):
fargs = {'prefix': prefix, 'header': hname}
code = '''{prefix}
#ifdef __has_include
@@ -274,7 +350,7 @@ class CLikeCompiler:
return self.compiles(code.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
- def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None):
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str, env, *, extra_args=None, dependencies=None):
fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
t = '''{prefix}
#include <{header}>
@@ -288,11 +364,19 @@ class CLikeCompiler:
return self.compiles(t.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
- def _get_basic_compiler_args(self, env, mode):
+ def _get_basic_compiler_args(self, env, mode: str):
cargs, largs = [], []
- # Select a CRT if needed since we're linking
if mode == 'link':
- cargs += self.get_linker_debug_crt_args()
+ # Sometimes we need to manually select the CRT to use with MSVC.
+ # One example is when trying to do a compiler check that involves
+ # linking with static libraries since MSVC won't select a CRT for
+ # us in that case and will error out asking us to pick one.
+ try:
+ crt_val = env.coredata.base_options['b_vscrt'].value
+ buildtype = env.coredata.base_options['buildtype'].value
+ cargs += self.get_crt_compile_args(crt_val, buildtype)
+ except (KeyError, AttributeError):
+ pass
# Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env
sys_args = env.coredata.get_external_args(self.for_machine, self.language)
@@ -329,7 +413,7 @@ class CLikeCompiler:
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- cargs = compilers.CompilerArgs(self)
+ cargs = self.compiler_args()
largs = []
for d in dependencies:
# Add compile flags needed by dependencies
@@ -354,11 +438,11 @@ class CLikeCompiler:
def compiles(self, code: str, env, *,
extra_args: T.Sequence[T.Union[T.Sequence[str], str]] = None,
- dependencies=None, mode: str = 'compile', disable_cache=False) -> T.Tuple[bool, bool]:
+ dependencies=None, mode: str = 'compile', disable_cache: bool = False) -> T.Tuple[bool, bool]:
with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p:
return p.returncode == 0, p.cached
- def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir=None) -> T.Tuple[bool, bool]:
+ def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir: str = None) -> T.Tuple[bool, bool]:
args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
if disable_cache or want_output:
return self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir)
@@ -369,7 +453,8 @@ class CLikeCompiler:
dependencies=dependencies, mode='link', disable_cache=disable_cache)
def run(self, code: str, env, *, extra_args=None, dependencies=None):
- if self.is_cross and self.exe_wrapper is None:
+ need_exe_wrapper = env.need_exe_wrapper(self.for_machine)
+ if need_exe_wrapper and self.exe_wrapper is None:
raise compilers.CrossNoRunException('Can not run test applications in this cross environment.')
with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p:
if p.returncode != 0:
@@ -377,7 +462,7 @@ class CLikeCompiler:
p.input_name,
p.returncode))
return compilers.RunResult(False)
- if self.is_cross:
+ if need_exe_wrapper:
cmdlist = self.exe_wrapper + [p.output_name]
else:
cmdlist = p.output_name
@@ -658,7 +743,7 @@ class CLikeCompiler:
# is not run so we don't care what the return value is.
main = '''\nint main(void) {{
void *a = (void*) &{func};
- long b = (long) a;
+ long long b = (long long) a;
return (int) b;
}}'''
return head, main
@@ -727,24 +812,29 @@ class CLikeCompiler:
# need to look for them differently. On nice compilers like clang, we
# can just directly use the __has_builtin() macro.
fargs['no_includes'] = '#include' not in prefix
- fargs['__builtin_'] = '' if funcname.startswith('__builtin_') else '__builtin_'
+ is_builtin = funcname.startswith('__builtin_')
+ fargs['is_builtin'] = is_builtin
+ fargs['__builtin_'] = '' if is_builtin else '__builtin_'
t = '''{prefix}
int main(void) {{
+
+ /* With some toolchains (MSYS2/mingw for example) the compiler
+ * provides various builtins which are not really implemented and
+ * fall back to the stdlib where they aren't provided and fail at
+ * build/link time. In case the user provides a header, including
+ * the header didn't lead to the function being defined, and the
+ * function we are checking isn't a builtin itself we assume the
+ * builtin is not functional and we just error out. */
+ #if !{no_includes:d} && !defined({func}) && !{is_builtin:d}
+ #error "No definition for {__builtin_}{func} found in the prefix"
+ #endif
+
#ifdef __has_builtin
#if !__has_builtin({__builtin_}{func})
#error "{__builtin_}{func} not found"
#endif
#elif ! defined({func})
- /* Check for {__builtin_}{func} only if no includes were added to the
- * prefix above, which means no definition of {func} can be found.
- * We would always check for this, but we get false positives on
- * MSYS2 if we do. Their toolchain is broken, but we can at least
- * give them a workaround. */
- #if {no_includes:d}
- {__builtin_}{func};
- #else
- #error "No definition for {__builtin_}{func} found in the prefix"
- #endif
+ {__builtin_}{func};
#endif
return 0;
}}'''
@@ -910,21 +1000,21 @@ class CLikeCompiler:
architecture.
'''
# If not building on macOS for Darwin, do a simple file check
- files = [Path(f) for f in files]
+ paths = [Path(f) for f in files]
if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
- for f in files:
- if f.is_file():
- return f
+ for p in paths:
+ if p.is_file():
+ return p
# Run `lipo` and check if the library supports the arch we want
- for f in files:
- if not f.is_file():
+ for p in paths:
+ if not p.is_file():
continue
- archs = mesonlib.darwin_get_object_archs(f)
+ archs = mesonlib.darwin_get_object_archs(str(p))
if archs and env.machines.host.cpu_family in archs:
- return f
+ return p
else:
mlog.debug('Rejected {}, supports {} but need {}'
- .format(f, archs, env.machines.host.cpu_family))
+ .format(p, archs, env.machines.host.cpu_family))
return None
@functools.lru_cache()
@@ -993,7 +1083,7 @@ class CLikeCompiler:
return value[:]
def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED):
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.find_library_impl(libname, env, extra_dirs, code, libtype)
def find_framework_paths(self, env):
@@ -1093,7 +1183,7 @@ class CLikeCompiler:
'the compiler you are using. has_link_argument or '
'other similar method can be used instead.'
.format(arg))
- code = 'int i;\n'
+ code = 'extern int i;\nint i;\n'
return self.has_arguments(args, env, code, mode='compile')
def has_multi_link_arguments(self, args, env):
@@ -1102,7 +1192,7 @@ class CLikeCompiler:
# false positive.
args = self.linker.fatal_warnings() + args
args = self.linker_to_compiler_args(args)
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.has_arguments(args, env, code, mode='link')
@staticmethod
@@ -1131,3 +1221,6 @@ class CLikeCompiler:
return self.compiles(self.attribute_check_func(name), env,
extra_args=self.get_has_func_attribute_extra_args(name))
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-DNDEBUG']
diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py
index 7cfb2c2..f5fd7ef 100644
--- a/mesonbuild/compilers/mixins/gnu.py
+++ b/mesonbuild/compilers/mixins/gnu.py
@@ -363,7 +363,7 @@ class GnuCompiler(GnuLikeCompiler):
# For some compiler command line arguments, the GNU compilers will
# emit a warning on stderr indicating that an option is valid for a
# another language, but still complete with exit_success
- with self._build_wrapper(code, env, args, None, mode, disable_cache=False, want_output=True) as p:
+ with self._build_wrapper(code, env, args, None, mode) as p:
result = p.returncode == 0
if self.language in {Language.CPP, Language.OBJCPP} and 'is valid for C/ObjC' in p.stde:
result = False
diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py
index 681c816..a9967d6 100644
--- a/mesonbuild/compilers/mixins/islinker.py
+++ b/mesonbuild/compilers/mixins/islinker.py
@@ -107,11 +107,8 @@ class BasicLinkerIsCompilerMixin:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
-
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def get_asneeded_args(self) -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index d0004ce..93101b5 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -114,7 +114,7 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
# See: https://ninja-build.org/manual.html#_deps
always_args = ['/nologo', '/showIncludes']
warn_args = {
- '0': ['/W1'],
+ '0': [],
'1': ['/W2'],
'2': ['/W3'],
'3': ['/W4'],
@@ -208,6 +208,9 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
def openmp_flags(self) -> T.List[str]:
return ['/openmp']
+ def openmp_link_flags(self) -> T.List[str]:
+ return []
+
# FIXME, no idea what these should be.
def thread_flags(self, env: 'Environment') -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py
index b042bc5..f642a1f 100644
--- a/mesonbuild/compilers/objc.py
+++ b/mesonbuild/compilers/objc.py
@@ -86,7 +86,7 @@ class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
**kwargs):
ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py
index 4cfb1ab..871a0a6 100644
--- a/mesonbuild/compilers/objcpp.py
+++ b/mesonbuild/compilers/objcpp.py
@@ -84,7 +84,7 @@ class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
is_cross, info: 'MachineInfo', exe_wrapper=None,
**kwargs):
ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 06f07f3..f2f092b 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -12,17 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from . import mlog
+from . import mlog, mparser
import pickle, os, uuid
import sys
from itertools import chain
from pathlib import PurePath
from collections import OrderedDict, defaultdict
from .mesonlib import (
- Language, MesonException, MachineChoice, PerMachine, OrderedSet,
+ Language, EnvironmentException, MesonException, MachineChoice, PerMachine, OrderedSet,
default_libdir, default_libexecdir, default_prefix, split_args
)
-from .envconfig import get_env_var_pair
from .wrap import WrapMode
import ast
import argparse
@@ -38,7 +37,7 @@ if T.TYPE_CHECKING:
OptionDictType = T.Dict[str, 'UserOption[T.Any]']
-version = '0.54.999'
+version = '0.55.999'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode']
default_yielding = False
@@ -99,16 +98,16 @@ class UserBooleanOption(UserOption[bool]):
class UserIntegerOption(UserOption[int]):
def __init__(self, description, value, yielding=None):
min_value, max_value, default_value = value
- super().__init__(description, [True, False], yielding)
self.min_value = min_value
self.max_value = max_value
- self.set_value(default_value)
c = []
if min_value is not None:
c.append('>=' + str(min_value))
if max_value is not None:
c.append('<=' + str(max_value))
- self.choices = ', '.join(c)
+ choices = ', '.join(c)
+ super().__init__(description, choices, yielding)
+ self.set_value(default_value)
def validate_value(self, value) -> int:
if isinstance(value, str):
@@ -160,8 +159,16 @@ class UserComboOption(UserOption[str]):
def validate_value(self, value):
if value not in self.choices:
+ if isinstance(value, bool):
+ _type = 'boolean'
+ elif isinstance(value, (int, float)):
+ _type = 'number'
+ else:
+ _type = 'string'
optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
- raise MesonException('Value "%s" for combo option is not one of the choices. Possible choices are: %s.' % (value, optionsstring))
+ raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+ ' Possible choices are (as string): {}.'.format(
+ value, _type, self.description, optionsstring))
return value
class UserArrayOption(UserOption[T.List[str]]):
@@ -227,14 +234,6 @@ class UserFeatureOption(UserComboOption):
def is_auto(self):
return self.value == 'auto'
-
-def load_configs(filenames: T.List[str]) -> configparser.ConfigParser:
- """Load configuration files from a named subdirectory."""
- config = configparser.ConfigParser(interpolation=None)
- config.read(filenames)
- return config
-
-
if T.TYPE_CHECKING:
CacheKeyType = T.Tuple[T.Tuple[T.Any, ...], ...]
SubCacheKeyType = T.Tuple[T.Any, ...]
@@ -366,14 +365,14 @@ class CoreData:
self.install_guid = str(uuid.uuid4()).upper()
self.target_guids = {}
self.version = version
- self.builtins = {} # : OptionDictType
+ self.builtins = {} # type: OptionDictType
self.builtins_per_machine = PerMachine({}, {})
- self.backend_options = {} # : OptionDictType
- self.user_options = {} # : OptionDictType
+ self.backend_options = {} # type: OptionDictType
+ self.user_options = {} # type: OptionDictType
self.compiler_options = PerMachine(
defaultdict(dict),
defaultdict(dict),
- ) # : PerMachine[T.defaultdict[Language, OptionDictType]]
+ ) # type: PerMachine[T.defaultdict[Language, OptionDictType]]
self.base_options = {} # : OptionDictType
self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
self.compilers = PerMachine(OrderedDict(), OrderedDict())
@@ -382,8 +381,10 @@ class CoreData:
host_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD)
self.deps = PerMachine(build_cache, host_cache) # type: PerMachine[DependencyCache]
self.compiler_check_cache = OrderedDict()
+
# Only to print a warning if it changes between Meson invocations.
self.config_files = self.__load_config_files(options, scratch_dir, 'native')
+ self.builtin_options_libdir_cross_fixup()
self.init_builtins('')
@staticmethod
@@ -444,12 +445,12 @@ class CoreData:
raise MesonException('Cannot find specified {} file: {}'.format(ftype, f))
return real
- def libdir_cross_fixup(self):
+ def builtin_options_libdir_cross_fixup(self):
# By default set libdir to "lib" when cross compiling since
# getting the "system default" is always wrong on multiarch
# platforms as it gets a value like lib/x86_64-linux-gnu.
if self.cross_files:
- self.builtins['libdir'].value = 'lib'
+ builtin_options['libdir'].default = 'lib'
def sanitize_prefix(self, prefix):
prefix = os.path.expanduser(prefix)
@@ -490,7 +491,7 @@ class CoreData:
# commonpath will always return a path in the native format, so we
# must use pathlib.PurePath to do the same conversion before
# comparing.
- msg = ('The value of the {!r} option is {!r} which must be a '
+ msg = ('The value of the {!r} option is \'{!s}\' which must be a '
'subdir of the prefix {!r}.\nNote that if you pass a '
'relative path, it is assumed to be a subdir of prefix.')
# os.path.commonpath doesn't understand case-insensitive filesystems,
@@ -510,7 +511,6 @@ class CoreData:
for for_machine in iter(MachineChoice):
for key, opt in builtin_options_per_machine.items():
self.add_builtin_option(self.builtins_per_machine[for_machine], key, opt, subproject)
- self.libdir_cross_fixup()
def add_builtin_option(self, opts_map, key, opt, subproject):
if subproject:
@@ -694,7 +694,9 @@ class CoreData:
if type(oldval) != type(value):
self.user_options[name] = value
- def is_cross_build(self) -> bool:
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ if when_building_for == MachineChoice.BUILD:
+ return False
return len(self.cross_files) > 0
def strip_build_option_names(self, options):
@@ -749,87 +751,54 @@ class CoreData:
if not self.is_cross_build():
self.copy_build_options_from_regular_ones()
- def set_default_options(self, default_options, subproject, env):
- # Warn if the user is using two different ways of setting build-type
- # options that override each other
- if 'buildtype' in env.cmd_line_options and \
- ('optimization' in env.cmd_line_options or 'debug' in env.cmd_line_options):
- mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
- 'Using both is redundant since they override each other. '
- 'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
- cmd_line_options = OrderedDict()
- # Set project default_options as if they were passed to the cmdline.
- # Subprojects can only define default for user options and not yielding
- # builtin option.
- from . import optinterpreter
- for k, v in default_options.items():
+ def set_default_options(self, default_options: 'T.OrderedDict[str, str]', subproject: str, env: 'Environment') -> None:
+ def make_key(key: str) -> str:
if subproject:
- if (k not in builtin_options or builtin_options[k].yielding) \
- and optinterpreter.is_invalid_name(k, log=False):
- continue
- k = subproject + ':' + k
- cmd_line_options[k] = v
-
- # Override project default_options using conf files (cross or native)
- for k, v in env.paths.host:
- if v is not None:
- cmd_line_options[k] = v
-
- # Override all the above defaults using the command-line arguments
- # actually passed to us
- cmd_line_options.update(env.cmd_line_options)
- env.cmd_line_options = cmd_line_options
-
- # Create a subset of cmd_line_options, keeping only options for this
- # subproject. Also take builtin options if it's the main project.
- # Language and backend specific options will be set later when adding
- # languages and setting the backend (builtin options must be set first
- # to know which backend we'll use).
+ return '{}:{}'.format(subproject, key)
+ return key
+
options = OrderedDict()
- # Some options default to environment variables if they are
- # unset, set those now. These will either be overwritten
- # below, or they won't. These should only be set on the first run.
- for for_machine in MachineChoice:
- p_env_pair = get_env_var_pair(for_machine, self.is_cross_build(), 'PKG_CONFIG_PATH')
- if p_env_pair is not None:
- p_env_var, p_env = p_env_pair
-
- # PKG_CONFIG_PATH may contain duplicates, which must be
- # removed, else a duplicates-in-array-option warning arises.
- p_list = list(OrderedSet(p_env.split(':')))
-
- key = 'pkg_config_path'
- if for_machine == MachineChoice.BUILD:
- key = 'build.' + key
-
- if env.first_invocation:
- options[key] = p_list
- elif options.get(key, []) != p_list:
- mlog.warning(
- p_env_var +
- ' environment variable has changed '
- 'between configurations, meson ignores this. '
- 'Use -Dpkg_config_path to change pkg-config search '
- 'path instead.'
- )
-
- def remove_prefix(text, prefix):
- if text.startswith(prefix):
- return text[len(prefix):]
- return text
-
- for k, v in env.cmd_line_options.items():
- if subproject:
- if not k.startswith(subproject + ':'):
- continue
- elif k not in builtin_options.keys() \
- and remove_prefix(k, 'build.') not in builtin_options_per_machine.keys():
- if ':' in k:
- continue
- if optinterpreter.is_invalid_name(k, log=False):
+ # TODO: validate these
+ from .compilers import all_languages, base_options
+ lang_prefixes = tuple('{}_'.format(l) for l in all_languages)
+ # split arguments that can be set now, and those that cannot so they
+ # can be set later, when they've been initialized.
+ for k, v in default_options.items():
+ if k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ for machine in MachineChoice:
+ if key not in env.compiler_options[machine][lang]:
+ env.compiler_options[machine][lang][key] = v
+ elif k in base_options:
+ if not subproject and k not in env.base_options:
+ env.base_options[k] = v
+ else:
+ options[make_key(k)] = v
+
+ for k, v in chain(env.meson_options.host.get('', {}).items(),
+ env.meson_options.host.get(subproject, {}).items()):
+ options[make_key(k)] = v
+
+ for k, v in chain(env.meson_options.build.get('', {}).items(),
+ env.meson_options.build.get(subproject, {}).items()):
+ if k in builtin_options_per_machine:
+ options[make_key('build.{}'.format(k))] = v
+
+ options.update({make_key(k): v for k, v in env.user_options.get(subproject, {}).items()})
+
+ # Some options (namely the compiler options) are not preasant in
+ # coredata until the compiler is fully initialized. As such, we need to
+ # put those options into env.meson_options, only if they're not already
+ # in there, as the machine files and command line have precendence.
+ for k, v in default_options.items():
+ if k in builtin_options and not builtin_options[k].yielding:
+ continue
+ for machine in MachineChoice:
+ if machine is MachineChoice.BUILD and not self.is_cross_build():
continue
- options[k] = v
+ if k not in env.meson_options[machine][subproject]:
+ env.meson_options[machine][subproject][k] = v
self.set_options(options, subproject=subproject)
@@ -845,24 +814,19 @@ class CoreData:
env.is_cross_build(),
env.properties[for_machine]).items():
# prefixed compiler options affect just this machine
- opt_prefix = for_machine.get_prefix()
- user_k = opt_prefix + lang.get_lower_case_name() + '_' + k
- if user_k in env.cmd_line_options:
- o.set_value(env.cmd_line_options[user_k])
+ if k in env.compiler_options[for_machine].get(lang, {}):
+ o.set_value(env.compiler_options[for_machine][lang][k])
self.compiler_options[for_machine][lang].setdefault(k, o)
- def process_new_compiler(self, lang: Language, comp: T.Type['Compiler'], env: 'Environment') -> None:
+ def process_new_compiler(self, lang: Language, comp: 'Compiler', env: 'Environment') -> None:
from . import compilers
self.compilers[comp.for_machine][lang] = comp
- enabled_opts = []
for k, o in comp.get_options().items():
# prefixed compiler options affect just this machine
- opt_prefix = comp.for_machine.get_prefix()
- user_k = opt_prefix + lang.get_lower_case_name() + '_' + k
- if user_k in env.cmd_line_options:
- o.set_value(env.cmd_line_options[user_k])
+ if k in env.compiler_options[comp.for_machine].get(lang, {}):
+ o.set_value(env.compiler_options[comp.for_machine][lang][k])
self.compiler_options[comp.for_machine][lang].setdefault(k, o)
enabled_opts = []
@@ -870,16 +834,16 @@ class CoreData:
if optname in self.base_options:
continue
oobj = compilers.base_options[optname]
- if optname in env.cmd_line_options:
- oobj.set_value(env.cmd_line_options[optname])
+ if optname in env.base_options:
+ oobj.set_value(env.base_options[optname])
enabled_opts.append(optname)
self.base_options[optname] = oobj
self.emit_base_options_warnings(enabled_opts)
def emit_base_options_warnings(self, enabled_opts: list):
if 'b_bitcode' in enabled_opts:
- mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
- mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
+ mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+ mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
class CmdLineFileParser(configparser.ConfigParser):
def __init__(self):
@@ -887,6 +851,69 @@ class CmdLineFileParser(configparser.ConfigParser):
# storing subproject options like "subproject:option=value"
super().__init__(delimiters=['='], interpolation=None)
+class MachineFileParser():
+ def __init__(self, filenames: T.List[str]):
+ self.parser = CmdLineFileParser()
+ self.constants = {'True': True, 'False': False}
+ self.sections = {}
+
+ self.parser.read(filenames)
+
+ # Parse [constants] first so they can be used in other sections
+ if self.parser.has_section('constants'):
+ self.constants.update(self._parse_section('constants'))
+
+ for s in self.parser.sections():
+ if s == 'constants':
+ continue
+ self.sections[s] = self._parse_section(s)
+
+ def _parse_section(self, s):
+ self.scope = self.constants.copy()
+ section = {}
+ for entry, value in self.parser.items(s):
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry))
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ try:
+ ast = mparser.Parser(value, 'machinefile').parse()
+ res = self._evaluate_statement(ast.lines[0])
+ except MesonException:
+ raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry))
+ except KeyError as e:
+ raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry))
+ section[entry] = res
+ self.scope[entry] = res
+ return section
+
+ def _evaluate_statement(self, node):
+ if isinstance(node, (mparser.StringNode)):
+ return node.value
+ elif isinstance(node, mparser.BooleanNode):
+ return node.value
+ elif isinstance(node, mparser.NumberNode):
+ return node.value
+ elif isinstance(node, mparser.ArrayNode):
+ return [self._evaluate_statement(arg) for arg in node.args.arguments]
+ elif isinstance(node, mparser.IdNode):
+ return self.scope[node.value]
+ elif isinstance(node, mparser.ArithmeticNode):
+ l = self._evaluate_statement(node.left)
+ r = self._evaluate_statement(node.right)
+ if node.operation == 'add':
+ if (isinstance(l, str) and isinstance(r, str)) or \
+ (isinstance(l, list) and isinstance(r, list)):
+ return l + r
+ elif node.operation == 'div':
+ if isinstance(l, str) and isinstance(r, str):
+ return os.path.join(l, r)
+ raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+ parser = MachineFileParser(filenames)
+ return parser.sections
+
def get_cmd_line_file(build_dir):
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
@@ -1102,23 +1129,25 @@ class BuiltinOption(T.Generic[_T, _U]):
cmdline_name = self.argparse_name_to_arg(prefix + name)
parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+
# Update `docs/markdown/Builtin-options.md` after changing the options below
-builtin_options = OrderedDict([
- # Directories
- ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
- ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
- ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')),
- ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')),
- ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
- ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
- ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
- ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
+BUILTIN_DIR_OPTIONS = OrderedDict([
+ ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+ ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+ ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')),
+ ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')),
+ ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
+ ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+ ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+ ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
('localstatedir', BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
('mandir', BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')),
('sbindir', BuiltinOption(UserStringOption, 'System executable directory', 'sbin')),
('sharedstatedir', BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
('sysconfdir', BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')),
- # Core options
+]) # type: OptionDictType
+
+BUILTIN_CORE_OPTIONS = OrderedDict([
('auto_features', BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
('backend', BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)),
('buildtype', BuiltinOption(UserComboOption, 'Build type to use', 'debug',
@@ -1134,10 +1163,13 @@ builtin_options = OrderedDict([
('strip', BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
('unity', BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
('unity_size', BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
- ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])),
+ ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])),
-])
+ ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+]) # type: OptionDictType
+
+builtin_options = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
builtin_options_per_machine = OrderedDict([
('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
@@ -1173,3 +1205,4 @@ forbidden_target_names = {'clean': None,
'dist': None,
'distcheck': None,
}
+
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 8cee491..3c204b9 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -22,14 +22,13 @@ import json
import shlex
import shutil
import stat
+import sys
import textwrap
import platform
import typing as T
from enum import Enum
from pathlib import Path, PurePath
-import pkg_resources
-
from .. import mlog
from .. import mesonlib
from ..compilers import clib_langs
@@ -41,6 +40,7 @@ from ..mesonlib import (
Popen_safe, version_compare_many, version_compare, listify, stringlistify, extract_as_list, split_args,
Version, LibType,
)
+from ..mesondata import mesondata
if T.TYPE_CHECKING:
from ..compilers.compilers import CompilerType # noqa: F401
@@ -78,6 +78,30 @@ class DependencyMethods(Enum):
DUB = 'dub'
+def find_external_program(env: Environment, for_machine: MachineChoice, name: str,
+ display_name: str, default_names: T.List[str],
+ allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+ """Find an external program, chcking the cross file plus any default options."""
+ # Lookup in cross or machine file.
+ potential_path = env.lookup_binary_entry(for_machine, name)
+ if potential_path is not None:
+ mlog.debug('{} binary for {} specified from cross file, native file, '
+ 'or env var as {}'.format(display_name, for_machine, potential_path))
+ yield ExternalProgram.from_entry(name, potential_path)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
+ # Fallback on hard-coded defaults, if a default binary is allowed for use
+ # with cross targets, or if this is not a cross target
+ if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+ for potential_path in default_names:
+ mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
+ yield ExternalProgram(potential_path, silent=True)
+ else:
+ mlog.debug('Default target is not allowed for cross use')
+
+
class Dependency:
@classmethod
@@ -229,6 +253,16 @@ class InternalDependency(Dependency):
self.ext_deps = ext_deps
self.variables = variables
+ def __deepcopy__(self, memo: dict) -> 'InternalDependency':
+ result = self.__class__.__new__(self.__class__)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ if k in ['libraries', 'whole_libraries']:
+ setattr(result, k, copy.copy(v))
+ else:
+ setattr(result, k, copy.deepcopy(v, memo))
+ return result
+
def get_pkgconfig_variable(self, variable_name, kwargs):
raise DependencyException('Method "get_pkgconfig_variable()" is '
'invalid for an internal dependency')
@@ -354,25 +388,6 @@ class ExternalDependency(Dependency, HasNativeKwarg):
raise DependencyException(m.format(self.name, not_found, self.version))
return
- # Create an iterator of options
- def search_tool(self, name, display_name, default_names):
- # Lookup in cross or machine file.
- potential_path = self.env.lookup_binary_entry(self.for_machine, name)
- if potential_path is not None:
- mlog.debug('{} binary for {} specified from cross file, native file, '
- 'or env var as {}'.format(display_name, self.for_machine, potential_path))
- yield ExternalProgram.from_entry(name, potential_path)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if self.env.machines.matches_build_machine(self.for_machine):
- for potential_path in default_names:
- mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
- yield ExternalProgram(potential_path, silent=True)
-
class NotFoundDependency(Dependency):
def __init__(self, environment):
@@ -421,8 +436,6 @@ class ConfigToolDependency(ExternalDependency):
self.config = None
return
self.version = version
- if getattr(self, 'finish_init', None):
- self.finish_init(self)
def _sanitize_version(self, version):
"""Remove any non-numeric, non-point version suffixes."""
@@ -433,34 +446,20 @@ class ConfigToolDependency(ExternalDependency):
return m.group(0).rstrip('.')
return version
- def find_config(self, versions=None, returncode: int = 0):
+ def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \
+ -> T.Tuple[T.Optional[str], T.Optional[str]]:
"""Helper method that searches for config tool binaries in PATH and
returns the one that best matches the given version requirements.
"""
if not isinstance(versions, list) and versions is not None:
versions = listify(versions)
-
- tool = self.env.lookup_binary_entry(self.for_machine, self.tool_name)
- if tool is not None:
- tools = [tool]
- else:
- if not self.env.machines.matches_build_machine(self.for_machine):
- mlog.deprecation('No entry for {0} specified in your cross file. '
- 'Falling back to searching PATH. This may find a '
- 'native version of {0}! This will become a hard '
- 'error in a future version of meson'.format(self.tool_name))
- tools = [[t] for t in self.tools]
-
- best_match = (None, None)
- for tool in tools:
- if len(tool) == 1:
- # In some situations the command can't be directly executed.
- # For example Shell scripts need to be called through sh on
- # Windows (see issue #1423).
- potential_bin = ExternalProgram(tool[0], silent=True)
- if not potential_bin.found():
- continue
- tool = potential_bin.get_command()
+ best_match = (None, None) # type: T.Tuple[T.Optional[str], T.Optional[str]]
+ for potential_bin in find_external_program(
+ self.env, self.for_machine, self.tool_name,
+ self.tool_name, self.tools, allow_default_for_cross=False):
+ if not potential_bin.found():
+ continue
+ tool = potential_bin.get_command()
try:
p, out = Popen_safe(tool + [self.version_arg])[:2]
except (FileNotFoundError, PermissionError):
@@ -581,9 +580,9 @@ class PkgConfigDependency(ExternalDependency):
else:
assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
- for potential_pkgbin in self.search_tool('pkgconfig', 'Pkg-config', environment.default_pkgconfig):
- mlog.debug('Trying pkg-config binary {} for machine {} at {}'
- .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command))
+ for potential_pkgbin in find_external_program(
+ self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+ environment.default_pkgconfig, allow_default_for_cross=False):
version_if_ok = self.check_pkgconfig(potential_pkgbin)
if not version_if_ok:
continue
@@ -1090,8 +1089,9 @@ class CMakeDependency(ExternalDependency):
# Setup the trace parser
self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+ cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
- CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info()
+ CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
if self.cmakeinfo is None:
raise self._gen_exception('Unable to obtain CMake system information')
@@ -1101,25 +1101,9 @@ class CMakeDependency(ExternalDependency):
modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
- cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if cm_path:
cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
-
- pref_path = self.env.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
- env_pref_path = get_env_var(
- self.for_machine,
- self.env.is_cross_build(),
- 'CMAKE_PREFIX_PATH')
- if env_pref_path is not None:
- env_pref_path = env_pref_path.split(os.pathsep)
- env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
- if not pref_path:
- pref_path = []
- pref_path += env_pref_path
- if pref_path:
- cm_args.append('-DCMAKE_PREFIX_PATH={}'.format(';'.join(pref_path)))
-
- if not self._preliminary_find_check(name, cm_path, pref_path, environment.machines[self.for_machine]):
+ if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
mlog.debug('Preliminary CMake check failed. Aborting.')
return
self._detect_dep(name, modules, components, cm_args)
@@ -1129,7 +1113,7 @@ class CMakeDependency(ExternalDependency):
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
- def _get_cmake_info(self):
+ def _get_cmake_info(self, cm_args):
mlog.debug("Extracting basic cmake information")
res = {}
@@ -1148,6 +1132,7 @@ class CMakeDependency(ExternalDependency):
# Prepare options
cmake_opts = temp_parser.trace_args() + ['.']
+ cmake_opts += cm_args
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
@@ -1171,12 +1156,17 @@ class CMakeDependency(ExternalDependency):
except MesonException:
return None
+ def process_paths(l: T.List[str]) -> T.Set[str]:
+ l = [x.split(':') for x in l]
+ l = [x for sublist in l for x in sublist]
+ return set(l)
+
# Extract the variables and sanity check them
- root_paths = set(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
- root_paths.update(set(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+ root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+ root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
root_paths = sorted(root_paths)
root_paths = list(filter(lambda x: os.path.isdir(x), root_paths))
- module_paths = set(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+ module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
rooted_paths = []
for j in [Path(x) for x in root_paths]:
for i in [Path(x) for x in module_paths]:
@@ -1462,8 +1452,15 @@ class CMakeDependency(ExternalDependency):
cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
cfg = cfgs[0]
- if 'RELEASE' in cfgs:
- cfg = 'RELEASE'
+ is_debug = self.env.coredata.get_builtin_option('debug');
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties:
libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x]
@@ -1526,8 +1523,7 @@ class CMakeDependency(ExternalDependency):
build_dir = self._get_build_dir()
# Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
- # Per the warning in pkg_resources, this is *not* a path and os.path and Pathlib are *not* safe to use here.
- cmake_txt = pkg_resources.resource_string('mesonbuild', 'dependencies/data/' + cmake_file).decode()
+ cmake_txt = mesondata['dependencies/data/' + cmake_file].data
# In general, some Fortran CMake find_package() also require C language enabled,
# even if nothing from C is directly used. An easy Fortran example that fails
@@ -1802,6 +1798,10 @@ class ExternalProgram:
self.name = name
if command is not None:
self.command = listify(command)
+ if mesonlib.is_windows():
+ cmd = self.command[0]
+ args = self.command[1:]
+ self.command = self._search_windows_special_cases(name, cmd) + args
else:
all_search_dirs = [search_dir]
if extra_search_dirs:
@@ -1855,14 +1855,22 @@ class ExternalProgram:
# Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
if 'USERPROFILE' not in os.environ:
return path
- # Ignore executables in the WindowsApps directory which are
- # zero-sized wrappers that magically open the Windows Store to
- # install the application.
+ # The WindowsApps directory is a bit of a problem. It contains
+ # some zero-sized .exe files which have "reparse points", that
+ # might either launch an installed application, or might open
+ # a page in the Windows Store to download the application.
+ #
+ # To handle the case where the python interpreter we're
+ # running on came from the Windows Store, if we see the
+ # WindowsApps path in the search path, replace it with
+ # dirname(sys.executable).
appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
paths = []
for each in path.split(os.pathsep):
if Path(each) != appstore_dir:
paths.append(each)
+ elif 'WindowsApps' in sys.executable:
+ paths.append(os.path.dirname(sys.executable))
return os.pathsep.join(paths)
@staticmethod
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 3341f3e..3dd0fd6 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -20,9 +20,10 @@ from pathlib import Path
from .. import mlog
from .. import mesonlib
+from ..envconfig import get_env_var
from ..environment import Environment
-from .base import (DependencyException, ExternalDependency)
+from .base import DependencyException, ExternalDependency, PkgConfigDependency
from .misc import threads_factory
# On windows 3 directory layouts are supported:
@@ -163,8 +164,8 @@ class BoostLibraryFile():
if not tags:
return
- # Without any tags mt is assumed, however, an absents of mt in the name
- # with tags present indicates that the lib was build without mt support
+ # Without any tags mt is assumed, however, an absence of mt in the name
+ # with tags present indicates that the lib was built without mt support
self.mt = False
for i in tags:
if i == 'mt':
@@ -189,13 +190,13 @@ class BoostLibraryFile():
def __lt__(self, other: T.Any) -> bool:
if isinstance(other, BoostLibraryFile):
return (
- self.mod_name, self.version_lib, self.arch, self.static,
+ self.mod_name, self.static, self.version_lib, self.arch,
not self.mt, not self.runtime_static,
not self.debug, self.runtime_debug, self.python_debug,
self.stlport, self.deprecated_iostreams,
self.name,
) < (
- other.mod_name, other.version_lib, other.arch, other.static,
+ other.mod_name, other.static, other.version_lib, other.arch,
not other.mt, not other.runtime_static,
not other.debug, other.runtime_debug, other.python_debug,
other.stlport, other.deprecated_iostreams,
@@ -344,6 +345,7 @@ class BoostDependency(ExternalDependency):
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None
+ self.explicit_static = 'static' in kwargs
# Extract and validate modules
self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
@@ -366,36 +368,27 @@ class BoostDependency(ExternalDependency):
self.arch = environment.machines[self.for_machine].cpu_family
self.arch = boost_arch_map.get(self.arch, None)
- # Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset
- boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']]
- if all(boost_manual_env):
- inc_dir = Path(os.environ['BOOST_INCLUDEDIR'])
- lib_dir = Path(os.environ['BOOST_LIBRARYDIR'])
- mlog.debug('Trying to find boost with:')
- mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
- mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
-
- boost_inc_dir = None
- for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
- if j.is_file():
- boost_inc_dir = self._include_dir_from_version_header(j)
- break
- if not boost_inc_dir:
- self.is_found = False
- return
+ # First, look for paths specified in a machine file
+ props = self.env.properties[self.for_machine]
+ boost_property_env = [props.get('boost_includedir'), props.get('boost_librarydir'), props.get('boost_root')]
+ if any(boost_property_env):
+ self.detect_boost_machine_file(props)
+ return
- self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+ # Next, look for paths in the environment
+ boost_manual_env_list = ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR', 'BOOST_ROOT', 'BOOSTROOT']
+ boost_manual_env = [get_env_var(self.for_machine, self.env.is_cross_build, x) for x in boost_manual_env_list]
+ if any(boost_manual_env):
+ self.detect_boost_env()
return
- elif any(boost_manual_env):
- mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
- # A) Detect potential boost root directories (uses also BOOST_ROOT env var)
- roots = self.detect_roots()
- roots = list(mesonlib.OrderedSet(roots))
+ # Finally, look for paths from .pc files and from searching the filesystem
+ self.detect_roots()
- # B) Foreach candidate
+ def check_and_set_roots(self, roots) -> None:
+ roots = list(mesonlib.OrderedSet(roots))
for j in roots:
- # 1. Look for the boost headers (boost/version.pp)
+ # 1. Look for the boost headers (boost/version.hpp)
mlog.debug('Checking potential boost root {}'.format(j.as_posix()))
inc_dirs = self.detect_inc_dirs(j)
inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
@@ -410,11 +403,88 @@ class BoostDependency(ExternalDependency):
self.boost_root = j
break
+ def detect_boost_machine_file(self, props) -> None:
+ incdir = props.get('boost_includedir')
+ libdir = props.get('boost_librarydir')
+
+ if incdir and libdir:
+ inc_dir = Path(props['boost_includedir'])
+ lib_dir = Path(props['boost_librarydir'])
+
+ if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+ raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(' - boost_includedir = {}'.format(inc_dir))
+ mlog.debug(' - boost_librarydir = {}'.format(lib_dir))
+
+ return self.detect_split_root(inc_dir, lib_dir)
+
+ elif incdir or libdir:
+ raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+ rootdir = props.get('boost_root')
+ # It shouldn't be possible to get here without something in boost_root
+ assert(rootdir)
+
+ raw_paths = mesonlib.stringlistify(rootdir)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('boost_root path given in machine file must be absolute')
+
+ self.check_and_set_roots(paths)
+
+ def detect_boost_env(self):
+ boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR')
+ boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR')
+
+ boost_manual_env = [boost_includedir, boost_librarydir]
+ if all(boost_manual_env):
+ inc_dir = Path(boost_includedir)
+ lib_dir = Path(boost_librarydir)
+
+ if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+ raise DependencyException('Paths given in BOOST_INCLUDEDIR and BOOST_LIBRARYDIR must be absolute')
+
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
+ mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
+
+ return self.detect_split_root(inc_dir, lib_dir)
+
+ elif any(boost_manual_env):
+ raise DependencyException('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
+
+ boost_root = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_ROOT')
+ boostroot = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOSTROOT')
+
+ # It shouldn't be possible to get here without something in BOOST_ROOT or BOOSTROOT
+ assert(boost_root or boostroot)
+
+ for path, name in [(boost_root, 'BOOST_ROOT'), (boostroot, 'BOOSTROOT')]:
+ if path:
+ raw_paths = path.split(os.pathsep)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('Paths in {} must be absolute'.format(name))
+ break
+
+ self.check_and_set_roots(paths)
+
def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+ mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+ mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
# 2. Find all boost libraries
libs = [] # type: T.List[BoostLibraryFile]
for i in lib_dirs:
- libs += self.detect_libraries(i)
+ libs = self.detect_libraries(i)
+ if libs:
+ mlog.debug(' - found boost library dir: {}'.format(i))
+ # mlog.debug(' - raw library list:')
+ # for j in libs:
+ # mlog.debug(' - {}'.format(j))
+ break
libs = sorted(set(libs))
modules = ['boost_' + x for x in self.modules]
@@ -422,9 +492,6 @@ class BoostDependency(ExternalDependency):
mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path))
f_libs = self.filter_libraries(libs, inc.version_lib)
- # mlog.debug(' - raw library list:')
- # for j in libs:
- # mlog.debug(' - {}'.format(j))
mlog.debug(' - filtered library list:')
for j in f_libs:
mlog.debug(' - {}'.format(j))
@@ -499,6 +566,19 @@ class BoostDependency(ExternalDependency):
return [self._include_dir_from_version_header(x) for x in candidates]
def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+ # First check the system include paths. Only consider those within the
+ # given root path
+ system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+ system_dirs = [Path(x) for x in system_dirs_t]
+ system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+ system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+ system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+ if system_dirs:
+ return system_dirs
+
+ # No system include paths were found --> fall back to manually looking
+ # for library dirs in root
dirs = [] # type: T.List[Path]
subdirs = [] # type: T.List[Path]
for i in root.iterdir():
@@ -510,7 +590,25 @@ class BoostDependency(ExternalDependency):
for j in i.iterdir():
if j.is_dir() and j.name.endswith('-linux-gnu'):
subdirs += [j]
- return dirs + subdirs
+
+ # Filter out paths that don't match the target arch to avoid finding
+ # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+ if not self.arch:
+ return dirs + subdirs
+
+ arch_list_32 = ['32', 'i386']
+ arch_list_64 = ['64']
+
+ raw_list = dirs + subdirs
+ no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
+
+ matching_arch = [] # type: T.List[Path]
+ if '32' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
+ elif '64' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
+
+ return sorted(matching_arch) + sorted(no_arch)
def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
# MSVC is very picky with the library tags
@@ -522,7 +620,13 @@ class BoostDependency(ExternalDependency):
except (KeyError, IndexError, AttributeError):
pass
- libs = [x for x in libs if x.static == self.static]
+ # mlog.debug(' - static: {}'.format(self.static))
+ # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static))
+ # mlog.debug(' - mt: {}'.format(self.multithreading))
+ # mlog.debug(' - version: {}'.format(lib_vers))
+ # mlog.debug(' - arch: {}'.format(self.arch))
+ # mlog.debug(' - vscrt: {}'.format(vscrt))
+ libs = [x for x in libs if x.static == self.static or not self.explicit_static]
libs = [x for x in libs if x.mt == self.multithreading]
libs = [x for x in libs if x.version_matches(lib_vers)]
libs = [x for x in libs if x.arch_matches(self.arch)]
@@ -554,18 +658,37 @@ class BoostDependency(ExternalDependency):
libs += [BoostLibraryFile(i)]
return [x for x in libs if x.is_boost()] # Filter out no boost libraries
- def detect_roots(self) -> T.List[Path]:
+ def detect_split_root(self, inc_dir, lib_dir) -> None:
+ boost_inc_dir = None
+ for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+ if j.is_file():
+ boost_inc_dir = self._include_dir_from_version_header(j)
+ break
+ if not boost_inc_dir:
+ self.is_found = False
+ return
+
+ self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+
+ def detect_roots(self) -> None:
roots = [] # type: T.List[Path]
- # Add roots from the environment
- for i in ['BOOST_ROOT', 'BOOSTROOT']:
- if i in os.environ:
- raw_paths = os.environ[i].split(os.pathsep)
- paths = [Path(x) for x in raw_paths]
- if paths and any([not x.is_absolute() for x in paths]):
- raise DependencyException('Paths in {} must be absolute'.format(i))
- roots += paths
- return roots # Do not add system paths if BOOST_ROOT is present
+ # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+ # allows BoostDependency to find boost from Conan. See #5438
+ try:
+ boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+ if boost_pc.found():
+ boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
+ if boost_root:
+ roots += [Path(boost_root)]
+ except DependencyException:
+ pass
+
+ # Add roots from system paths
+ inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+ inc_paths = [x.parent for x in inc_paths if x.exists()]
+ inc_paths = [x.resolve() for x in inc_paths]
+ roots += inc_paths
# Add system paths
if self.env.machines[self.for_machine].is_windows():
@@ -588,8 +711,6 @@ class BoostDependency(ExternalDependency):
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
tmp = [] # type: T.List[Path]
- # Add unix paths
- tmp += [Path(x).parent for x in self.clib_compiler.get_default_include_dirs()]
# Homebrew
brew_boost = Path('/usr/local/Cellar/boost')
@@ -607,7 +728,7 @@ class BoostDependency(ExternalDependency):
tmp = [x.resolve() for x in tmp]
roots += tmp
- return roots
+ self.check_and_set_roots(roots)
def log_details(self) -> str:
res = ''
@@ -637,11 +758,8 @@ class BoostDependency(ExternalDependency):
return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
def _extra_compile_args(self) -> T.List[str]:
- args = [] # type: T.List[str]
- args += ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
- if not self.static:
- args += ['-DBOOST_ALL_DYN_LINK']
- return args
+ # BOOST_ALL_DYN_LINK should not be required with the known defines below
+ return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
@@ -665,9 +783,9 @@ boost_arch_map = {
#### ---- BEGIN GENERATED ---- ####
# #
# Generated with tools/boost_names.py:
-# - boost version: 1.72.0
-# - modules found: 158
-# - libraries found: 42
+# - boost version: 1.73.0
+# - modules found: 159
+# - libraries found: 43
#
class BoostLibrary():
@@ -690,16 +808,16 @@ class BoostModule():
boost_libraries = {
'boost_atomic': BoostLibrary(
name='boost_atomic',
- shared=[],
- static=[],
+ shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+ static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_chrono': BoostLibrary(
name='boost_chrono',
- shared=['-DBOOST_ALL_DYN_LINK=1'],
- static=['-DBOOST_All_STATIC_LINK=1'],
- single=[],
+ shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+ static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+ single=['-DBOOST_CHRONO_THREAD_DISABLED'],
multi=[],
),
'boost_container': BoostLibrary(
@@ -711,28 +829,28 @@ boost_libraries = {
),
'boost_context': BoostLibrary(
name='boost_context',
- shared=[],
+ shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_contract': BoostLibrary(
name='boost_contract',
- shared=[],
- static=[],
- single=[],
+ shared=['-DBOOST_CONTRACT_DYN_LINK'],
+ static=['-DBOOST_CONTRACT_STATIC_LINK'],
+ single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
multi=[],
),
'boost_coroutine': BoostLibrary(
name='boost_coroutine',
- shared=[],
+ shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_date_time': BoostLibrary(
name='boost_date_time',
- shared=[],
+ shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -746,14 +864,14 @@ boost_libraries = {
),
'boost_fiber': BoostLibrary(
name='boost_fiber',
- shared=[],
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_fiber_numa': BoostLibrary(
name='boost_fiber_numa',
- shared=[],
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -767,84 +885,91 @@ boost_libraries = {
),
'boost_graph': BoostLibrary(
name='boost_graph',
- shared=['-DBOOST_GRAPH_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_iostreams': BoostLibrary(
name='boost_iostreams',
- shared=['-DBOOST_IOSTREAMS_DYN_LINK=1', '-DBOOST_IOSTREAMS_DYN_LINK=1'],
+ shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_locale': BoostLibrary(
name='boost_locale',
- shared=['-DBOOST_LOCALE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_log': BoostLibrary(
name='boost_log',
- shared=['-DBOOST_LOG_DLL', '-DBOOST_LOG_DYN_LINK=1'],
+ shared=['-DBOOST_LOG_DYN_LINK=1'],
static=[],
- single=['BOOST_LOG_NO_THREADS'],
+ single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_log_setup': BoostLibrary(
name='boost_log_setup',
- shared=['-DBOOST_LOG_DYN_LINK=1', '-DBOOST_LOG_SETUP_DLL', '-DBOOST_LOG_SETUP_DYN_LINK=1'],
+ shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
static=[],
- single=['BOOST_LOG_NO_THREADS'],
+ single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_math_c99': BoostLibrary(
name='boost_math_c99',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99f': BoostLibrary(
name='boost_math_c99f',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99l': BoostLibrary(
name='boost_math_c99l',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1': BoostLibrary(
name='boost_math_tr1',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1f': BoostLibrary(
name='boost_math_tr1f',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1l': BoostLibrary(
name='boost_math_tr1l',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_mpi': BoostLibrary(
name='boost_mpi',
- shared=['-DBOOST_MPI_DYN_LINK=1'],
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_nowide': BoostLibrary(
+ name='boost_nowide',
+ shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -865,63 +990,63 @@ boost_libraries = {
),
'boost_random': BoostLibrary(
name='boost_random',
- shared=[],
+ shared=['-DBOOST_RANDOM_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_regex': BoostLibrary(
name='boost_regex',
- shared=['-DBOOST_REGEX_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_serialization': BoostLibrary(
name='boost_serialization',
- shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_addr2line': BoostLibrary(
name='boost_stacktrace_addr2line',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_backtrace': BoostLibrary(
name='boost_stacktrace_backtrace',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_basic': BoostLibrary(
name='boost_stacktrace_basic',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_noop': BoostLibrary(
name='boost_stacktrace_noop',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg': BoostLibrary(
name='boost_stacktrace_windbg',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg_cached': BoostLibrary(
name='boost_stacktrace_windbg_cached',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
@@ -942,8 +1067,8 @@ boost_libraries = {
),
'boost_thread': BoostLibrary(
name='boost_thread',
- shared=['-DBOOST_THREAD_USE_DLL=1'],
- static=['-DBOOST_THREAD_USE_LIB=1'],
+ shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+ static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
single=[],
multi=[],
),
@@ -956,7 +1081,7 @@ boost_libraries = {
),
'boost_type_erasure': BoostLibrary(
name='boost_type_erasure',
- shared=[],
+ shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
static=[],
single=[],
multi=[],
@@ -977,7 +1102,7 @@ boost_libraries = {
),
'boost_wserialization': BoostLibrary(
name='boost_wserialization',
- shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py
index 063fa6d..d197f8c 100644
--- a/mesonbuild/dependencies/cuda.py
+++ b/mesonbuild/dependencies/cuda.py
@@ -158,11 +158,15 @@ class CudaDependency(ExternalDependency):
mlog.debug('Falling back to extracting version from path')
path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
- m = path_version_regex.match(os.path.basename(path))
- if m:
- return m[1]
+ try:
+ m = path_version_regex.match(os.path.basename(path))
+ if m:
+ return m.group(1)
+ else:
+ mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
+ except Exception as e:
+ mlog.warning('Could not detect CUDA Toolkit version for {}: {}'.format(path, str(e)))
- mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
return '0.0'
def _read_toolkit_version_txt(self, path):
@@ -173,7 +177,7 @@ class CudaDependency(ExternalDependency):
version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
m = self.toolkit_version_regex.match(version_str)
if m:
- return self._strip_patch_version(m[1])
+ return self._strip_patch_version(m.group(1))
except Exception as e:
mlog.debug('Could not read CUDA Toolkit\'s version file {}: {}'.format(version_file_path, str(e)))
@@ -193,7 +197,7 @@ class CudaDependency(ExternalDependency):
raise DependencyException(msg.format(arch, 'Windows'))
return os.path.join('lib', libdirs[arch])
elif machine.is_linux():
- libdirs = {'x86_64': 'lib64', 'ppc64': 'lib'}
+ libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64'}
if arch not in libdirs:
raise DependencyException(msg.format(arch, 'Linux'))
return libdirs[arch]
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index de05a79..f19566b 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -97,7 +97,8 @@ class OpenMPDependency(ExternalDependency):
for name in header_names:
if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
self.is_found = True
- self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+ self.compile_args = self.clib_compiler.openmp_flags()
+ self.link_args = self.clib_compiler.openmp_link_flags()
break
if not self.is_found:
mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
@@ -271,8 +272,10 @@ class PcapDependencyConfigTool(ConfigToolDependency):
tools = ['pcap-config']
tool_name = 'pcap-config'
- @staticmethod
- def finish_init(self) -> None:
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
self.link_args = self.get_config_value(['--libs'], 'link_args')
self.version = self.get_pcap_lib_version()
@@ -284,6 +287,7 @@ class PcapDependencyConfigTool(ConfigToolDependency):
def get_pcap_lib_version(self):
# Since we seem to need to run a program to discover the pcap version,
# we can't do that when cross-compiling
+ # FIXME: this should be handled if we have an exe_wrapper
if not self.env.machines.matches_build_machine(self.for_machine):
return None
@@ -299,10 +303,12 @@ class CupsDependencyConfigTool(ConfigToolDependency):
tools = ['cups-config']
tool_name = 'cups-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -317,10 +323,12 @@ class LibWmfDependencyConfigTool(ConfigToolDependency):
tools = ['libwmf-config']
tool_name = 'libwmf-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -332,11 +340,13 @@ class LibGCryptDependencyConfigTool(ConfigToolDependency):
tools = ['libgcrypt-config']
tool_name = 'libgcrypt-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
@@ -348,11 +358,13 @@ class GpgmeDependencyConfigTool(ConfigToolDependency):
tools = ['gpgme-config']
tool_name = 'gpg-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 6e54e8e..fc0824c 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -31,9 +31,11 @@ from .base import DependencyException, DependencyMethods
from .base import ExternalDependency, NonExistingExternalProgram
from .base import ExtraFrameworkDependency, PkgConfigDependency
from .base import ConfigToolDependency, DependencyFactory
+from .base import find_external_program
if T.TYPE_CHECKING:
from ..environment import Environment
+ from .base import ExternalProgram
class GLDependencySystem(ExternalDependency):
@@ -227,11 +229,14 @@ class QtBaseDependency(ExternalDependency):
bins = ['moc', 'uic', 'rcc', 'lrelease']
found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name))
for b in bins}
+ wanted = '== {}'.format(self.version)
def gen_bins():
for b in bins:
if self.bindir:
yield os.path.join(self.bindir, b), b, False
+ # prefer the <tool>-qt<version> of the tool to the plain one, as we
+ # don't know what the unsuffixed one points to without calling it.
yield '{}-{}'.format(b, self.name), b, False
yield b, b, self.required if b != 'lrelease' else False
@@ -239,12 +244,6 @@ class QtBaseDependency(ExternalDependency):
if found[name].found():
continue
- # prefer the <tool>-qt<version> of the tool to the plain one, as we
- # don't know what the unsuffixed one points to without calling it.
- p = interp_obj.find_program_impl([b], silent=True, required=required).held_object
- if not p.found():
- continue
-
if name == 'lrelease':
arg = ['-version']
elif mesonlib.version_compare(self.version, '>= 5'):
@@ -253,12 +252,18 @@ class QtBaseDependency(ExternalDependency):
arg = ['-v']
# Ensure that the version of qt and each tool are the same
- _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
- if b.startswith('lrelease') or not self.version.startswith('4'):
- care = out
- else:
- care = err
- if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])):
+ def get_version(p):
+ _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
+ if b.startswith('lrelease') or not self.version.startswith('4'):
+ care = out
+ else:
+ care = err
+ return care.split(' ')[-1].replace(')', '')
+
+ p = interp_obj.find_program_impl([b], required=required,
+ version_func=get_version,
+ wanted=wanted).held_object
+ if p.found():
found[name] = p
return tuple([found[b] for b in bins])
@@ -324,10 +329,9 @@ class QtBaseDependency(ExternalDependency):
if prefix:
self.bindir = os.path.join(prefix, 'bin')
- def search_qmake(self):
+ def search_qmake(self) -> T.Generator['ExternalProgram', None, None]:
for qmake in ('qmake-' + self.name, 'qmake'):
- for potential_qmake in self.search_tool(qmake, 'QMake', [qmake]):
- yield potential_qmake
+ yield from find_external_program(self.env, self.for_machine, qmake, 'QMake', [qmake])
def _qmake_detect(self, mods, kwargs):
for qmake in self.search_qmake():
@@ -406,6 +410,9 @@ class QtBaseDependency(ExternalDependency):
if libfile:
libfile = libfile[0]
else:
+ mlog.log("Could not find:", module,
+ self.qtpkgname + module + modules_lib_suffix,
+ 'in', libdir)
self.is_found = False
break
self.link_args.append(libfile)
@@ -426,6 +433,20 @@ class QtBaseDependency(ExternalDependency):
if self.env.machines[self.for_machine].is_darwin():
if is_debug:
suffix += '_debug'
+ if mesonlib.version_compare(self.version, '>= 5.14.0'):
+ if self.env.machines[self.for_machine].is_android():
+ cpu_family = self.env.machines[self.for_machine].cpu_family
+ if cpu_family == 'x86':
+ suffix += '_x86'
+ elif cpu_family == 'x86_64':
+ suffix += '_x86_64'
+ elif cpu_family == 'arm':
+ suffix += '_armeabi-v7a'
+ elif cpu_family == 'aarch64':
+ suffix += '_arm64-v8a'
+ else:
+ mlog.warning('Android target arch {!r} for Qt5 is unknown, '
+ 'module detection may not work'.format(cpu_family))
return suffix
def _link_with_qtmain(self, is_debug, libdir):
@@ -528,10 +549,12 @@ class SDL2DependencyConfigTool(ConfigToolDependency):
tools = ['sdl2-config']
tool_name = 'sdl2-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index 327d3fa..7d87fb7 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import configparser, os, subprocess
+import os, subprocess
import typing as T
from . import mesonlib
@@ -40,7 +40,9 @@ known_cpu_families = (
'alpha',
'arc',
'arm',
+ 'avr',
'c2000',
+ 'dspic',
'e2k',
'ia64',
'm68k',
@@ -48,6 +50,7 @@ known_cpu_families = (
'mips',
'mips64',
'parisc',
+ 'pic24',
'ppc',
'ppc64',
'riscv32',
@@ -56,14 +59,13 @@ known_cpu_families = (
'rx',
's390',
's390x',
+ 'sh4',
'sparc',
'sparc64',
- 'pic24',
- 'dspic',
'wasm32',
'wasm64',
'x86',
- 'x86_64'
+ 'x86_64',
)
# It would feel more natural to call this "64_BIT_CPU_FAMILES", but
@@ -81,33 +83,6 @@ CPU_FAMILES_64_BIT = [
'x86_64',
]
-class MesonConfigFile:
- @classmethod
- def from_config_parser(cls, parser: configparser.ConfigParser) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
- out = {}
- # This is a bit hackish at the moment.
- for s in parser.sections():
- section = {}
- for entry in parser[s]:
- value = parser[s][entry]
- # Windows paths...
- value = value.replace('\\', '\\\\')
- if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
- raise EnvironmentException('Malformed variable name {} in cross file..'.format(entry))
- try:
- res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
- except Exception:
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- for i in (res if isinstance(res, list) else [res]):
- if not isinstance(i, (str, int, bool)):
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- section[entry] = res
-
- out[s] = section
- return out
-
def get_env_var_pair(for_machine: MachineChoice,
is_cross: bool,
var_name: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
@@ -121,7 +96,7 @@ def get_env_var_pair(for_machine: MachineChoice,
# ones.
([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
# Always just the unprefixed host verions
- ([] if is_cross else [var_name]),
+ [var_name]
)[for_machine]
for var in candidates:
value = os.environ.get(var)
@@ -298,6 +273,10 @@ class MachineInfo:
"""
return self.system == 'gnu'
+ def is_irix(self) -> bool:
+ """Machine is IRIX?"""
+ return self.system.startswith('irix')
+
# Various prefixes and suffixes for import libraries, shared libraries,
# static libraries, and executables.
# Versioning is added to these names in the backends as-needed.
@@ -428,43 +407,3 @@ class BinaryTable:
if command is not None and (len(command) == 0 or len(command[0].strip()) == 0):
command = None
return command
-
-class Directories:
-
- """Data class that holds information about directories for native and cross
- builds.
- """
-
- def __init__(self, bindir: T.Optional[str] = None, datadir: T.Optional[str] = None,
- includedir: T.Optional[str] = None, infodir: T.Optional[str] = None,
- libdir: T.Optional[str] = None, libexecdir: T.Optional[str] = None,
- localedir: T.Optional[str] = None, localstatedir: T.Optional[str] = None,
- mandir: T.Optional[str] = None, prefix: T.Optional[str] = None,
- sbindir: T.Optional[str] = None, sharedstatedir: T.Optional[str] = None,
- sysconfdir: T.Optional[str] = None):
- self.bindir = bindir
- self.datadir = datadir
- self.includedir = includedir
- self.infodir = infodir
- self.libdir = libdir
- self.libexecdir = libexecdir
- self.localedir = localedir
- self.localstatedir = localstatedir
- self.mandir = mandir
- self.prefix = prefix
- self.sbindir = sbindir
- self.sharedstatedir = sharedstatedir
- self.sysconfdir = sysconfdir
-
- def __contains__(self, key: str) -> bool:
- return hasattr(self, key)
-
- def __getitem__(self, key: str) -> T.Optional[str]:
- # Mypy can't figure out what to do with getattr here, so we'll case for it
- return T.cast(T.Optional[str], getattr(self, key))
-
- def __setitem__(self, key: str, value: T.Optional[str]) -> None:
- setattr(self, key, value)
-
- def __iter__(self) -> T.Iterator[T.Tuple[str, str]]:
- return iter(self.__dict__.items())
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 0e3ae8c..7cb7286 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -16,6 +16,7 @@ import os, platform, re, sys, shutil, subprocess
import tempfile
import shlex
import typing as T
+import collections
from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, Xc16Linker, C2000Linker, IntelVisualStudioLinker
@@ -27,12 +28,14 @@ from .mesonlib import (
from . import mlog
from .envconfig import (
- BinaryTable, Directories, MachineInfo, MesonConfigFile,
- Properties, known_cpu_families,
+ BinaryTable, MachineInfo,
+ Properties, known_cpu_families, get_env_var_pair,
)
from . import compilers
from .compilers import (
Compiler,
+ all_languages,
+ base_options,
is_assembly,
is_header,
is_library,
@@ -52,6 +55,7 @@ from .linkers import (
GnuBFDDynamicLinker,
GnuGoldDynamicLinker,
LLVMDynamicLinker,
+ QualcommLLVMDynamicLinker,
MSVCDynamicLinker,
OptlinkDynamicLinker,
PGIDynamicLinker,
@@ -134,9 +138,18 @@ def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
return None, None
+def detect_llvm_cov():
+ tools = get_llvm_tool_names('llvm-cov')
+ for tool in tools:
+ if mesonlib.exe_exists([tool, '--version']):
+ return tool
+ return None
+
def find_coverage_tools():
gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+ llvm_cov_exe = detect_llvm_cov()
+
lcov_exe = 'lcov'
genhtml_exe = 'genhtml'
@@ -145,7 +158,7 @@ def find_coverage_tools():
if not mesonlib.exe_exists([genhtml_exe, '--version']):
genhtml_exe = None
- return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe
+ return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
def detect_ninja(version: str = '1.7', log: bool = False) -> str:
r = detect_ninja_command_and_version(version, log)
@@ -332,6 +345,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
trial = 'x86'
elif trial == 'bepc':
trial = 'x86'
+ elif trial == 'arm64':
+ trial = 'aarch64'
elif trial.startswith('arm') or trial.startswith('earm'):
trial = 'arm'
elif trial.startswith(('powerpc64', 'ppc64')):
@@ -344,6 +359,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
trial = 'sparc64'
elif trial in {'mipsel', 'mips64el'}:
trial = trial.rstrip('el')
+ elif trial in {'ip30', 'ip35'}:
+ trial = 'mips64'
# On Linux (and maybe others) there can be any mixture of 32/64 bit code in
# the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
@@ -438,9 +455,10 @@ def machine_info_can_run(machine_info: MachineInfo):
true_build_cpu_family = detect_cpu_family({})
return \
(machine_info.cpu_family == true_build_cpu_family) or \
- ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
+ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+ ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
-def search_version(text):
+def search_version(text: str) -> str:
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
# (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
@@ -474,6 +492,13 @@ def search_version(text):
match = version_regex.search(text)
if match:
return match.group(0)
+
+ # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
+ version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
+ match = version_regex.search(text)
+ if match:
+ return match.group(0)
+
return 'unknown version'
class Environment:
@@ -527,10 +552,11 @@ class Environment:
# Misc other properties about each machine.
properties = PerMachineDefaultable()
- # Store paths for native and cross build files. There is no target
- # machine information here because nothing is installed for the target
- # architecture, just the build and host architectures
- paths = PerMachineDefaultable()
+ # We only need one of these as project options are not per machine
+ user_options = collections.defaultdict(dict) # type: T.DefaultDict[str, T.Dict[str, object]]
+
+ # meson builtin options, as passed through cross or native files
+ meson_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]]
## Setup build machine defaults
@@ -542,34 +568,169 @@ class Environment:
binaries.build = BinaryTable()
properties.build = Properties()
+ # meson base options
+ _base_options = {} # type: T.Dict[str, object]
+
+ # Per language compiler arguments
+ compiler_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]]
+ compiler_options.build = collections.defaultdict(dict)
+
## Read in native file(s) to override build machine configuration
+ def load_options(tag: str, store: T.Dict[str, T.Any]) -> None:
+ for section in config.keys():
+ if section.endswith(tag):
+ if ':' in section:
+ project = section.split(':')[0]
+ else:
+ project = ''
+ store[project].update(config.get(section, {}))
+
+ def split_base_options(mopts: T.DefaultDict[str, T.Dict[str, object]]) -> None:
+ for k, v in list(mopts.get('', {}).items()):
+ if k in base_options:
+ _base_options[k] = v
+ del mopts[k]
+
+ lang_prefixes = tuple('{}_'.format(l) for l in all_languages)
+ def split_compiler_options(mopts: T.DefaultDict[str, T.Dict[str, object]], machine: MachineChoice) -> None:
+ for k, v in list(mopts.get('', {}).items()):
+ if k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ if compiler_options[machine] is None:
+ compiler_options[machine] = collections.defaultdict(dict)
+ if lang not in compiler_options[machine]:
+ compiler_options[machine][lang] = collections.defaultdict(dict)
+ compiler_options[machine][lang][key] = v
+ del mopts[''][k]
+
+ def move_compiler_options(properties: Properties, compopts: T.Dict[str, T.DefaultDict[str, object]]) -> None:
+ for k, v in properties.properties.copy().items():
+ for lang in all_languages:
+ if k == '{}_args'.format(lang):
+ if 'args' not in compopts[lang]:
+ compopts[lang]['args'] = v
+ else:
+ mlog.warning('Ignoring {}_args in [properties] section for those in the [built-in options]'.format(lang))
+ elif k == '{}_link_args'.format(lang):
+ if 'link_args' not in compopts[lang]:
+ compopts[lang]['link_args'] = v
+ else:
+ mlog.warning('Ignoring {}_link_args in [properties] section in favor of the [built-in options] section.')
+ else:
+ continue
+ mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k))
+ del properties.properties[k]
+ break
+
if self.coredata.config_files is not None:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.config_files))
+ config = coredata.parse_machine_files(self.coredata.config_files)
binaries.build = BinaryTable(config.get('binaries', {}))
- paths.build = Directories(**config.get('paths', {}))
properties.build = Properties(config.get('properties', {}))
+ # Don't run this if there are any cross files, we don't want to use
+ # the native values if we're doing a cross build
+ if not self.coredata.cross_files:
+ load_options('project options', user_options)
+ meson_options.build = collections.defaultdict(dict)
+ if config.get('paths') is not None:
+ mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+ load_options('paths', meson_options.build)
+ load_options('built-in options', meson_options.build)
+ if not self.coredata.cross_files:
+ split_base_options(meson_options.build)
+ split_compiler_options(meson_options.build, MachineChoice.BUILD)
+ move_compiler_options(properties.build, compiler_options.build)
+
## Read in cross file(s) to override host machine configuration
if self.coredata.cross_files:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.cross_files))
+ config = coredata.parse_machine_files(self.coredata.cross_files)
properties.host = Properties(config.get('properties', {}))
binaries.host = BinaryTable(config.get('binaries', {}))
if 'host_machine' in config:
machines.host = MachineInfo.from_literal(config['host_machine'])
if 'target_machine' in config:
machines.target = MachineInfo.from_literal(config['target_machine'])
- paths.host = Directories(**config.get('paths', {}))
+ load_options('project options', user_options)
+ meson_options.host = collections.defaultdict(dict)
+ compiler_options.host = collections.defaultdict(dict)
+ if config.get('paths') is not None:
+ mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+ load_options('paths', meson_options.host)
+ load_options('built-in options', meson_options.host)
+ split_base_options(meson_options.host)
+ split_compiler_options(meson_options.host, MachineChoice.HOST)
+ move_compiler_options(properties.host, compiler_options.host)
## "freeze" now initialized configuration, and "save" to the class.
self.machines = machines.default_missing()
self.binaries = binaries.default_missing()
self.properties = properties.default_missing()
- self.paths = paths.default_missing()
+ self.user_options = user_options
+ self.meson_options = meson_options.default_missing()
+ self.base_options = _base_options
+ self.compiler_options = compiler_options.default_missing()
+
+ # Some options default to environment variables if they are
+ # unset, set those now.
+
+ for for_machine in MachineChoice:
+ p_env_pair = get_env_var_pair(for_machine, self.coredata.is_cross_build(), 'PKG_CONFIG_PATH')
+ if p_env_pair is not None:
+ p_env_var, p_env = p_env_pair
+
+ # PKG_CONFIG_PATH may contain duplicates, which must be
+ # removed, else a duplicates-in-array-option warning arises.
+ p_list = list(mesonlib.OrderedSet(p_env.split(':')))
+
+ key = 'pkg_config_path'
+
+ if self.first_invocation:
+ # Environment variables override config
+ self.meson_options[for_machine][''][key] = p_list
+ elif self.meson_options[for_machine][''].get(key, []) != p_list:
+ mlog.warning(
+ p_env_var,
+ 'environment variable does not match configured',
+ 'between configurations, meson ignores this.',
+ 'Use -Dpkg_config_path to change pkg-config search',
+ 'path instead.'
+ )
+
+ # Read in command line and populate options
+ # TODO: validate all of this
+ all_builtins = set(coredata.builtin_options) | set(coredata.builtin_options_per_machine) | set(coredata.builtin_dir_noprefix_options)
+ for k, v in options.cmd_line_options.items():
+ try:
+ subproject, k = k.split(':')
+ except ValueError:
+ subproject = ''
+ if k in base_options:
+ self.base_options[k] = v
+ elif k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ self.compiler_options.host[lang][key] = v
+ elif k in all_builtins or k.startswith('backend_'):
+ self.meson_options.host[subproject][k] = v
+ elif k.startswith('build.'):
+ k = k.lstrip('build.')
+ if k in coredata.builtin_options_per_machine:
+ if self.meson_options.build is None:
+ self.meson_options.build = collections.defaultdict(dict)
+ self.meson_options.build[subproject][k] = v
+ else:
+ assert not k.startswith('build.')
+ self.user_options[subproject][k] = v
+
+ # Warn if the user is using two different ways of setting build-type
+ # options that override each other
+ if meson_options.build and 'buildtype' in meson_options.build[''] and \
+ ('optimization' in meson_options.build[''] or 'debug' in meson_options.build['']):
+ mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+ 'Using both is redundant since they override each other. '
+ 'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
if exe_wrapper is not None:
@@ -578,8 +739,6 @@ class Environment:
else:
self.exe_wrapper = None
- self.cmd_line_options = options.cmd_line_options.copy()
-
# List of potential compilers.
if mesonlib.is_windows():
# Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
@@ -625,6 +784,7 @@ class Environment:
self.clang_static_linker = ['llvm-ar']
self.default_cmake = ['cmake']
self.default_pkgconfig = ['pkg-config']
+ self.wrap_resolver = None
def create_new_coredata(self, options):
# WARNING: Don't use any values from coredata in __init__. It gets
@@ -635,8 +795,8 @@ class Environment:
self.coredata.meson_command = mesonlib.meson_command
self.first_invocation = True
- def is_cross_build(self) -> bool:
- return self.coredata.is_cross_build()
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ return self.coredata.is_cross_build(when_building_for)
def dump_coredata(self):
return coredata.save(self.coredata, self.get_build_dir())
@@ -726,6 +886,28 @@ class Environment:
minor = defines.get('__LCC_MINOR__', '0')
return dot.join((generation, major, minor))
+ @staticmethod
+ def get_clang_compiler_defines(compiler):
+ """
+ Get the list of Clang pre-processor defines
+ """
+ args = compiler + ['-E', '-dM', '-']
+ p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+ if p.returncode != 0:
+ raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error)
+ defines = {}
+ for line in output.split('\n'):
+ if not line:
+ continue
+ d, *rest = line.split(' ', 2)
+ if d != '#define':
+ continue
+ if len(rest) == 1:
+ defines[rest] = True
+ if len(rest) == 2:
+ defines[rest[0]] = rest[1]
+ return defines
+
def _get_compilers(self, lang, for_machine):
'''
The list of compilers is detected in the exact same way for
@@ -847,10 +1029,13 @@ class Environment:
check_args += override
_, o, e = Popen_safe(compiler + check_args)
- v = search_version(o)
+ v = search_version(o + e)
if o.startswith('LLD'):
linker = LLVMDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
+ elif 'Snapdragon' in e and 'LLVM' in e:
+ linker = QualcommLLVMDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
elif e.startswith('lld-link: '):
# The LLD MinGW frontend didn't respond to --version before version 9.0.0,
# and produced an error message about failing to link (when no object
@@ -889,9 +1074,15 @@ class Environment:
cls = GnuBFDDynamicLinker
linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Solaris' in e or 'Solaris' in o:
+ for line in (o+e).split('\n'):
+ if 'ld: Software Generation Utilities' in line:
+ v = line.split(':')[2].lstrip()
+ break
+ else:
+ v = 'unknown version'
linker = SolarisDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override,
- version=search_version(e))
+ version=v)
else:
raise EnvironmentException('Unable to determine dynamic linker')
return linker
@@ -899,7 +1090,7 @@ class Environment:
def _detect_c_or_cpp_compiler(self, lang: Language, for_machine: MachineChoice) -> Compiler:
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(lang, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -985,12 +1176,15 @@ class Environment:
if 'Emscripten' in out:
cls = EmscriptenCCompiler if lang == Language.C else EmscriptenCPPCompiler
self.coredata.add_lang_args(cls.language, cls, for_machine, self)
- # emcc cannot be queried to get the version out of it (it
- # ignores -Wl,--version and doesn't have an alternative).
- # Further, wasm-id *is* lld and will return `LLD X.Y.Z` if you
- # call `wasm-ld --version`, but a special version of lld that
- # takes different options.
- p, o, _ = Popen_safe(['wasm-ld', '--version'])
+
+ # emcc requires a file input in order to pass arguments to the
+ # linker. It'll exit with an error code, but still print the
+ # linker version. Old emcc versions ignore -Wl,--version completely,
+ # however. We'll report "unknown version" in that case.
+ with tempfile.NamedTemporaryFile(suffix='.c') as f:
+ cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name]
+ _, o, _ = Popen_safe(cmd)
+
linker = WASMDynamicLinker(
compiler, for_machine, cls.LINKER_PREFIX,
[], version=search_version(o))
@@ -1037,9 +1231,11 @@ class Environment:
return cls(
compiler, version, for_machine, is_cross, info, exe_wrap,
target, linker=linker)
- if 'clang' in out:
+ if 'clang' in out or 'Clang' in out:
linker = None
+ defines = self.get_clang_compiler_defines(compiler)
+
# Even if the for_machine is darwin, we could be using vanilla
# clang.
if 'Apple' in out:
@@ -1060,7 +1256,7 @@ class Environment:
return cls(
ccache + compiler, version, for_machine, is_cross, info,
- exe_wrap, full_version=full_version, linker=linker)
+ exe_wrap, defines, full_version=full_version, linker=linker)
if 'Intel(R) C++ Intel(R)' in err:
version = search_version(err)
@@ -1149,7 +1345,7 @@ class Environment:
def detect_cuda_compiler(self, for_machine):
popen_exceptions = {}
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
compilers, ccache, exe_wrap = self._get_compilers(Language.CUDA, for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -1189,7 +1385,7 @@ class Environment:
def detect_fortran_compiler(self, for_machine: MachineChoice):
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.FORTRAN, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
if isinstance(compiler, str):
@@ -1308,7 +1504,7 @@ class Environment:
def _detect_objc_or_objcpp_compiler(self, for_machine: MachineInfo, objc: bool) -> 'Compiler':
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.OBJC if objc else Language.OBJCPP, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -1399,7 +1595,7 @@ class Environment:
def detect_vala_compiler(self, for_machine):
exelist = self.lookup_binary_entry(for_machine, Language.VALA)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
if exelist is None:
# TODO support fallback
@@ -1419,7 +1615,7 @@ class Environment:
def detect_rust_compiler(self, for_machine):
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.RUST, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
cc = self.detect_c_compiler(for_machine)
@@ -1510,7 +1706,7 @@ class Environment:
arch = 'x86_mscoff'
popen_exceptions = {}
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
results, ccache, exe_wrap = self._get_compilers(Language.D, for_machine)
for exelist in results:
# Search for a D compiler.
@@ -1601,7 +1797,7 @@ class Environment:
def detect_swift_compiler(self, for_machine):
exelist = self.lookup_binary_entry(for_machine, Language.SWIFT)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
if exelist is None:
# TODO support fallback
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index bd783c7..4d541bf 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -32,10 +32,11 @@ from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening
from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound
-from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs
+from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
from .interpreterbase import ObjectHolder
from .modules import ModuleReturnValue
from .cmake import CMakeInterpreter
+from .backend.backends import TestProtocol
from pathlib import Path, PurePath
import os
@@ -43,6 +44,7 @@ import shutil
import uuid
import re
import shlex
+import stat
import subprocess
import collections
import functools
@@ -512,11 +514,14 @@ class DependencyHolder(InterpreterObject, ObjectHolder):
return DependencyHolder(new_dep, self.subproject)
class ExternalProgramHolder(InterpreterObject, ObjectHolder):
- def __init__(self, ep):
+ def __init__(self, ep, subproject, backend=None):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, ep)
+ self.subproject = subproject
+ self.backend = backend
self.methods.update({'found': self.found_method,
- 'path': self.path_method})
+ 'path': self.path_method,
+ 'full_path': self.full_path_method})
self.cached_version = None
@noPosargs
@@ -526,8 +531,22 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder):
@noPosargs
@permittedKwargs({})
+ @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+ 'use ExternalProgram.full_path() instead')
def path_method(self, args, kwargs):
- return self.held_object.get_path()
+ return self._full_path()
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('ExternalProgram.full_path', '0.55.0')
+ def full_path_method(self, args, kwargs):
+ return self._full_path()
+
+ def _full_path(self):
+ exe = self.held_object
+ if isinstance(exe, build.Executable):
+ return self.backend.get_target_filename_abs(exe)
+ return exe.get_path()
def found(self):
return isinstance(self.held_object, build.Executable) or self.held_object.found()
@@ -536,9 +555,14 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder):
return self.held_object.get_command()
def get_name(self):
- return self.held_object.get_name()
+ exe = self.held_object
+ if isinstance(exe, build.Executable):
+ return exe.name
+ return exe.get_name()
def get_version(self, interpreter):
+ if isinstance(self.held_object, build.Executable):
+ return self.held_object.project_version
if not self.cached_version:
raw_cmd = self.get_command() + ['--version']
cmd = [self, '--version']
@@ -961,7 +985,7 @@ class Test(InterpreterObject):
self.should_fail = should_fail
self.timeout = timeout
self.workdir = workdir
- self.protocol = protocol
+ self.protocol = TestProtocol.from_str(protocol)
self.priority = priority
def get_exe(self):
@@ -1783,6 +1807,11 @@ class ModuleHolder(InterpreterObject, ObjectHolder):
target_machine=self.interpreter.builtin['target_machine'].held_object,
current_node=self.current_node
)
+ # Many modules do for example self.interpreter.find_program_impl(),
+ # so we have to ensure they use the current interpreter and not the one
+ # that first imported that module, otherwise it will use outdated
+ # overrides.
+ self.held_object.interpreter = self.interpreter
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
return self.interpreter.holderify(value)
@@ -1818,10 +1847,18 @@ class Summary:
if bool_yn and isinstance(i, bool):
formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
else:
- formatted_values.append(i)
+ formatted_values.append(str(i))
self.sections[section][k] = (formatted_values, list_sep)
self.max_key_len = max(self.max_key_len, len(k))
+ def text_len(self, v):
+ if isinstance(v, str):
+ return len(v)
+ elif isinstance(v, mlog.AnsiDecorator):
+ return len(v.text)
+ else:
+ raise RuntimeError('Expecting only strings or AnsiDecorator')
+
def dump(self):
mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
for section, values in self.sections.items():
@@ -1833,12 +1870,28 @@ class Summary:
indent = self.max_key_len - len(k) + 3
end = ' ' if v else ''
mlog.log(' ' * indent, k + ':', end=end)
- if list_sep is None:
- indent = self.max_key_len + 6
- list_sep = '\n' + ' ' * indent
- mlog.log(*v, sep=list_sep)
+ indent = self.max_key_len + 6
+ self.dump_value(v, list_sep, indent)
mlog.log('') # newline
+ def dump_value(self, arr, list_sep, indent):
+ lines_sep = '\n' + ' ' * indent
+ if list_sep is None:
+ mlog.log(*arr, sep=lines_sep)
+ return
+ max_len = shutil.get_terminal_size().columns
+ line = []
+ line_len = indent
+ lines_sep = list_sep.rstrip() + lines_sep
+ for v in arr:
+ v_len = self.text_len(v) + len(list_sep)
+ if line and line_len + v_len > max_len:
+ mlog.log(*line, sep=list_sep, end=lines_sep)
+ line_len = indent
+ line = []
+ line.append(v)
+ line_len += v_len
+ mlog.log(*line, sep=list_sep)
class MesonMain(InterpreterObject):
def __init__(self, build, interpreter):
@@ -1849,6 +1902,7 @@ class MesonMain(InterpreterObject):
self.methods.update({'get_compiler': self.get_compiler_method,
'is_cross_build': self.is_cross_build_method,
'has_exe_wrapper': self.has_exe_wrapper_method,
+ 'can_run_host_binaries': self.can_run_host_binaries_method,
'is_unity': self.is_unity_method,
'is_subproject': self.is_subproject_method,
'current_source_dir': self.current_source_dir_method,
@@ -1870,48 +1924,101 @@ class MesonMain(InterpreterObject):
'backend': self.backend_method,
})
- def _find_source_script(self, name, args):
+ def _find_source_script(self, prog: T.Union[str, ExecutableHolder], args):
+ if isinstance(prog, ExecutableHolder):
+ prog_path = self.interpreter.backend.get_target_filename(prog.held_object)
+ return build.RunScript([prog_path], args)
+ elif isinstance(prog, ExternalProgramHolder):
+ return build.RunScript(prog.get_command(), args)
+
# Prefer scripts in the current source directory
search_dir = os.path.join(self.interpreter.environment.source_dir,
self.interpreter.subdir)
- key = (name, search_dir)
+ key = (prog, search_dir)
if key in self._found_source_scripts:
found = self._found_source_scripts[key]
else:
- found = dependencies.ExternalProgram(name, search_dir=search_dir)
+ found = dependencies.ExternalProgram(prog, search_dir=search_dir)
if found.found():
self._found_source_scripts[key] = found
else:
m = 'Script or command {!r} not found or not executable'
- raise InterpreterException(m.format(name))
+ raise InterpreterException(m.format(prog))
return build.RunScript(found.get_command(), args)
- @permittedKwargs({})
- def add_install_script_method(self, args, kwargs):
+ def _process_script_args(
+ self, name: str, args: T.List[T.Union[
+ str, mesonlib.File, CustomTargetHolder,
+ CustomTargetIndexHolder, ConfigureFileHolder,
+ ExternalProgramHolder, ExecutableHolder,
+ ]], allow_built: bool = False) -> T.List[str]:
+ script_args = [] # T.List[str]
+ new = False
+ for a in args:
+ a = unholder(a)
+ if isinstance(a, str):
+ script_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ new = True
+ script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+ elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+ if not allow_built:
+ raise InterpreterException('Arguments to {} cannot be built'.format(name))
+ new = True
+ script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+ # This feels really hacky, but I'm not sure how else to fix
+ # this without completely rewriting install script handling.
+ # This is complicated by the fact that the install target
+ # depends on all.
+ if isinstance(a, build.CustomTargetIndex):
+ a.target.build_by_default = True
+ else:
+ a.build_by_default = True
+ elif isinstance(a, build.ConfigureFile):
+ new = True
+ script_args.append(os.path.join(a.subdir, a.targetname))
+ elif isinstance(a, dependencies.ExternalProgram):
+ script_args.extend(a.command)
+ new = True
+ else:
+ raise InterpreterException(
+ 'Arguments to {} must be strings, Files, CustomTargets, '
+ 'Indexes of CustomTargets, or ConfigureFiles'.format(name))
+ if new:
+ FeatureNew.single_use(
+ 'Calling "{}" with File, CustomTaget, Index of CustomTarget, '
+ 'ConfigureFile, Executable, or ExternalProgram'.format(name),
+ '0.55.0', self.interpreter.subproject)
+ return script_args
+
+ @permittedKwargs(set())
+ def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs):
if len(args) < 1:
raise InterpreterException('add_install_script takes one or more arguments')
- check_stringlist(args, 'add_install_script args must be strings')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_install_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.install_scripts.append(script)
- @permittedKwargs({})
+ @permittedKwargs(set())
def add_postconf_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_postconf_script takes one or more arguments')
- check_stringlist(args, 'add_postconf_script arguments must be strings')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.postconf_scripts.append(script)
- @permittedKwargs({})
+ @permittedKwargs(set())
def add_dist_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_dist_script takes one or more arguments')
if len(args) > 1:
- FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject)
- check_stringlist(args, 'add_dist_script argument must be a string')
+ FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+ '0.49.0', self.interpreter.subproject)
if self.interpreter.subproject != '':
raise InterpreterException('add_dist_script may not be used in a subproject.')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.dist_scripts.append(script)
@noPosargs
@@ -1949,9 +2056,19 @@ class MesonMain(InterpreterObject):
@noPosargs
@permittedKwargs({})
- def has_exe_wrapper_method(self, args, kwargs):
- if self.is_cross_build_method(None, None) and \
- self.build.environment.need_exe_wrapper():
+ @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+ def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+ def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ def can_run_host_binaries_impl(self, args, kwargs):
+ if (self.is_cross_build_method(None, None) and
+ self.build.environment.need_exe_wrapper()):
if self.build.environment.exe_wrapper is None:
return False
# We return True when exe_wrap is defined, when it's not needed, and
@@ -2355,7 +2472,7 @@ class Interpreter(InterpreterBase):
if isinstance(item, build.CustomTarget):
return CustomTargetHolder(item, self)
- elif isinstance(item, (int, str, bool, Disabler)) or item is None:
+ elif isinstance(item, (int, str, bool, Disabler, InterpreterObject)) or item is None:
return item
elif isinstance(item, build.Executable):
return ExecutableHolder(item, self)
@@ -2370,7 +2487,7 @@ class Interpreter(InterpreterBase):
elif isinstance(item, dependencies.Dependency):
return DependencyHolder(item, self.subproject)
elif isinstance(item, dependencies.ExternalProgram):
- return ExternalProgramHolder(item)
+ return ExternalProgramHolder(item, self.subproject)
elif hasattr(item, 'held_object'):
return item
else:
@@ -2393,7 +2510,7 @@ class Interpreter(InterpreterBase):
elif isinstance(v, build.Data):
self.build.data.append(v)
elif isinstance(v, dependencies.ExternalProgram):
- return ExternalProgramHolder(v)
+ return ExternalProgramHolder(v, self.subproject)
elif isinstance(v, dependencies.InternalDependency):
# FIXME: This is special cased and not ideal:
# The first source is our new VapiTarget, the rest are deps
@@ -2427,7 +2544,19 @@ class Interpreter(InterpreterBase):
elif os.path.isfile(f) and not f.startswith('/dev'):
srcdir = Path(self.environment.get_source_dir())
builddir = Path(self.environment.get_build_dir())
- f = Path(f).resolve()
+ try:
+ f = Path(f).resolve()
+ except OSError:
+ f = Path(f)
+ s = f.stat()
+ if (hasattr(s, 'st_file_attributes') and
+ s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+ # This is a Windows Store link which we can't
+ # resolve, so just do our best otherwise.
+ f = f.parent.resolve() / f.name
+ else:
+ raise
if builddir in f.parents:
return
if srcdir in f.parents:
@@ -2458,6 +2587,15 @@ class Interpreter(InterpreterBase):
except InvalidArguments:
pass
+ def import_module(self, modname):
+ if modname in self.modules:
+ return
+ try:
+ module = importlib.import_module('mesonbuild.modules.' + modname)
+ except ImportError:
+ raise InvalidArguments('Module "%s" does not exist' % (modname, ))
+ self.modules[modname] = module.initialize(self)
+
@stringArgs
@noKwargs
def func_import(self, node, args, kwargs):
@@ -2466,14 +2604,15 @@ class Interpreter(InterpreterBase):
modname = args[0]
if modname.startswith('unstable-'):
plainname = modname.split('-', 1)[1]
- mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
- modname = 'unstable_' + plainname
- if modname not in self.modules:
try:
- module = importlib.import_module('mesonbuild.modules.' + modname)
- except ImportError:
- raise InvalidArguments('Module "%s" does not exist' % (modname, ))
- self.modules[modname] = module.initialize(self)
+ # check if stable module exists
+ self.import_module(plainname)
+ mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname))
+ modname = plainname
+ except InvalidArguments:
+ mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
+ modname = 'unstable_' + plainname
+ self.import_module(modname)
return ModuleHolder(modname, self.modules[modname], self)
@stringArgs
@@ -2524,7 +2663,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_assert(self, node, args, kwargs):
if len(args) == 1:
- FeatureNew('assert function without message argument', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject)
value = args[0]
message = None
elif len(args) == 2:
@@ -2656,6 +2795,7 @@ external dependencies (including libraries) must go to "dependencies".''')
default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
default_options = coredata.create_options_dict(default_options)
+
if dirname == '':
raise InterpreterException('Subproject dir name must not be empty.')
if dirname[0] == '.':
@@ -2678,10 +2818,9 @@ external dependencies (including libraries) must go to "dependencies".''')
self.subproject_dir, dirname))
return subproject
- subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
- r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
+ r = self.environment.wrap_resolver
try:
- resolved = r.resolve(dirname, method)
+ resolved = r.resolve(dirname, method, self.subproject)
except wrap.WrapException as e:
subprojdir = os.path.join(self.subproject_dir, r.directory)
if isinstance(e, wrap.WrapNotFoundException):
@@ -2697,7 +2836,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise e
subdir = os.path.join(self.subproject_dir, resolved)
- subdir_abs = os.path.join(subproject_dir_abs, resolved)
+ subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
@@ -2766,13 +2905,21 @@ external dependencies (including libraries) must go to "dependencies".''')
with mlog.nested():
new_build = self.build.copy()
prefix = self.coredata.builtins['prefix'].value
+
+ from .modules.cmake import CMakeSubprojectOptions
+ options = kwargs.get('options', CMakeSubprojectOptions())
+ if not isinstance(options, CMakeSubprojectOptions):
+ raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions'
+ ' object (created by cmake.subproject_options())')
+
cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', []))
+ cmake_options += options.cmake_options
cm_int = CMakeInterpreter(new_build, subdir, subdir_abs, prefix, new_build.environment, self.backend)
cm_int.initialise(cmake_options)
cm_int.analyse()
# Generate a meson ast and execute it with the normal do_subproject_meson
- ast = cm_int.pretend_to_be_meson()
+ ast = cm_int.pretend_to_be_meson(options.target_options)
mlog.log()
with mlog.nested():
@@ -2803,6 +2950,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if self.is_subproject():
optname = self.subproject + ':' + optname
+
for opts in [
self.coredata.base_options, compilers.base_options, self.coredata.builtins,
dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)),
@@ -2859,7 +3007,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if len(args) > 1:
raise InterpreterException('configuration_data takes only one optional positional arguments')
elif len(args) == 1:
- FeatureNew('configuration_data dictionary', '0.49.0').use(self.subproject)
+ FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject)
initial_values = args[0]
if not isinstance(initial_values, dict):
raise InterpreterException('configuration_data first argument must be a dictionary')
@@ -2887,8 +3035,9 @@ external dependencies (including libraries) must go to "dependencies".''')
if self.environment.first_invocation:
self.coredata.init_backend_options(backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
- self.coredata.set_options(options)
+ if '' in self.environment.meson_options.host:
+ options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')}
+ self.coredata.set_options(options)
@stringArgs
@permittedKwargs(permitted_kwargs['project'])
@@ -2899,11 +3048,14 @@ external dependencies (including libraries) must go to "dependencies".''')
if ':' in proj_name:
raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name))
+ # This needs to be evaluated as early as possible, as meson uses this
+ # for things like deprecation testing.
if 'meson_version' in kwargs:
cv = coredata.version
pv = kwargs['meson_version']
if not mesonlib.version_compare(cv, pv):
raise InterpreterException('Meson version is %s but project requires %s' % (cv, pv))
+ mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
if os.path.exists(self.option_file):
oi = optinterpreter.OptionInterpreter(self.subproject)
@@ -2918,7 +3070,7 @@ external dependencies (including libraries) must go to "dependencies".''')
self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
self.project_default_options = coredata.create_options_dict(self.project_default_options)
if self.environment.first_invocation:
- default_options = self.project_default_options
+ default_options = self.project_default_options.copy()
default_options.update(self.default_project_options)
self.coredata.init_builtins(self.subproject)
else:
@@ -2949,10 +3101,10 @@ external dependencies (including libraries) must go to "dependencies".''')
self.subproject_dir = spdirname
self.build.subproject_dir = self.subproject_dir
-
- mesonlib.project_meson_versions[self.subproject] = ''
- if 'meson_version' in kwargs:
- mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+ if not self.is_subproject():
+ wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ self.environment.wrap_resolver = wrap.Resolver(subproject_dir_abs, wrap_mode)
self.build.projects[self.subproject] = proj_name
mlog.log('Project name:', mlog.bold(proj_name))
@@ -2976,8 +3128,11 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs))
else:
# absent 'native' means 'both' for backwards compatibility
- mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
- location=self.current_node)
+ tv = FeatureNew.get_target_version(self.subproject)
+ if FeatureNew.check_version(tv, '0.54.0'):
+ mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+ location=self.current_node)
+
success = self.add_languages(args, False, MachineChoice.BUILD)
success &= self.add_languages(args, required, MachineChoice.HOST)
return success
@@ -3000,7 +3155,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_message(self, node, args, kwargs):
if len(args) > 1:
- FeatureNew('message with more than one argument', '0.54.0').use(self.subproject)
+ FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject)
args_str = [self.get_message_string_arg(i) for i in args]
self.message_impl(args_str)
@@ -3062,7 +3217,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_warning(self, node, args, kwargs):
if len(args) > 1:
- FeatureNew('warning with more than one argument', '0.54.0').use(self.subproject)
+ FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject)
args_str = [self.get_message_string_arg(i) for i in args]
mlog.warning(*args_str, location=node)
@@ -3084,16 +3239,22 @@ external dependencies (including libraries) must go to "dependencies".''')
return success
def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
- if for_machine != MachineChoice.HOST:
- return False
- if not self.environment.is_cross_build():
- return False
should = self.environment.properties.host.get('skip_sanity_check', False)
if not isinstance(should, bool):
raise InterpreterException('Option skip_sanity_check must be a boolean.')
+ if for_machine != MachineChoice.HOST and not should:
+ return False
+ if not self.environment.is_cross_build() and not should:
+ return False
return should
def add_languages_for(self, args, required, for_machine: MachineChoice):
+ langs = set(self.coredata.compilers[for_machine].keys())
+ langs.update(args)
+ if 'vala' in langs:
+ if 'c' not in langs:
+ raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
+
success = True
for lang_str in sorted(args, key=compilers.sort_clink):
lang_str = lang_str.lower()
@@ -3134,14 +3295,9 @@ external dependencies (including libraries) must go to "dependencies".''')
mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
self.build.ensure_static_linker(comp)
- langs = self.coredata.compilers[for_machine].keys()
- if Language.VALA in langs:
- if Language.C not in langs:
- raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
-
return success
- def program_from_file_for(self, for_machine, prognames, silent):
+ def program_from_file_for(self, for_machine, prognames):
for p in unholder(prognames):
if isinstance(p, mesonlib.File):
continue # Always points to a local (i.e. self generated) file.
@@ -3149,7 +3305,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Executable name must be a string')
prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
if prog.found():
- return ExternalProgramHolder(prog)
+ return ExternalProgramHolder(prog, self.subproject)
return None
def program_from_system(self, args, search_dirs, silent=False):
@@ -3176,20 +3332,18 @@ external dependencies (including libraries) must go to "dependencies".''')
extprog = dependencies.ExternalProgram(exename, search_dir=search_dir,
extra_search_dirs=extra_search_dirs,
silent=silent)
- progobj = ExternalProgramHolder(extprog)
+ progobj = ExternalProgramHolder(extprog, self.subproject)
if progobj.found():
return progobj
- def program_from_overrides(self, command_names, silent=False):
+ def program_from_overrides(self, command_names, extra_info):
for name in command_names:
if not isinstance(name, str):
continue
if name in self.build.find_overrides:
exe = self.build.find_overrides[name]
- if not silent:
- mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
- '(overridden: %s)' % exe.description())
- return ExternalProgramHolder(exe)
+ extra_info.append(mlog.blue('(overriden)'))
+ return ExternalProgramHolder(exe, self.subproject, self.backend)
return None
def store_name_lookups(self, command_names):
@@ -3206,40 +3360,79 @@ external dependencies (including libraries) must go to "dependencies".''')
% name)
self.build.find_overrides[name] = exe
+ def notfound_program(self, args):
+ return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject)
+
# TODO update modules to always pass `for_machine`. It is bad-form to assume
# the host machine.
def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST,
- required=True, silent=True, wanted='', search_dirs=None):
- if not isinstance(args, list):
- args = [args]
+ required=True, silent=True, wanted='', search_dirs=None,
+ version_func=None):
+ args = mesonlib.listify(args)
- progobj = self.program_from_overrides(args, silent=silent)
+ extra_info = []
+ progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
if progobj is None:
- progobj = self.program_from_file_for(for_machine, args, silent=silent)
- if progobj is None:
- progobj = self.program_from_system(args, search_dirs, silent=silent)
- if progobj is None and args[0].endswith('python3'):
- prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
- progobj = ExternalProgramHolder(prog)
- if required and (progobj is None or not progobj.found()):
- raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
- if progobj is None:
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
- # Only store successful lookups
- self.store_name_lookups(args)
+ progobj = self.notfound_program(args)
+
+ if not progobj.found():
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+ if required:
+ m = 'Program {!r} not found'
+ raise InterpreterException(m.format(progobj.get_name()))
+ return progobj
+
if wanted:
- version = progobj.get_version(self)
+ if version_func:
+ version = version_func(progobj)
+ else:
+ version = progobj.get_version(self)
is_found, not_found, found = mesonlib.version_compare_many(version, wanted)
if not is_found:
mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'),
- 'found {!r} but need:'.format(version),
- ', '.join(["'{}'".format(e) for e in not_found]))
+ 'found', mlog.normal_cyan(version), 'but need:',
+ mlog.bold(', '.join(["'{}'".format(e) for e in not_found])))
if required:
m = 'Invalid version of program, need {!r} {!r} found {!r}.'
- raise InvalidArguments(m.format(progobj.get_name(), not_found, version))
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
+ raise InterpreterException(m.format(progobj.get_name(), not_found, version))
+ return self.notfound_program(args)
+ extra_info.insert(0, mlog.normal_cyan(version))
+
+ # Only store successful lookups
+ self.store_name_lookups(args)
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.green('YES'), *extra_info)
return progobj
+ def program_lookup(self, args, for_machine, required, search_dirs, extra_info):
+ progobj = self.program_from_overrides(args, extra_info)
+ if progobj:
+ return progobj
+
+ fallback = None
+ wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+ if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+ fallback = self.environment.wrap_resolver.find_program_provider(args)
+ if fallback and wrap_mode == WrapMode.forcefallback:
+ return self.find_program_fallback(fallback, args, required, extra_info)
+
+ progobj = self.program_from_file_for(for_machine, args)
+ if progobj is None:
+ progobj = self.program_from_system(args, search_dirs, silent=True)
+ if progobj is None and args[0].endswith('python3'):
+ prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = ExternalProgramHolder(prog, self.subproject) if prog.found() else None
+ if progobj is None and fallback and required:
+ progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+ return progobj
+
+ def find_program_fallback(self, fallback, args, required, extra_info):
+ mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+ mlog.bold(' '.join(args)))
+ sp_kwargs = { 'required': required }
+ self.do_subproject(fallback, 'meson', sp_kwargs)
+ return self.program_from_overrides(args, extra_info)
+
@FeatureNewKwargs('find_program', '0.53.0', ['dirs'])
@FeatureNewKwargs('find_program', '0.52.0', ['version'])
@FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
@@ -3252,7 +3445,7 @@ external dependencies (including libraries) must go to "dependencies".''')
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
+ return self.notfound_program(args)
search_dirs = extract_search_dirs(kwargs)
wanted = mesonlib.stringlistify(kwargs.get('version', []))
@@ -3267,7 +3460,7 @@ external dependencies (including libraries) must go to "dependencies".''')
'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
)
- def _find_cached_dep(self, name, kwargs):
+ def _find_cached_dep(self, name, display_name, kwargs):
# Check if we want this as a build-time / build machine or runt-time /
# host machine dep.
for_machine = self.machine_from_native_kwarg(kwargs)
@@ -3282,7 +3475,7 @@ external dependencies (including libraries) must go to "dependencies".''')
# have explicitly called meson.override_dependency() with a not-found
# dep.
if not cached_dep.found():
- mlog.log('Dependency', mlog.bold(name),
+ mlog.log('Dependency', mlog.bold(display_name),
'found:', mlog.red('NO'), *info)
return identifier, cached_dep
found_vers = cached_dep.get_version()
@@ -3304,7 +3497,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if cached_dep:
if found_vers:
info = [mlog.normal_cyan(found_vers), *info]
- mlog.log('Dependency', mlog.bold(name),
+ mlog.log('Dependency', mlog.bold(display_name),
'found:', mlog.green('YES'), *info)
return identifier, cached_dep
@@ -3327,7 +3520,7 @@ external dependencies (including libraries) must go to "dependencies".''')
return
dep = subi.get_variable_method([varname], {})
if dep.held_object != cached_dep:
- m = 'Inconsistency: Subproject has overriden the dependency with another variable than {!r}'
+ m = 'Inconsistency: Subproject has overridden the dependency with another variable than {!r}'
raise DependencyException(m.format(varname))
def get_subproject_dep(self, name, display_name, dirname, varname, kwargs):
@@ -3337,17 +3530,21 @@ external dependencies (including libraries) must go to "dependencies".''')
dep = self.notfound_dependency()
try:
subproject = self.subprojects[dirname]
- _, cached_dep = self._find_cached_dep(name, kwargs)
+ _, cached_dep = self._find_cached_dep(name, display_name, kwargs)
if varname is None:
- # Assuming the subproject overriden the dependency we want
+ # Assuming the subproject overridden the dependency we want
if cached_dep:
if required and not cached_dep.found():
m = 'Dependency {!r} is not satisfied'
raise DependencyException(m.format(display_name))
return DependencyHolder(cached_dep, self.subproject)
else:
- m = 'Subproject {} did not override dependency {}'
- raise DependencyException(m.format(subproj_path, display_name))
+ if required:
+ m = 'Subproject {} did not override dependency {}'
+ raise DependencyException(m.format(subproj_path, display_name))
+ mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+ mlog.bold(subproj_path), 'found:', mlog.red('NO'))
+ return self.notfound_dependency()
if subproject.found():
self.verify_fallback_consistency(dirname, varname, cached_dep)
dep = self.subprojects[dirname].get_variable_method([varname], {})
@@ -3388,15 +3585,15 @@ external dependencies (including libraries) must go to "dependencies".''')
def _handle_featurenew_dependencies(self, name):
'Do a feature check on dependencies used by this subproject'
if name == 'mpi':
- FeatureNew('MPI Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
elif name == 'pcap':
- FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
elif name == 'vulkan':
- FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
elif name == 'libwmf':
- FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject)
+ FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
elif name == 'openmp':
- FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
+ FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
@FeatureNewKwargs('dependency', '0.54.0', ['components'])
@FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
@@ -3410,6 +3607,9 @@ external dependencies (including libraries) must go to "dependencies".''')
self.validate_arguments(args, 1, [str])
name = args[0]
display_name = name if name else '(anonymous)'
+ mods = extract_as_list(kwargs, 'modules')
+ if mods:
+ display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
not_found_message = kwargs.get('not_found_message', '')
if not isinstance(not_found_message, str):
raise InvalidArguments('The not_found_message must be a string.')
@@ -3439,6 +3639,18 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.notfound_dependency()
has_fallback = 'fallback' in kwargs
+ if not has_fallback and name:
+ # Add an implicit fallback if we have a wrap file or a directory with the same name,
+ # but only if this dependency is required. It is common to first check for a pkg-config,
+ # then fallback to use find_library() and only afterward check again the dependency
+ # with a fallback. If the fallback has already been configured then we have to use it
+ # even if the dependency is not required.
+ provider = self.environment.wrap_resolver.find_dep_provider(name)
+ dirname = mesonlib.listify(provider)[0]
+ if provider and (required or dirname in self.subprojects):
+ kwargs['fallback'] = provider
+ has_fallback = True
+
if 'default_options' in kwargs and not has_fallback:
mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.',
location=self.current_node)
@@ -3451,7 +3663,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
- identifier, cached_dep = self._find_cached_dep(name, kwargs)
+ identifier, cached_dep = self._find_cached_dep(name, display_name, kwargs)
if cached_dep:
if has_fallback:
dirname, varname = self.get_subproject_infos(kwargs)
@@ -3469,7 +3681,10 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.get_subproject_dep(name, display_name, dirname, varname, kwargs)
wrap_mode = self.coredata.get_builtin_option('wrap_mode')
- forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ forcefallback = has_fallback and (wrap_mode == WrapMode.forcefallback or \
+ name in force_fallback_for or \
+ dirname in force_fallback_for)
if name != '' and not forcefallback:
self._handle_featurenew_dependencies(name)
kwargs['required'] = required and not has_fallback
@@ -3515,15 +3730,23 @@ external dependencies (including libraries) must go to "dependencies".''')
def get_subproject_infos(self, kwargs):
fbinfo = mesonlib.stringlistify(kwargs['fallback'])
if len(fbinfo) == 1:
- FeatureNew('Fallback without variable name', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
return fbinfo[0], None
elif len(fbinfo) != 2:
raise InterpreterException('Fallback info must have one or two items.')
return fbinfo
def dependency_fallback(self, name, display_name, kwargs):
+ dirname, varname = self.get_subproject_infos(kwargs)
required = kwargs.get('required', True)
- if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
+
+ # Explicitly listed fallback preferences for specific subprojects
+ # take precedence over wrap-mode
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ if name in force_fallback_for or dirname in force_fallback_for:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject')
+ elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
mlog.log('Not looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback '
'dependencies is disabled.')
@@ -3537,7 +3760,6 @@ external dependencies (including libraries) must go to "dependencies".''')
else:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name))
- dirname, varname = self.get_subproject_infos(kwargs)
sp_kwargs = {
'default_options': kwargs.get('default_options', []),
'required': required,
@@ -3603,11 +3825,13 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Unknown target_type.')
@permittedKwargs(permitted_kwargs['vcs_tag'])
+ @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'],
+ 'combine build_by_default and build_always_stale instead.')
def func_vcs_tag(self, node, args, kwargs):
if 'input' not in kwargs or 'output' not in kwargs:
raise InterpreterException('Keyword arguments input and output must exist')
if 'fallback' not in kwargs:
- FeatureNew('Optional fallback in vcs_tag', '0.41.0').use(self.subproject)
+ FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject)
fallback = kwargs.pop('fallback', self.project_version)
if not isinstance(fallback, str):
raise InterpreterException('Keyword argument fallback must be a string.')
@@ -3660,7 +3884,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if len(args) != 1:
raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
- FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject)
+ FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject)
return self._func_custom_target_impl(node, args, kwargs)
def _func_custom_target_impl(self, node, args, kwargs):
@@ -3748,6 +3972,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
@FeatureNewKwargs('test', '0.52.0', ['priority'])
@permittedKwargs(permitted_kwargs['test'])
def func_test(self, node, args, kwargs):
+ if kwargs.get('protocol') == 'gtest':
+ FeatureNew.single_use('"gtest" protocol for tests', '0.55.0', self.subproject)
self.add_test(node, args, kwargs, True)
def unpack_env_kwarg(self, kwargs) -> build.EnvironmentVariables:
@@ -3755,7 +3981,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if isinstance(envlist, EnvironmentVariablesHolder):
env = envlist.held_object
elif isinstance(envlist, dict):
- FeatureNew('environment dictionary', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject)
env = EnvironmentVariablesHolder(envlist)
env = env.held_object
else:
@@ -3767,7 +3993,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
def add_test(self, node, args, kwargs, is_base_test):
if len(args) != 2:
- raise InterpreterException('Incorrect number of arguments')
+ raise InterpreterException('test expects 2 arguments, {} given'.format(len(args)))
if not isinstance(args[0], str):
raise InterpreterException('First argument of test must be a string.')
exe = args[1]
@@ -3799,8 +4025,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
protocol = kwargs.get('protocol', 'exitcode')
- if protocol not in ('exitcode', 'tap'):
- raise InterpreterException('Protocol must be "exitcode" or "tap".')
+ if protocol not in {'exitcode', 'tap', 'gtest'}:
+ raise InterpreterException('Protocol must be "exitcode", "tap", or "gtest".')
suite = []
prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
@@ -3874,7 +4100,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
- raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
+ raise InterpreterException("Non-existent build file '{!s}'".format(buildfilename))
with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str))
@@ -3922,7 +4148,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
elif isinstance(s, str):
source_strings.append(s)
else:
- raise InvalidArguments('Argument {!r} must be string or file.'.format(s))
+ raise InvalidArguments('Argument must be string or file.')
sources += self.source_strings_to_files(source_strings)
install_dir = kwargs.get('install_dir', None)
if not isinstance(install_dir, (str, type(None))):
@@ -4071,7 +4297,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if 'configuration' in kwargs:
conf = kwargs['configuration']
if isinstance(conf, dict):
- FeatureNew('configure_file.configuration dictionary', '0.49.0').use(self.subproject)
+ FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject)
conf = ConfigurationDataHolder(self.subproject, conf)
elif not isinstance(conf, ConfigurationDataHolder):
raise InterpreterException('Argument "configuration" is not of type configuration_data')
@@ -4101,7 +4327,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
conf.mark_used()
elif 'command' in kwargs:
if len(inputs) > 1:
- FeatureNew('multiple inputs in configure_file()', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject)
# We use absolute paths for input and output here because the cwd
# that the command is run from is 'unspecified', so it could change.
# Currently it's builddir/subdir for in_builddir else srcdir/subdir.
@@ -4196,8 +4422,9 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
for a in incdir_strings:
if a.startswith(src_root):
- raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
-relative paths instead.
+ raise InvalidArguments('Tried to form an absolute path to a source dir. '
+ 'You should not do that but use relative paths instead.'
+ '''
To get include path to any directory relative to the current dir do
@@ -4348,7 +4575,7 @@ different subdirectory.
if len(args) > 1:
raise InterpreterException('environment takes only one optional positional arguments')
elif len(args) == 1:
- FeatureNew('environment positional arguments', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject)
initial_values = args[0]
if not isinstance(initial_values, dict) and not isinstance(initial_values, list):
raise InterpreterException('environment first argument must be a dictionary or a list')
@@ -4557,6 +4784,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s
kwargs['include_directories'] = self.extract_incdirs(kwargs)
target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs)
+ target.project_version = self.project_version
if not self.environment.machines.matches_build_machine(for_machine):
self.add_cross_stdlib_info(target)
@@ -4637,6 +4865,8 @@ This will become a hard error in the future.''', location=self.current_node)
if len(args) < 1 or len(args) > 2:
raise InvalidCode('Get_variable takes one or two arguments.')
varname = args[0]
+ if isinstance(varname, Disabler):
+ return varname
if not isinstance(varname, str):
raise InterpreterException('First argument must be a string.')
try:
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 1a7aa38..822167c 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -18,6 +18,7 @@
from . import mparser, mesonlib, mlog
from . import environment, dependencies
+import abc
import os, copy, re
import collections.abc
from functools import wraps
@@ -212,17 +213,17 @@ class permittedKwargs:
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
-class FeatureCheckBase:
+class FeatureCheckBase(metaclass=abc.ABCMeta):
"Base class for feature version checks"
- # Class variable, shared across all instances
- #
- # Format: {subproject: {feature_version: set(feature_names)}}
+ # In python 3.6 we can just forward declare this, but in 3.5 we can't
+ # This will be overwritten by the subclasses by necessity
feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
- def __init__(self, feature_name: str, version: str):
+ def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None):
self.feature_name = feature_name # type: str
self.feature_version = version # type: str
+ self.extra_message = extra_message or '' # type: str
@staticmethod
def get_target_version(subproject: str) -> str:
@@ -231,13 +232,18 @@ class FeatureCheckBase:
return ''
return mesonlib.project_meson_versions[subproject]
+ @staticmethod
+ @abc.abstractmethod
+ def check_version(target_version: str, feature_Version: str) -> bool:
+ pass
+
def use(self, subproject: str) -> None:
tv = self.get_target_version(subproject)
# No target version
if tv == '':
return
# Target version is new enough
- if mesonlib.version_compare_condition_with_min(tv, self.feature_version):
+ if self.check_version(tv, self.feature_version):
return
# Feature is too new for target version, register it
if subproject not in self.feature_registry:
@@ -280,41 +286,86 @@ class FeatureCheckBase:
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
+ @classmethod
+ def single_use(cls, feature_name: str, version: str, subproject: str,
+ extra_message: T.Optional[str] = None) -> None:
+ """Oneline version that instantiates and calls use()."""
+ cls(feature_name, version, extra_message).use(subproject)
+
+
class FeatureNew(FeatureCheckBase):
"""Checks for new features"""
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
@staticmethod
def get_warning_str_prefix(tv: str) -> str:
return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv)
def log_usage_warning(self, tv: str) -> None:
- mlog.warning('Project targeting \'{}\' but tried to use feature introduced '
- 'in \'{}\': {}'.format(tv, self.feature_version, self.feature_name))
+ args = [
+ 'Project targeting', "'{}'".format(tv),
+ 'but tried to use feature introduced in',
+ "'{}':".format(self.feature_version),
+ '{}.'.format(self.feature_name),
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
class FeatureDeprecated(FeatureCheckBase):
"""Checks for deprecated features"""
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ # For deprecation checks we need to return the inverse of FeatureNew checks
+ return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
@staticmethod
def get_warning_str_prefix(tv: str) -> str:
return 'Deprecated features used:'
def log_usage_warning(self, tv: str) -> None:
- mlog.deprecation('Project targeting \'{}\' but tried to use feature '
- 'deprecated since \'{}\': {}'
- ''.format(tv, self.feature_version, self.feature_name))
-
-
-class FeatureCheckKwargsBase:
- def __init__(self, feature_name: str, feature_version: str, kwargs: T.List[str]):
+ args = [
+ 'Project targeting', "'{}'".format(tv),
+ 'but tried to use feature deprecated since',
+ "'{}':".format(self.feature_version),
+ '{}.'.format(self.feature_name),
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
+
+
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+ @property
+ @abc.abstractmethod
+ def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+ pass
+
+ def __init__(self, feature_name: str, feature_version: str,
+ kwargs: T.List[str], extra_message: T.Optional[str] = None):
self.feature_name = feature_name
self.feature_version = feature_version
self.kwargs = kwargs
+ self.extra_message = extra_message
def __call__(self, f):
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
- # Which FeatureCheck class to invoke
- FeatureCheckClass = self.feature_check_class
kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5]
if subproject is None:
raise AssertionError('{!r}'.format(wrapped_args))
@@ -322,7 +373,8 @@ class FeatureCheckKwargsBase:
if arg not in kwargs:
continue
name = arg + ' arg in ' + self.feature_name
- FeatureCheckClass(name, self.feature_version).use(subproject)
+ self.feature_check_class.single_use(
+ name, self.feature_version, subproject, self.extra_message)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
@@ -532,7 +584,7 @@ class InterpreterBase:
self.argument_depth += 1
for key, value in kwargs.items():
if not isinstance(key, mparser.StringNode):
- FeatureNew('Dictionary entry using non literal key', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
assert isinstance(key, mparser.BaseNode) # All keys must be nodes due to resolve_key_nodes=False
str_key = self.evaluate_statement(key)
if not isinstance(str_key, str):
@@ -819,7 +871,7 @@ The result of this is undefined and will become a hard error in a future Meson r
def function_call(self, node: mparser.FunctionNode) -> T.Optional[TYPE_var]:
func_name = node.func_name
(posargs, kwargs) = self.reduce_arguments(node.args)
- if is_disabled(posargs, kwargs) and func_name != 'set_variable' and func_name != 'is_disabler':
+ if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}:
return Disabler()
if func_name in self.funcs:
func = self.funcs[func_name]
@@ -974,6 +1026,20 @@ The result of this is undefined and will become a hard error in a future Meson r
if not isinstance(cmpr, str):
raise InterpreterException('Version_compare() argument must be a string.')
return mesonlib.version_compare(obj, cmpr)
+ elif method_name == 'substring':
+ if len(posargs) > 2:
+ raise InterpreterException('substring() takes maximum two arguments.')
+ start = 0
+ end = len(obj)
+ if len (posargs) > 0:
+ if not isinstance(posargs[0], int):
+ raise InterpreterException('substring() argument must be an int')
+ start = posargs[0]
+ if len (posargs) > 1:
+ if not isinstance(posargs[1], int):
+ raise InterpreterException('substring() argument must be an int')
+ end = posargs[1]
+ return obj[start:end]
raise InterpreterException('Unknown method "%s" for a string.' % method_name)
def format_string(self, templ: str, args: T.List[TYPE_nvar]) -> str:
diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py
index 44c720f..3ce7111 100644
--- a/mesonbuild/linkers.py
+++ b/mesonbuild/linkers.py
@@ -17,6 +17,7 @@ import os
import typing as T
from . import mesonlib
+from .arglist import CompilerArgs
from .envconfig import get_env_var
if T.TYPE_CHECKING:
@@ -29,6 +30,9 @@ class StaticLinker:
def __init__(self, exelist: T.List[str]):
self.exelist = exelist
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ return CompilerArgs(self, args)
+
def can_linker_accept_rsp(self) -> bool:
"""
Determines whether the linker can accept arguments using the @rsp syntax.
@@ -56,8 +60,8 @@ class StaticLinker:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
return []
@@ -149,6 +153,10 @@ class ArLinker(StaticLinker):
self.std_args = ['csrD']
else:
self.std_args = ['csr']
+ self.can_rsp = '@<' in stdo
+
+ def can_linker_accept_rsp(self) -> bool:
+ return self.can_rsp
def get_std_link_args(self) -> T.List[str]:
return self.std_args
@@ -436,16 +444,17 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta):
"""Arguments to make all warnings errors."""
return []
+ def headerpad_args(self) -> T.List[str]:
+ # Only used by the Apple linker
+ return []
+
def bitcode_args(self) -> T.List[str]:
raise mesonlib.MesonException('This linker does not support bitcode bundles')
- def get_debug_crt_args(self) -> T.List[str]:
- return []
-
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
@@ -551,12 +560,12 @@ class GnuLikeDynamicLinkerMixin:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
m = env.machines[self.for_machine]
if m.is_windows() or m.is_cygwin():
- return []
+ return ([], set())
if not rpath_paths and not install_rpath and not build_rpath:
- return []
+ return ([], set())
args = []
origin_placeholder = '$ORIGIN'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
@@ -564,9 +573,14 @@ class GnuLikeDynamicLinkerMixin:
# is *very* allergic to duplicate -delete_rpath arguments
# when calling depfixer on installation.
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+ rpath_dirs_to_remove = set()
+ for p in all_paths:
+ rpath_dirs_to_remove.add(p.encode('utf8'))
# Build_rpath is used as-is (it is usually absolute).
if build_rpath != '':
all_paths.add(build_rpath)
+ for p in build_rpath.split(':'):
+ rpath_dirs_to_remove.add(p.encode('utf8'))
# TODO: should this actually be "for (dragonfly|open)bsd"?
if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
@@ -590,7 +604,7 @@ class GnuLikeDynamicLinkerMixin:
# TODO: should this actually be "for solaris/sunos"?
if mesonlib.is_sunos():
- return args
+ return (args, rpath_dirs_to_remove)
# Rpaths to use while linking must be absolute. These are not
# written to the binary. Needed only with GNU ld:
@@ -610,7 +624,7 @@ class GnuLikeDynamicLinkerMixin:
for p in rpath_paths:
args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
- return args
+ return (args, rpath_dirs_to_remove)
class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
@@ -650,8 +664,8 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def no_undefined_args(self) -> T.List[str]:
return self._apply_prefix('-undefined,error')
- def get_always_args(self) -> T.List[str]:
- return self._apply_prefix('-headerpad_max_install_names') + super().get_always_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self._apply_prefix('-headerpad_max_install_names')
def bitcode_args(self) -> T.List[str]:
return self._apply_prefix('-bitcode_bundle')
@@ -676,12 +690,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
- return []
- # Ensure that there is enough space for install_name_tool in-place
- # editing of large RPATHs
- args = self._apply_prefix('-headerpad_max_install_names')
+ return ([], set())
+ args = []
# @loader_path is the equivalent of $ORIGIN on macOS
# https://stackoverflow.com/q/26280738
origin_placeholder = '@loader_path'
@@ -692,13 +704,16 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
for rp in all_paths:
args.extend(self._apply_prefix('-rpath,' + rp))
- return args
+ return (args, set())
class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
"""Representation of GNU ld.bfd and ld.gold."""
+ def get_accepts_rsp(self) -> bool:
+ return True;
+
class GnuGoldDynamicLinker(GnuDynamicLinker):
@@ -761,6 +776,11 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna
def get_asneeded_args(self) -> T.List[str]:
return []
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
class CcrxDynamicLinker(DynamicLinker):
@@ -834,8 +854,8 @@ class Xc16DynamicLinker(DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
class C2000DynamicLinker(DynamicLinker):
@@ -907,6 +927,12 @@ class ArmClangDynamicLinker(ArmDynamicLinker):
def import_library_args(self, implibname: str) -> T.List[str]:
return ['--symdefs=' + implibname]
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ # ARM Linker from Snapdragon LLVM ARM Compiler
+ self.id = 'ld.qcld'
class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
@@ -933,10 +959,10 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not env.machines[self.for_machine].is_windows():
- return ['-R' + os.path.join(build_dir, p) for p in rpath_paths]
- return []
+ return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+ return ([], set())
class PGIStaticLinker(StaticLinker):
@@ -975,16 +1001,6 @@ class VisualStudioLikeLinkerMixin:
def invoked_by_compiler(self) -> bool:
return not self.direct
- def get_debug_crt_args(self) -> T.List[str]:
- """Arguments needed to select a debug crt for the linker.
-
- Sometimes we need to manually select the CRT (C runtime) to use with
- MSVC. One example is when trying to link with static libraries since
- MSVC won't auto-select a CRT for us in that case and will error out
- asking us to select one.
- """
- return self._apply_prefix('/MDd')
-
def get_output_args(self, outputname: str) -> T.List[str]:
return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
@@ -1075,6 +1091,19 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
return args
return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+ def get_pie_args(self) -> T.List[str]:
+ # Available in Solaris 11.2 and later
+ pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+ for line in (stdo + stde).split('\n'):
+ if '-z type' in line:
+ if 'pie' in line:
+ return ['-z', 'type=pie']
+ break
+ return []
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix(['-z', 'ignore'])
+
def no_undefined_args(self) -> T.List[str]:
return ['-z', 'defs']
@@ -1086,9 +1115,9 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
- return []
+ return ([], set())
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
if build_rpath != '':
@@ -1103,7 +1132,7 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
paths = padding
else:
paths = paths + ':' + padding
- return self._apply_prefix('-rpath,{}'.format(paths))
+ return (self._apply_prefix('-rpath,{}'.format(paths)), set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index 7829ffc..9fe3a65 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -14,23 +14,241 @@
"""Entrypoint script for backend agnostic compile."""
-import os
-import pathlib
-import shutil
+import json
+import re
import sys
import typing as T
+from collections import defaultdict
+from pathlib import Path
from . import mlog
from . import mesonlib
+from . import coredata
from .mesonlib import MesonException
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
if T.TYPE_CHECKING:
import argparse
+def array_arg(value: str) -> T.List[str]:
+ return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+ if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
+ raise MesonException('Current directory is not a meson build directory: `{}`.\n'
+ 'Please specify a valid build dir or change the working directory to it.\n'
+ 'It is also possible that the build directory was generated with an old\n'
+ 'meson version. Please regenerate it in this case.'.format(builddir))
+
+def get_backend_from_coredata(builddir: Path) -> str:
+ """
+ Gets `backend` option value from coredata
+ """
+ return coredata.load(str(builddir)).get_builtin_option('backend')
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+ """
+ Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+ """
+ path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+ if not path_to_intro.exists():
+ raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name))
+ with path_to_intro.open() as f:
+ schema = json.load(f)
+
+ parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+ for target in schema:
+ parsed_data[target['name']] += [target]
+ return parsed_data
+
+class ParsedTargetName:
+ full_name = ''
+ name = ''
+ type = ''
+ path = ''
+
+ def __init__(self, target: str):
+ self.full_name = target
+ split = target.rsplit(':', 1)
+ if len(split) > 1:
+ self.type = split[1]
+ if not self._is_valid_type(self.type):
+ raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type))
+
+ split = split[0].rsplit('/', 1)
+ if len(split) > 1:
+ self.path = split[0]
+ self.name = split[1]
+ else:
+ self.name = split[0]
+
+ @staticmethod
+ def _is_valid_type(type: str) -> bool:
+ # Ammend docs in Commands.md when editing this list
+ allowed_types = {
+ 'executable',
+ 'static_library',
+ 'shared_library',
+ 'shared_module',
+ 'custom',
+ 'run',
+ 'jar',
+ }
+ return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> dict:
+ if target.name not in introspect_data:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+
+ intro_targets = introspect_data[target.name]
+ found_targets = []
+
+ resolved_bdir = builddir.resolve()
+
+ if not target.type and not target.path:
+ found_targets = intro_targets
+ else:
+ for intro_target in intro_targets:
+ if (intro_target['subproject'] or
+ (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+ (target.path
+ and intro_target['filename'] != 'no_name'
+ and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+ continue
+ found_targets += [intro_target]
+
+ if not found_targets:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+ elif len(found_targets) > 1:
+ raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name))
+
+ return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ if intro_target['type'] == 'run':
+ return [target.name]
+ else:
+ return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
+ runner = detect_ninja()
+ if runner is None:
+ raise MesonException('Cannot find ninja.')
+ mlog.log('Found runner:', runner)
+
+ cmd = [runner, '-C', builddir.as_posix()]
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ for t in options.targets:
+ cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+ if options.clean:
+ cmd.append('clean')
+
+ # If the value is set to < 1 then don't set anything, which let's
+ # ninja/samu decide what to do.
+ if options.jobs > 0:
+ cmd.extend(['-j', str(options.jobs)])
+ if options.load_average > 0:
+ cmd.extend(['-l', str(options.load_average)])
+
+ if options.verbose:
+ cmd.append('--verbose')
+
+ cmd += options.ninja_args
+
+ return cmd
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+ # Normalize project name
+ # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+ target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id'])
+ rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+ if rel_path != '.':
+ target_name = str(rel_path / target_name)
+ return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
+ slns = list(builddir.glob('*.sln'))
+ assert len(slns) == 1, 'More than one solution in a project?'
+ sln = slns[0]
+
+ cmd = ['msbuild']
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ has_run_target = any(map(
+ lambda t:
+ get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+ options.targets
+ ))
+
+ if has_run_target:
+ # `run` target can't be used the same way as other targets on `vs` backend.
+ # They are defined as disabled projects, which can't be invoked as `.sln`
+ # target and have to be invoked directly as project instead.
+ # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+ if len(options.targets) > 1:
+ raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+ intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+ proj_dir = Path(intro_target['filename'][0]).parent
+ proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+ cmd += [str(proj.resolve())]
+ else:
+ cmd += [str(sln.resolve())]
+ cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+ else:
+ cmd += [str(sln.resolve())]
+
+ if options.clean:
+ cmd.extend(['-target:Clean'])
+
+ # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+ if options.jobs > 0:
+ cmd.append('-maxCpuCount:{}'.format(options.jobs))
+ else:
+ cmd.append('-maxCpuCount')
+
+ if options.load_average:
+ mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+ if not options.verbose:
+ cmd.append('-verbosity:minimal')
+
+ cmd += options.vs_args
+
+ return cmd
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
"""Add compile specific arguments."""
parser.add_argument(
+ 'targets',
+ metavar='TARGET',
+ nargs='*',
+ default=None,
+ help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+ parser.add_argument(
+ '--clean',
+ action='store_true',
+ help='Clean the build directory.'
+ )
+ parser.add_argument(
+ '-C',
+ action='store',
+ dest='builddir',
+ type=Path,
+ default='.',
+ help='The directory containing build files to be built.'
+ )
+ parser.add_argument(
'-j', '--jobs',
action='store',
default=0,
@@ -42,80 +260,44 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
action='store',
default=0,
type=int,
- help='The system load average to try to maintain (if supported)'
+ help='The system load average to try to maintain (if supported).'
)
parser.add_argument(
- '--clean',
+ '--verbose',
action='store_true',
- help='Clean the build directory.'
+ help='Show more verbose output.'
)
parser.add_argument(
- '-C',
- action='store',
- dest='builddir',
- type=pathlib.Path,
- default='.',
- help='The directory containing build files to be built.'
+ '--ninja-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+ )
+ parser.add_argument(
+ '--vs-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
)
-
def run(options: 'argparse.Namespace') -> int:
- bdir = options.builddir # type: pathlib.Path
- if not bdir.exists():
- raise MesonException('Path to builddir {} does not exist!'.format(str(bdir.resolve())))
- if not bdir.is_dir():
- raise MesonException('builddir path should be a directory.')
+ bdir = options.builddir # type: Path
+ validate_builddir(bdir.resolve())
cmd = [] # type: T.List[str]
- runner = None # type T.Optional[str]
- slns = list(bdir.glob('*.sln'))
-
- if (bdir / 'build.ninja').exists():
- runner = os.environ.get('NINJA')
- if not runner:
- if shutil.which('ninja'):
- runner = 'ninja'
- elif shutil.which('samu'):
- runner = 'samu'
-
- if runner is None:
- raise MesonException('Cannot find either ninja or samu.')
-
- cmd = [runner, '-C', bdir.as_posix()]
-
- # If the value is set to < 1 then don't set anything, which let's
- # ninja/samu decide what to do.
- if options.jobs > 0:
- cmd.extend(['-j', str(options.jobs)])
- if options.load_average > 0:
- cmd.extend(['-l', str(options.load_average)])
- if options.clean:
- cmd.append('clean')
-
- # TODO: with python 3.8 this could be `elif slns := bdir.glob('*.sln'):`
- elif slns:
- assert len(slns) == 1, 'More than one solution in a project?'
-
- sln = slns[0]
- cmd = ['msbuild', str(sln.resolve())]
-
- # In msbuild `-m` with no number means "detect cpus", the default is `-m1`
- if options.jobs > 0:
- cmd.append('-m{}'.format(options.jobs))
- else:
- cmd.append('-m')
- if options.load_average:
- mlog.warning('Msbuild does not have a load-average switch, ignoring.')
- if options.clean:
- cmd.extend(['/t:Clean'])
+ if options.targets and options.clean:
+ raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
- # TODO: xcode?
+ backend = get_backend_from_coredata(bdir)
+ if backend == 'ninja':
+ cmd = get_parsed_args_ninja(options, bdir)
+ elif backend.startswith('vs'):
+ cmd = get_parsed_args_vs(options, bdir)
else:
+ # TODO: xcode?
raise MesonException(
- 'Could not find any runner or backend for directory {}'.format(bdir.resolve().as_posix()))
-
- mlog.log('Found runner:', runner)
+ 'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend))
p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer)
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 6c450da..b38df85 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -97,9 +97,9 @@ class Conf:
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
- def split_options_per_subproject(self, options_iter):
+ def split_options_per_subproject(self, options):
result = {}
- for k, o in options_iter:
+ for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
@@ -184,19 +184,7 @@ class Conf:
if not self.default_values_only:
print(' Build dir ', self.build_dir)
- dir_option_names = ['bindir',
- 'datadir',
- 'includedir',
- 'infodir',
- 'libdir',
- 'libexecdir',
- 'localedir',
- 'localstatedir',
- 'mandir',
- 'prefix',
- 'sbindir',
- 'sharedstatedir',
- 'sysconfdir']
+ dir_option_names = list(coredata.BUILTIN_DIR_OPTIONS)
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
@@ -211,15 +199,12 @@ class Conf:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
- core_options = self.split_options_per_subproject(core_options.items())
- host_compiler_options = self.split_options_per_subproject(
- self.coredata.flatten_lang_iterator(
- self.coredata.compiler_options.host.items()))
+ core_options = self.split_options_per_subproject(core_options)
build_compiler_options = self.split_options_per_subproject(
- (insert_build_prefix(k), o)
- for k, o in self.coredata.flatten_lang_iterator(
- self.coredata.compiler_options.build.items()))
- project_options = self.split_options_per_subproject(self.coredata.user_options.items())
+ dict((insert_build_prefix(k), o)
+ for k, o in self.coredata.flatten_lang_iterator(
+ self.coredata.compiler_options.build.items())))
+ project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index b324f76..9d94ace 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -213,7 +213,7 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
unpacked_src_dir = unpacked_files[0]
with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
- if o['name'] not in ['backend', 'install_umask']]
+ if o['name'] not in ['backend', 'install_umask', 'buildtype']]
meson_command += extra_meson_args
ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin)
@@ -259,7 +259,7 @@ def run(options):
if options.include_subprojects:
subproject_dir = os.path.join(src_root, b.subproject_dir)
for sub in b.subprojects:
- _, directory = wrap.get_directory(subproject_dir, sub)
+ directory = wrap.get_directory(subproject_dir, sub)
subprojects.append(os.path.join(b.subproject_dir, directory))
extra_meson_args.append('-Dwrap_mode=nodownload')
diff --git a/mesonbuild/mesondata.py b/mesonbuild/mesondata.py
new file mode 100644
index 0000000..1f223c2
--- /dev/null
+++ b/mesonbuild/mesondata.py
@@ -0,0 +1,374 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+####
+#### WARNING: This is an automatically generated file! Do not edit!
+#### Generated by tools/gen_data.py
+####
+
+
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .environment import Environment
+
+######################
+# BEGIN Data section #
+######################
+
+file_0_data_CMakeListsLLVM_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+ find_package(LLVM REQUIRED CONFIG QUIET)
+
+ # ARCHS has to be set via the CMD interface
+ if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ foreach(mod IN LISTS LLVM_MESON_MODULES)
+ # Reset variables
+ set(out_mods)
+ set(real_mods)
+
+ # Generate a lower and upper case version
+ string(TOLOWER "${mod}" mod_L)
+ string(TOUPPER "${mod}" mod_U)
+
+ # Get the mapped components
+ llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+ list(SORT out_mods)
+ list(REMOVE_DUPLICATES out_mods)
+
+ # Make sure that the modules exist
+ foreach(i IN LISTS out_mods)
+ if(TARGET ${i})
+ list(APPEND real_mods ${i})
+ endif()
+ endforeach()
+
+ # Set the output variables
+ set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+ foreach(i IN LISTS real_mods)
+ set(MESON_TARGET_TO_LLVM_${i} ${mod})
+ endforeach()
+ endforeach()
+
+ # Check the following variables:
+ # LLVM_PACKAGE_VERSION
+ # LLVM_VERSION
+ # LLVM_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED LLVM_PACKAGE_VERSION)
+ set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+ elseif(DEFINED LLVM_VERSION)
+ set(PACKAGE_VERSION "${LLVM_VERSION}")
+ elseif(DEFINED LLVM_VERSION_STRING)
+ set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # LLVM_LIBRARIES
+ # LLVM_LIBS
+ set(libs)
+ if(DEFINED LLVM_LIBRARIES)
+ set(libs LLVM_LIBRARIES)
+ elseif(DEFINED LLVM_LIBS)
+ set(libs LLVM_LIBS)
+ endif()
+
+ # Check the following variables:
+ # LLVM_INCLUDE_DIRS
+ # LLVM_INCLUDES
+ # LLVM_INCLUDE_DIR
+ set(includes)
+ if(DEFINED LLVM_INCLUDE_DIRS)
+ set(includes LLVM_INCLUDE_DIRS)
+ elseif(DEFINED LLVM_INCLUDES)
+ set(includes LLVM_INCLUDES)
+ elseif(DEFINED LLVM_INCLUDE_DIR)
+ set(includes LLVM_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # LLVM_DEFINITIONS
+ set(definitions)
+ if(DEFINED LLVM_DEFINITIONS)
+ set(definitions LLVM_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_1_data_CMakePathInfo_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
+'''
+
+file_2_data_CMakeLists_txt = '''\
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+ find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ # Check the following variables:
+ # FOO_VERSION
+ # Foo_VERSION
+ # FOO_VERSION_STRING
+ # Foo_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED ${_packageName}_VERSION)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+ elseif(DEFINED ${_packageName}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # FOO_LIBRARIES
+ # Foo_LIBRARIES
+ # FOO_LIBS
+ # Foo_LIBS
+ set(libs)
+ if(DEFINED ${_packageName}_LIBRARIES)
+ set(libs ${_packageName}_LIBRARIES)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+ set(libs ${PACKAGE_NAME}_LIBRARIES)
+ elseif(DEFINED ${_packageName}_LIBS)
+ set(libs ${_packageName}_LIBS)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+ set(libs ${PACKAGE_NAME}_LIBS)
+ endif()
+
+ # Check the following variables:
+ # FOO_INCLUDE_DIRS
+ # Foo_INCLUDE_DIRS
+ # FOO_INCLUDES
+ # Foo_INCLUDES
+ # FOO_INCLUDE_DIR
+ # Foo_INCLUDE_DIR
+ set(includes)
+ if(DEFINED ${_packageName}_INCLUDE_DIRS)
+ set(includes ${_packageName}_INCLUDE_DIRS)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+ elseif(DEFINED ${_packageName}_INCLUDES)
+ set(includes ${_packageName}_INCLUDES)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+ set(includes ${PACKAGE_NAME}_INCLUDES)
+ elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+ set(includes ${_packageName}_INCLUDE_DIR)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # FOO_DEFINITIONS
+ # Foo_DEFINITIONS
+ set(definitions)
+ if(DEFINED ${_packageName}_DEFINITIONS)
+ set(definitions ${_packageName}_DEFINITIONS)
+ elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+ set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_3_data_preload_cmake = '''\
+if(MESON_PS_LOADED)
+ return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+ set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+ meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+ meson_ps_inspect_vars()
+ _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+ meson_ps_inspect_vars()
+ _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+ meson_ps_inspect_vars()
+ _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+ set(FILES)
+ set(I 0)
+ set(PROPERTIES OFF)
+
+ while(I LESS ARGC)
+ if(NOT PROPERTIES)
+ if("${ARGV${I}}" STREQUAL "PROPERTIES")
+ set(PROPERTIES ON)
+ else()
+ list(APPEND FILES "${ARGV${I}}")
+ endif()
+
+ math(EXPR I "${I} + 1")
+ else()
+ set(ID_IDX ${I})
+ math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+ set(ID "${ARGV${ID_IDX}}")
+ set(PROP "${ARGV${PROP_IDX}}")
+
+ set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+ math(EXPR I "${I} + 2")
+ endif()
+ endwhile()
+endfunction()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+'''
+
+
+####################
+# END Data section #
+####################
+
+class DataFile:
+ def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+ self.path = path
+ self.sha256sum = sha256sum
+ self.data = data
+
+ def write_once(self, path: Path) -> None:
+ if not path.exists():
+ path.write_text(self.data)
+
+ def write_to_private(self, env: 'Environment') -> Path:
+ out_file = Path(env.scratch_dir) / 'data' / self.path.name
+ out_file.parent.mkdir(exist_ok=True)
+ self.write_once(out_file)
+ return out_file
+
+
+mesondata = {
+ 'dependencies/data/CMakeListsLLVM.txt': DataFile(
+ Path('dependencies/data/CMakeListsLLVM.txt'),
+ '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd',
+ file_0_data_CMakeListsLLVM_txt,
+ ),
+ 'dependencies/data/CMakePathInfo.txt': DataFile(
+ Path('dependencies/data/CMakePathInfo.txt'),
+ '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2',
+ file_1_data_CMakePathInfo_txt,
+ ),
+ 'dependencies/data/CMakeLists.txt': DataFile(
+ Path('dependencies/data/CMakeLists.txt'),
+ '71a2d58381f912bbfb1c8709884d34d721f682edf2fca001e1f582f0bffd0da7',
+ file_2_data_CMakeLists_txt,
+ ),
+ 'cmake/data/preload.cmake': DataFile(
+ Path('cmake/data/preload.cmake'),
+ '064d047b18a5c919ad016b838bed50c5d40aebe9e53da0e70eff9d52a2c1ca1f',
+ file_3_data_preload_cmake,
+ ),
+}
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 73a9139..e0f48f5 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -41,8 +41,10 @@ _U = T.TypeVar('_U')
have_fcntl = False
have_msvcrt = False
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
# {subproject: project_meson_version}
-project_meson_versions = {} # type: T.Dict[str, str]
+project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str]
try:
import fcntl
@@ -395,6 +397,9 @@ class PerMachine(T.Generic[_T]):
unfreeze.host = None
return unfreeze
+ def __repr__(self) -> str:
+ return 'PerMachine({!r}, {!r})'.format(self.build, self.host)
+
class PerThreeMachine(PerMachine[_T]):
"""Like `PerMachine` but includes `target` too.
@@ -427,6 +432,9 @@ class PerThreeMachine(PerMachine[_T]):
def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
+ def __repr__(self) -> str:
+ return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
@@ -445,6 +453,9 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
freeze.host = freeze.build
return freeze
+ def __repr__(self) -> str:
+ return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host)
+
class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
"""Extends `PerThreeMachine` with the ability to default from `None`s.
@@ -466,6 +477,9 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option
freeze.target = freeze.host
return freeze
+ def __repr__(self) -> str:
+ return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
class Language(Enum):
@@ -584,6 +598,8 @@ def is_netbsd() -> bool:
def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
+def is_irix() -> bool:
+ return platform.system().startswith('irix')
def is_hurd() -> bool:
return platform.system().lower() == 'gnu'
@@ -619,20 +635,24 @@ def darwin_get_object_archs(objpath: str) -> T.List[str]:
return stdo.split()
-def detect_vcs(source_dir: str) -> T.Optional[T.Dict[str, str]]:
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
]
- # FIXME: this is much cleaner with pathlib.Path
- segs = source_dir.replace('\\', '/').split('/')
- for i in range(len(segs), -1, -1):
- curdir = '/'.join(segs[:i])
+ if isinstance(source_dir, str):
+ source_dir = Path(source_dir)
+
+ parent_paths_and_self = collections.deque(source_dir.parents)
+ # Prepend the source directory to the front so we can check it;
+ # source_dir.parents doesn't include source_dir
+ parent_paths_and_self.appendleft(source_dir)
+ for curdir in parent_paths_and_self:
for vcs in vcs_systems:
- if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']):
- vcs['wc_dir'] = curdir
+ if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+ vcs['wc_dir'] = str(curdir)
return vcs
return None
@@ -804,7 +824,7 @@ def default_libdir() -> str:
return 'lib/' + archpath
except Exception:
pass
- if is_freebsd():
+ if is_freebsd() or is_irix():
return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
@@ -1610,6 +1630,16 @@ def relpath(path: str, start: str) -> str:
except (TypeError, ValueError):
return path
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+ # Check wheter a path is within the root directory root
+ try:
+ if resolve:
+ path.resolve().relative_to(root.resolve())
+ else:
+ path.relative_to(root)
+ except ValueError:
+ return False
+ return True
class LibType(Enum):
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index c636053..191c735 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -56,8 +56,8 @@ class DEFAULT_TYPES(Enum):
INFO_MESSAGE = '''Sample project created. To build it run the
following commands:
-meson builddir
-ninja -C builddir
+meson setup builddir
+meson compile -C builddir
'''
@@ -151,7 +151,7 @@ def add_arguments(parser):
parser.add_argument("-n", "--name", help="project name. default: name of current directory")
parser.add_argument("-e", "--executable", help="executable name. default: project name")
parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
- parser.add_argument("-l", "--language", choices=LANG_SUPPORTED, help="project language. default: autodetected based on source files")
+ parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
parser.add_argument("-b", "--build", action='store_true', help="build after generation")
parser.add_argument("--builddir", default='build', help="directory for build")
parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 9c64429..0be01fe 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -512,7 +512,7 @@ class Installer:
if file_copied:
self.did_install_something = True
try:
- depfixer.fix_rpath(outname, install_rpath, final_path,
+ depfixer.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
install_name_mappings, verbose=False)
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index d5516d4..0049bbd 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -19,10 +19,11 @@ tests and so on. All output is in JSON for simple parsing.
Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
+import collections
import json
from . import build, coredata as cdata
from . import mesonlib
-from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
from . import mlog
from .backend import backends
from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode
@@ -52,7 +53,7 @@ class IntroCommand:
def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
builddata: T.Optional[build.Build] = None,
backend: T.Optional[backends.Backend] = None,
- sourcedir: T.Optional[str] = None) -> T.Dict[str, IntroCommand]:
+ sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
if backend and builddata:
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
testdata = backend.create_test_serialisation(builddata.get_tests())
@@ -61,17 +62,19 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
else:
benchmarkdata = testdata = installdata = None
- return {
- 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)),
- 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source),
- 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter)),
- 'dependencies': IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source),
- 'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source),
- 'installed': IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata)),
- 'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source),
- 'targets': IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source),
- 'tests': IntroCommand('List all unit tests', func=lambda: list_tests(testdata)),
- }
+ # Enforce key order for argparse
+ return collections.OrderedDict([
+ ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+ ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+ ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+ ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+ ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+ ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+ ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+ ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+ ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+ ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+ ])
def add_arguments(parser):
intro_types = get_meson_introspection_types()
@@ -79,7 +82,7 @@ def add_arguments(parser):
flag = '--' + key.replace('_', '-')
parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
- parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja',
+ parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
help='The backend to use for the --buildoptions introspection.')
parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
help='Print all available information.')
@@ -89,6 +92,11 @@ def add_arguments(parser):
help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+ printer = AstJSONPrinter()
+ intr.ast.accept(printer)
+ return printer.result
+
def list_installed(installdata):
res = {}
if installdata is not None:
@@ -192,19 +200,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di
def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
- dir_option_names = ['bindir',
- 'datadir',
- 'includedir',
- 'infodir',
- 'libdir',
- 'libexecdir',
- 'localedir',
- 'localstatedir',
- 'mandir',
- 'prefix',
- 'sbindir',
- 'sharedstatedir',
- 'sysconfdir']
+ dir_option_names = list(cdata.BUILTIN_DIR_OPTIONS)
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names]
@@ -328,7 +324,7 @@ def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str],
to['suite'] = t.suite
to['is_parallel'] = t.is_parallel
to['priority'] = t.priority
- to['protocol'] = t.protocol
+ to['protocol'] = str(t.protocol)
result.append(to)
return result
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index 8cbd248..1e5a105 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -40,15 +40,32 @@ def _windows_ansi() -> bool:
# original behavior
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
-def setup_console() -> bool:
+def colorize_console() -> bool:
+ _colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool
+ if _colorize_console is not None:
+ return _colorize_console
+
try:
if platform.system().lower() == 'windows':
- return os.isatty(sys.stdout.fileno()) and _windows_ansi()
- return os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
+ _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+ else:
+ _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
except Exception:
- return False
+ _colorize_console = False
+
+ sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined]
+ return _colorize_console
+
+def setup_console():
+ # on Windows, a subprocess might call SetConsoleMode() on the console
+ # connected to stdout and turn off ANSI escape processing. Call this after
+ # running a subprocess to ensure we turn it on again.
+ if platform.system().lower() == 'windows':
+ try:
+ delattr(sys.stdout, 'colorize_console')
+ except AttributeError:
+ pass
-colorize_console = setup_console()
log_dir = None # type: T.Optional[str]
log_file = None # type: T.Optional[T.TextIO]
log_fname = 'meson-log.txt' # type: str
@@ -204,7 +221,7 @@ def log(*args: T.Union[str, AnsiDecorator], is_error: bool = False,
if log_file is not None:
print(*arr, file=log_file, **kwargs)
log_file.flush()
- if colorize_console:
+ if colorize_console():
arr = process_markup(args, True)
if not log_errors_only or is_error:
force_print(*arr, **kwargs)
@@ -233,7 +250,7 @@ def get_error_location_string(fname: str, lineno: str) -> str:
return '{}:{}:'.format(fname, lineno)
def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator],
- once: bool = False, **kwargs: T.Any) -> None:
+ once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None:
from .mesonlib import MesonException, relpath
# The typing requirements here are non-obvious. Lists are invariant,
@@ -266,7 +283,7 @@ def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator],
global log_warnings_counter
log_warnings_counter += 1
- if log_fatal_warnings:
+ if log_fatal_warnings and fatal:
raise MesonException("Fatal warnings enabled, aborting")
def error(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index dc86a1b..47be039 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -57,6 +57,17 @@ def get_include_args(include_dirs, prefix='-I'):
return dirs_str
+def is_module_library(fname):
+ '''
+ Check if the file is a library-like file generated by a module-specific
+ target, such as GirTarget or TypelibTarget
+ '''
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in ('gir', 'typelib')
+
+
class ModuleReturnValue:
def __init__(self, return_value, new_objects):
self.return_value = return_value
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index 0283d11..e6587e4 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -14,12 +14,28 @@
import re
import os, os.path, pathlib
import shutil
+import typing as T
from . import ExtensionModule, ModuleReturnValue
from .. import build, dependencies, mesonlib, mlog
-from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder, noPosargs
+from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args
from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder
+from ..interpreterbase import (
+ InterpreterObject,
+ ObjectHolder,
+
+ FeatureNew,
+ FeatureNewKwargs,
+ FeatureDeprecatedKwargs,
+
+ stringArgs,
+ permittedKwargs,
+ noPosargs,
+ noKwargs,
+
+ InvalidArguments,
+)
COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
@@ -82,42 +98,107 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder):
assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']]))
return res
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def get_variable(self, args, kwargs):
return self.held_object.get_variable_method(args, kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def dependency(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['dep']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def include_directories(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['inc']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def target(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['tgt']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def target_type(self, args, kwargs):
info = self._args_to_info(args)
return info['func']
@noPosargs
- @permittedKwargs({})
+ @noKwargs
def target_list(self, args, kwargs):
return self.held_object.cm_interpreter.target_list()
@noPosargs
- @permittedKwargs({})
+ @noKwargs
@FeatureNew('CMakeSubproject.found()', '0.53.2')
def found_method(self, args, kwargs):
return self.held_object is not None
+class CMakeSubprojectOptions(InterpreterObject):
+ def __init__(self) -> None:
+ super().__init__()
+ self.cmake_options = [] # type: T.List[str]
+ self.target_options = TargetOptions()
+
+ self.methods.update(
+ {
+ 'add_cmake_defines': self.add_cmake_defines,
+ 'set_override_option': self.set_override_option,
+ 'set_install': self.set_install,
+ 'append_compile_args': self.append_compile_args,
+ 'append_link_args': self.append_link_args,
+ 'clear': self.clear,
+ }
+ )
+
+ def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+ if 'target' in kwargs:
+ return self.target_options[kwargs['target']]
+ return self.target_options.global_options
+
+ @noKwargs
+ def add_cmake_defines(self, args, kwargs) -> None:
+ self.cmake_options += cmake_defines_to_args(args)
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def set_override_option(self, args, kwargs) -> None:
+ if len(args) != 2:
+ raise InvalidArguments('set_override_option takes exactly 2 positional arguments')
+ self._get_opts(kwargs).set_opt(args[0], args[1])
+
+ @permittedKwargs({'target'})
+ def set_install(self, args, kwargs) -> None:
+ if len(args) != 1 or not isinstance(args[0], bool):
+ raise InvalidArguments('set_install takes exactly 1 boolean argument')
+ self._get_opts(kwargs).set_install(args[0])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_compile_args(self, args, kwargs) -> None:
+ if len(args) < 2:
+ raise InvalidArguments('append_compile_args takes at least 2 positional arguments')
+ self._get_opts(kwargs).append_args(args[0], args[1:])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_link_args(self, args, kwargs) -> None:
+ if not args:
+ raise InvalidArguments('append_link_args takes at least 1 positional argument')
+ self._get_opts(kwargs).append_link_args(args)
+
+ @noPosargs
+ @noKwargs
+ def clear(self, args, kwargs) -> None:
+ self.cmake_options.clear()
+ self.target_options = TargetOptions()
+
+
class CmakeModule(ExtensionModule):
cmake_detected = False
cmake_root = None
@@ -252,8 +333,7 @@ class CmakeModule(ExtensionModule):
(ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name)))
ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
- if 'install_dir' not in kwargs:
- install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+ install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name))
if not isinstance(install_dir, str):
raise mesonlib.MesonException('"install_dir" must be a string.')
@@ -287,16 +367,27 @@ class CmakeModule(ExtensionModule):
return res
@FeatureNew('subproject', '0.51.0')
- @permittedKwargs({'cmake_options', 'required'})
+ @FeatureNewKwargs('subproject', '0.55.0', ['options'])
+ @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options'])
+ @permittedKwargs({'cmake_options', 'required', 'options'})
@stringArgs
def subproject(self, interpreter, state, args, kwargs):
if len(args) != 1:
raise InterpreterException('Subproject takes exactly one argument')
+ if 'cmake_options' in kwargs and 'options' in kwargs:
+ raise InterpreterException('"options" cannot be used together with "cmake_options"')
dirname = args[0]
subp = interpreter.do_subproject(dirname, 'cmake', kwargs)
if not subp.held_object:
return subp
return CMakeSubprojectHolder(subp, dirname)
+ @FeatureNew('subproject_options', '0.55.0')
+ @noKwargs
+ @noPosargs
+ def subproject_options(self, state, args, kwargs) -> ModuleReturnValue:
+ opts = CMakeSubprojectOptions()
+ return ModuleReturnValue(opts, [])
+
def initialize(*args, **kwargs):
return CmakeModule(*args, **kwargs)
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 21360a2..de674db 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -32,8 +32,8 @@ from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
join_args, unholder,
)
-from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
-from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency, ExternalProgram
+from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs
# gresource compilation is broken due to the way
# the resource compiler and Ninja clash about it
@@ -44,20 +44,6 @@ gresource_dep_needed_version = '>= 2.51.1'
native_glib_version = None
-@functools.lru_cache(maxsize=None)
-def gir_has_option(intr_obj, option):
- try:
- g_ir_scanner = intr_obj.find_program_impl('g-ir-scanner')
- # Handle overridden g-ir-scanner
- if isinstance(getattr(g_ir_scanner, "held_object", g_ir_scanner), interpreter.OverrideProgram):
- assert option in ['--extra-library', '--sources-top-dirs']
- return True
-
- opts = Popen_safe(g_ir_scanner.get_command() + ['--help'], stderr=subprocess.STDOUT)[1]
- return option in opts
- except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
- return False
-
class GnomeModule(ExtensionModule):
gir_dep = None
@@ -303,7 +289,7 @@ class GnomeModule(ExtensionModule):
link_command.append('-L' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + d)
- if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+ if use_gir_args and self._gir_has_option('--extra-library'):
link_command.append('--extra-library=' + lib.name)
else:
link_command.append('-l' + lib.name)
@@ -321,6 +307,10 @@ class GnomeModule(ExtensionModule):
deps = mesonlib.unholder(mesonlib.listify(deps))
for dep in deps:
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+ if girdir:
+ gi_includes.update([girdir])
if isinstance(dep, InternalDependency):
cflags.update(dep.get_compile_args())
cflags.update(get_include_args(dep.include_directories))
@@ -371,11 +361,6 @@ class GnomeModule(ExtensionModule):
external_ldflags_nodedup += [lib, next(ldflags)]
else:
external_ldflags.update([lib])
-
- if isinstance(dep, PkgConfigDependency):
- girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
- if girdir:
- gi_includes.update([girdir])
elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
cflags.update(get_include_args(dep.get_include_dirs()))
depends.append(dep)
@@ -383,7 +368,7 @@ class GnomeModule(ExtensionModule):
mlog.log('dependency {!r} not handled to build gir files'.format(dep))
continue
- if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+ if use_gir_args and self._gir_has_option('--extra-library'):
def fix_ldflags(ldflags):
fixed_ldflags = OrderedSet()
for ldflag in ldflags:
@@ -417,15 +402,37 @@ class GnomeModule(ExtensionModule):
return girtarget
def _get_gir_dep(self, state):
- try:
- gir_dep = self.gir_dep or PkgConfigDependency('gobject-introspection-1.0',
- state.environment,
- {'native': True})
- pkgargs = gir_dep.get_compile_args()
- except Exception:
- raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
-
- return gir_dep, pkgargs
+ if not self.gir_dep:
+ kwargs = {'native': True, 'required': True}
+ holder = self.interpreter.func_dependency(state.current_node, ['gobject-introspection-1.0'], kwargs)
+ self.gir_dep = holder.held_object
+ giscanner = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-scanner')
+ if giscanner is not None:
+ self.giscanner = ExternalProgram.from_entry('g-ir-scanner', giscanner)
+ elif self.gir_dep.type_name == 'pkgconfig':
+ self.giscanner = ExternalProgram('g_ir_scanner', self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {}))
+ else:
+ self.giscanner = self.interpreter.find_program_impl('g-ir-scanner')
+ gicompiler = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-compiler')
+ if gicompiler is not None:
+ self.gicompiler = ExternalProgram.from_entry('g-ir-compiler', gicompiler)
+ elif self.gir_dep.type_name == 'pkgconfig':
+ self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {}))
+ else:
+ self.gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
+ return self.gir_dep, self.giscanner, self.gicompiler
+
+ @functools.lru_cache(maxsize=None)
+ def _gir_has_option(self, option):
+ exe = self.giscanner
+ if hasattr(exe, 'held_object'):
+ exe = exe.held_object
+ if isinstance(exe, interpreter.OverrideProgram):
+ # Handle overridden g-ir-scanner
+ assert option in ['--extra-library', '--sources-top-dirs']
+ return True
+ p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+ return p.returncode == 0 and option in o
def _scan_header(self, kwargs):
ret = []
@@ -688,11 +695,10 @@ class GnomeModule(ExtensionModule):
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
- elif isinstance(dep, PkgConfigDependency):
- girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
if girdir and girdir not in typelib_includes:
typelib_includes.append(girdir)
-
return typelib_includes
def _get_external_args_for_langs(self, state, langs):
@@ -715,11 +721,12 @@ class GnomeModule(ExtensionModule):
if f.startswith(('-L', '-l', '--extra-library')):
yield f
- @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+ @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
@permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
- 'packages', 'header', 'build_by_default'})
+ 'packages', 'header', 'build_by_default', 'fatal_warnings'})
def generate_gir(self, state, args, kwargs):
if not args:
raise MesonException('generate_gir takes at least one argument')
@@ -731,42 +738,25 @@ class GnomeModule(ExtensionModule):
if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
- self.gir_dep, pkgargs = self._get_gir_dep(state)
- # find_program is needed in the case g-i is built as subproject.
- # In that case it uses override_find_program so the gobject utilities
- # can be used from the build dir instead of from the system.
- # However, GObject-introspection provides the appropriate paths to
- # these utilities via pkg-config, so it would be best to use the
- # results from pkg-config when possible.
- gi_util_dirs_check = [state.environment.get_build_dir(), state.environment.get_source_dir()]
- giscanner = self.interpreter.find_program_impl('g-ir-scanner')
- if giscanner.found():
- giscanner_path = giscanner.get_command()[0]
- if not any(x in giscanner_path for x in gi_util_dirs_check):
- giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})
- else:
- giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})
+ gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
- gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
- if gicompiler.found():
- gicompiler_path = gicompiler.get_command()[0]
- if not any(x in gicompiler_path for x in gi_util_dirs_check):
- gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})
- else:
- gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})
-
- ns = kwargs.pop('namespace')
- nsversion = kwargs.pop('nsversion')
+ ns = kwargs.get('namespace')
+ if not ns:
+ raise MesonException('Missing "namespace" keyword argument')
+ nsversion = kwargs.get('nsversion')
+ if not nsversion:
+ raise MesonException('Missing "nsversion" keyword argument')
libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
girfile = '%s-%s.gir' % (ns, nsversion)
srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
- depends = [] + girtargets
+ depends = gir_dep.sources + girtargets
gir_inc_dirs = []
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
deps += mesonlib.unholder(extract_as_list(kwargs, 'dependencies', pop=True))
+ deps += [gir_dep]
typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
@@ -781,7 +771,6 @@ class GnomeModule(ExtensionModule):
inc_dirs = self._scan_inc_dirs(kwargs)
scan_command = [giscanner]
- scan_command += pkgargs
scan_command += ['--no-libtool']
scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
scan_command += ['--warn-all']
@@ -806,10 +795,18 @@ class GnomeModule(ExtensionModule):
scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
scan_command += list(external_ldflags)
- if gir_has_option(self.interpreter, '--sources-top-dirs'):
+ if self._gir_has_option('--sources-top-dirs'):
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
+ if '--warn-error' in scan_command:
+ mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+ fatal_warnings = kwargs.get('fatal_warnings', False)
+ if not isinstance(fatal_warnings, bool):
+ raise MesonException('fatal_warnings keyword argument must be a boolean')
+ if fatal_warnings:
+ scan_command.append('--warn-error')
+
scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
@@ -846,6 +843,8 @@ class GnomeModule(ExtensionModule):
return ModuleReturnValue(target_g, [target_g])
@permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+ @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'],
+ 'Use a LINGUAS file in the source directory instead')
def yelp(self, state, args, kwargs):
if len(args) < 1:
raise MesonException('Yelp requires a project id')
@@ -860,11 +859,6 @@ class GnomeModule(ExtensionModule):
source_str = '@@'.join(sources)
langs = mesonlib.stringlistify(kwargs.pop('languages', []))
- if langs:
- mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated.
-Use a LINGUAS file in the sources directory instead.
-This will become a hard error in the future.''')
-
media = mesonlib.stringlistify(kwargs.pop('media', []))
symlinks = kwargs.pop('symlink_media', True)
diff --git a/mesonbuild/modules/unstable_kconfig.py b/mesonbuild/modules/keyval.py
index 6685710..3da2992 100644
--- a/mesonbuild/modules/unstable_kconfig.py
+++ b/mesonbuild/modules/keyval.py
@@ -21,9 +21,9 @@ from ..interpreter import InvalidCode
import os
-class KconfigModule(ExtensionModule):
+class KeyvalModule(ExtensionModule):
- @FeatureNew('Kconfig Module', '0.51.0')
+ @FeatureNew('Keyval Module', '0.55.0')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.snippets.add('load')
@@ -56,9 +56,7 @@ class KconfigModule(ExtensionModule):
s = sources[0]
is_built = False
if isinstance(s, mesonlib.File):
- if s.is_built:
- FeatureNew('kconfig.load() of built files', '0.52.0').use(state.subproject)
- is_built = True
+ is_built = is_built or s.is_built
s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir)
else:
s = os.path.join(interpreter.environment.source_dir, s)
@@ -70,4 +68,4 @@ class KconfigModule(ExtensionModule):
def initialize(*args, **kwargs):
- return KconfigModule(*args, **kwargs)
+ return KeyvalModule(*args, **kwargs)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index ac51e36..f81ee2f 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -36,6 +36,7 @@ class DependenciesHelper:
self.priv_reqs = []
self.cflags = []
self.version_reqs = {}
+ self.link_whole_targets = []
def add_pub_libs(self, libs):
libs, reqs, cflags = self._process_libs(libs, True)
@@ -76,7 +77,7 @@ class DependenciesHelper:
processed_reqs = []
for obj in mesonlib.unholder(mesonlib.listify(reqs)):
if not isinstance(obj, str):
- FeatureNew('pkgconfig.generate requirement from non-string object', '0.46.0').use(self.state.subproject)
+ FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
if hasattr(obj, 'generated_pc'):
self._check_generated_pc_deprecation(obj)
processed_reqs.append(obj.generated_pc)
@@ -130,10 +131,7 @@ class DependenciesHelper:
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
- if public:
- self.add_pub_libs(obj.libraries)
- else:
- self.add_priv_libs(obj.libraries)
+ self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public)
elif isinstance(obj, dependencies.Dependency):
if obj.found():
processed_libs += obj.get_link_args()
@@ -148,12 +146,13 @@ class DependenciesHelper:
processed_libs.append(obj)
elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
processed_libs.append(obj)
- if isinstance(obj, build.StaticLibrary) and public:
- self.add_pub_libs(obj.get_dependencies(for_pkgconfig=True))
- self.add_pub_libs(obj.get_external_deps())
- else:
- self.add_priv_libs(obj.get_dependencies(for_pkgconfig=True))
- self.add_priv_libs(obj.get_external_deps())
+ # If there is a static library in `Libs:` all its deps must be
+ # public too, otherwise the generated pc file will never be
+ # usable without --static.
+ self._add_lib_dependencies(obj.link_targets,
+ obj.link_whole_targets,
+ obj.external_deps,
+ isinstance(obj, build.StaticLibrary) and public)
elif isinstance(obj, str):
processed_libs.append(obj)
else:
@@ -161,6 +160,31 @@ class DependenciesHelper:
return processed_libs, processed_reqs, processed_cflags
+ def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public):
+ add_libs = self.add_pub_libs if public else self.add_priv_libs
+ # Recursively add all linked libraries
+ for t in link_targets:
+ # Internal libraries (uninstalled static library) will be promoted
+ # to link_whole, treat them as such here.
+ if t.is_internal():
+ self._add_link_whole(t, public)
+ else:
+ add_libs([t])
+ for t in link_whole_targets:
+ self._add_link_whole(t, public)
+ # And finally its external dependencies
+ add_libs(external_deps)
+
+ def _add_link_whole(self, t, public):
+ # Don't include static libraries that we link_whole. But we still need to
+ # include their dependencies: a static library we link_whole
+ # could itself link to a shared library or an installed static library.
+ # Keep track of link_whole_targets so we can remove them from our
+ # lists in case a library is link_with and link_whole at the same time.
+ # See remove_dups() below.
+ self.link_whole_targets.append(t)
+ self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
def add_version_reqs(self, name, version_reqs):
if version_reqs:
if name not in self.version_reqs:
@@ -196,6 +220,32 @@ class DependenciesHelper:
return ', '.join(result)
def remove_dups(self):
+ # Set of ids that have already been handled and should not be added any more
+ exclude = set()
+
+ # We can't just check if 'x' is excluded because we could have copies of
+ # the same SharedLibrary object for example.
+ def _ids(x):
+ if hasattr(x, 'generated_pc'):
+ yield x.generated_pc
+ if isinstance(x, build.Target):
+ yield x.get_id()
+ yield x
+
+ # Exclude 'x' in all its forms and return if it was already excluded
+ def _add_exclude(x):
+ was_excluded = False
+ for i in _ids(x):
+ if i in exclude:
+ was_excluded = True
+ else:
+ exclude.add(i)
+ return was_excluded
+
+ # link_whole targets are already part of other targets, exclude them all.
+ for t in self.link_whole_targets:
+ _add_exclude(t)
+
def _fn(xs, libs=False):
# Remove duplicates whilst preserving original order
result = []
@@ -206,19 +256,21 @@ class DependenciesHelper:
cannot_dedup = libs and isinstance(x, str) and \
not x.startswith(('-l', '-L')) and \
x not in known_flags
- if x not in result or cannot_dedup:
- result.append(x)
+ if not cannot_dedup and _add_exclude(x):
+ continue
+ result.append(x)
return result
- self.pub_libs = _fn(self.pub_libs, True)
+
+ # Handle lists in priority order: public items can be excluded from
+ # private and Requires can excluded from Libs.
self.pub_reqs = _fn(self.pub_reqs)
- self.priv_libs = _fn(self.priv_libs, True)
+ self.pub_libs = _fn(self.pub_libs, True)
self.priv_reqs = _fn(self.priv_reqs)
+ self.priv_libs = _fn(self.priv_libs, True)
+ # Reset exclude list just in case some values can be both cflags and libs.
+ exclude = set()
self.cflags = _fn(self.cflags)
- # Remove from private libs/reqs if they are in public already
- self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs]
- self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs]
-
class PkgConfigModule(ExtensionModule):
def _get_lname(self, l, msg, pcfile):
@@ -267,7 +319,6 @@ class PkgConfigModule(ExtensionModule):
def generate_pkgconfig_file(self, state, deps, subdirs, name, description,
url, version, pcfile, conflicts, variables,
uninstalled=False, dataonly=False):
- deps.remove_dups()
coredata = state.environment.get_coredata()
if uninstalled:
outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
@@ -372,18 +423,18 @@ class PkgConfigModule(ExtensionModule):
if len(deps.priv_libs) > 0:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
- def generate_compiler_flags():
- cflags_buf = []
- for f in deps.cflags:
- cflags_buf.append(self._escape(f))
- return cflags_buf
-
- cflags = generate_compiler_flags()
- ofile.write('Cflags:')
+ cflags = []
if uninstalled:
- ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)))
- elif not dataonly and cflags:
- ofile.write('{}\n'.format(' '.join(cflags)))
+ cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+ else:
+ for d in subdirs:
+ if d == '.':
+ cflags.append('-I${includedir}')
+ else:
+ cflags.append(self._escape(PurePath('-I${includedir}') / d))
+ cflags += [self._escape(f) for f in deps.cflags]
+ if cflags and not dataonly:
+ ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
@@ -394,8 +445,6 @@ class PkgConfigModule(ExtensionModule):
'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
'dataonly', 'conflicts'})
def generate(self, state, args, kwargs):
- if 'variables' in kwargs:
- FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject)
default_version = state.project_version['version']
default_install_dir = None
default_description = None
@@ -403,9 +452,9 @@ class PkgConfigModule(ExtensionModule):
mainlib = None
default_subdirs = ['.']
if not args and 'version' not in kwargs:
- FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject)
+ FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
elif len(args) == 1:
- FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject)
+ FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
mainlib = getattr(args[0], 'held_object', args[0])
if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
@@ -450,11 +499,6 @@ class PkgConfigModule(ExtensionModule):
libraries = [mainlib] + libraries
deps = DependenciesHelper(state, filebase)
- for d in subdirs:
- if d == '.':
- deps.add_cflags(['-I${includedir}'])
- else:
- deps.add_cflags(self._escape(PurePath('-I${includedir}') / d))
deps.add_pub_libs(libraries)
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
@@ -467,6 +511,8 @@ class PkgConfigModule(ExtensionModule):
if compiler:
deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
+ deps.remove_dups()
+
def parse_variable_list(stringlist):
reserved = ['prefix', 'libdir', 'includedir']
variables = []
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index a5c58a2..ceabd76 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -285,7 +285,7 @@ print (json.dumps ({
class PythonInstallation(ExternalProgramHolder):
def __init__(self, interpreter, python, info):
- ExternalProgramHolder.__init__(self, python)
+ ExternalProgramHolder.__init__(self, python, interpreter.subproject)
self.interpreter = interpreter
self.subproject = self.interpreter.subproject
prefix = self.interpreter.environment.coredata.get_builtin_option('prefix')
@@ -361,7 +361,7 @@ class PythonInstallation(ExternalProgramHolder):
@permittedKwargs(['pure', 'subdir'])
def install_sources_method(self, args, kwargs):
- pure = kwargs.pop('pure', False)
+ pure = kwargs.pop('pure', True)
if not isinstance(pure, bool):
raise InvalidArguments('"pure" argument must be a boolean.')
@@ -514,7 +514,7 @@ class PythonModule(ExtensionModule):
if disabled:
mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
- return ExternalProgramHolder(NonExistingExternalProgram())
+ return ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
if not name_or_path:
python = ExternalProgram('python3', mesonlib.python_command, silent=True)
@@ -561,11 +561,11 @@ class PythonModule(ExtensionModule):
if not python.found():
if required:
raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
elif missing_modules:
if required:
raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
else:
# Sanity check, we expect to have something that at least quacks in tune
try:
@@ -583,7 +583,7 @@ class PythonModule(ExtensionModule):
if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']):
res = PythonInstallation(interpreter, python, info)
else:
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
if required:
raise mesonlib.MesonException('{} is not a valid python or it is missing setuptools'.format(python))
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index c7da530..c810df6 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -15,8 +15,8 @@
import os
from .. import mlog
from .. import build
-from ..mesonlib import MesonException, Popen_safe, extract_as_list, File, unholder
-from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency
+from ..mesonlib import MesonException, extract_as_list, File, unholder, version_compare
+from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram
import xml.etree.ElementTree as ET
from . import ModuleReturnValue, get_include_args, ExtensionModule
from ..interpreterbase import noPosargs, permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -30,49 +30,34 @@ _QT_DEPS_LUT = {
class QtBaseModule(ExtensionModule):
tools_detected = False
+ rcc_supports_depfiles = False
def __init__(self, interpreter, qt_version=5):
ExtensionModule.__init__(self, interpreter)
self.snippets.add('has_tools')
self.qt_version = qt_version
- def _detect_tools(self, env, method):
+ def _detect_tools(self, env, method, required=True):
if self.tools_detected:
return
+ self.tools_detected = True
mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version))
- # FIXME: We currently require QtX to exist while importing the module.
- # We should make it gracefully degrade and not create any targets if
- # the import is marked as 'optional' (not implemented yet)
- kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method}
+ kwargs = {'required': required, 'modules': 'Core', 'method': method}
qt = _QT_DEPS_LUT[self.qt_version](env, kwargs)
- # Get all tools and then make sure that they are the right version
- self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
- # Moc, uic and rcc write their version strings to stderr.
- # Moc and rcc return a non-zero result when doing so.
- # What kind of an idiot thought that was a good idea?
- for compiler, compiler_name in ((self.moc, "Moc"), (self.uic, "Uic"), (self.rcc, "Rcc"), (self.lrelease, "lrelease")):
- if compiler.found():
- # Workaround since there is no easy way to know which tool/version support which flag
- for flag in ['-v', '-version']:
- p, stdout, stderr = Popen_safe(compiler.get_command() + [flag])[0:3]
- if p.returncode == 0:
- break
- stdout = stdout.strip()
- stderr = stderr.strip()
- if 'Qt {}'.format(self.qt_version) in stderr:
- compiler_ver = stderr
- elif 'version {}.'.format(self.qt_version) in stderr:
- compiler_ver = stderr
- elif ' {}.'.format(self.qt_version) in stdout:
- compiler_ver = stdout
- else:
- raise MesonException('{name} preprocessor is not for Qt {version}. Output:\n{stdo}\n{stderr}'.format(
- name=compiler_name, version=self.qt_version, stdo=stdout, stderr=stderr))
- mlog.log(' {}:'.format(compiler_name.lower()), mlog.green('YES'), '({path}, {version})'.format(
- path=compiler.get_path(), version=compiler_ver.split()[-1]))
+ if qt.found():
+ # Get all tools and then make sure that they are the right version
+ self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
+ if version_compare(qt.version, '>=5.14.0'):
+ self.rcc_supports_depfiles = True
else:
- mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO'))
- self.tools_detected = True
+ mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+ mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+ else:
+ suffix = '-qt{}'.format(self.qt_version)
+ self.moc = NonExistingExternalProgram(name='moc' + suffix)
+ self.uic = NonExistingExternalProgram(name='uic' + suffix)
+ self.rcc = NonExistingExternalProgram(name='rcc' + suffix)
+ self.lrelease = NonExistingExternalProgram(name='lrelease' + suffix)
def parse_qrc(self, state, rcc_file):
if type(rcc_file) is str:
@@ -128,7 +113,7 @@ class QtBaseModule(ExtensionModule):
if disabled:
mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
return False
- self._detect_tools(state.environment, method)
+ self._detect_tools(state.environment, method, required=False)
for tool in (self.moc, self.uic, self.rcc, self.lrelease):
if not tool.found():
if required:
@@ -177,6 +162,9 @@ class QtBaseModule(ExtensionModule):
'output': name + '.cpp',
'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'],
'depend_files': qrc_deps}
+ if self.rcc_supports_depfiles:
+ rcc_kwargs['depfile'] = name + '.d'
+ rcc_kwargs['command'] += ['--depfile', '@DEPFILE@']
res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
sources.append(res_target)
if ui_files:
diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py
index 112e3e4..e85a150 100644
--- a/mesonbuild/modules/qt4.py
+++ b/mesonbuild/modules/qt4.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from .. import mlog
from .qt import QtBaseModule
@@ -23,6 +22,4 @@ class Qt4Module(QtBaseModule):
def initialize(*args, **kwargs):
- mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:',
- mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
return Qt4Module(*args, **kwargs)
diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py
index 96a7964..873c2db 100644
--- a/mesonbuild/modules/qt5.py
+++ b/mesonbuild/modules/qt5.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from .. import mlog
from .qt import QtBaseModule
@@ -23,6 +22,4 @@ class Qt5Module(QtBaseModule):
def initialize(*args, **kwargs):
- mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:',
- mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
return Qt5Module(*args, **kwargs)
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index f939782..c154ab2 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -107,7 +107,7 @@ class WindowsModule(ExtensionModule):
'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
for arg in extra_args:
if ' ' in arg:
- mlog.warning(m.format(arg))
+ mlog.warning(m.format(arg), fatal=False)
res_targets = []
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 2cffc47..b9e381e 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -426,8 +426,8 @@ class IfNode(BaseNode):
class IfClauseNode(BaseNode):
def __init__(self, linenode: BaseNode):
super().__init__(linenode.lineno, linenode.colno, linenode.filename)
- self.ifs = [] # type: T.List[IfNode]
- self.elseblock = EmptyNode(linenode.lineno, linenode.colno, linenode.filename) # type: T.Union[EmptyNode, CodeBlockNode]
+ self.ifs = [] # type: T.List[IfNode]
+ self.elseblock = None # type: T.Union[EmptyNode, CodeBlockNode]
class UMinusNode(BaseNode):
def __init__(self, current_location: Token, value: BaseNode):
@@ -747,9 +747,7 @@ class Parser:
block = self.codeblock()
clause.ifs.append(IfNode(clause, condition, block))
self.elseifblock(clause)
- elseblock = self.elseblock()
- if elseblock:
- clause.elseblock = elseblock
+ clause.elseblock = self.elseblock()
return clause
def elseifblock(self, clause) -> None:
@@ -759,11 +757,11 @@ class Parser:
b = self.codeblock()
clause.ifs.append(IfNode(s, s, b))
- def elseblock(self) -> T.Optional[CodeBlockNode]:
+ def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
if self.accept('else'):
self.expect('eol')
return self.codeblock()
- return None
+ return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
def line(self) -> BaseNode:
block_start = self.current
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 77d8377..2521511 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -86,7 +86,7 @@ class MesonApp:
# will cause a crash
for l in os.listdir(self.build_dir):
l = os.path.join(self.build_dir, l)
- if os.path.isdir(l):
+ if os.path.isdir(l) and not os.path.islink(l):
mesonlib.windows_proof_rmtree(l)
else:
mesonlib.windows_proof_rm(l)
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 23643c5..0d81692 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -33,14 +33,17 @@ import signal
import subprocess
import sys
import tempfile
+import textwrap
import time
import typing as T
+import xml.etree.ElementTree as et
from . import build
from . import environment
from . import mlog
from .dependencies import ExternalProgram
-from .mesonlib import MesonException, get_wine_shortpath, split_args
+from .mesonlib import MesonException, get_wine_shortpath, split_args, join_args
+from .backend.backends import TestProtocol
if T.TYPE_CHECKING:
from .backend.backends import TestSerialisation
@@ -92,6 +95,9 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
help='wrapper to run tests with (e.g. Valgrind)')
parser.add_argument('-C', default='.', dest='wd',
+ # https://github.com/python/typeshed/issues/3107
+ # https://github.com/python/mypy/issues/7177
+ type=os.path.abspath, # type: ignore
help='directory to cd into before running')
parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
help='Only run tests belonging to the given suite.')
@@ -305,7 +311,7 @@ class TAPParser:
yield self.Version(version=version)
continue
- if len(line) == 0:
+ if not line:
continue
yield self.Error('unexpected input at line {}'.format((lineno,)))
@@ -320,13 +326,144 @@ class TAPParser:
yield self.Error('Too many tests run (expected {}, got {})'.format(plan.count, num_tests))
+
+class JunitBuilder:
+
+ """Builder for Junit test results.
+
+ Junit is impossible to stream out, it requires attributes counting the
+ total number of tests, failures, skips, and errors in the root element
+ and in each test suite. As such, we use a builder class to track each
+ test case, and calculate all metadata before writing it out.
+
+ For tests with multiple results (like from a TAP test), we record the
+ test as a suite with the project_name.test_name. This allows us to track
+ each result separately. For tests with only one result (such as exit-code
+ tests) we record each one into a suite with the name project_name. The use
+ of the project_name allows us to sort subproject tests separately from
+ the root project.
+ """
+
+ def __init__(self, filename: str) -> None:
+ self.filename = filename
+ self.root = et.Element(
+ 'testsuites', tests='0', errors='0', failures='0')
+ self.suites = {} # type: T.Dict[str, et.Element]
+
+ def log(self, name: str, test: 'TestRun') -> None:
+ """Log a single test case."""
+ if test.junit is not None:
+ for suite in test.junit.findall('.//testsuite'):
+ # Assume that we don't need to merge anything here...
+ suite.attrib['name'] = '{}.{}.{}'.format(test.project, name, suite.attrib['name'])
+
+ # GTest can inject invalid attributes
+ for case in suite.findall('.//testcase[@result]'):
+ del case.attrib['result']
+ for case in suite.findall('.//testcase[@timestamp]'):
+ del case.attrib['timestamp']
+ self.root.append(suite)
+ return
+
+ # In this case we have a test binary with multiple results.
+ # We want to record this so that each result is recorded
+ # separately
+ if test.results:
+ suitename = '{}.{}'.format(test.project, name)
+ assert suitename not in self.suites, 'duplicate suite'
+
+ suite = self.suites[suitename] = et.Element(
+ 'testsuite',
+ name=suitename,
+ tests=str(len(test.results)),
+ errors=str(sum(1 for r in test.results if r is TestResult.ERROR)),
+ failures=str(sum(1 for r in test.results if r in
+ {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+ skipped=str(sum(1 for r in test.results if r is TestResult.SKIP)),
+ )
+
+ for i, result in enumerate(test.results):
+ # Both name and classname are required. Set them both to the
+ # number of the test in a TAP test, as TAP doesn't give names.
+ testcase = et.SubElement(suite, 'testcase', name=str(i), classname=str(i))
+ if result is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ elif result is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ elif result is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ elif result is TestResult.UNEXPECTEDPASS:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test unexpected passed.'
+ elif result is TestResult.TIMEOUT:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test did not finish before configured timeout.'
+ if test.stdo:
+ out = et.SubElement(suite, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(suite, 'system-err')
+ err.text = test.stde.rstrip()
+ else:
+ if test.project not in self.suites:
+ suite = self.suites[test.project] = et.Element(
+ 'testsuite', name=test.project, tests='1', errors='0',
+ failures='0', skipped='0')
+ else:
+ suite = self.suites[test.project]
+ suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+ testcase = et.SubElement(suite, 'testcase', name=name, classname=name)
+ if test.res is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+ elif test.res is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+ elif test.res is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+ if test.stdo:
+ out = et.SubElement(testcase, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(testcase, 'system-err')
+ err.text = test.stde.rstrip()
+
+ def write(self) -> None:
+ """Calculate total test counts and write out the xml result."""
+ for suite in self.suites.values():
+ self.root.append(suite)
+ # Skipped is really not allowed in the "testsuits" element
+ for attr in ['tests', 'errors', 'failures']:
+ self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+ tree = et.ElementTree(self.root)
+ with open(self.filename, 'wb') as f:
+ tree.write(f, encoding='utf-8', xml_declaration=True)
+
+
class TestRun:
@classmethod
+ def make_gtest(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
+ returncode: int, starttime: float, duration: float,
+ stdo: T.Optional[str], stde: T.Optional[str],
+ cmd: T.Optional[T.List[str]]) -> 'TestRun':
+ filename = '{}.xml'.format(test.name)
+ if test.workdir:
+ filename = os.path.join(test.workdir, filename)
+ tree = et.parse(filename)
+
+ return cls.make_exitcode(
+ test, test_env, returncode, starttime, duration, stdo, stde, cmd,
+ junit=tree)
+
+ @classmethod
def make_exitcode(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
- cmd: T.Optional[T.List[str]]) -> 'TestRun':
+ cmd: T.Optional[T.List[str]], **kwargs) -> 'TestRun':
if returncode == GNU_SKIP_RETURNCODE:
res = TestResult.SKIP
elif returncode == GNU_ERROR_RETURNCODE:
@@ -335,30 +472,29 @@ class TestRun:
res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS
else:
res = TestResult.FAIL if bool(returncode) else TestResult.OK
- return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+ return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd, **kwargs)
@classmethod
def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: str, stde: str,
cmd: T.Optional[T.List[str]]) -> 'TestRun':
- res = None
- num_tests = 0
+ res = None # type: T.Optional[TestResult]
+ results = [] # type: T.List[TestResult]
failed = False
- num_skipped = 0
for i in TAPParser(io.StringIO(stdo)).parse():
if isinstance(i, TAPParser.Bailout):
- res = TestResult.ERROR
+ results.append(TestResult.ERROR)
+ failed = True
elif isinstance(i, TAPParser.Test):
- if i.result == TestResult.SKIP:
- num_skipped += 1
- elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS):
+ results.append(i.result)
+ if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL}:
failed = True
- num_tests += 1
elif isinstance(i, TAPParser.Error):
- res = TestResult.ERROR
+ results.append(TestResult.ERROR)
stde += '\nTAP parsing error: ' + i.message
+ failed = True
if returncode != 0:
res = TestResult.ERROR
@@ -366,7 +502,7 @@ class TestRun:
if res is None:
# Now determine the overall result of the test based on the outcome of the subcases
- if num_skipped == num_tests:
+ if all(t is TestResult.SKIP for t in results):
# This includes the case where num_tests is zero
res = TestResult.SKIP
elif test.should_fail:
@@ -374,14 +510,16 @@ class TestRun:
else:
res = TestResult.FAIL if failed else TestResult.OK
- return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+ return cls(test, test_env, res, results, returncode, starttime, duration, stdo, stde, cmd)
def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
- res: TestResult, returncode: int, starttime: float, duration: float,
+ res: TestResult, results: T.List[TestResult], returncode:
+ int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
- cmd: T.Optional[T.List[str]]):
+ cmd: T.Optional[T.List[str]], *, junit: T.Optional[et.ElementTree] = None):
assert isinstance(res, TestResult)
self.res = res
+ self.results = results # May be an empty list
self.returncode = returncode
self.starttime = starttime
self.duration = duration
@@ -390,6 +528,8 @@ class TestRun:
self.cmd = cmd
self.env = test_env
self.should_fail = test.should_fail
+ self.project = test.project_name
+ self.junit = junit
def get_log(self) -> str:
res = '--- command ---\n'
@@ -436,9 +576,7 @@ def write_json_log(jsonlogfile: T.TextIO, test_name: str, result: TestRun) -> No
jsonlogfile.write(json.dumps(jresult) + '\n')
def run_with_mono(fname: str) -> bool:
- if fname.endswith('.exe') and not (is_windows() or is_cygwin()):
- return True
- return False
+ return fname.endswith('.exe') and not (is_windows() or is_cygwin())
def load_benchmarks(build_dir: str) -> T.List['TestSerialisation']:
datafile = Path(build_dir) / 'meson-private' / 'meson_benchmark_setup.dat'
@@ -471,26 +609,26 @@ class SingleTestRunner:
return ['java', '-jar'] + self.test.fname
elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
return ['mono'] + self.test.fname
- else:
- if self.test.is_cross_built and self.test.needs_exe_wrapper:
- if self.test.exe_runner is None:
- # Can not run test on cross compiled executable
- # because there is no execute wrapper.
- return None
- else:
- if not self.test.exe_runner.found():
- msg = 'The exe_wrapper defined in the cross file {!r} was not ' \
- 'found. Please check the command and/or add it to PATH.'
- raise TestException(msg.format(self.test.exe_runner.name))
- return self.test.exe_runner.get_command() + self.test.fname
- else:
- return self.test.fname
+ elif self.test.cmd_is_built and self.test.needs_exe_wrapper:
+ if self.test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ return None
+ elif self.test.cmd_is_built:
+ # If the command is not built (ie, its a python script),
+ # then we don't check for the exe-wrapper
+ if not self.test.exe_runner.found():
+ msg = ('The exe_wrapper defined in the cross file {!r} was not '
+ 'found. Please check the command and/or add it to PATH.')
+ raise TestException(msg.format(self.test.exe_runner.name))
+ return self.test.exe_runner.get_command() + self.test.fname
+ return self.test.fname
def run(self) -> TestRun:
cmd = self._get_cmd()
if cmd is None:
skip_stdout = 'Not run because can not execute cross compiled binaries.'
- return TestRun(self.test, self.test_env, TestResult.SKIP, GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
+ return TestRun(self.test, self.test_env, TestResult.SKIP, [], GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
else:
wrap = TestHarness.get_wrapper(self.options)
if self.options.gdb:
@@ -500,7 +638,7 @@ class SingleTestRunner:
def _run_cmd(self, cmd: T.List[str]) -> TestRun:
starttime = time.time()
- if len(self.test.extra_paths) > 0:
+ if self.test.extra_paths:
self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH']
winecmd = []
for c in cmd:
@@ -525,7 +663,7 @@ class SingleTestRunner:
if not self.options.verbose:
stdout = tempfile.TemporaryFile("wb+")
stderr = tempfile.TemporaryFile("wb+") if self.options.split else stdout
- if self.test.protocol == 'tap' and stderr is stdout:
+ if self.test.protocol is TestProtocol.TAP and stderr is stdout:
stdout = tempfile.TemporaryFile("wb+")
# Let gdb handle ^C instead of us
@@ -545,7 +683,14 @@ class SingleTestRunner:
# errors avoid not being able to use the terminal.
os.setsid() # type: ignore
- p = subprocess.Popen(cmd,
+ extra_cmd = [] # type: T.List[str]
+ if self.test.protocol is TestProtocol.GTEST:
+ gtestname = '{}.xml'.format(self.test.name)
+ if self.test.workdir:
+ gtestname = '{}:{}'.format(self.test.workdir, self.test.name)
+ extra_cmd.append('--gtest_output=xml:{}'.format(gtestname))
+
+ p = subprocess.Popen(cmd + extra_cmd,
stdout=stdout,
stderr=stderr,
env=self.env,
@@ -633,10 +778,12 @@ class SingleTestRunner:
stdo = ""
stde = additional_error
if timed_out:
- return TestRun(self.test, self.test_env, TestResult.TIMEOUT, p.returncode, starttime, duration, stdo, stde, cmd)
+ return TestRun(self.test, self.test_env, TestResult.TIMEOUT, [], p.returncode, starttime, duration, stdo, stde, cmd)
else:
- if self.test.protocol == 'exitcode':
+ if self.test.protocol is TestProtocol.EXITCODE:
return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
+ elif self.test.protocol is TestProtocol.GTEST:
+ return TestRun.make_gtest(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
else:
if self.options.verbose:
print(stdo, end='')
@@ -647,6 +794,7 @@ class TestHarness:
def __init__(self, options: argparse.Namespace):
self.options = options
self.collected_logs = [] # type: T.List[str]
+ self.collected_failures = [] # type: T.List[str]
self.fail_count = 0
self.expectedfail_count = 0
self.unexpectedpass_count = 0
@@ -655,9 +803,11 @@ class TestHarness:
self.timeout_count = 0
self.is_run = False
self.tests = None
+ self.results = [] # type: T.List[TestRun]
self.logfilename = None # type: T.Optional[str]
self.logfile = None # type: T.Optional[T.TextIO]
self.jsonlogfile = None # type: T.Optional[T.TextIO]
+ self.junit = None # type: T.Optional[JunitBuilder]
if self.options.benchmark:
self.tests = load_benchmarks(options.wd)
else:
@@ -678,12 +828,11 @@ class TestHarness:
self.close_logfiles()
def close_logfiles(self) -> None:
- if self.logfile:
- self.logfile.close()
- self.logfile = None
- if self.jsonlogfile:
- self.jsonlogfile.close()
- self.jsonlogfile = None
+ for f in ['logfile', 'jsonlogfile']:
+ lfile = getattr(self, f)
+ if lfile:
+ lfile.close()
+ setattr(self, f, None)
def merge_suite_options(self, options: argparse.Namespace, test: 'TestSerialisation') -> T.Dict[str, str]:
if ':' in options.setup:
@@ -719,6 +868,9 @@ class TestHarness:
env = os.environ.copy()
test_env = test.env.get_env(env)
env.update(test_env)
+ if (test.is_cross_built and test.needs_exe_wrapper and
+ test.exe_runner and test.exe_runner.found()):
+ env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command())
return SingleTestRunner(test, test_env, env, options)
def process_test_result(self, result: TestRun) -> None:
@@ -757,6 +909,7 @@ class TestHarness:
if not self.options.quiet or result.res not in ok_statuses:
if result.res not in ok_statuses and mlog.colorize_console:
if result.res in bad_statuses:
+ self.collected_failures.append(result_str)
decorator = mlog.red
elif result.res is TestResult.SKIP:
decorator = mlog.yellow
@@ -773,23 +926,31 @@ class TestHarness:
self.logfile.write(result_str)
if self.jsonlogfile:
write_json_log(self.jsonlogfile, name, result)
+ if self.junit:
+ self.junit.log(name, result)
def print_summary(self) -> None:
- msg = '''
-Ok: {:<4}
-Expected Fail: {:<4}
-Fail: {:<4}
-Unexpected Pass: {:<4}
-Skipped: {:<4}
-Timeout: {:<4}
-'''.format(self.success_count, self.expectedfail_count, self.fail_count,
+ # Prepend a list of failures
+ msg = '' if len(self.collected_failures) < 1 else "\nSummary of Failures:\n\n"
+ msg += '\n'.join(self.collected_failures)
+ msg += textwrap.dedent('''
+
+ Ok: {:<4}
+ Expected Fail: {:<4}
+ Fail: {:<4}
+ Unexpected Pass: {:<4}
+ Skipped: {:<4}
+ Timeout: {:<4}
+ ''').format(self.success_count, self.expectedfail_count, self.fail_count,
self.unexpectedpass_count, self.skip_count, self.timeout_count)
print(msg)
if self.logfile:
self.logfile.write(msg)
+ if self.junit:
+ self.junit.write()
def print_collected_logs(self) -> None:
- if len(self.collected_logs) > 0:
+ if self.collected_logs:
if len(self.collected_logs) > 10:
print('\nThe output from 10 first failed tests:\n')
else:
@@ -871,7 +1032,7 @@ Timeout: {:<4}
print('No tests defined.')
return []
- if len(self.options.include_suites) or len(self.options.exclude_suites):
+ if self.options.include_suites or self.options.exclude_suites:
tests = []
for tst in self.tests:
if self.test_suitable(tst):
@@ -903,6 +1064,9 @@ Timeout: {:<4}
if namebase:
logfile_base += '-' + namebase.replace(' ', '_')
+
+ self.junit = JunitBuilder(logfile_base + '.junit.xml')
+
self.logfilename = logfile_base + '.txt'
self.jsonlogfilename = logfile_base + '.json'
@@ -930,7 +1094,7 @@ Timeout: {:<4}
if len(self.suites) > 1 and test.suite:
rv = TestHarness.split_suite_string(test.suite[0])[0]
s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
- if len(s):
+ if s:
rv += ":"
return rv + s + " / " + test.name
else:
@@ -970,8 +1134,8 @@ Timeout: {:<4}
break
self.drain_futures(futures)
- self.print_summary()
self.print_collected_logs()
+ self.print_summary()
if self.logfilename:
print('Full log written to {}'.format(self.logfilename))
@@ -1046,7 +1210,6 @@ def run(options: argparse.Namespace) -> int:
if not exe.found():
print('Could not find requested program: {!r}'.format(check_bin))
return 1
- options.wd = os.path.abspath(options.wd)
if not options.list and not options.no_rebuild:
if not rebuild_all(options.wd):
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index c3cf1d8..56f8984 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -16,10 +16,11 @@ import re
import functools
import typing as T
-from . import mparser
+from . import compilers
from . import coredata
from . import mesonlib
-from . import compilers
+from . import mparser
+from .interpreterbase import FeatureNew
forbidden_option_names = set(coredata.builtin_options.keys())
forbidden_prefixes = [lang.get_lower_case_name() + '_' for lang in compilers.all_languages] + ['b_', 'backend_']
@@ -170,12 +171,21 @@ class OptionInterpreter:
res = self.reduce_single(arg.value)
if not isinstance(res, (int, float)):
raise OptionException('Token after "-" is not a number')
+ FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
return -res
elif isinstance(arg, mparser.NotNode):
res = self.reduce_single(arg.value)
if not isinstance(res, bool):
raise OptionException('Token after "not" is not a a boolean')
+ FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
return not res
+ elif isinstance(arg, mparser.ArithmeticNode):
+ l = self.reduce_single(arg.left)
+ r = self.reduce_single(arg.right)
+ if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+ raise OptionException('Only string concatenation with the "+" operator is allowed')
+ FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+ return l + r
else:
raise OptionException('Arguments may only be string, int, bool, or array of those.')
@@ -200,11 +210,8 @@ class OptionInterpreter:
raise OptionException('Only calls to option() are allowed in option files.')
(posargs, kwargs) = self.reduce_arguments(node.args)
- # FIXME: Cannot use FeatureNew while parsing options because we parse
- # it before reading options in project(). See func_project() in
- # interpreter.py
- #if 'yield' in kwargs:
- # FeatureNew('option yield', '0.45.0').use(self.subproject)
+ if 'yield' in kwargs:
+ FeatureNew.single_use('option yield', '0.45.0', self.subproject)
if 'type' not in kwargs:
raise OptionException('Option call missing mandatory "type" keyword argument')
diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 0000000..5c0b31f
--- /dev/null
+++ b/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import os
+import sys
+from pathlib import Path
+
+def run(argsv):
+ commands = [[]]
+ SEPARATOR = ';;;'
+
+ # Generate CMD parameters
+ parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+ parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+ parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+ parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+ parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
+
+ # Parse
+ args = parser.parse_args(argsv)
+
+ dummy_target = None
+ if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+ dummy_target = args.outputs[0]
+ elif len(args.outputs) != len(args.original_outputs):
+ print('Length of output list and original output list differ')
+ sys.exit(1)
+
+ for i in args.commands:
+ if i == SEPARATOR:
+ commands += [[]]
+ continue
+
+ i = i.replace('"', '') # Remove lefover quotes
+ commands[-1] += [i]
+
+ # Execute
+ for i in commands:
+ # Skip empty lists
+ if not i:
+ continue
+
+ cmd = []
+ stdout = None
+ stderr = None
+ capture_file = ''
+
+ for j in i:
+ if j in ['>', '>>']:
+ stdout = subprocess.PIPE
+ continue
+ elif j in ['&>', '&>>']:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
+ continue
+
+ if stdout is not None or stderr is not None:
+ capture_file += j
+ else:
+ cmd += [j]
+
+ try:
+ os.makedirs(args.directory, exist_ok=True)
+
+ res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
+ if capture_file:
+ out_file = Path(args.directory) / capture_file
+ out_file.write_bytes(res.stdout)
+ except subprocess.CalledProcessError:
+ sys.exit(1)
+
+ if dummy_target:
+ with open(dummy_target, 'a'):
+ os.utime(dummy_target, None)
+ sys.exit(0)
+
+ # Copy outputs
+ zipped_outputs = zip(args.outputs, args.original_outputs)
+ for expected, generated in zipped_outputs:
+ do_copy = False
+ if not os.path.exists(expected):
+ if not os.path.exists(generated):
+ print('Unable to find generated file. This can cause the build to fail:')
+ print(generated)
+ do_copy = False
+ else:
+ do_copy = True
+ elif os.path.exists(generated):
+ if os.path.getmtime(generated) > os.path.getmtime(expected):
+ do_copy = True
+
+ if do_copy:
+ if os.path.exists(expected):
+ os.remove(expected)
+ shutil.copyfile(generated, expected)
+
+if __name__ == '__main__':
+ sys.run(sys.argv[1:])
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 4bd41fe..7231972 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -12,15 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import environment
+from mesonbuild import environment, mesonlib
-import argparse, sys, os, subprocess, pathlib
+import argparse, sys, os, subprocess, pathlib, stat
-def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov):
outfiles = []
exitcode = 0
- (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+ (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
# gcovr >= 4.2 requires a different syntax for out of source builds
if gcovr_new_rootdir:
@@ -28,13 +28,18 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
else:
gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+ if use_llvm_cov:
+ gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+ else:
+ gcov_exe_args = []
+
if not outputs or 'xml' in outputs:
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-x',
'-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.xml'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.xml')
+ ] + gcov_exe_args)
outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
elif outputs:
print('gcovr >= 3.3 needed to generate Xml coverage report')
@@ -44,8 +49,8 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.txt'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.txt')
+ ] + gcov_exe_args)
outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
elif outputs:
print('gcovr >= 3.3 needed to generate text coverage report')
@@ -58,19 +63,34 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
initial_tracefile = covinfo + '.initial'
run_tracefile = covinfo + '.run'
raw_tracefile = covinfo + '.raw'
+ if use_llvm_cov:
+ # Create a shim to allow using llvm-cov as a gcov tool.
+ if mesonlib.is_windows():
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_bat:
+ llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe))
+ else:
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_sh:
+ llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe))
+ os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+ gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+ else:
+ gcov_tool_args = []
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--initial',
'--output-file',
- initial_tracefile])
+ initial_tracefile] +
+ gcov_tool_args)
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--output-file', run_tracefile,
'--no-checksum',
- '--rc', 'lcov_branch_coverage=1',
- ])
+ '--rc', 'lcov_branch_coverage=1'] +
+ gcov_tool_args)
# Join initial and test results.
subprocess.check_call([lcov_exe,
'-a', initial_tracefile,
@@ -137,6 +157,8 @@ def run(args):
const='xml', help='generate Xml report')
parser.add_argument('--html', dest='outputs', action='append_const',
const='html', help='generate Html report')
+ parser.add_argument('--use_llvm_cov', action='store_true',
+ help='use llvm-cov')
parser.add_argument('source_root')
parser.add_argument('subproject_root')
parser.add_argument('build_root')
@@ -144,7 +166,7 @@ def run(args):
options = parser.parse_args(args)
return coverage(options.outputs, options.source_root,
options.subproject_root, options.build_root,
- options.log_dir)
+ options.log_dir, options.use_llvm_cov)
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index 5ba3a97..a3a3eff 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -290,13 +290,13 @@ class Elf(DataSizes):
self.bf.seek(offset)
self.bf.write(newname)
- def fix_rpath(self, new_rpath):
+ def fix_rpath(self, rpath_dirs_to_remove, new_rpath):
# The path to search for can be either rpath or runpath.
# Fix both of them to be sure.
- self.fix_rpathtype_entry(new_rpath, DT_RPATH)
- self.fix_rpathtype_entry(new_rpath, DT_RUNPATH)
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
- def fix_rpathtype_entry(self, new_rpath, entrynum):
+ def fix_rpathtype_entry(self, rpath_dirs_to_remove, new_rpath, entrynum):
if isinstance(new_rpath, str):
new_rpath = new_rpath.encode('utf8')
rp_off = self.get_entry_offset(entrynum)
@@ -305,7 +305,23 @@ class Elf(DataSizes):
print('File does not have rpath. It should be a fully static executable.')
return
self.bf.seek(rp_off)
+
old_rpath = self.read_str()
+ new_rpaths = []
+ if new_rpath:
+ new_rpaths.append(new_rpath)
+ if old_rpath:
+ # Filter out build-only rpath entries
+ # added by get_link_dep_subdirs() or
+ # specified by user with build_rpath.
+ for dir in old_rpath.split(b':'):
+ if not (dir in rpath_dirs_to_remove or
+ dir == (b'X' * len(dir))):
+ new_rpaths.append(dir)
+
+ # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+ new_rpath = b':'.join(new_rpaths)
+
if len(old_rpath) < len(new_rpath):
sys.exit("New rpath must not be longer than the old one.")
# The linker does read-only string deduplication. If there is a
@@ -343,13 +359,13 @@ class Elf(DataSizes):
entry.write(self.bf)
return None
-def fix_elf(fname, new_rpath, verbose=True):
+def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
with Elf(fname, verbose) as e:
if new_rpath is None:
e.print_rpath()
e.print_runpath()
else:
- e.fix_rpath(new_rpath)
+ e.fix_rpath(rpath_dirs_to_remove, new_rpath)
def get_darwin_rpaths_to_remove(fname):
out = subprocess.check_output(['otool', '-l', fname],
@@ -430,7 +446,7 @@ def fix_jar(fname):
f.truncate()
subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
-def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True):
+def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_mappings, verbose=True):
global INSTALL_NAME_TOOL
# Static libraries, import libraries, debug information, headers, etc
# never have rpaths
@@ -441,7 +457,7 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True)
if fname.endswith('.jar'):
fix_jar(fname)
return
- fix_elf(fname, new_rpath, verbose)
+ fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
return
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 6b174a6..812604a 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -16,7 +16,7 @@ import sys, os
import subprocess
import shutil
import argparse
-from ..mesonlib import MesonException, Popen_safe, is_windows, split_args
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
from . import destdir_join
parser = argparse.ArgumentParser()
@@ -55,16 +55,18 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None):
library_paths = []
env = dict(os.environ)
- if is_windows():
+ if is_windows() or is_cygwin():
if 'PATH' in env:
library_paths.extend(env['PATH'].split(os.pathsep))
env['PATH'] = os.pathsep.join(library_paths)
- cmd.insert(0, sys.executable)
else:
if 'LD_LIBRARY_PATH' in env:
library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+ if is_windows():
+ cmd.insert(0, sys.executable)
+
# Put stderr into stdout since we want to print it out anyway.
# This preserves the order of messages.
p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
index d393f93..f4084be 100644
--- a/mesonbuild/scripts/symbolextractor.py
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -113,11 +113,23 @@ def gnu_syms(libfilename: str, outfilename: str):
continue
line_split = line.split()
entry = line_split[0:2]
- if len(line_split) >= 4:
+ # Store the size of symbols pointing to data objects so we relink
+ # when those change, which is needed because of copy relocations
+ # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+ if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
entry += [line_split[3]]
result += [' '.join(entry)]
write_if_changed('\n'.join(result) + '\n', outfilename)
+def solaris_syms(libfilename: str, outfilename: str):
+ # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+ origpath = os.environ['PATH']
+ try:
+ os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+ gnu_syms(libfilename, outfilename)
+ finally:
+ os.environ['PATH'] = origpath
+
def osx_syms(libfilename: str, outfilename: str):
# Get the name of the library
output = call_tool('otool', ['-l', libfilename])
@@ -139,6 +151,23 @@ def osx_syms(libfilename: str, outfilename: str):
result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
write_if_changed('\n'.join(result) + '\n', outfilename)
+def openbsd_syms(libfilename: str, outfilename: str):
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ # U = undefined (cope with the lack of --defined-only option)
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
def cygwin_syms(impfilename: str, outfilename: str):
# Get the name of the library
output = call_tool('dlltool', ['-I', impfilename])
@@ -234,6 +263,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
gnu_syms(libfilename, outfilename)
elif mesonlib.is_osx():
osx_syms(libfilename, outfilename)
+ elif mesonlib.is_openbsd():
+ openbsd_syms(libfilename, outfilename)
elif mesonlib.is_windows():
if os.path.isfile(impfilename):
windows_syms(impfilename, outfilename)
@@ -248,6 +279,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
# No import library. Not sure how the DLL is being used, so just
# rebuild everything that links to it every time.
dummy_syms(outfilename)
+ elif mesonlib.is_sunos():
+ solaris_syms(libfilename, outfilename)
else:
if not os.path.exists(TOOL_WARNING_FILE):
mlog.warning('Symbol extracting has not been implemented for this '
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 1715cd3..aba220e 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -27,7 +27,9 @@ import sys
import configparser
import typing as T
+from pathlib import Path
from . import WrapMode
+from .. import coredata
from ..mesonlib import git, GIT, ProgressBar, MesonException
if T.TYPE_CHECKING:
@@ -59,7 +61,10 @@ def quiet_git(cmd: T.List[str], workingdir: str) -> T.Tuple[bool, str]:
def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
if not GIT:
return False
- return git(cmd, workingdir, check=check).returncode == 0
+ try:
+ return git(cmd, workingdir, check=check).returncode == 0
+ except subprocess.CalledProcessError:
+ raise WrapException('Git command failed')
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
@@ -102,13 +107,31 @@ class WrapNotFoundException(WrapException):
class PackageDefinition:
def __init__(self, fname: str):
self.filename = fname
+ self.type = None
+ self.values = {} # type: T.Dict[str, str]
+ self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+ self.provided_programs = [] # type: T.List[str]
self.basename = os.path.basename(fname)
- self.name = self.basename[:-5]
+ self.name = self.basename
+ if self.name.endswith('.wrap'):
+ self.name = self.name[:-5]
+ self.provided_deps[self.name] = None
+ if fname.endswith('.wrap'):
+ self.parse_wrap(fname)
+ self.directory = self.values.get('directory', self.name)
+ if os.path.dirname(self.directory):
+ raise WrapException('Directory key must be a name and not a path')
+
+ def parse_wrap(self, fname: str):
try:
self.config = configparser.ConfigParser(interpolation=None)
self.config.read(fname)
except configparser.Error:
raise WrapException('Failed to parse {}'.format(self.basename))
+ self.parse_wrap_section()
+ self.parse_provide_section()
+
+ def parse_wrap_section(self):
if len(self.config.sections()) < 1:
raise WrapException('Missing sections in {}'.format(self.basename))
self.wrap_section = self.config.sections()[0]
@@ -118,6 +141,27 @@ class PackageDefinition:
self.type = self.wrap_section[5:]
self.values = dict(self.config[self.wrap_section])
+ def parse_provide_section(self):
+ if self.config.has_section('provide'):
+ for k, v in self.config['provide'].items():
+ if k == 'dependency_names':
+ # A comma separated list of dependency names that does not
+ # need a variable name
+ names = {n.strip(): None for n in v.split(',')}
+ self.provided_deps.update(names)
+ continue
+ if k == 'program_names':
+ # A comma separated list of program names
+ names = [n.strip() for n in v.split(',')]
+ self.provided_programs += names
+ continue
+ if not v:
+ m = ('Empty dependency variable name for {!r} in {}. '
+ 'If the subproject uses meson.override_dependency() '
+ 'it can be added in the "dependency_names" special key.')
+ raise WrapException(m.format(k, self.basename))
+ self.provided_deps[k] = v
+
def get(self, key: str) -> str:
try:
return self.values[key]
@@ -125,36 +169,87 @@ class PackageDefinition:
m = 'Missing key {!r} in {}'
raise WrapException(m.format(key, self.basename))
- def has_patch(self) -> bool:
- return 'patch_url' in self.values
-
-def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition:
+def get_directory(subdir_root: str, packagename: str) -> str:
fname = os.path.join(subdir_root, packagename + '.wrap')
if os.path.isfile(fname):
- return PackageDefinition(fname)
- return None
-
-def get_directory(subdir_root: str, packagename: str):
- directory = packagename
- # We always have to load the wrap file, if it exists, because it could
- # override the default directory name.
- wrap = load_wrap(subdir_root, packagename)
- if wrap and 'directory' in wrap.values:
- directory = wrap.get('directory')
- if os.path.dirname(directory):
- raise WrapException('Directory key must be a name and not a path')
- return wrap, directory
+ wrap = PackageDefinition(fname)
+ return wrap.directory
+ return packagename
class Resolver:
def __init__(self, subdir_root: str, wrap_mode=WrapMode.default):
self.wrap_mode = wrap_mode
self.subdir_root = subdir_root
self.cachedir = os.path.join(self.subdir_root, 'packagecache')
-
- def resolve(self, packagename: str, method: str) -> str:
+ self.filesdir = os.path.join(self.subdir_root, 'packagefiles')
+ self.wraps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+ self.load_wraps()
+
+ def load_wraps(self):
+ if not os.path.isdir(self.subdir_root):
+ return
+ root, dirs, files = next(os.walk(self.subdir_root))
+ for i in files:
+ if not i.endswith('.wrap'):
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+ if wrap.directory in dirs:
+ dirs.remove(wrap.directory)
+ # Add dummy package definition for directories not associated with a wrap file.
+ for i in dirs:
+ if i in ['packagecache', 'packagefiles']:
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+
+ for wrap in self.wraps.values():
+ for k in wrap.provided_deps.keys():
+ if k in self.provided_deps:
+ prev_wrap = self.provided_deps[k]
+ m = 'Multiple wrap files provide {!r} dependency: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_deps[k] = wrap
+ for k in wrap.provided_programs:
+ if k in self.provided_programs:
+ prev_wrap = self.provided_programs[k]
+ m = 'Multiple wrap files provide {!r} program: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_programs[k] = wrap
+
+ def find_dep_provider(self, packagename: str):
+ # Return value is in the same format as fallback kwarg:
+ # ['subproject_name', 'variable_name'], or 'subproject_name'.
+ wrap = self.provided_deps.get(packagename)
+ if wrap:
+ dep_var = wrap.provided_deps.get(packagename)
+ if dep_var:
+ return [wrap.name, dep_var]
+ return wrap.name
+ return None
+
+ def find_program_provider(self, names: T.List[str]):
+ for name in names:
+ wrap = self.provided_programs.get(name)
+ if wrap:
+ return wrap.name
+ return None
+
+ def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str:
+ self.current_subproject = current_subproject
self.packagename = packagename
- self.wrap, self.directory = get_directory(self.subdir_root, self.packagename)
+ self.directory = packagename
+ self.wrap = self.wraps.get(packagename)
+ if not self.wrap:
+ m = 'Subproject directory not found and {}.wrap file not found'
+ raise WrapNotFoundException(m.format(self.packagename))
+ self.directory = self.wrap.directory
self.dirname = os.path.join(self.subdir_root, self.directory)
+
meson_file = os.path.join(self.dirname, 'meson.build')
cmake_file = os.path.join(self.dirname, 'CMakeLists.txt')
@@ -174,11 +269,6 @@ class Resolver:
if not os.path.isdir(self.dirname):
raise WrapException('Path already exists but is not a directory')
else:
- # A wrap file is required to download
- if not self.wrap:
- m = 'Subproject directory not found and {}.wrap file not found'
- raise WrapNotFoundException(m.format(self.packagename))
-
if self.wrap.type == 'file':
self.get_file()
else:
@@ -191,6 +281,7 @@ class Resolver:
self.get_svn()
else:
raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type))
+ self.apply_patch()
# A meson.build or CMakeLists.txt file is required in the directory
if method == 'meson' and not os.path.exists(meson_file):
@@ -250,8 +341,6 @@ class Resolver:
os.mkdir(self.dirname)
extract_dir = self.dirname
shutil.unpack_archive(path, extract_dir)
- if self.wrap.has_patch():
- self.apply_patch()
def get_git(self) -> None:
if not GIT:
@@ -330,7 +419,8 @@ class Resolver:
raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
else:
try:
- resp = urllib.request.urlopen(urlstring, timeout=REQ_TIMEOUT)
+ req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)})
+ resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
except urllib.error.URLError as e:
mlog.log(str(e))
raise WrapException('could not get {} is the internet available?'.format(urlstring))
@@ -363,7 +453,9 @@ class Resolver:
hashvalue = h.hexdigest()
return hashvalue, tmpfile.name
- def check_hash(self, what: str, path: str) -> None:
+ def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+ if what + '_hash' not in self.wrap.values and not hash_required:
+ return
expected = self.wrap.get(what + '_hash')
h = hashlib.sha256()
with open(path, 'rb') as f:
@@ -393,26 +485,49 @@ class Resolver:
def get_file_internal(self, what: str) -> str:
filename = self.wrap.get(what + '_filename')
- cache_path = os.path.join(self.cachedir, filename)
+ if what + '_url' in self.wrap.values:
+ cache_path = os.path.join(self.cachedir, filename)
+
+ if os.path.exists(cache_path):
+ self.check_hash(what, cache_path)
+ mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+ return cache_path
- if os.path.exists(cache_path):
- self.check_hash(what, cache_path)
- mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+ if not os.path.isdir(self.cachedir):
+ os.mkdir(self.cachedir)
+ self.download(what, cache_path)
return cache_path
+ else:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('Local wrap patch files without {}_url'.format(what), '0.55.0').use(self.current_subproject)
+ path = Path(self.filesdir) / filename
+
+ if not path.exists():
+ raise WrapException('File "{}" does not exist'.format(path))
+ self.check_hash(what, path.as_posix(), hash_required=False)
- if not os.path.isdir(self.cachedir):
- os.mkdir(self.cachedir)
- self.download(what, cache_path)
- return cache_path
+ return path.as_posix()
def apply_patch(self) -> None:
- path = self.get_file_internal('patch')
- try:
- shutil.unpack_archive(path, self.subdir_root)
- except Exception:
- with tempfile.TemporaryDirectory() as workdir:
- shutil.unpack_archive(path, workdir)
- self.copy_tree(workdir, self.subdir_root)
+ if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+ m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+ raise WrapException(m.format(self.wrap.basename))
+ if 'patch_filename' in self.wrap.values:
+ path = self.get_file_internal('patch')
+ try:
+ shutil.unpack_archive(path, self.subdir_root)
+ except Exception:
+ with tempfile.TemporaryDirectory() as workdir:
+ shutil.unpack_archive(path, workdir)
+ self.copy_tree(workdir, self.subdir_root)
+ elif 'patch_directory' in self.wrap.values:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+ patch_dir = self.wrap.values['patch_directory']
+ src_dir = os.path.join(self.filesdir, patch_dir)
+ if not os.path.isdir(src_dir):
+ raise WrapException('patch directory does not exists: {}'.format(patch_dir))
+ self.copy_tree(src_dir, self.dirname)
def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
"""