aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/macos.yml2
-rw-r--r--data/test.schema.json3
-rw-r--r--docs/markdown/Contributing.md10
-rw-r--r--docs/markdown/External-Project-module.md4
-rw-r--r--docs/markdown/Reference-manual.md8
-rw-r--r--docs/markdown/Vala.md2
-rw-r--r--docs/markdown/howtox.md2
-rw-r--r--docs/markdown/snippets/waf.md5
-rw-r--r--mesonbuild/backend/backends.py427
-rw-r--r--mesonbuild/backend/ninjabackend.py44
-rw-r--r--mesonbuild/backend/vs2010backend.py7
-rw-r--r--mesonbuild/backend/xcodebackend.py3
-rw-r--r--mesonbuild/build.py139
-rw-r--r--mesonbuild/compilers/compilers.py4
-rw-r--r--mesonbuild/environment.py20
-rw-r--r--mesonbuild/interpreter/interpreterobjects.py9
-rw-r--r--mesonbuild/mesonlib/universal.py43
-rw-r--r--mesonbuild/modules/python.py80
-rw-r--r--mesonbuild/modules/unstable_external_project.py39
-rw-r--r--mesonbuild/modules/unstable_rust.py67
-rw-r--r--mesonbuild/scripts/depscan.py10
-rw-r--r--mesonbuild/scripts/externalproject.py32
-rwxr-xr-xrun_mypy.py1
-rwxr-xr-xrun_project_tests.py21
-rw-r--r--test cases/frameworks/1 boost/meson.build14
-rwxr-xr-xtest cases/python/2 extmodule/blaster.py.in (renamed from test cases/python/2 extmodule/blaster.py)2
-rw-r--r--test cases/python/2 extmodule/ext/meson.build4
-rw-r--r--test cases/python/2 extmodule/ext/nested/meson.build16
-rw-r--r--test cases/python/2 extmodule/ext/tachyon_module.c2
-rw-r--r--test cases/python/2 extmodule/ext/wrongdir/meson.build7
-rw-r--r--test cases/python/2 extmodule/meson.build10
-rw-r--r--test cases/python/2 extmodule/test.json13
32 files changed, 657 insertions, 393 deletions
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index 06de073..2a21835 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -61,7 +61,7 @@ jobs:
- uses: actions/checkout@v2
# use python3 from homebrew because it is a valid framework, unlike the actions one:
# https://github.com/actions/setup-python/issues/58
- - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 python3
+ - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 python3 boost-python3
- run: |
python3 -m pip install --upgrade setuptools
python3 -m pip install --upgrade pip
diff --git a/data/test.schema.json b/data/test.schema.json
index b89a874..a809388 100644
--- a/data/test.schema.json
+++ b/data/test.schema.json
@@ -21,11 +21,14 @@
"type": "string",
"enum": [
"file",
+ "python_file",
"dir",
"exe",
"shared_lib",
+ "python_lib",
"pdb",
"implib",
+ "py_implib",
"implibempty",
"expr"
]
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index 77e5165..3e3ff22 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -264,15 +264,18 @@ current platform. The following values are currently supported:
| type | Description |
| ------------- | ------------------------------------------------------------------------------------------------------- |
| `file` | No postprocessing, just use the provided path |
+| `python_file` | Use the provided path while replacing the python directory. |
| `dir` | To include all files inside the directory (for generated docs, etc). The path must be a valid directory |
| `exe` | For executables. On Windows the `.exe` suffix is added to the path in `file` |
| `shared_lib` | For shared libraries, always written as `name`. The appropriate suffix and prefix are added by platform |
+| `python_lib` | For python libraries, while replacing the python directory. The appropriate suffix is added by platform |
| `pdb` | For Windows PDB files. PDB entries are ignored on non Windows platforms |
| `implib` | For Windows import libraries. These entries are ignored on non Windows platforms |
+| `py_implib` | For Windows import libraries. These entries are ignored on non Windows platforms |
| `implibempty` | Like `implib`, but no symbols are exported in the library |
| `expr` | `file` is an expression. This type should be avoided and removed if possible |
-Except for the `file` and `expr` types, all paths should be provided *without* a suffix.
+Except for the `file`, `python_file` and `expr` types, all paths should be provided *without* a suffix.
| Argument | Applies to | Description |
| -----------|----------------------------|-------------------------------------------------------------------------------|
@@ -284,6 +287,11 @@ parameter, `version`, this is us a string in `X.Y.Z` format that will
be applied to the library. Each version to be tested must have a
single version. The harness will apply this correctly per platform:
+The `python_file`, `python_lib`, and `py_implib` types have basic support for configuring the string with the `@<VAR>@` syntax:
+
+- `@PYTHON_PLATLIB@`: python `get_install_dir` directory relative to prefix
+- `@PYTHON_PURELIB@`: python `get_install_dir(pure: true)` directory relative to prefix
+
`pdb` takes an optional `language` argument. This determines which
compiler/linker should generate the pdb file. Because it's possible to
mix compilers that do and don't generate pdb files (dmd's optlink
diff --git a/docs/markdown/External-Project-module.md b/docs/markdown/External-Project-module.md
index 866564e..640caaf 100644
--- a/docs/markdown/External-Project-module.md
+++ b/docs/markdown/External-Project-module.md
@@ -63,6 +63,10 @@ directory and executable. Note that if a bootstrap script is required
(e.g. `autogen.sh` when building from git instead of tarball), it can
be done using `run_command()` before calling `add_project()` method.
+*Since 0.60.0* If the first positional argument is `'waf'`, special treatment
+is done for the [waf](https://waf.io/) build system. The waf executable must be
+found either in the current directory, or in system `PATH`.
+
Keyword arguments:
- `configure_options`: An array of strings to be passed as arguments to the
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index fd78a1e..fd156f9 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -780,12 +780,6 @@ creating the final list.
The returned object also has methods that are documented in the
[object methods section](#build-target-object) below.
-### find_library()
-
-*(since 0.31.0)* **(deprecated)** Use `find_library()` method of
-[the compiler object](#compiler-object) as obtained from
-`meson.get_compiler(lang)`.
-
### find_program()
``` meson
@@ -2865,7 +2859,7 @@ env.prepend('MY_PATH', '0')
### `external library` object
-This object is returned by [`find_library()`](#find_library) and
+This object is returned by [`find_library()`](#compiler-object) and
contains an external (i.e. not built as part of this project)
library. This object has the following methods:
diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md
index d3edce0..606cf0d 100644
--- a/docs/markdown/Vala.md
+++ b/docs/markdown/Vala.md
@@ -38,7 +38,7 @@ map Vala code to the library's C programming interface. It is the
[`pkg-config`](https://www.freedesktop.org/wiki/Software/pkg-config/)
tool that makes finding these installed files all work seamlessly
behind the scenes. When a `pkg-config` file doesn't exist for the
-library then the [`find_library()`](Reference-manual.md#find_library)
+library then the `find_library()`
method of the [compiler object](Reference-manual.md#compiler-object)
needs to be used. Examples are given later.
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 1521f72..5c64bcb 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -85,7 +85,7 @@ executable(..., override_options : ['c_std=c11'])
## Enable threads
-Lots of people seem to do this manually with `find_library('pthread')`
+Lots of people seem to do this manually with `cc.find_library('pthread')`
or something similar. Do not do that. It is not portable. Instead do
this.
diff --git a/docs/markdown/snippets/waf.md b/docs/markdown/snippets/waf.md
new file mode 100644
index 0000000..87634a0
--- /dev/null
+++ b/docs/markdown/snippets/waf.md
@@ -0,0 +1,5 @@
+## Waf support in external-project module
+
+If the first argument is `'waf'`, special treatment is done for the
+[waf](https://waf.io/) build system. The waf executable must be
+found either in the current directory, or in system `PATH`.
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 6c877ea..658f031 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -39,8 +39,22 @@ if T.TYPE_CHECKING:
from .._typing import ImmutableListProtocol
from ..arglist import CompilerArgs
from ..compilers import Compiler
+ from ..environment import Environment
from ..interpreter import Interpreter, Test
- from ..mesonlib import FileMode
+ from ..linkers import StaticLinker
+ from ..mesonlib import FileMode, FileOrString
+ from ..wrap import WrapMode
+
+ from typing_extensions import TypedDict
+
+ class TargetIntrospectionData(TypedDict):
+
+ language: str
+ compiler : T.List[str]
+ parameters: T.List[str]
+ sources: T.List[str]
+ generated_sources: T.List[str]
+
# Languages that can mix with C or C++ but don't support unity builds yet
# because the syntax we use for unity builds is specific to C/++/ObjC/++.
@@ -48,7 +62,7 @@ if T.TYPE_CHECKING:
LANGS_CANT_UNITY = ('d', 'fortran', 'vala')
class RegenInfo:
- def __init__(self, source_dir, build_dir, depfiles):
+ def __init__(self, source_dir: str, build_dir: str, depfiles: T.List[str]):
self.source_dir = source_dir
self.build_dir = build_dir
self.depfiles = depfiles
@@ -88,7 +102,7 @@ class CleanTrees:
Directories outputted by custom targets that have to be manually cleaned
because on Linux `ninja clean` only deletes empty directories.
'''
- def __init__(self, build_dir, trees):
+ def __init__(self, build_dir: str, trees: T.List[str]):
self.build_dir = build_dir
self.trees = trees
@@ -113,10 +127,13 @@ class InstallData:
self.version = version
class TargetInstallData:
+
+ # TODO: install_mode should just always be a FileMode object
+
def __init__(self, fname: str, outdir: str, aliases: T.Dict[str, str], strip: bool,
- install_name_mappings: T.Dict, rpath_dirs_to_remove: T.Set[bytes],
- install_rpath: str, install_mode: 'FileMode', subproject: str,
- optional: bool = False, tag: T.Optional[str] = None):
+ install_name_mappings: T.Mapping[str, str], rpath_dirs_to_remove: T.Set[bytes],
+ install_rpath: str, install_mode: T.Optional['FileMode'],
+ subproject: str, optional: bool = False, tag: T.Optional[str] = None):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
@@ -140,14 +157,25 @@ class InstallDataBase:
class SubdirInstallData(InstallDataBase):
def __init__(self, path: str, install_path: str, install_mode: 'FileMode',
- exclude, subproject: str, tag: T.Optional[str] = None):
+ exclude: T.Tuple[T.Set[str], T.Set[str]], subproject: str,
+ tag: T.Optional[str] = None):
super().__init__(path, install_path, install_mode, subproject, tag)
self.exclude = exclude
class ExecutableSerialisation:
- def __init__(self, cmd_args, env: T.Optional[build.EnvironmentVariables] = None, exe_wrapper=None,
- workdir=None, extra_paths=None, capture=None, feed=None,
- tag: T.Optional[str] = None) -> None:
+
+ # XXX: should capture and feed default to False, instead of None?
+
+ def __init__(self, cmd_args: T.List[str],
+ env: T.Optional[build.EnvironmentVariables] = None,
+ exe_wrapper: T.Optional['programs.ExternalProgram'] = None,
+ workdir: T.Optional[str] = None,
+ extra_paths: T.Optional[T.List] = None,
+ capture: T.Optional[bool] = None,
+ feed: T.Optional[bool] = None,
+ tag: T.Optional[str] = None,
+ verbose: bool = False,
+ ) -> None:
self.cmd_args = cmd_args
self.env = env
if exe_wrapper is not None:
@@ -159,7 +187,7 @@ class ExecutableSerialisation:
self.feed = feed
self.pickled = False
self.skip_if_destdir = False
- self.verbose = False
+ self.verbose = verbose
self.subproject = ''
self.tag = tag
@@ -227,6 +255,9 @@ def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, i
# This class contains the basic functionality that is needed by all backends.
# Feel free to move stuff in and out of it as you see fit.
class Backend:
+
+ environment: T.Optional['Environment']
+
def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']):
# Make it possible to construct a dummy backend
# This is used for introspection without a build directory
@@ -248,7 +279,7 @@ class Backend:
def generate(self) -> None:
raise RuntimeError(f'generate is not implemented in {type(self).__name__}')
- def get_target_filename(self, t: T.Union[build.Target, build.CustomTargetIndex], *, warn_multi_output: bool = True):
+ def get_target_filename(self, t: T.Union[build.Target, build.CustomTargetIndex], *, warn_multi_output: bool = True) -> str:
if isinstance(t, build.CustomTarget):
if warn_multi_output and len(t.get_outputs()) != 1:
mlog.warning(f'custom_target {t.name!r} has more than one output! '
@@ -274,13 +305,19 @@ class Backend:
comp_override = target.option_overrides_compiler
return OptionOverrideProxy(comp_override, comp_reg)
- def get_option_for_target(self, option_name: 'OptionKey', target: build.BuildTarget):
+ def get_option_for_target(self, option_name: 'OptionKey', target: build.BuildTarget) -> T.Union[str, int, bool, 'WrapMode']:
if option_name in target.option_overrides_base:
override = target.option_overrides_base[option_name]
- return self.environment.coredata.validate_option_value(option_name, override)
- return self.environment.coredata.get_option(option_name.evolve(subproject=target.subproject))
-
- def get_source_dir_include_args(self, target, compiler, *, absolute_path=False):
+ v = self.environment.coredata.validate_option_value(option_name, override)
+ else:
+ v = self.environment.coredata.get_option(option_name.evolve(subproject=target.subproject))
+ # We don't actually have wrapmode here to do an assert, so just do a
+ # cast, we know what's in coredata anyway.
+ # TODO: if it's possible to annotate get_option or validate_option_value
+ # in the future we might be able to remove the cast here
+ return T.cast(T.Union[str, int, bool, 'WrapMode'], v)
+
+ def get_source_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]:
curdir = target.get_subdir()
if absolute_path:
lead = self.source_dir
@@ -289,7 +326,7 @@ class Backend:
tmppath = os.path.normpath(os.path.join(lead, curdir))
return compiler.get_include_args(tmppath, False)
- def get_build_dir_include_args(self, target, compiler, *, absolute_path=False):
+ def get_build_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]:
if absolute_path:
curdir = os.path.join(self.build_dir, target.get_subdir())
else:
@@ -298,7 +335,7 @@ class Backend:
curdir = '.'
return compiler.get_include_args(curdir, False)
- def get_target_filename_for_linking(self, target):
+ def get_target_filename_for_linking(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> T.Optional[str]:
# On some platforms (msvc for instance), the file that is used for
# dynamic linking is not the same as the dynamic library itself. This
# file is called an import library, and we want to link against that.
@@ -320,20 +357,20 @@ class Backend:
raise AssertionError(f'BUG: Tried to link to {target!r} which is not linkable')
@lru_cache(maxsize=None)
- def get_target_dir(self, target: build.Target) -> str:
+ def get_target_dir(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
dirname = target.get_subdir()
else:
dirname = 'meson-out'
return dirname
- def get_target_dir_relative_to(self, t, o):
+ def get_target_dir_relative_to(self, t: build.Target, o: build.Target) -> str:
'''Get a target dir relative to another target's directory'''
target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o))
return os.path.relpath(target_dir, othert_dir)
- def get_target_source_dir(self, target):
+ def get_target_source_dir(self, target: build.Target) -> str:
# if target dir is empty, avoid extraneous trailing / from os.path.join()
target_dir = self.get_target_dir(target)
if target_dir:
@@ -343,11 +380,14 @@ class Backend:
def get_target_private_dir(self, target: build.Target) -> str:
return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p')
- def get_target_private_dir_abs(self, target):
+ def get_target_private_dir_abs(self, target: build.Target) -> str:
return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
@lru_cache(maxsize=None)
- def get_target_generated_dir(self, target, gensrc, src):
+ def get_target_generated_dir(
+ self, target: build.Target,
+ gensrc: T.Union[build.CustomTarget, build.CustomTargetIndex, build.GeneratedList],
+ src: str) -> str:
"""
Takes a BuildTarget, a generator source (CustomTarget or GeneratedList),
and a generated source filename.
@@ -360,19 +400,20 @@ class Backend:
# target that the GeneratedList is used in
return os.path.join(self.get_target_private_dir(target), src)
- def get_unity_source_file(self, target, suffix, number):
+ def get_unity_source_file(self, target: build.Target, suffix: str, number: int) -> mesonlib.File:
# There is a potential conflict here, but it is unlikely that
# anyone both enables unity builds and has a file called foo-unity.cpp.
osrc = f'{target.name}-unity{number}.{suffix}'
return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc)
- def generate_unity_files(self, target, unity_src):
- abs_files = []
- result = []
+ def generate_unity_files(self, target: build.BuildTarget, unity_src: str) -> T.List[mesonlib.File]:
+ abs_files: T.List[str] = []
+ result: T.List[mesonlib.File] = []
compsrcs = classify_unity_sources(target.compilers.values(), unity_src)
unity_size = self.get_option_for_target(OptionKey('unity_size'), target)
+ assert isinstance(unity_size, int), 'for mypy'
- def init_language_file(suffix, unity_file_number):
+ def init_language_file(suffix: str, unity_file_number: int) -> T.TextIO:
unity_src = self.get_unity_source_file(target, suffix, unity_file_number)
outfileabs = unity_src.absolute_path(self.environment.get_source_dir(),
self.environment.get_build_dir())
@@ -388,6 +429,8 @@ class Backend:
for comp, srcs in compsrcs.items():
files_in_current = unity_size + 1
unity_file_number = 0
+ # TODO: this could be simplified with an algorithm that pre-sorts
+ # the sources into the size of chunks we want
ofile = None
for src in srcs:
if files_in_current >= unity_size:
@@ -401,19 +444,23 @@ class Backend:
if ofile:
ofile.close()
- [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
+ for x in abs_files:
+ mesonlib.replace_if_different(x, x + '.tmp')
return result
- def relpath(self, todir, fromdir):
+ @staticmethod
+ def relpath(todir: str, fromdir: str) -> str:
return os.path.relpath(os.path.join('dummyprefixdir', todir),
os.path.join('dummyprefixdir', fromdir))
- def flatten_object_list(self, target, proj_dir_to_build_root=''):
+ def flatten_object_list(self, target: build.BuildTarget, proj_dir_to_build_root: str = '') -> T.List[str]:
obj_list = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root)
return list(dict.fromkeys(obj_list))
- def _flatten_object_list(self, target, objects, proj_dir_to_build_root):
- obj_list = []
+ def _flatten_object_list(self, target: build.BuildTarget,
+ objects: T.Sequence[T.Union[str, 'File', build.ExtractedObjects]],
+ proj_dir_to_build_root: str) -> T.List[str]:
+ obj_list: T.List[str] = []
for obj in objects:
if isinstance(obj, str):
o = os.path.join(proj_dir_to_build_root,
@@ -436,24 +483,31 @@ class Backend:
raise MesonException('Unknown data type in object list.')
return obj_list
- def is_swift_target(self, target):
+ @staticmethod
+ def is_swift_target(target: build.BuildTarget) -> bool:
for s in target.sources:
if s.endswith('swift'):
return True
return False
- def determine_swift_dep_dirs(self, target):
- result = []
+ def determine_swift_dep_dirs(self, target: build.BuildTarget) -> T.List[str]:
+ result: T.List[str] = []
for l in target.link_targets:
result.append(self.get_target_private_dir_abs(l))
return result
- def get_executable_serialisation(self, cmd, workdir=None,
- extra_bdeps=None, capture=None, feed=None,
- env: T.Optional[build.EnvironmentVariables] = None,
- tag: T.Optional[str] = None):
- exe = cmd[0]
- cmd_args = cmd[1:]
+ def get_executable_serialisation(
+ self, cmd: T.Sequence[T.Union[programs.ExternalProgram, build.BuildTarget, build.CustomTarget, File, str]],
+ workdir: T.Optional[str] = None,
+ extra_bdeps: T.Optional[T.List[build.BuildTarget]] = None,
+ capture: T.Optional[bool] = None,
+ feed: T.Optional[bool] = None,
+ env: T.Optional[build.EnvironmentVariables] = None,
+ tag: T.Optional[str] = None,
+ verbose: bool = False) -> 'ExecutableSerialisation':
+
+ # XXX: cmd_args either need to be lowered to strings, or need to be checked for non-string arguments, right?
+ exe, *raw_cmd_args = cmd
if isinstance(exe, programs.ExternalProgram):
exe_cmd = exe.get_command()
exe_for_machine = exe.for_machine
@@ -475,6 +529,19 @@ class Backend:
exe_cmd = [exe]
exe_for_machine = MachineChoice.BUILD
+ cmd_args: T.List[str] = []
+ for c in raw_cmd_args:
+ if isinstance(c, programs.ExternalProgram):
+ p = c.get_path()
+ assert isinstance(p, str)
+ cmd_args.append(p)
+ elif isinstance(c, (build.BuildTarget, build.CustomTarget)):
+ cmd_args.append(self.get_target_filename_abs(c))
+ elif isinstance(c, mesonlib.File):
+ cmd_args.append(c.rel_to_builddir(self.environment.source_dir))
+ else:
+ cmd_args.append(c)
+
machine = self.environment.machines[exe_for_machine]
if machine.is_windows() or machine.is_cygwin():
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or [])
@@ -498,20 +565,25 @@ class Backend:
workdir = workdir or self.environment.get_build_dir()
return ExecutableSerialisation(exe_cmd + cmd_args, env,
exe_wrapper, workdir,
- extra_paths, capture, feed, tag)
-
- def as_meson_exe_cmdline(self, tname, exe, cmd_args, workdir=None,
- extra_bdeps=None, capture=None, feed=None,
- force_serialize=False,
+ extra_paths, capture, feed, tag, verbose)
+
+ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram],
+ cmd_args: T.Sequence[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]],
+ workdir: T.Optional[str] = None,
+ extra_bdeps: T.Optional[T.List[build.BuildTarget]] = None,
+ capture: T.Optional[bool] = None,
+ feed: T.Optional[bool] = None,
+ force_serialize: bool = False,
env: T.Optional[build.EnvironmentVariables] = None,
- verbose: bool = False):
+ verbose: bool = False) -> T.Tuple[T.Sequence[T.Union[str, File, build.Target, programs.ExternalProgram]], str]:
'''
Serialize an executable for running with a generator or a custom target
'''
- cmd = [exe] + cmd_args
- es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env)
- es.verbose = verbose
- reasons = []
+ cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = []
+ cmd.append(exe)
+ cmd.extend(cmd_args)
+ es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose)
+ reasons: T.List[str] = []
if es.extra_paths:
reasons.append('to set PATH')
@@ -537,14 +609,16 @@ class Backend:
if not force_serialize:
if not capture and not feed:
return es.cmd_args, ''
- args = []
+ args: T.List[str] = []
if capture:
- args += ['--capture', capture]
+ args += ['--capture', str(capture)]
if feed:
- args += ['--feed', feed]
- return ((self.environment.get_build_command() +
- ['--internal', 'exe'] + args + ['--'] + es.cmd_args),
- ', '.join(reasons))
+ args += ['--feed', str(feed)]
+
+ return (
+ self.environment.get_build_command() + ['--internal', 'exe'] + args + ['--'] + es.cmd_args,
+ ', '.join(reasons)
+ )
if isinstance(exe, (programs.ExternalProgram,
build.BuildTarget, build.CustomTarget)):
@@ -569,7 +643,7 @@ class Backend:
return (self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data],
', '.join(reasons))
- def serialize_tests(self):
+ def serialize_tests(self) -> T.Tuple[str, str]:
test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
with open(test_data, 'wb') as datafile:
self.write_test_file(datafile)
@@ -578,7 +652,7 @@ class Backend:
self.write_benchmark_file(datafile)
return test_data, benchmark_data
- def determine_linker_and_stdlib_args(self, target):
+ def determine_linker_and_stdlib_args(self, target: build.BuildTarget) -> T.Tuple[T.Union['Compiler', 'StaticLinker'], T.List[str]]:
'''
If we're building a static library, there is only one static linker.
Otherwise, we query the target for the dynamic linker.
@@ -589,19 +663,23 @@ class Backend:
return l, stdlib_args
@staticmethod
- def _libdir_is_system(libdir, compilers, env):
+ def _libdir_is_system(libdir: str, compilers: T.Mapping[str, 'Compiler'], env: 'Environment') -> bool:
libdir = os.path.normpath(libdir)
for cc in compilers.values():
if libdir in cc.get_library_dirs(env):
return True
return False
- def get_external_rpath_dirs(self, target):
- dirs = set()
- args = []
+ def get_external_rpath_dirs(self, target: build.BuildTarget) -> T.Set[str]:
+ dirs: T.Set[str] = set()
+ args: T.List[str] = []
for lang in LANGUAGES_USING_LDFLAGS:
try:
- args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang))
+ e = self.environment.coredata.get_external_link_args(target.for_machine, lang)
+ if isinstance(e, str):
+ args.append(e)
+ else:
+ args.extend(e)
except Exception:
pass
# Match rpath formats:
@@ -635,8 +713,8 @@ class Backend:
raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
return dirs
- def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
- paths = []
+ def rpaths_for_bundled_shared_libraries(self, target: build.BuildTarget, exclude_system: bool = True) -> T.List[str]:
+ paths: T.List[str] = []
for dep in target.external_deps:
if not isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)):
continue
@@ -665,8 +743,10 @@ class Backend:
return paths
def determine_rpath_dirs(self, target: build.BuildTarget) -> T.Tuple[str, ...]:
+ result: OrderedSet[str]
if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
- result: OrderedSet[str] = target.get_link_dep_subdirs()
+ # NEed a copy here
+ result = OrderedSet(target.get_link_dep_subdirs())
else:
result = OrderedSet()
result.add('meson-out')
@@ -675,12 +755,12 @@ class Backend:
return tuple(result)
@staticmethod
- def canonicalize_filename(fname):
+ def canonicalize_filename(fname: str) -> str:
for ch in ('/', '\\', ':'):
fname = fname.replace(ch, '_')
return fname
- def object_filename_from_source(self, target, source):
+ def object_filename_from_source(self, target: build.BuildTarget, source: 'FileOrString') -> str:
assert isinstance(source, mesonlib.File)
build_dir = self.environment.get_build_dir()
rel_src = source.rel_to_builddir(self.build_to_src)
@@ -695,42 +775,41 @@ class Backend:
else:
rel_src = os.path.basename(rel_src)
# A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
- source = 'meson-generated_' + rel_src[:-5] + '.c'
+ gen_source = 'meson-generated_' + rel_src[:-5] + '.c'
elif source.is_built:
if os.path.isabs(rel_src):
rel_src = rel_src[len(build_dir) + 1:]
targetdir = self.get_target_private_dir(target)
# A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
- source = 'meson-generated_' + os.path.relpath(rel_src, targetdir)
+ gen_source = 'meson-generated_' + os.path.relpath(rel_src, targetdir)
else:
if os.path.isabs(rel_src):
# Use the absolute path directly to avoid file name conflicts
- source = rel_src
+ gen_source = rel_src
else:
- source = os.path.relpath(os.path.join(build_dir, rel_src),
- os.path.join(self.environment.get_source_dir(), target.get_subdir()))
+ gen_source = os.path.relpath(os.path.join(build_dir, rel_src),
+ os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
- return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
+ return self.canonicalize_filename(gen_source) + '.' + machine.get_object_suffix()
- def determine_ext_objs(self, extobj, proj_dir_to_build_root):
- result = []
+ def determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_build_root: str) -> T.List[str]:
+ result: T.List[str] = []
# Merge sources and generated sources
- sources = list(extobj.srclist)
+ raw_sources = list(extobj.srclist)
for gensrc in extobj.genlist:
- for s in gensrc.get_outputs():
- path = self.get_target_generated_dir(extobj.target, gensrc, s)
+ for r in gensrc.get_outputs():
+ path = self.get_target_generated_dir(extobj.target, gensrc, r)
dirpart, fnamepart = os.path.split(path)
- sources.append(File(True, dirpart, fnamepart))
+ raw_sources.append(File(True, dirpart, fnamepart))
# Filter out headers and all non-source files
- filtered_sources = []
- for s in sources:
+ sources: T.List['FileOrString'] = []
+ for s in raw_sources:
if self.environment.is_source(s) and not self.environment.is_header(s):
- filtered_sources.append(s)
+ sources.append(s)
elif self.environment.is_object(s):
result.append(s.relative_name())
- sources = filtered_sources
# extobj could contain only objects and no sources
if not sources:
@@ -745,15 +824,16 @@ class Backend:
compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources)
sources = []
unity_size = self.get_option_for_target(OptionKey('unity_size'), extobj.target)
+ assert isinstance(unity_size, int), 'for mypy'
for comp, srcs in compsrcs.items():
if comp.language in LANGS_CANT_UNITY:
sources += srcs
continue
for i in range(len(srcs) // unity_size + 1):
- osrc = self.get_unity_source_file(extobj.target,
+ _src = self.get_unity_source_file(extobj.target,
comp.get_default_suffix(), i)
- sources.append(osrc)
+ sources.append(_src)
for osrc in sources:
objname = self.object_filename_from_source(extobj.target, osrc)
@@ -762,8 +842,8 @@ class Backend:
return result
- def get_pch_include_args(self, compiler, target):
- args = []
+ def get_pch_include_args(self, compiler: 'Compiler', target: build.BuildTarget) -> T.List[str]:
+ args: T.List[str] = []
pchpath = self.get_target_private_dir(target)
includeargs = compiler.get_include_args(pchpath, False)
p = target.get_pch(compiler.get_language())
@@ -771,7 +851,7 @@ class Backend:
args += compiler.get_pch_use_args(pchpath, p[0])
return includeargs + args
- def create_msvc_pch_implementation(self, target, lang, pch_header):
+ def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, pch_header: str) -> str:
# We have to include the language in the file name, otherwise
# pch.c and pch.cpp will both end up as pch.obj in VS backends.
impl_name = f'meson_pch-{lang}.{lang}'
@@ -789,16 +869,21 @@ class Backend:
return pch_rel_to_build
@staticmethod
- def escape_extra_args(compiler, args):
+ def escape_extra_args(args: T.List[str]) -> T.List[str]:
# all backslashes in defines are doubly-escaped
- extra_args = []
+ extra_args: T.List[str] = []
for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
+ if arg.startswith(('-D', '/D')):
arg = arg.replace('\\', '\\\\')
extra_args.append(arg)
return extra_args
+ def get_no_stdlib_args(self, target: 'build.BuildTarget', compiler: 'Compiler') -> T.List[str]:
+ if compiler.language in self.build.stdlibs[target.for_machine]:
+ return compiler.get_no_stdinc_args()
+ return []
+
def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Compiler', no_warn_args: bool = False) -> 'CompilerArgs':
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
@@ -817,7 +902,8 @@ class Backend:
if no_warn_args:
commands += compiler.get_no_warn_args()
else:
- commands += compiler.get_warn_args(self.get_option_for_target(OptionKey('warning_level'), target))
+ # warning_level is a string, but mypy can't determine that
+ commands += compiler.get_warn_args(T.cast(str, self.get_option_for_target(OptionKey('warning_level'), target)))
# Add -Werror if werror=true is set in the build options set on the
# command-line or default_options inside project(). This only sets the
# action to be done for warnings if/when they are emitted, so it's ok
@@ -827,10 +913,20 @@ class Backend:
# Add compile args for c_* or cpp_* build options set on the
# command-line or default_options inside project().
commands += compiler.get_option_compile_args(copt_proxy)
+
# Add buildtype args: optimization level, debugging, etc.
- commands += compiler.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target))
- commands += compiler.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
- commands += compiler.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ buildtype = self.get_option_for_target(OptionKey('buildtype'), target)
+ assert isinstance(buildtype, str), 'for mypy'
+ commands += compiler.get_buildtype_args(buildtype)
+
+ optimization = self.get_option_for_target(OptionKey('optimization'), target)
+ assert isinstance(optimization, str), 'for mypy'
+ commands += compiler.get_optimization_args(optimization)
+
+ debug = self.get_option_for_target(OptionKey('debug'), target)
+ assert isinstance(debug, bool), 'for mypy'
+ commands += compiler.get_debug_args(debug)
+
# Add compile args added using add_project_arguments()
commands += self.build.get_project_args(compiler, target.subproject, target.for_machine)
# Add compile args added using add_global_arguments()
@@ -886,8 +982,8 @@ class Backend:
commands += compiler.get_include_args(priv_dir, False)
return commands
- def build_target_link_arguments(self, compiler, deps):
- args = []
+ def build_target_link_arguments(self, compiler: 'Compiler', deps: T.List[build.Target]) -> T.List[str]:
+ args: T.List[str] = []
for d in deps:
if not (d.is_linkable_target()):
raise RuntimeError(f'Tried to link with a non-library target "{d.get_basename()}".')
@@ -901,8 +997,8 @@ class Backend:
args.append(arg)
return args
- def get_mingw_extra_paths(self, target):
- paths = OrderedSet()
+ def get_mingw_extra_paths(self, target: build.BuildTarget) -> T.List[str]:
+ paths: OrderedSet[str] = OrderedSet()
# The cross bindir
root = self.environment.properties[target.for_machine].get_root()
if root:
@@ -918,13 +1014,17 @@ class Backend:
paths.update(cc.get_library_dirs(self.environment))
return list(paths)
- def determine_windows_extra_paths(self, target: T.Union[build.BuildTarget, str], extra_bdeps):
- '''On Windows there is no such thing as an rpath.
+ def determine_windows_extra_paths(
+ self, target: T.Union[build.BuildTarget, build.CustomTarget, programs.ExternalProgram, mesonlib.File, str],
+ extra_bdeps: T.Sequence[T.Union[build.BuildTarget, build.CustomTarget]]) -> T.List[str]:
+ """On Windows there is no such thing as an rpath.
+
We must determine all locations of DLLs that this exe
links to and return them so they can be used in unit
- tests.'''
- result = set()
- prospectives = set()
+ tests.
+ """
+ result: T.Set[str] = set()
+ prospectives: T.Set[build.Target] = set()
if isinstance(target, build.BuildTarget):
prospectives.update(target.get_transitive_link_deps())
# External deps
@@ -932,11 +1032,10 @@ class Backend:
result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
for bdep in extra_bdeps:
prospectives.add(bdep)
- prospectives.update(bdep.get_transitive_link_deps())
+ if isinstance(bdep, build.BuildTarget):
+ prospectives.update(bdep.get_transitive_link_deps())
# Internal deps
for ld in prospectives:
- if ld == '' or ld == '.':
- continue
dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld))
result.add(dirseg)
if (isinstance(target, build.BuildTarget) and
@@ -944,20 +1043,20 @@ class Backend:
result.update(self.get_mingw_extra_paths(target))
return list(result)
- def write_benchmark_file(self, datafile):
+ def write_benchmark_file(self, datafile: T.BinaryIO) -> None:
self.write_test_serialisation(self.build.get_benchmarks(), datafile)
- def write_test_file(self, datafile):
+ def write_test_file(self, datafile: T.BinaryIO) -> None:
self.write_test_serialisation(self.build.get_tests(), datafile)
def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]:
- arr = []
+ arr: T.List[TestSerialisation] = []
for t in sorted(tests, key=lambda tst: -1 * tst.priority):
exe = t.get_exe()
if isinstance(exe, programs.ExternalProgram):
cmd = exe.get_command()
else:
- cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))]
+ cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))]
if isinstance(exe, (build.BuildTarget, programs.ExternalProgram)):
test_for_machine = exe.for_machine
else:
@@ -980,15 +1079,15 @@ class Backend:
exe_wrapper = None
machine = self.environment.machines[exe.for_machine]
if machine.is_windows() or machine.is_cygwin():
- extra_bdeps = []
+ extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget]] = []
if isinstance(exe, build.CustomTarget):
- extra_bdeps = exe.get_transitive_build_target_deps()
+ extra_bdeps = list(exe.get_transitive_build_target_deps())
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
else:
extra_paths = []
- cmd_args = []
- depends = set(t.depends)
+ cmd_args: T.List[str] = []
+ depends: T.Set[build.Target] = set(t.depends)
if isinstance(exe, build.Target):
depends.add(exe)
for a in t.cmd_args:
@@ -996,6 +1095,7 @@ class Backend:
depends.add(a)
if isinstance(a, build.BuildTarget):
extra_paths += self.determine_windows_extra_paths(a, [])
+
if isinstance(a, mesonlib.File):
a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src))
cmd_args.append(a)
@@ -1021,10 +1121,10 @@ class Backend:
arr.append(ts)
return arr
- def write_test_serialisation(self, tests: T.List['Test'], datafile: str):
+ def write_test_serialisation(self, tests: T.List['Test'], datafile: T.BinaryIO) -> None:
pickle.dump(self.create_test_serialisation(tests), datafile)
- def construct_target_rel_path(self, a, workdir):
+ def construct_target_rel_path(self, a: build.Target, workdir: T.Optional[str]) -> str:
if workdir is None:
return self.get_target_filename(a)
assert(os.path.isabs(workdir))
@@ -1042,7 +1142,7 @@ class Backend:
# Copy file from, to, and with mode unchanged
d.data.append(InstallDataBase(ifilename, ofilename, None, '', tag='devel'))
- def get_regen_filelist(self):
+ def get_regen_filelist(self) -> T.List[str]:
'''List of all files whose alteration means that the build
definition needs to be regenerated.'''
deps = [str(Path(self.build_to_src) / df)
@@ -1054,7 +1154,7 @@ class Backend:
self.check_clock_skew(deps)
return deps
- def generate_regen_info(self):
+ def generate_regen_info(self) -> None:
deps = self.get_regen_filelist()
regeninfo = RegenInfo(self.environment.get_source_dir(),
self.environment.get_build_dir(),
@@ -1064,7 +1164,7 @@ class Backend:
with open(filename, 'wb') as f:
pickle.dump(regeninfo, f)
- def check_clock_skew(self, file_list):
+ def check_clock_skew(self, file_list: T.List[str]) -> None:
# If a file that leads to reconfiguration has a time
# stamp in the future, it will trigger an eternal reconfigure
# loop.
@@ -1080,15 +1180,15 @@ class Backend:
if delta > 0.001:
raise MesonException(f'Clock skew detected. File {absf} has a time stamp {delta:.4f}s in the future.')
- def build_target_to_cmd_array(self, bt):
+ def build_target_to_cmd_array(self, bt: T.Union[build.BuildTarget, programs.ExternalProgram]) -> T.List[str]:
if isinstance(bt, build.BuildTarget):
arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))]
else:
arr = bt.get_command()
return arr
- def replace_extra_args(self, args, genlist):
- final_args = []
+ def replace_extra_args(self, args: T.List[str], genlist: 'build.GeneratedList') -> T.List[str]:
+ final_args: T.List[str] = []
for a in args:
if a == '@EXTRA_ARGS@':
final_args += genlist.get_extra_args()
@@ -1096,8 +1196,8 @@ class Backend:
final_args.append(a)
return final_args
- def replace_outputs(self, args, private_dir, output_list):
- newargs = []
+ def replace_outputs(self, args: T.List[str], private_dir: str, output_list: T.List[str]) -> T.List[str]:
+ newargs: T.List[str] = []
regex = re.compile(r'@OUTPUT(\d+)@')
for arg in args:
m = regex.search(arg)
@@ -1109,12 +1209,12 @@ class Backend:
newargs.append(arg)
return newargs
- def get_build_by_default_targets(self):
- result = OrderedDict()
+ def get_build_by_default_targets(self) -> 'T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]]':
+ result: 'T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]]' = OrderedDict()
# Get all build and custom targets that must be built by default
- for name, t in self.build.get_targets().items():
- if t.build_by_default:
- result[name] = t
+ for name, b in self.build.get_targets().items():
+ if b.build_by_default:
+ result[name] = b
# Get all targets used as test executables and arguments. These must
# also be built by default. XXX: Sometime in the future these should be
# built only before running tests.
@@ -1148,19 +1248,17 @@ class Backend:
libs.extend(self.get_custom_target_provided_by_generated_source(t))
return libs
- def is_unity(self, target):
+ def is_unity(self, target: build.BuildTarget) -> bool:
optval = self.get_option_for_target(OptionKey('unity'), target)
- if optval == 'on' or (optval == 'subprojects' and target.subproject != ''):
- return True
- return False
+ return optval == 'on' or (optval == 'subprojects' and target.subproject != '')
- def get_custom_target_sources(self, target):
+ def get_custom_target_sources(self, target: build.CustomTarget) -> T.List[str]:
'''
Custom target sources can be of various object types; strings, File,
BuildTarget, even other CustomTargets.
Returns the path to them relative to the build root directory.
'''
- srcs = []
+ srcs: T.List[str] = []
for i in target.get_sources():
if isinstance(i, str):
fname = [os.path.join(self.build_to_src, target.subdir, i)]
@@ -1179,8 +1277,8 @@ class Backend:
srcs += fname
return srcs
- def get_custom_target_depend_files(self, target, absolute_paths=False):
- deps = []
+ def get_custom_target_depend_files(self, target: build.CustomTarget, absolute_paths: bool = False) -> T.List[str]:
+ deps: T.List[str] = []
for i in target.depend_files:
if isinstance(i, mesonlib.File):
if absolute_paths:
@@ -1195,7 +1293,7 @@ class Backend:
deps.append(os.path.join(self.build_to_src, target.subdir, i))
return deps
- def get_custom_target_output_dir(self, target):
+ def get_custom_target_output_dir(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
# The XCode backend is special. A target foo/bar does
# not go to ${BUILDDIR}/foo/bar but instead to
# ${BUILDDIR}/${BUILDTYPE}/foo/bar.
@@ -1206,11 +1304,12 @@ class Backend:
return self.get_target_dir(target)
@lru_cache(maxsize=None)
- def get_normpath_target(self, source) -> str:
+ def get_normpath_target(self, source: str) -> str:
return os.path.normpath(source)
- def get_custom_target_dirs(self, target, compiler, *, absolute_path=False):
- custom_target_include_dirs = []
+ def get_custom_target_dirs(self, target: build.CustomTarget, compiler: 'Compiler', *,
+ absolute_path: bool = False) -> T.List[str]:
+ custom_target_include_dirs: T.List[str] = []
for i in target.get_generated_sources():
# Generator output goes into the target private dir which is
# already in the include paths list. Only custom targets have their
@@ -1226,14 +1325,18 @@ class Backend:
custom_target_include_dirs.append(idir)
return custom_target_include_dirs
- def get_custom_target_dir_include_args(self, target, compiler, *, absolute_path=False):
- incs = []
+ def get_custom_target_dir_include_args(
+ self, target: build.CustomTarget, compiler: 'Compiler', *,
+ absolute_path: bool = False) -> T.List[str]:
+ incs: T.List[str] = []
for i in self.get_custom_target_dirs(target, compiler, absolute_path=absolute_path):
incs += compiler.get_include_args(i, False)
return incs
- def eval_custom_target_command(self, target, absolute_outputs=False):
+ def eval_custom_target_command(
+ self, target: build.CustomTarget, absolute_outputs: bool = False) -> \
+ T.Tuple[T.List[str], T.List[str], T.List[str]]:
# We want the outputs to be absolute only when using the VS backend
# XXX: Maybe allow the vs backend to use relative paths too?
source_root = self.build_to_src
@@ -1243,12 +1346,10 @@ class Backend:
source_root = self.environment.get_source_dir()
build_root = self.environment.get_build_dir()
outdir = os.path.join(self.environment.get_build_dir(), outdir)
- outputs = []
- for i in target.get_outputs():
- outputs.append(os.path.join(outdir, i))
+ outputs = [os.path.join(outdir, i) for i in target.get_outputs()]
inputs = self.get_custom_target_sources(target)
# Evaluate the command list
- cmd = []
+ cmd: T.List[str] = []
for i in target.command:
if isinstance(i, build.BuildTarget):
cmd += self.build_target_to_cmd_array(i)
@@ -1341,11 +1442,15 @@ class Backend:
else:
# TODO go through all candidates, like others
strip_bin = [detect.defaults['strip'][0]]
+
+ umask = self.environment.coredata.get_option(OptionKey('install_umask'))
+ assert isinstance(umask, (str, int)), 'for mypy'
+
d = InstallData(self.environment.get_source_dir(),
self.environment.get_build_dir(),
self.environment.get_prefix(),
strip_bin,
- self.environment.coredata.get_option(OptionKey('install_umask')),
+ umask,
self.environment.get_build_command() + ['introspect'],
self.environment.coredata.version)
self.generate_depmf_install(d)
@@ -1357,7 +1462,7 @@ class Backend:
self.generate_subdir_install(d)
return d
- def create_install_data_files(self):
+ def create_install_data_files(self) -> None:
install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
with open(install_data_file, 'wb') as ofile:
pickle.dump(self.create_install_data(), ofile)
@@ -1367,7 +1472,9 @@ class Backend:
bindir = Path(prefix, self.environment.get_bindir())
libdir = Path(prefix, self.environment.get_libdir())
incdir = Path(prefix, self.environment.get_includedir())
- localedir = Path(prefix, self.environment.coredata.get_option(mesonlib.OptionKey('localedir')))
+ _ldir = self.environment.coredata.get_option(mesonlib.OptionKey('localedir'))
+ assert isinstance(_ldir, str), 'for mypy'
+ localedir = Path(prefix, _ldir)
dest_path = Path(prefix, outdir, Path(fname).name) if outdir else Path(prefix, fname)
if bindir in dest_path.parents:
return 'runtime'
@@ -1537,7 +1644,7 @@ class Backend:
i = InstallDataBase(srcabs, dstabs, m.get_custom_install_mode(), m.subproject, tag='man')
d.man.append(i)
- def generate_data_install(self, d: InstallData):
+ def generate_data_install(self, d: InstallData) -> None:
data = self.build.get_data()
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
@@ -1569,7 +1676,7 @@ class Backend:
i = SubdirInstallData(src_dir, dst_dir, sd.install_mode, sd.exclude, sd.subproject)
d.install_subdirs.append(i)
- def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List['TargetIntrospectionData']:
'''
Returns a list of source dicts with the following format for a given target:
[
@@ -1596,11 +1703,9 @@ class Backend:
source_list += [os.path.join(self.build_dir, j.get_subdir(), o) for o in j.get_outputs()]
source_list = list(map(lambda x: os.path.normpath(x), source_list))
- compiler = []
+ compiler: T.List[str] = []
if isinstance(target, build.CustomTarget):
tmp_compiler = target.command
- if not isinstance(compiler, list):
- tmp_compiler = [compiler]
for j in tmp_compiler:
if isinstance(j, mesonlib.File):
compiler += [j.absolute_path(self.source_dir, self.build_dir)]
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 9b249c3..9707640 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -11,18 +11,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import typing as T
+
+from collections import OrderedDict
+from enum import Enum, unique
+from functools import lru_cache
+from pathlib import PurePath, Path
+from textwrap import dedent
+import itertools
+import json
import os
-import re
import pickle
+import re
import shlex
import subprocess
-from collections import OrderedDict
-from enum import Enum, unique
-import itertools
-from textwrap import dedent
-from pathlib import PurePath, Path
-from functools import lru_cache
+import typing as T
from . import backends
from .. import modules
@@ -915,9 +917,16 @@ class NinjaBackend(backends.Backend):
pickle_base = target.name + '.dat'
pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
+ json_abs = os.path.join(self.get_target_private_dir_abs(target), f'{target.name}-deps.json').replace('\\', '/')
rule_name = 'depscan'
scan_sources = self.select_sources_to_scan(compiled_sources)
- elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, scan_sources)
+
+ # Dump the sources as a json list. This avoids potential probllems where
+ # the number of sources passed to depscan exceedes the limit imposed by
+ # the OS.
+ with open(json_abs, 'w', encoding='utf-8') as f:
+ json.dump(scan_sources, f)
+ elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, json_abs)
elem.add_item('picklefile', pickle_file)
scaninfo = TargetDependencyScannerInfo(self.get_target_private_dir(target), source2object)
with open(pickle_abs, 'wb') as p:
@@ -975,7 +984,7 @@ class NinjaBackend(backends.Backend):
for output in d.get_outputs():
elem.add_dep(os.path.join(self.get_target_dir(d), output))
- cmd, reason = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:],
+ cmd, reason = self.as_meson_exe_cmdline(target.command[0], cmd[1:],
extra_bdeps=target.get_transitive_build_target_deps(),
capture=ofilenames[0] if target.capture else None,
feed=srcs[0] if target.feed else None,
@@ -1013,7 +1022,7 @@ class NinjaBackend(backends.Backend):
else:
target_env = self.get_run_target_env(target)
_, _, cmd = self.eval_custom_target_command(target)
- meson_exe_cmd, reason = self.as_meson_exe_cmdline(target_name, target.command[0], cmd[1:],
+ meson_exe_cmd, reason = self.as_meson_exe_cmdline(target.command[0], cmd[1:],
force_serialize=True, env=target_env,
verbose=True)
cmd_type = f' (wrapped by meson {reason})'
@@ -2209,8 +2218,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
outfilelist = outfilelist[len(generator.outputs):]
args = self.replace_paths(target, args, override_subdir=subdir)
cmdlist = exe_arr + self.replace_extra_args(args, genlist)
- cmdlist, reason = self.as_meson_exe_cmdline('generator ' + cmdlist[0],
- cmdlist[0], cmdlist[1:],
+ cmdlist, reason = self.as_meson_exe_cmdline(cmdlist[0], cmdlist[1:],
capture=outfiles[0] if generator.capture else None)
abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
os.makedirs(abs_pdir, exist_ok=True)
@@ -2304,11 +2312,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler)
return mod_files
- def get_no_stdlib_args(self, target, compiler):
- if compiler.language in self.build.stdlibs[target.for_machine]:
- return compiler.get_no_stdinc_args()
- return []
-
def get_no_stdlib_link_args(self, target, linker):
if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]:
return linker.get_no_stdlib_link_args()
@@ -2485,8 +2488,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
commands += compiler.get_include_args(d, i.is_system)
# Add per-target compile args, f.ex, `c_args : ['-DFOO']`. We set these
# near the end since these are supposed to override everything else.
- commands += self.escape_extra_args(compiler,
- target.get_extra_args(compiler.get_language()))
+ commands += self.escape_extra_args(target.get_extra_args(compiler.get_language()))
# D specific additional flags
if compiler.language == 'd':
@@ -3082,7 +3084,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
except OSError:
mlog.debug("Library versioning disabled because we do not have symlink creation privileges.")
- def generate_custom_target_clean(self, trees):
+ def generate_custom_target_clean(self, trees: T.List[str]) -> str:
e = NinjaBuildElement(self.all_outputs, 'meson-clean-ctlist', 'CUSTOM_COMMAND', 'PHONY')
d = CleanTrees(self.environment.get_build_dir(), trees)
d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat')
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 2349b51..0a6e7cd 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -31,7 +31,7 @@ from ..mesonlib import (
)
from ..environment import Environment, build_filename
-def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]) -> backends.Backend:
vs_version = os.getenv('VisualStudioVersion', None)
vs_install_dir = os.getenv('VSINSTALLDIR', None)
if not vs_install_dir:
@@ -150,7 +150,6 @@ class Vs2010Backend(backends.Backend):
# there are many arguments.
tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
cmd, _ = self.as_meson_exe_cmdline(
- 'generator ' + cmd[0],
cmd[0],
cmd[1:],
workdir=tdir_abs,
@@ -560,7 +559,7 @@ class Vs2010Backend(backends.Backend):
_, _, cmd_raw = self.eval_custom_target_command(target)
depend_files = self.get_custom_target_depend_files(target)
target_env = self.get_run_target_env(target)
- wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd_raw[1:],
+ wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd_raw[1:],
force_serialize=True, env=target_env,
verbose=True)
self.add_custom_build(root, 'run_target', ' '.join(self.quote_arguments(wrapper_cmd)),
@@ -581,7 +580,7 @@ class Vs2010Backend(backends.Backend):
# there are many arguments.
tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
extra_bdeps = target.get_transitive_build_target_deps()
- wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:],
+ wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd[1:],
# All targets run from the target dir
workdir=tdir_abs,
extra_bdeps=extra_bdeps,
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index c67828f..2f21149 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -1169,8 +1169,7 @@ class XCodeBackend(backends.Backend):
if not isinstance(t, build.CustomTarget):
continue
(srcs, ofilenames, cmd) = self.eval_custom_target_command(t, absolute_outputs=True)
- fixed_cmd, _ = self.as_meson_exe_cmdline(t.name,
- cmd[0],
+ fixed_cmd, _ = self.as_meson_exe_cmdline(cmd[0],
cmd[1:],
#workdir=None,
env=t.env)
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 69a2d76..9a13531 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -47,7 +47,9 @@ if T.TYPE_CHECKING:
from .interpreter.interpreter import Test, SourceOutputs, Interpreter
from .mesonlib import FileMode, FileOrString
from .modules import ModuleState
- from .backend.backends import Backend
+ from .backend.backends import Backend, ExecutableSerialisation
+
+ GeneratedTypes = T.Union['CustomTarget', 'CustomTargetIndex', 'GeneratedList']
pch_kwargs = {'c_pch', 'cpp_pch'}
@@ -213,7 +215,7 @@ class Build:
self.project_version = None
self.environment = environment
self.projects = {}
- self.targets: T.MutableMapping[str, 'Target'] = OrderedDict()
+ self.targets: 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]' = OrderedDict()
self.run_target_names: T.Set[T.Tuple[str, str]] = set()
self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
@@ -228,7 +230,7 @@ class Build:
self.subprojects = {}
self.subproject_dir = ''
self.install_scripts = []
- self.postconf_scripts = []
+ self.postconf_scripts: T.List['ExecutableSerialisation'] = []
self.dist_scripts = []
self.install_dirs: T.List[InstallDir] = []
self.dep_manifest_name = None
@@ -282,7 +284,7 @@ class Build:
def get_subproject_dir(self):
return self.subproject_dir
- def get_targets(self) -> T.Dict[str, 'Target']:
+ def get_targets(self) -> 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]':
return self.targets
def get_tests(self) -> T.List['Test']:
@@ -291,16 +293,16 @@ class Build:
def get_benchmarks(self) -> T.List['Test']:
return self.benchmarks
- def get_headers(self):
+ def get_headers(self) -> T.List['Headers']:
return self.headers
- def get_man(self):
+ def get_man(self) -> T.List['Man']:
return self.man
- def get_data(self):
+ def get_data(self) -> T.List['Data']:
return self.data
- def get_install_subdirs(self):
+ def get_install_subdirs(self) -> T.List['InstallDir']:
return self.install_dirs
def get_global_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]:
@@ -364,21 +366,26 @@ class ExtractedObjects(HoldableObject):
'''
Holds a list of sources for which the objects must be extracted
'''
- def __init__(self, target, srclist=None, genlist=None, objlist=None, recursive=True):
+ def __init__(self, target: 'BuildTarget',
+ srclist: T.Optional[T.List[File]] = None,
+ genlist: T.Optional[T.List['GeneratedTypes']] = None,
+ objlist: T.Optional[T.List[T.Union[str, 'File', 'ExtractedObjects']]] = None,
+ recursive: bool = True):
self.target = target
self.recursive = recursive
- self.srclist = srclist if srclist is not None else []
- self.genlist = genlist if genlist is not None else []
- self.objlist = objlist if objlist is not None else []
+ self.srclist: T.List[File] = srclist if srclist is not None else []
+ self.genlist: T.List['GeneratedTypes'] = genlist if genlist is not None else []
+ self.objlist: T.Optional[T.List[T.Union[str, 'File', 'ExtractedObjects']]] = \
+ objlist if objlist is not None else []
if self.target.is_unity:
self.check_unity_compatible()
- def __repr__(self):
+ def __repr__(self) -> str:
r = '<{0} {1!r}: {2}>'
return r.format(self.__class__.__name__, self.target.name, self.srclist)
@staticmethod
- def get_sources(sources, generated_sources):
+ def get_sources(sources: T.Sequence['FileOrString'], generated_sources: T.Sequence['GeneratedTypes']) -> T.List['FileOrString']:
# Merge sources and generated sources
sources = list(sources)
for gensrc in generated_sources:
@@ -391,11 +398,11 @@ class ExtractedObjects(HoldableObject):
# Filter out headers and all non-source files
return [s for s in sources if environment.is_source(s) and not environment.is_header(s)]
- def classify_all_sources(self, sources, generated_sources):
- sources = self.get_sources(sources, generated_sources)
- return classify_unity_sources(self.target.compilers.values(), sources)
+ def classify_all_sources(self, sources: T.List[str], generated_sources: T.Sequence['GeneratedTypes']) -> T.Dict['Compiler', T.List['FileOrString']]:
+ sources_ = self.get_sources(sources, generated_sources)
+ return classify_unity_sources(self.target.compilers.values(), sources_)
- def check_unity_compatible(self):
+ def check_unity_compatible(self) -> None:
# Figure out if the extracted object list is compatible with a Unity
# build. When we're doing a Unified build, we go through the sources,
# and create a single source file from each subset of the sources that
@@ -411,7 +418,7 @@ class ExtractedObjects(HoldableObject):
'in Unity builds. You can only extract all '
'the object files for each compiler at once.')
- def get_outputs(self, backend):
+ def get_outputs(self, backend: 'Backend') -> T.List[str]:
return [
backend.object_filename_from_source(self.target, source)
for source in self.get_sources(self.srclist, self.genlist)
@@ -613,8 +620,8 @@ class BuildTarget(Target):
self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '')
self.environment = environment
self.compilers = OrderedDict() # type: OrderedDict[str, Compiler]
- self.objects = []
- self.external_deps = []
+ self.objects: T.List[T.Union[str, 'File', 'ExtractedObjects']] = []
+ self.external_deps: T.List[dependencies.Dependency] = []
self.include_dirs = []
self.link_language = kwargs.get('link_language')
self.link_targets: T.List[BuildTarget] = []
@@ -628,10 +635,10 @@ class BuildTarget(Target):
# as Vala which generates .vapi and .h besides the compiled output.
self.outputs = [self.filename]
self.need_install = False
- self.pch = {}
+ self.pch: T.Dict[str, T.List[str]] = {}
self.extra_args: T.Dict[str, T.List['FileOrString']] = {}
self.sources: T.List[File] = []
- self.generated: T.List[T.Union[GeneratedList, CustomTarget, CustomTargetIndex]] = []
+ self.generated: T.List['GeneratedTypes'] = []
self.d_features = {}
self.pic = False
self.pie = False
@@ -875,7 +882,7 @@ class BuildTarget(Target):
self.kwargs[t] = listify(self.kwargs[t], flatten=True)
def extract_objects(self, srclist: T.List[FileOrString]) -> ExtractedObjects:
- obj_src = []
+ obj_src: T.List['File'] = []
sources_set = set(self.sources)
for src in srclist:
if isinstance(src, str):
@@ -933,7 +940,7 @@ class BuildTarget(Target):
def get_custom_install_dir(self):
return self.install_dir
- def get_custom_install_mode(self):
+ def get_custom_install_mode(self) -> T.Optional['FileMode']:
return self.install_mode
def process_kwargs(self, kwargs, environment):
@@ -1133,7 +1140,7 @@ class BuildTarget(Target):
raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean')
return val
- def get_filename(self):
+ def get_filename(self) -> str:
return self.filename
def get_outputs(self) -> T.List[str]:
@@ -1160,23 +1167,20 @@ class BuildTarget(Target):
def get_sources(self):
return self.sources
- def get_objects(self):
+ def get_objects(self) -> T.List[T.Union[str, 'File', 'ExtractedObjects']]:
return self.objects
- def get_generated_sources(self):
+ def get_generated_sources(self) -> T.List['GeneratedTypes']:
return self.generated
def should_install(self) -> bool:
return self.need_install
- def has_pch(self):
- return len(self.pch) > 0
+ def has_pch(self) -> bool:
+ return bool(self.pch)
- def get_pch(self, language):
- try:
- return self.pch[language]
- except KeyError:
- return[]
+ def get_pch(self, language: str) -> T.List[str]:
+ return self.pch.get(language, [])
def get_include_dirs(self):
return self.include_dirs
@@ -1226,10 +1230,10 @@ You probably should put it in link_with instead.''')
'declare_dependency()).')
self.added_deps.add(dep)
- def get_external_deps(self):
+ def get_external_deps(self) -> T.List[dependencies.Dependency]:
return self.external_deps
- def is_internal(self):
+ def is_internal(self) -> bool:
return isinstance(self, StaticLibrary) and not self.need_install
def link(self, target):
@@ -1288,14 +1292,14 @@ You probably should put it in link_with instead.''')
self.objects += t.extract_all_objects_recurse()
self.link_whole_targets.append(t)
- def extract_all_objects_recurse(self):
+ def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
objs = [self.extract_all_objects()]
for t in self.link_targets:
if t.is_internal():
objs += t.extract_all_objects_recurse()
return objs
- def add_pch(self, language, pchlist):
+ def add_pch(self, language: str, pchlist: T.List[str]) -> None:
if not pchlist:
return
elif len(pchlist) == 1:
@@ -1396,7 +1400,7 @@ You probably should put it in link_with instead.''')
return prelinker
raise MesonException(f'Could not determine prelinker for {self.name!r}.')
- def get_clink_dynamic_linker_and_stdlibs(self):
+ def get_clink_dynamic_linker_and_stdlibs(self) -> T.Tuple['Compiler', T.List[str]]:
'''
We use the order of languages in `clink_langs` to determine which
linker to use in case the target has sources compiled with multiple
@@ -1427,8 +1431,8 @@ You probably should put it in link_with instead.''')
f'Could not get a dynamic linker for build target {self.name!r}. '
f'Requires a linker for language "{l}", but that is not '
'a project language.')
- stdlib_args = []
- added_languages = set()
+ stdlib_args: T.List[str] = []
+ added_languages: T.Set[str] = set()
for dl in itertools.chain(self.compilers, dep_langs):
if dl != linker.language:
stdlib_args += all_compilers[dl].language_stdlib_only_link_flags()
@@ -1450,7 +1454,7 @@ You probably should put it in link_with instead.''')
return True
return False
- def get_using_msvc(self):
+ def get_using_msvc(self) -> bool:
'''
Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
and SharedLibrary for deciding when to use MSVC-specific file naming
@@ -1727,7 +1731,7 @@ class Executable(BuildTarget):
def type_suffix(self):
return "@exe"
- def get_import_filename(self):
+ def get_import_filename(self) -> T.Optional[str]:
"""
The name of the import library that will be outputted by the compiler
@@ -1740,7 +1744,7 @@ class Executable(BuildTarget):
return [self.vs_import_filename, self.gcc_import_filename]
return []
- def get_debug_filename(self):
+ def get_debug_filename(self) -> T.Optional[str]:
"""
The name of debuginfo file that will be created by the compiler
@@ -2075,7 +2079,7 @@ class SharedLibrary(BuildTarget):
else:
raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.')
- def get_import_filename(self):
+ def get_import_filename(self) -> T.Optional[str]:
"""
The name of the import library that will be outputted by the compiler
@@ -2083,7 +2087,7 @@ class SharedLibrary(BuildTarget):
"""
return self.import_filename
- def get_debug_filename(self):
+ def get_debug_filename(self) -> T.Optional[str]:
"""
The name of debuginfo file that will be created by the compiler
@@ -2256,7 +2260,7 @@ class CustomTarget(Target, CommandBase):
deps.append(c)
return deps
- def get_transitive_build_target_deps(self):
+ def get_transitive_build_target_deps(self) -> T.Set[T.Union[BuildTarget, 'CustomTarget']]:
'''
Recursively fetch the build targets that this custom target depends on,
whether through `command:`, `depends:`, or `sources:` The recursion is
@@ -2265,7 +2269,7 @@ class CustomTarget(Target, CommandBase):
F.ex, if you have a python script that loads a C module that links to
other DLLs in your project.
'''
- bdeps = set()
+ bdeps: T.Set[T.Union[BuildTarget, 'CustomTarget']] = set()
deps = self.get_target_dependencies()
for d in deps:
if isinstance(d, BuildTarget):
@@ -2390,26 +2394,26 @@ class CustomTarget(Target, CommandBase):
def get_custom_install_dir(self):
return self.install_dir
- def get_custom_install_mode(self):
+ def get_custom_install_mode(self) -> T.Optional['FileMode']:
return self.install_mode
def get_outputs(self) -> T.List[str]:
return self.outputs
- def get_filename(self):
+ def get_filename(self) -> str:
return self.outputs[0]
- def get_sources(self):
+ def get_sources(self) -> T.List[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList', 'ExtractedObjects']]:
return self.sources
- def get_generated_lists(self):
- genlists = []
+ def get_generated_lists(self) -> T.List[GeneratedList]:
+ genlists: T.List[GeneratedList] = []
for c in self.sources:
if isinstance(c, GeneratedList):
genlists.append(c)
return genlists
- def get_generated_sources(self):
+ def get_generated_sources(self) -> T.List[GeneratedList]:
return self.get_generated_lists()
def get_dep_outname(self, infilenames):
@@ -2424,12 +2428,11 @@ class CustomTarget(Target, CommandBase):
raise InvalidArguments('Substitution in depfile for custom_target that does not have an input file.')
return self.depfile
- def is_linkable_target(self):
+ def is_linkable_target(self) -> bool:
if len(self.outputs) != 1:
return False
suf = os.path.splitext(self.outputs[0])[-1]
- if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so' or suf == '.dylib':
- return True
+ return suf in {'.a', '.dll', '.lib', '.so', '.dylib'}
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
return {}
@@ -2449,7 +2452,7 @@ class CustomTarget(Target, CommandBase):
return False
return True
- def extract_all_objects_recurse(self):
+ def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
return self.get_outputs()
def type_suffix(self):
@@ -2469,7 +2472,8 @@ class CustomTarget(Target, CommandBase):
yield CustomTargetIndex(self, i)
class RunTarget(Target, CommandBase):
- def __init__(self, name, command, dependencies, subdir, subproject, env=None):
+ def __init__(self, name: str, command, dependencies,
+ subdir: str, subproject: str, env: T.Optional['EnvironmentVariables'] = None):
self.typename = 'run'
# These don't produce output artifacts
super().__init__(name, subdir, subproject, False, MachineChoice.BUILD)
@@ -2563,12 +2567,16 @@ class CustomTargetIndex(HoldableObject):
the sources.
"""
- def __init__(self, target: CustomTarget, output: int):
+ def __init__(self, target: CustomTarget, output: str):
self.typename = 'custom'
self.target = target
self.output = output
self.for_machine = target.for_machine
+ @property
+ def name(self) -> str:
+ return f'{self.target.name}[{self.output}]'
+
def __repr__(self):
return '<CustomTargetIndex: {!r}[{}]>'.format(
self.target, self.target.get_outputs().index(self.output))
@@ -2579,7 +2587,7 @@ class CustomTargetIndex(HoldableObject):
def get_subdir(self) -> str:
return self.target.get_subdir()
- def get_filename(self):
+ def get_filename(self) -> str:
return self.output
def get_id(self):
@@ -2594,10 +2602,9 @@ class CustomTargetIndex(HoldableObject):
def get_link_dep_subdirs(self):
return self.target.get_link_dep_subdirs()
- def is_linkable_target(self):
+ def is_linkable_target(self) -> bool:
suf = os.path.splitext(self.output)[-1]
- if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so':
- return True
+ return suf in {'.a', '.dll', '.lib', '.so'}
def should_install(self) -> bool:
return self.target.should_install()
@@ -2605,7 +2612,7 @@ class CustomTargetIndex(HoldableObject):
def is_internal(self) -> bool:
return self.target.is_internal()
- def extract_all_objects_recurse(self):
+ def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
return self.target.extract_all_objects_recurse()
def get_custom_install_dir(self):
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index c71375b..e23d18e 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -1260,6 +1260,10 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
"""Arguments required for a debug build."""
return []
+ def get_no_warn_args(self) -> T.List[str]:
+ """Arguments to completely disable warnings."""
+ return []
+
def get_global_options(lang: str,
comp: T.Type[Compiler],
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 71286a5..dbf0d4e 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -22,7 +22,7 @@ from . import mesonlib
from .mesonlib import (
MesonException, EnvironmentException, MachineChoice, Popen_safe, PerMachine,
PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey,
- search_version
+ search_version, MesonBugException
)
from . import mlog
from .programs import (
@@ -750,25 +750,29 @@ class Environment:
def get_coredata(self) -> coredata.CoreData:
return self.coredata
- def get_build_command(self, unbuffered=False):
- cmd = mesonlib.get_meson_command().copy()
+ @staticmethod
+ def get_build_command(unbuffered: bool = False) -> T.List[str]:
+ cmd = mesonlib.get_meson_command()
+ if cmd is None:
+ raise MesonBugException('No command?')
+ cmd = cmd.copy()
if unbuffered and 'python' in os.path.basename(cmd[0]):
cmd.insert(1, '-u')
return cmd
- def is_header(self, fname):
+ def is_header(self, fname: 'mesonlib.FileOrString') -> bool:
return is_header(fname)
- def is_source(self, fname):
+ def is_source(self, fname: 'mesonlib.FileOrString') -> bool:
return is_source(fname)
- def is_assembly(self, fname):
+ def is_assembly(self, fname: 'mesonlib.FileOrString') -> bool:
return is_assembly(fname)
- def is_llvm_ir(self, fname):
+ def is_llvm_ir(self, fname: 'mesonlib.FileOrString') -> bool:
return is_llvm_ir(fname)
- def is_object(self, fname):
+ def is_object(self, fname: 'mesonlib.FileOrString') -> bool:
return is_object(fname)
@lru_cache(maxsize=None)
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
index 9da3456..2bff1bb 100644
--- a/mesonbuild/interpreter/interpreterobjects.py
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -668,9 +668,12 @@ class GeneratedObjectsHolder(ObjectHolder[build.ExtractedObjects]):
pass
class Test(MesonInterpreterObject):
- def __init__(self, name: str, project: str, suite: T.List[str], exe: build.Executable,
+ def __init__(self, name: str, project: str, suite: T.List[str],
+ exe: T.Union[ExternalProgram, build.Executable, build.CustomTarget],
depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
- is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables,
+ is_parallel: bool,
+ cmd_args: T.List[T.Union[str, mesonlib.File, build.Target]],
+ env: build.EnvironmentVariables,
should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
priority: int):
super().__init__()
@@ -688,7 +691,7 @@ class Test(MesonInterpreterObject):
self.protocol = TestProtocol.from_str(protocol)
self.priority = priority
- def get_exe(self) -> build.Executable:
+ def get_exe(self) -> T.Union[ExternalProgram, build.Executable, build.CustomTarget]:
return self.exe
def get_name(self) -> str:
diff --git a/mesonbuild/mesonlib/universal.py b/mesonbuild/mesonlib/universal.py
index 65d21ee..3ce46f7 100644
--- a/mesonbuild/mesonlib/universal.py
+++ b/mesonbuild/mesonlib/universal.py
@@ -35,8 +35,7 @@ if T.TYPE_CHECKING:
from .._typing import ImmutableListProtocol
from ..build import ConfigurationData
from ..coredata import KeyedOptionDictType, UserOption
- from ..compilers.compilers import CompilerType
- from ..interpreterbase import ObjectHolder
+ from ..compilers.compilers import Compiler
FileOrString = T.Union['File', str]
@@ -453,7 +452,7 @@ class File(HoldableObject):
return os.path.join(self.subdir, self.fname)
-def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> 'CompilerType':
+def get_compiler_for_source(compilers: T.Iterable['Compiler'], src: 'FileOrString') -> 'Compiler':
"""Given a set of compilers and a source, find the compiler for that source type."""
for comp in compilers:
if comp.can_compile(src):
@@ -461,8 +460,8 @@ def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) ->
raise MesonException(f'No specified compiler can handle file {src!s}')
-def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]:
- compsrclist = {} # type: T.Dict[CompilerType, T.List[str]]
+def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequence['FileOrString']) -> T.Dict['Compiler', T.List['FileOrString']]:
+ compsrclist: T.Dict['Compiler', T.List['FileOrString']] = {}
for src in sources:
comp = get_compiler_for_source(compilers, src)
if comp not in compsrclist:
@@ -1261,6 +1260,8 @@ def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format:
elif output_format == 'nasm':
prelude = CONF_NASM_PRELUDE
prefix = '%'
+ else:
+ raise MesonBugException(f'Undefined output_format: "{output_format}"')
ofilename_tmp = ofilename + '~'
with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
@@ -1443,7 +1444,7 @@ def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.
return None
-def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, str]) -> None:
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
# Error checking
inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] # type: T.List[str]
outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] # type: T.List[str]
@@ -1484,7 +1485,7 @@ def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, st
raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
-def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[str]:
+def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
@@ -1493,14 +1494,29 @@ def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[
If multiple inputs/outputs are given in the @values dictionary, we
substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
just a part of it, and in that case we substitute *all* of them.
+
+ The typing of this function is difficult, as only @OUTPUT@ and @INPUT@ can
+ be lists, everything else is a string. However, TypeDict cannot represent
+ this, as you can have optional keys, but not extra keys. We end up just
+ having to us asserts to convince type checkers that this is okay.
+
+ https://github.com/python/mypy/issues/4617
'''
+
+ def replace(m: T.Match[str]) -> str:
+ v = values[m.group(0)]
+ assert isinstance(v, str), 'for mypy'
+ return v
+
# Error checking
_substitute_values_check_errors(command, values)
+
# Substitution
outcmd = [] # type: T.List[str]
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
+ more: T.Optional[str] = None
if not isinstance(vv, str):
outcmd.append(vv)
elif '@INPUT@' in vv:
@@ -1521,15 +1537,22 @@ def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[
else:
raise MesonException("Command has '@OUTPUT@' as part of a "
"string and more than one output file")
+
# Append values that are exactly a template string.
# This is faster than a string replace.
elif vv in values:
- outcmd.append(values[vv])
+ o = values[vv]
+ assert isinstance(o, str), 'for mypy'
+ more = o
# Substitute everything else with replacement
elif value_rx:
- outcmd.append(value_rx.sub(lambda m: values[m.group(0)], vv))
+ more = value_rx.sub(replace, vv)
else:
- outcmd.append(vv)
+ more = vv
+
+ if more is not None:
+ outcmd.append(more)
+
return outcmd
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 65a73a7..279e297 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -306,6 +306,7 @@ if T.TYPE_CHECKING:
link_libpython: bool
paths: T.Dict[str, str]
platform: str
+ suffix : str
variables: T.Dict[str, str]
version: str
@@ -316,7 +317,7 @@ class PythonExternalProgram(ExternalProgram):
if ext_prog is None:
super().__init__(name, command=command, silent=True)
else:
- self.name = ext_prog.name
+ self.name = name
self.command = ext_prog.command
self.path = ext_prog.path
@@ -334,6 +335,35 @@ class PythonExternalProgram(ExternalProgram):
'version': '0.0',
}
+ def _check_version(self, version: str) -> bool:
+ if self.name == 'python2':
+ return mesonlib.version_compare(version, '< 3.0')
+ elif self.name == 'python3':
+ return mesonlib.version_compare(version, '>= 3.0')
+ return True
+
+ def sanity(self) -> bool:
+ # Sanity check, we expect to have something that at least quacks in tune
+ cmd = self.get_command() + ['-c', INTROSPECT_COMMAND]
+ p, stdout, stderr = mesonlib.Popen_safe(cmd)
+ try:
+ info = json.loads(stdout)
+ except json.JSONDecodeError:
+ info = None
+ mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode))
+ mlog.debug('Program stdout:\n')
+ mlog.debug(stdout)
+ mlog.debug('Program stderr:\n')
+ mlog.debug(stderr)
+
+ if info is not None and self._check_version(info['version']):
+ variables = info['variables']
+ info['suffix'] = variables.get('EXT_SUFFIX') or variables.get('SO') or variables.get('.so')
+ self.info = T.cast('PythonIntrospectionDict', info)
+ return True
+ else:
+ return False
+
_PURE_KW = KwargInfo('pure', bool, default=True)
_SUBDIR_KW = KwargInfo('subdir', str, default='')
@@ -354,6 +384,7 @@ class PythonInstallation(ExternalProgramHolder):
prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
assert isinstance(prefix, str), 'for mypy'
self.variables = info['variables']
+ self.suffix = info['suffix']
self.paths = info['paths']
install_paths = info['install_paths']
self.platlib_install_path = os.path.join(prefix, install_paths['platlib'][1:])
@@ -378,10 +409,10 @@ class PythonInstallation(ExternalProgramHolder):
@permittedKwargs(mod_kwargs)
def extension_module_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'SharedModule':
- if 'subdir' in kwargs and 'install_dir' in kwargs:
- raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
-
- if 'subdir' in kwargs:
+ if 'install_dir' in kwargs:
+ if 'subdir' in kwargs:
+ raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
+ else:
subdir = kwargs.pop('subdir', '')
if not isinstance(subdir, str):
raise InvalidArguments('"subdir" argument must be a string.')
@@ -399,12 +430,10 @@ class PythonInstallation(ExternalProgramHolder):
new_deps.append(dep)
kwargs['dependencies'] = new_deps
- suffix = self.variables.get('EXT_SUFFIX') or self.variables.get('SO') or self.variables.get('.so')
-
# msys2's python3 has "-cpython-36m.dll", we have to be clever
- split = suffix.rsplit('.', 1)
- suffix = split.pop(-1)
- args[0] += ''.join(s for s in split)
+ # FIXME: explain what the specific cleverness is here
+ split, suffix = self.suffix.rsplit('.', 1)
+ args[0] += split
kwargs['name_prefix'] = ''
kwargs['name_suffix'] = suffix
@@ -535,14 +564,6 @@ class PythonModule(ExtensionModule):
else:
return None
- @staticmethod
- def _check_version(name_or_path: str, version: str) -> bool:
- if name_or_path == 'python2':
- return mesonlib.version_compare(version, '< 3.0')
- elif name_or_path == 'python3':
- return mesonlib.version_compare(version, '>= 3.0')
- return True
-
@disablerIfNotFound
@typed_pos_args('python.find_installation', optargs=[str])
@typed_kwargs(
@@ -567,6 +588,7 @@ class PythonModule(ExtensionModule):
# $PATH, or that uses a wrapper of some kind.
np: T.List[str] = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python') or []
fallback = args[0]
+ display_name = fallback or 'python'
if not np and fallback is not None:
np = [fallback]
name_or_path = np[0] if np else None
@@ -578,8 +600,8 @@ class PythonModule(ExtensionModule):
if not name_or_path:
python = PythonExternalProgram('python3', mesonlib.python_command)
else:
- tmp_python = ExternalProgram.from_entry('python3', name_or_path)
- python = PythonExternalProgram('python3', ext_prog=tmp_python)
+ tmp_python = ExternalProgram.from_entry(display_name, name_or_path)
+ python = PythonExternalProgram(display_name, ext_prog=tmp_python)
if not python.found() and mesonlib.is_windows():
pythonpath = self._get_win_pythonpath(name_or_path)
@@ -627,21 +649,9 @@ class PythonModule(ExtensionModule):
raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
return NonExistingExternalProgram()
else:
- # Sanity check, we expect to have something that at least quacks in tune
- cmd = python.get_command() + ['-c', INTROSPECT_COMMAND]
- p, stdout, stderr = mesonlib.Popen_safe(cmd)
- try:
- info = json.loads(stdout)
- except json.JSONDecodeError:
- info = None
- mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode))
- mlog.debug('Program stdout:\n')
- mlog.debug(stdout)
- mlog.debug('Program stderr:\n')
- mlog.debug(stderr)
-
- if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']):
- python.info = T.cast('PythonIntrospectionDict', info)
+ sane = python.sanity()
+
+ if sane:
return python
else:
if required:
diff --git a/mesonbuild/modules/unstable_external_project.py b/mesonbuild/modules/unstable_external_project.py
index f866e75..32ecdf9 100644
--- a/mesonbuild/modules/unstable_external_project.py
+++ b/mesonbuild/modules/unstable_external_project.py
@@ -19,7 +19,7 @@ import typing as T
from . import ExtensionModule, ModuleReturnValue, ModuleState, NewExtensionModule
from .. import mlog, build
from ..mesonlib import (MesonException, Popen_safe, MachineChoice,
- get_variable_regex, do_replacement, extract_as_list)
+ get_variable_regex, do_replacement, extract_as_list, join_args)
from ..interpreterbase import InterpreterException, FeatureNew
from ..interpreterbase import permittedKwargs, typed_pos_args
from ..compilers.compilers import CFLAGS_MAPPING, CEXE_MAPPING
@@ -67,19 +67,26 @@ class ExternalProject(NewExtensionModule):
# self.prefix is an absolute path, so we cannot append it to another path.
self.rel_prefix = self.prefix.relative_to(self.prefix.root)
- self.make = state.find_program('make')
- self.make = self.make.get_command()[0]
-
self._configure(state)
self.targets = self._create_targets()
def _configure(self, state: ModuleState):
- # Assume it's the name of a script in source dir, like 'configure',
- # 'autogen.sh', etc).
- configure_path = Path(self.src_dir, self.configure_command)
- configure_prog = state.find_program(configure_path.as_posix())
- configure_cmd = configure_prog.get_command()
+ if self.configure_command == 'waf':
+ FeatureNew('Waf external project', '0.60.0').use(self.subproject)
+ waf = state.find_program('waf')
+ configure_cmd = waf.get_command()
+ configure_cmd += ['configure', '-o', str(self.build_dir)]
+ workdir = self.src_dir
+ self.make = waf.get_command() + ['build']
+ else:
+ # Assume it's the name of a script in source dir, like 'configure',
+ # 'autogen.sh', etc).
+ configure_path = Path(self.src_dir, self.configure_command)
+ configure_prog = state.find_program(configure_path.as_posix())
+ configure_cmd = configure_prog.get_command()
+ workdir = self.build_dir
+ self.make = state.find_program('make').get_command()
d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()),
('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()),
@@ -122,7 +129,7 @@ class ExternalProject(NewExtensionModule):
Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix())
self.build_dir.mkdir(parents=True, exist_ok=True)
- self._run('configure', configure_cmd)
+ self._run('configure', configure_cmd, workdir)
def _quote_and_join(self, array: T.List[str]) -> str:
return ' '.join([shlex.quote(i) for i in array])
@@ -156,9 +163,9 @@ class ExternalProject(NewExtensionModule):
f"Variables {var_list} in configure options are missing.")
return out
- def _run(self, step: str, command: T.List[str]):
+ def _run(self, step: str, command: T.List[str], workdir: Path):
mlog.log(f'External project {self.name}:', mlog.bold(step))
- m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+ m = 'Running command ' + str(command) + ' in directory ' + str(workdir) + '\n'
log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log')
output = None
if not self.verbose:
@@ -167,9 +174,9 @@ class ExternalProject(NewExtensionModule):
output.flush()
else:
mlog.log(m)
- p, o, e = Popen_safe(command, cwd=str(self.build_dir), env=self.run_env,
- stderr=subprocess.STDOUT,
- stdout=output)
+ p, o, e = Popen_safe(command, cwd=str(workdir), env=self.run_env,
+ stderr=subprocess.STDOUT,
+ stdout=output)
if p.returncode != 0:
m = f'{step} step returned error code {p.returncode}.'
if not self.verbose:
@@ -184,7 +191,7 @@ class ExternalProject(NewExtensionModule):
'--builddir', self.build_dir.as_posix(),
'--installdir', self.install_dir.as_posix(),
'--logdir', mlog.log_dir,
- '--make', self.make,
+ '--make', join_args(self.make),
]
if self.verbose:
cmd.append('--verbose')
diff --git a/mesonbuild/modules/unstable_rust.py b/mesonbuild/modules/unstable_rust.py
index 995370a..998dbfd 100644
--- a/mesonbuild/modules/unstable_rust.py
+++ b/mesonbuild/modules/unstable_rust.py
@@ -17,26 +17,34 @@ import typing as T
from . import ExtensionModule, ModuleReturnValue
from .. import mlog
-from ..build import BuildTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, IncludeDirs, CustomTarget
-from ..interpreter.interpreter import TEST_KWARGS
-from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, permittedKwargs, FeatureNew, typed_kwargs, typed_pos_args, noPosargs
-from ..mesonlib import stringlistify, listify, typeslistify, File
+from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, GeneratedList, IncludeDirs, CustomTarget
from ..dependencies import Dependency, ExternalLibrary
-from ..interpreterbase import InterpreterException, permittedKwargs, FeatureNew, typed_pos_args, noPosargs
-from ..mesonlib import stringlistify, listify, typeslistify, File
+from ..interpreter.interpreter import TEST_KWARGS
+from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, FeatureNew, typed_kwargs, typed_pos_args, noPosargs
+from ..mesonlib import File
if T.TYPE_CHECKING:
from . import ModuleState
from ..interpreter import Interpreter
from ..interpreter import kwargs as _kwargs
- from ..interpreter.interpreter import SourceOutputs
+ from ..interpreter.interpreter import SourceInputs
from ..programs import ExternalProgram
+ from typing_extensions import TypedDict
+
class FuncTest(_kwargs.BaseTest):
dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
is_parallel: bool
+ class FuncBindgen(TypedDict):
+
+ args: T.List[str]
+ c_args: T.List[str]
+ include_directories: T.List[IncludeDirs]
+ input: T.List[SourceInputs]
+ output: str
+
class RustModule(ExtensionModule):
@@ -153,30 +161,27 @@ class RustModule(ExtensionModule):
return ModuleReturnValue(None, [new_target, test])
@noPosargs
- @permittedKwargs({'input', 'output', 'include_directories', 'c_args', 'args'})
- def bindgen(self, state: 'ModuleState', args: T.List, kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue:
+ @typed_kwargs(
+ 'rust.bindgen',
+ KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True),
+ KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True),
+ KwargInfo('include_directories', ContainerTypeInfo(list, IncludeDirs), default=[], listify=True),
+ KwargInfo(
+ 'input',
+ ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, CustomTargetIndex, CustomTarget, str), allow_empty=False),
+ default=[],
+ listify=True,
+ required=True,
+ ),
+ KwargInfo('output', str, required=True),
+ )
+ def bindgen(self, state: 'ModuleState', args: T.List, kwargs: 'FuncBindgen') -> ModuleReturnValue:
"""Wrapper around bindgen to simplify it's use.
The main thing this simplifies is the use of `include_directory`
objects, instead of having to pass a plethora of `-I` arguments.
"""
- header: 'SourceOutputs'
- _deps: T.Sequence['SourceOutputs']
- try:
- header, *_deps = self.interpreter.source_strings_to_files(listify(kwargs['input']))
- except KeyError:
- raise InvalidArguments('rustmod.bindgen() `input` argument must have at least one element.')
-
- try:
- output: str = kwargs['output']
- except KeyError:
- raise InvalidArguments('rustmod.bindgen() `output` must be provided')
- if not isinstance(output, str):
- raise InvalidArguments('rustmod.bindgen() `output` argument must be a string.')
-
- include_dirs: T.List[IncludeDirs] = typeslistify(listify(kwargs.get('include_directories', [])), IncludeDirs)
- c_args: T.List[str] = stringlistify(listify(kwargs.get('c_args', [])))
- bind_args: T.List[str] = stringlistify(listify(kwargs.get('args', [])))
+ header, *_deps = self.interpreter.source_strings_to_files(kwargs['input'])
# Split File and Target dependencies to add pass to CustomTarget
depends: T.List[T.Union[GeneratedList, BuildTarget, CustomTargetIndex, CustomTarget]] = []
@@ -188,7 +193,7 @@ class RustModule(ExtensionModule):
depends.append(d)
inc_strs: T.List[str] = []
- for i in include_dirs:
+ for i in kwargs['include_directories']:
# bindgen always uses clang, so it's safe to hardcode -I here
inc_strs.extend([f'-I{x}' for x in i.to_string_list(state.environment.get_source_dir())])
@@ -207,11 +212,11 @@ class RustModule(ExtensionModule):
state.subproject,
{
'input': header,
- 'output': output,
+ 'output': kwargs['output'],
'command': self._bindgen_bin.get_command() + [
'@INPUT@', '--output',
os.path.join(state.environment.build_dir, '@OUTPUT@')] +
- bind_args + ['--'] + c_args + inc_strs +
+ kwargs['args'] + ['--'] + kwargs['c_args'] + inc_strs +
['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'],
'depfile': '@PLAINNAME@.d',
'depends': depends,
@@ -223,5 +228,5 @@ class RustModule(ExtensionModule):
return ModuleReturnValue([target], [target])
-def initialize(*args: T.List, **kwargs: T.Dict) -> RustModule:
- return RustModule(*args, **kwargs) # type: ignore
+def initialize(interp: 'Interpreter') -> RustModule:
+ return RustModule(interp)
diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py
index 9fc435b..68e7dc4 100644
--- a/mesonbuild/scripts/depscan.py
+++ b/mesonbuild/scripts/depscan.py
@@ -12,10 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import json
+import os
import pathlib
import pickle
import re
-import os
import sys
import typing as T
@@ -194,8 +195,9 @@ class DependencyScanner:
return 0
def run(args: T.List[str]) -> int:
- pickle_file = args[0]
- outfile = args[1]
- sources = args[2:]
+ assert len(args) == 3, 'got wrong number of arguments!'
+ pickle_file, outfile, jsonfile = args
+ with open(jsonfile, 'r', encoding='utf-8') as f:
+ sources = json.load(f)
scanner = DependencyScanner(pickle_file, outfile, sources)
return scanner.scan()
diff --git a/mesonbuild/scripts/externalproject.py b/mesonbuild/scripts/externalproject.py
index a8e3bfe..eefa32e 100644
--- a/mesonbuild/scripts/externalproject.py
+++ b/mesonbuild/scripts/externalproject.py
@@ -19,7 +19,7 @@ import subprocess
from pathlib import Path
import typing as T
-from ..mesonlib import Popen_safe
+from ..mesonlib import Popen_safe, split_args
class ExternalProject:
def __init__(self, options: argparse.Namespace):
@@ -31,7 +31,7 @@ class ExternalProject:
self.verbose = options.verbose
self.stampfile = options.stampfile
self.depfile = options.depfile
- self.make = options.make
+ self.make = split_args(options.make)
def write_depfile(self) -> None:
with open(self.depfile, 'w', encoding='utf-8') as f:
@@ -49,22 +49,28 @@ class ExternalProject:
pass
def gnu_make(self) -> bool:
- p, o, e = Popen_safe([self.make, '--version'])
+ p, o, e = Popen_safe(self.make + ['--version'])
if p.returncode == 0 and 'GNU Make' in o:
return True
return False
def build(self) -> int:
- make_cmd = [self.make]
- if self.gnu_make():
- make_cmd.append('-j' + str(multiprocessing.cpu_count()))
-
+ is_make = self.make[0] == 'make'
+ make_cmd = self.make.copy()
+ if is_make and self.gnu_make():
+ make_cmd.append(f'-j{multiprocessing.cpu_count()}')
rc = self._run('build', make_cmd)
if rc != 0:
return rc
- install_cmd = make_cmd + ['DESTDIR= ' + self.install_dir, 'install']
- rc = self._run('install', install_cmd)
+ install_cmd = self.make.copy()
+ install_env = {}
+ if is_make:
+ install_cmd.append(f'DESTDIR={self.install_dir}')
+ else:
+ install_env['DESTDIR'] = self.install_dir
+ install_cmd.append('install')
+ rc = self._run('install', install_cmd, install_env)
if rc != 0:
return rc
@@ -73,7 +79,7 @@ class ExternalProject:
return 0
- def _run(self, step: str, command: T.List[str]) -> int:
+ def _run(self, step: str, command: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> int:
m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
log_filename = Path(self.log_dir, f'{self.name}-{step}.log')
output = None
@@ -83,8 +89,12 @@ class ExternalProject:
output.flush()
else:
print(m)
+ run_env = os.environ.copy()
+ if env:
+ run_env.update(env)
p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
- cwd=self.build_dir)
+ cwd=self.build_dir,
+ env=run_env)
if p.returncode != 0:
m = f'{step} step returned error code {p.returncode}.'
if not self.verbose:
diff --git a/run_mypy.py b/run_mypy.py
index a7570e0..f8f22a0 100755
--- a/run_mypy.py
+++ b/run_mypy.py
@@ -22,6 +22,7 @@ modules = [
# specific files
'mesonbuild/arglist.py',
+ 'mesonbuild/backend/backends.py',
# 'mesonbuild/coredata.py',
'mesonbuild/envconfig.py',
'mesonbuild/interpreter/compiler.py',
diff --git a/run_project_tests.py b/run_project_tests.py
index e8e9e67..154b66f 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -52,6 +52,7 @@ from mesonbuild.mesonlib import MachineChoice, Popen_safe, TemporaryDirectoryWin
from mesonbuild.mlog import blue, bold, cyan, green, red, yellow, normal_green
from mesonbuild.coredata import backendlist, version as meson_version
from mesonbuild.mesonmain import setup_vsenv
+from mesonbuild.modules.python import PythonExternalProgram
from run_tests import get_fake_options, run_configure, get_meson_script
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
from run_tests import ensure_backend_detects_changes
@@ -62,6 +63,7 @@ if T.TYPE_CHECKING:
from mesonbuild.environment import Environment
from mesonbuild._typing import Protocol
from concurrent.futures import Future
+ from mesonbuild.modules.python import PythonIntrospectionDict
class CompilerArgumentType(Protocol):
cross_file: str
@@ -122,6 +124,9 @@ class TestResult(BaseException):
def fail(self, msg: str) -> None:
self.msg = msg
+python = PythonExternalProgram(sys.executable)
+python.sanity()
+
class InstalledFile:
def __init__(self, raw: T.Dict[str, str]):
self.path = raw['file']
@@ -143,6 +148,9 @@ class InstalledFile:
(env.machines.host.is_windows() and compiler in {'pgi', 'dmd', 'ldc'})):
canonical_compiler = 'msvc'
+ python_paths = python.info['install_paths']
+ python_suffix = python.info['suffix']
+
has_pdb = False
if self.language in {'c', 'cpp'}:
has_pdb = canonical_compiler == 'msvc'
@@ -161,6 +169,15 @@ class InstalledFile:
return None
# Handle the different types
+ if self.typ in {'py_implib', 'python_lib', 'python_file'}:
+ val = p.as_posix()
+ val = val.replace('@PYTHON_PLATLIB@', python_paths['platlib'])
+ val = val.replace('@PYTHON_PURELIB@', python_paths['purelib'])
+ p = Path(val)
+ if self.typ == 'python_file':
+ return p
+ if self.typ == 'python_lib':
+ return p.with_suffix(python_suffix)
if self.typ in ['file', 'dir']:
return p
elif self.typ == 'shared_lib':
@@ -195,13 +212,15 @@ class InstalledFile:
if self.version:
p = p.with_name('{}-{}'.format(p.name, self.version[0]))
return p.with_suffix('.pdb') if has_pdb else None
- elif self.typ == 'implib' or self.typ == 'implibempty':
+ elif self.typ in {'implib', 'implibempty', 'py_implib'}:
if env.machines.host.is_windows() and canonical_compiler == 'msvc':
# only MSVC doesn't generate empty implibs
if self.typ == 'implibempty' and compiler == 'msvc':
return None
return p.parent / (re.sub(r'^lib', '', p.name) + '.lib')
elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
+ if self.typ == 'py_implib':
+ p = p.with_suffix(python_suffix)
return p.with_suffix('.dll.a')
else:
return None
diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build
index 83570f0..821bb62 100644
--- a/test cases/frameworks/1 boost/meson.build
+++ b/test cases/frameworks/1 boost/meson.build
@@ -21,21 +21,22 @@ notfound = dependency('boost', static: s, modules : ['this_should_not_exist_o
assert(not notfound.found())
+require_bp = host_machine.system() in ['linux', 'darwin']
pymod = import('python')
python2 = pymod.find_installation('python2', required: false , disabler: true)
-python3 = pymod.find_installation('python3', required: host_machine.system() == 'linux', disabler: true)
+python3 = pymod.find_installation('python3', required: require_bp , disabler: true)
python2dep = python2.dependency(required: false , embed: true, disabler: true)
-python3dep = python3.dependency(required: host_machine.system() == 'linux', embed: true, disabler: true)
+python3dep = python3.dependency(required: require_bp, embed: true, disabler: true)
# compile python 2/3 modules only if we found a corresponding python version
-if(python2dep.found() and host_machine.system() == 'linux' and not s)
+if(python2dep.found() and require_bp and not s)
bpython2dep = dependency('boost', static: s, modules : ['python'], required: false, disabler: true)
else
python2dep = disabler()
bpython2dep = disabler()
endif
-if(python3dep.found() and host_machine.system() == 'linux' and not s)
+if(python3dep.found() and require_bp and not s)
bpython3dep = dependency('boost', static: s, modules : ['python3'])
else
python3dep = disabler()
@@ -48,8 +49,9 @@ nomodexe = executable('nomod', 'nomod.cpp', dependencies : nomoddep)
extralibexe = executable('extralibexe', 'extralib.cpp', dependencies : extralibdep)
# python modules are shared libraries
-python2module = shared_library('python2_module', ['python_module.cpp'], dependencies: [python2dep, bpython2dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python2_module'])
-python3module = shared_library('python3_module', ['python_module.cpp'], dependencies: [python3dep, bpython3dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python3_module'])
+python2module = python2.extension_module('python2_module', ['python_module.cpp'], dependencies: [python2dep, bpython2dep], cpp_args: ['-DMOD_NAME=python2_module'])
+python3module = python3.extension_module('python3_module', ['python_module.cpp'], dependencies: [python3dep, bpython3dep], cpp_args: ['-DMOD_NAME=python3_module'])
+
test('Boost linktest', linkexe, timeout: 60)
test('Boost UTF test', unitexe, timeout: 60)
diff --git a/test cases/python/2 extmodule/blaster.py b/test cases/python/2 extmodule/blaster.py.in
index aaac984..b690b40 100755
--- a/test cases/python/2 extmodule/blaster.py
+++ b/test cases/python/2 extmodule/blaster.py.in
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-import tachyon
+import @tachyon_module@ as tachyon
result = tachyon.phaserize('shoot')
diff --git a/test cases/python/2 extmodule/ext/meson.build b/test cases/python/2 extmodule/ext/meson.build
index b13bb32..799e9b0 100644
--- a/test cases/python/2 extmodule/ext/meson.build
+++ b/test cases/python/2 extmodule/ext/meson.build
@@ -1,6 +1,10 @@
pylib = py.extension_module('tachyon',
'tachyon_module.c',
dependencies : py_dep,
+ c_args: '-DMESON_MODULENAME="tachyon"',
+ install: true,
)
+subdir('nested')
+subdir('wrongdir')
pypathdir = meson.current_build_dir()
diff --git a/test cases/python/2 extmodule/ext/nested/meson.build b/test cases/python/2 extmodule/ext/nested/meson.build
new file mode 100644
index 0000000..38d3d3e
--- /dev/null
+++ b/test cases/python/2 extmodule/ext/nested/meson.build
@@ -0,0 +1,16 @@
+py.extension_module('tachyon',
+ '../tachyon_module.c',
+ dependencies : py_dep,
+ c_args: '-DMESON_MODULENAME="nested.tachyon"',
+ install: true,
+ subdir: 'nested'
+)
+py.install_sources(
+ configure_file(
+ input: '../../blaster.py.in',
+ output: 'blaster.py',
+ configuration: {'tachyon_module': 'nested.tachyon'}
+ ),
+ pure: false,
+ subdir: 'nested',
+)
diff --git a/test cases/python/2 extmodule/ext/tachyon_module.c b/test cases/python/2 extmodule/ext/tachyon_module.c
index b2592e4..a5d7cdc 100644
--- a/test cases/python/2 extmodule/ext/tachyon_module.c
+++ b/test cases/python/2 extmodule/ext/tachyon_module.c
@@ -38,7 +38,7 @@ static PyMethodDef TachyonMethods[] = {
static struct PyModuleDef tachyonmodule = {
PyModuleDef_HEAD_INIT,
- "tachyon",
+ MESON_MODULENAME,
NULL,
-1,
TachyonMethods
diff --git a/test cases/python/2 extmodule/ext/wrongdir/meson.build b/test cases/python/2 extmodule/ext/wrongdir/meson.build
new file mode 100644
index 0000000..1355d4f
--- /dev/null
+++ b/test cases/python/2 extmodule/ext/wrongdir/meson.build
@@ -0,0 +1,7 @@
+py.extension_module('tachyon',
+ '../tachyon_module.c',
+ dependencies : py_dep,
+ c_args: '-DMESON_MODULENAME="tachyon"',
+ install: true,
+ install_dir: get_option('libdir')
+)
diff --git a/test cases/python/2 extmodule/meson.build b/test cases/python/2 extmodule/meson.build
index 18d70c8..c3f4eec 100644
--- a/test cases/python/2 extmodule/meson.build
+++ b/test cases/python/2 extmodule/meson.build
@@ -18,11 +18,19 @@ endif
subdir('ext')
+blaster = configure_file(
+ input: 'blaster.py.in',
+ output: 'blaster.py',
+ configuration: {'tachyon_module': 'tachyon'}
+)
+
test('extmod',
py,
- args : files('blaster.py'),
+ args : blaster,
env : ['PYTHONPATH=' + pypathdir])
+py.install_sources(blaster, pure: false)
+py.install_sources(blaster, subdir: 'pure')
py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false)
if py3_pkg_dep.found()
diff --git a/test cases/python/2 extmodule/test.json b/test cases/python/2 extmodule/test.json
new file mode 100644
index 0000000..6bd1195
--- /dev/null
+++ b/test cases/python/2 extmodule/test.json
@@ -0,0 +1,13 @@
+{
+ "installed": [
+ { "type": "python_file", "file": "usr/@PYTHON_PLATLIB@/blaster.py" },
+ { "type": "python_lib", "file": "usr/@PYTHON_PLATLIB@/tachyon" },
+ { "type": "py_implib", "file": "usr/@PYTHON_PLATLIB@/tachyon" },
+ { "type": "python_file", "file": "usr/@PYTHON_PURELIB@/pure/blaster.py" },
+ { "type": "python_file", "file": "usr/@PYTHON_PLATLIB@/nested/blaster.py" },
+ { "type": "python_lib", "file": "usr/@PYTHON_PLATLIB@/nested/tachyon" },
+ { "type": "py_implib", "file": "usr/@PYTHON_PLATLIB@/nested/tachyon" },
+ { "type": "python_lib", "file": "usr/lib/tachyon" },
+ { "type": "py_implib", "file": "usr/lib/tachyon" }
+ ]
+}