aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/lint_mypy.yml2
-rw-r--r--.github/workflows/os_comp.yml8
-rw-r--r--azure-pipelines.yml3
-rw-r--r--ci/azure-steps.yml4
-rw-r--r--ci/ciimage/eoan/Dockerfile2
-rw-r--r--docs/markdown/Build-options.md21
-rw-r--r--docs/markdown/CMake-module.md2
-rw-r--r--docs/markdown/Contributing.md37
-rw-r--r--docs/markdown/Custom-build-targets.md4
-rw-r--r--docs/markdown/Dependencies.md2
-rw-r--r--docs/markdown/Pkgconfig-module.md2
-rw-r--r--docs/markdown/Python-module.md5
-rw-r--r--docs/markdown/Reference-manual.md27
-rw-r--r--docs/markdown/Reference-tables.md31
-rw-r--r--docs/markdown/snippets/minstall_quiet.md11
-rw-r--r--docs/markdown/snippets/pkgconfig_dataonly.md15
-rw-r--r--mesonbuild/cmake/data/preload.cmake35
-rw-r--r--mesonbuild/cmake/interpreter.py78
-rw-r--r--mesonbuild/cmake/traceparser.py84
-rw-r--r--mesonbuild/compilers/compilers.py28
-rw-r--r--mesonbuild/coredata.py37
-rw-r--r--mesonbuild/dependencies/base.py6
-rw-r--r--mesonbuild/dependencies/boost.py1407
-rw-r--r--mesonbuild/environment.py26
-rw-r--r--mesonbuild/interpreter.py2
-rw-r--r--mesonbuild/linkers.py62
-rw-r--r--mesonbuild/mdist.py65
-rw-r--r--mesonbuild/mesonlib.py20
-rw-r--r--mesonbuild/minstall.py72
-rw-r--r--mesonbuild/mlog.py16
-rw-r--r--mesonbuild/modules/cmake.py13
-rw-r--r--mesonbuild/modules/pkgconfig.py60
-rw-r--r--mesonbuild/modules/python.py6
-rwxr-xr-xmesonbuild/msubprojects.py8
-rw-r--r--mesonbuild/scripts/depfixer.py21
-rwxr-xr-xrun_project_tests.py142
-rwxr-xr-xrun_unittests.py27
-rw-r--r--setup.py2
-rw-r--r--test cases/cmake/1 basic/meson.build1
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt32
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp3
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp8
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt7
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp5
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp3
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp3
-rw-r--r--test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp3
-rw-r--r--test cases/cmake/9 disabled subproject/meson.build3
-rw-r--r--test cases/common/47 pkgconfig-gen/installed_files.txt1
-rw-r--r--test cases/common/47 pkgconfig-gen/meson.build7
-rw-r--r--test cases/common/56 install script/no-installed-files0
-rw-r--r--test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp3
-rw-r--r--test cases/failing build/4 cmake subproject isolation/main.cpp10
-rw-r--r--test cases/failing build/4 cmake subproject isolation/meson.build17
-rw-r--r--test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt10
-rw-r--r--test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp12
-rw-r--r--test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp14
-rw-r--r--test cases/frameworks/1 boost/meson.build42
-rw-r--r--test cases/frameworks/1 boost/meson_options.txt1
-rw-r--r--test cases/frameworks/1 boost/test_matrix.json19
-rw-r--r--test cases/frameworks/1 boost/unit_test.cpp1
-rwxr-xr-xtools/boost_names.py371
-rwxr-xr-xtools/cmake2meson.py25
63 files changed, 1920 insertions, 1074 deletions
diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml
index b76a751..54535b3 100644
--- a/.github/workflows/lint_mypy.yml
+++ b/.github/workflows/lint_mypy.yml
@@ -30,4 +30,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- - run: mypy --follow-imports=skip mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/msetup.py mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py
+ - run: mypy --follow-imports=skip mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/msetup.py mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py
diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml
index dcbbdde..19281c9 100644
--- a/.github/workflows/os_comp.yml
+++ b/.github/workflows/os_comp.yml
@@ -11,9 +11,7 @@ jobs:
- name: Install Dependencies
run: |
sudo apt update -yq
- sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev libboost-all-dev
- - name: Remove GitHub boost version
- run: sudo rm -rf /usr/local/share/boost
+ sudo apt install -yq --no-install-recommends python3-setuptools python3-pip g++ gfortran gobjc gobjc++ zlib1g-dev python-dev python3-dev
- name: Install ninja-build tool
uses: seanmiddleditch/gha-setup-ninja@v1
- name: Python version
@@ -21,7 +19,7 @@ jobs:
- name: Ninja version
run: ninja --version
- name: Run tests
- run: python3 run_tests.py
+ run: LD_LIBRARY_PATH=/usr/local/share/boost/1.69.0/lib/:$LD_LIBRARY_PATH python3 run_tests.py
env:
CI: '1'
XENIAL: '1'
@@ -48,6 +46,7 @@ jobs:
env:
CI: '1'
SKIP_SCIENTIFIC: '1'
+ SKIP_STATIC_BOOST: '1'
opensuse:
name: OpenSUSE
@@ -60,3 +59,4 @@ jobs:
env:
CI: '1'
SKIP_SCIENTIFIC: '1'
+ SKIP_STATIC_BOOST: '1'
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 1a9de56..40d422d 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -105,7 +105,10 @@ jobs:
- script: |
set BOOST_ROOT=
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
+ set SKIP_STATIC_BOOST=1
env.exe -- python3 run_tests.py --backend=ninja
+ # Cygwin's static boost installation is broken (some static library
+ # variants such as boost_thread are not present)
displayName: Run Tests
- task: CopyFiles@2
condition: not(canceled())
diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml
index 15832bb..8ec219e 100644
--- a/ci/azure-steps.yml
+++ b/ci/azure-steps.yml
@@ -49,8 +49,8 @@ steps:
# install boost (except for clang-cl)
if ($env:arch -eq 'x86') { $boost_bitness = '32' } else { $boost_bitness = '64' }
- if ($env:compiler -eq 'msvc2017') {
- $boost_version = '1.64.0' ; $boost_abi_tag = '14.1'
+ if ($env:compiler -eq 'msvc2017' -Or $env:compiler -eq 'msvc2019' -Or $env:compiler -eq 'clang-cl') {
+ $boost_version = '1.72.0' ; $boost_abi_tag = '14.1'
}
if ($boost_version) {
$boost_filename = $boost_version.Replace('.', '_')
diff --git a/ci/ciimage/eoan/Dockerfile b/ci/ciimage/eoan/Dockerfile
index a98662c..dcc8549 100644
--- a/ci/ciimage/eoan/Dockerfile
+++ b/ci/ciimage/eoan/Dockerfile
@@ -24,7 +24,7 @@ RUN sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" \
&& eatmydata apt-get -y install libgcrypt20-dev \
&& eatmydata apt-get -y install libgpgme-dev \
&& eatmydata apt-get -y install libhdf5-dev \
-&& eatmydata apt-get -y install libboost-python-dev \
+&& eatmydata apt-get -y install libboost-python-dev libboost-regex-dev \
&& eatmydata apt-get -y install libblocksruntime-dev \
&& eatmydata apt-get -y install libperl-dev \
&& eatmydata apt-get -y install liblapack-dev libscalapack-mpi-dev \
diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md
index 8b29afd..2d53e28 100644
--- a/docs/markdown/Build-options.md
+++ b/docs/markdown/Build-options.md
@@ -83,9 +83,9 @@ Currently supported in
- `disabled` do not look for the dependency and always return 'not-found'.
When getting the value of this type of option using `get_option()`, a special
-object is returned instead of the string representation of the option's value.
-That object has three methods returning boolean and taking no argument:
-`enabled()`, `disabled()`, and `auto()`.
+[feature option object](Reference-manual.md#feature-option-object)
+is returned instead of the string representation of the option's value.
+This object can be passed to `required`:
```meson
d = dependency('foo', required : get_option('myfeature'))
@@ -94,6 +94,21 @@ if d.found()
endif
```
+To check the value of the feature, the object has three methods
+returning a boolean and taking no argument:
+
+- `.enabled()`
+- `.disabled()`
+- `.auto()`
+
+This is useful for custom code depending on the feature:
+
+```meson
+if get_option('myfeature').enabled()
+ # ...
+endif
+```
+
If the value of a `feature` option is set to `auto`, that value is overridden by
the global `auto_features` option (which defaults to `auto`). This is intended
to be used by packagers who want to have full control on which dependencies are
diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md
index a021396..a15e3c2 100644
--- a/docs/markdown/CMake-module.md
+++ b/docs/markdown/CMake-module.md
@@ -99,6 +99,8 @@ and supports the following methods:
- `get_variable(name)` fetches the specified variable from inside
the subproject. Usually `dependency()` or `target()` should be
preferred to extract build targets.
+ - `found` returns true if the subproject is available, otherwise false
+ *new in in 0.53.2*
## CMake configuration files
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index f545b77..c5b8608 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -127,6 +127,8 @@ project tests. To run all tests, execute `./run_tests.py`. Unit tests
can be run with `./run_unittests.py` and project tests with
`./run_project_tests.py`.
+### Project tests
+
Subsets of project tests can be selected with
`./run_project_tests.py --only` option. This can save a great deal of
time when only a certain part of Meson is being tested.
@@ -139,7 +141,7 @@ For example, all the CUDA project tests run and pass on Windows via
`./run_project_tests.py --only cuda --backend ninja`
Each project test is a standalone project that can be compiled on its
-own. They are all in `test cases` subdirectory. The simplest way to
+own. They are all in the `test cases` subdirectory. The simplest way to
run a single project test is to do something like `./meson.py test\
cases/common/1\ trivial builddir`. The one exception to this is `test
cases/unit` directory discussed below.
@@ -153,13 +155,32 @@ should be implemented as a Python script. The goal of test projects is
also to provide sample projects that end users can use as a base for
their own projects.
-All project tests follow the same pattern: they are compiled, tests
-are run and finally install is run. Passing means that building and
-tests succeed and installed files match the `installed_files.txt` file
-in the test's source root. Any tests that require more thorough
-analysis, such as checking that certain compiler arguments can be
-found in the command line or that the generated pkg-config files
-actually work should be done with a unit test.
+All project tests follow the same pattern: they are configured, compiled, tests
+are run and finally install is run. Passing means that configuring, building and
+tests succeed and that installed files match those expected.
+
+Any tests that require more thorough analysis, such as checking that certain
+compiler arguments can be found in the command line or that the generated
+pkg-config files actually work should be done with a unit test.
+
+The following files in the test's source root are consulted, if they exist:
+
+* `installed_files.txt` lists the files which are expected to be installed.
+Various constructs containing `?` are used to indicate platform specific
+filename variations (e.g. `?so` represents the platform appropriate suffix for a
+shared library)
+
+* `setup_env.json` contains a dictionary which specifies additional
+environment variables to be set during the configure step of the test. `@ROOT@`
+is replaced with the absolute path of the source directory.
+
+* `crossfile.ini` and `nativefile.ini` are passed to the configure step with
+`--cross-file` and `--native-file` options, respectively.
+
+Additionally:
+
+* `mlog.cmd_ci_include()` can be called from anywhere inside meson to capture the
+contents of an additional file into the CI log on failure.
Projects needed by unit tests are in the `test cases/unit`
subdirectory. They are not run as part of `./run_project_tests.py`.
diff --git a/docs/markdown/Custom-build-targets.md b/docs/markdown/Custom-build-targets.md
index 9a0f2a1..f0b50d8 100644
--- a/docs/markdown/Custom-build-targets.md
+++ b/docs/markdown/Custom-build-targets.md
@@ -16,8 +16,8 @@ infile = 'source_code.txt'
outfile = 'output.bin'
mytarget = custom_target('targetname',
- output : 'output.bin',
- input : 'source_code.txt',
+ output : outfile,
+ input : infile,
command : [comp, '@INPUT@', '@OUTPUT@'],
install : true,
install_dir : 'subdir')
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index 50ea92f..8cffba4 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -181,7 +181,7 @@ should only be used if the CMake files are not stored in the project itself.
Additional CMake parameters can be specified with the `cmake_args` property.
-### Some notes on Dub
+## Dub
Please understand that meson is only able to find dependencies that
exist in the local Dub repository. You need to manually fetch and
diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md
index 678090b..13fc4e6 100644
--- a/docs/markdown/Pkgconfig-module.md
+++ b/docs/markdown/Pkgconfig-module.md
@@ -56,6 +56,8 @@ keyword arguments.
D sources referred to by this pkg-config file
- `uninstalled_variables` used instead of the `variables` keyword argument, when
generating the uninstalled pkg-config file. Since *0.54.0*
+- `dataonly` field. (*since 0.54.0*) this is used for architecture-independent
+ pkg-config files in projects which also have architecture-dependent outputs.
Since 0.46 a `StaticLibrary` or `SharedLibrary` object can optionally be passed
as first positional argument. If one is provided a default value will be
diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md
index 152aa5c..d99b36a 100644
--- a/docs/markdown/Python-module.md
+++ b/docs/markdown/Python-module.md
@@ -103,8 +103,9 @@ need to add `dependencies : py_installation.dependency()`, see [][`dependency()`
python_dependency py_installation.dependency(...)
```
-This method accepts the same arguments as the standard [dependency] function and
-the following additional keyword arguments:
+This method accepts no positional arguments, and the same keyword arguments as
+the standard [dependency] function. It also supports the following keyword
+argument:
- `embed`: *(since 0.53.0)* If true, meson will try to find a python dependency
that can be used for embedding python into an application.
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 510d443..475b711 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -846,8 +846,10 @@ configuration as-is, which may be absolute, or relative to `prefix`.
if you need the absolute path to one of these e.g. to use in a define
etc., you should use `get_option('prefix') / get_option('localstatedir')`
-For options of type `feature` a special object is returned instead of
-a string. See [`feature` options](Build-options.md#features)
+For options of type `feature` a
+[feature option object](#feature-option-object)
+is returned instead of a string.
+See [`feature` options](Build-options.md#features)
documentation for more details.
### get_variable()
@@ -1226,7 +1228,7 @@ This function is used to summarize build configuration at the end of the build
process. This function provides a way for projects (and subprojects) to report
this information in a clear way.
-The content is a serie of key/value pairs grouped into sections. If the section
+The content is a series of key/value pairs grouped into sections. If the section
keyword argument is omitted, those key/value pairs are implicitly grouped into a section
with no title. key/value pairs can optionally be grouped into a dictionary,
but keep in mind that dictionaries does not guarantee ordering. `key` must be string,
@@ -1285,7 +1287,7 @@ My Project 1.0
The first argument to this function must be a string defining the name
of this project. It is followed by programming languages that the
project uses. Supported values for languages are `c`, `cpp` (for
-`C++`), `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`),
+`C++`), `cuda`, `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`),
`vala` and `rust`. Since version `0.40.0` the list of languages
is optional.
@@ -1718,8 +1720,11 @@ the following methods.
given as an argument to be run during the install step, this script
will have the environment variables `MESON_SOURCE_ROOT`,
`MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`,
- `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. All
- additional arguments are passed as parameters.
+ `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set.
+ All positional arguments are passed as parameters.
+
+ *(added 0.54)* If `meson install` is called with the `--quiet` option, the
+ environment variable `MESON_INSTALL_QUIET` will be set.
Meson uses the `DESTDIR` environment variable as set by the
inherited environment to determine the (temporary) installation
@@ -2469,6 +2474,16 @@ library. This object has the following methods:
object will only inherit other attributes from its parent as
controlled by keyword arguments.
+### Feature option object
+
+The following methods are defined for all [`feature` options](Build-options.md#features):
+
+- `enabled()` returns whether the feature was set to `'enabled'`
+- `disabled()` returns whether the feature was set to `'disabled'`
+- `auto()` returns whether the feature was set to `'auto'`
+
+Feature options are available since 0.47.0.
+
### `generator` object
This object is returned by [`generator()`](#generator) and contains a
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index 86524b7..9f432f0 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -33,16 +33,27 @@ These are return values of the `get_id` (Compiler family) and
## Linker ids
-These are return values of the `get_linker_id` (Linker family) method in a compiler object.
-
-| Value | Linker family |
-| ----- | --------------- |
-| ld.bfd | GNU Compiler Collection |
-| {ld.bfd,lld} | Clang non-Windows |
-| link | MSVC, Clang-cl, clang Windows |
-| pgi | Portland/Nvidia PGI |
-| {ld.bfd,gold,xild} | Intel compiler non-Windows |
-| xilink | Intel-cl Windows |
+These are return values of the `get_linker_id` method in a compiler object.
+
+| Value | Linker family |
+| ----- | --------------- |
+| ld.bfd | The GNU linker |
+| ld.gold | The GNU gold linker |
+| ld.lld | The LLVM linker, with the GNU interface |
+| ld.solaris | Solaris and illumos |
+| ld64 | Apple ld64 |
+| link | MSVC linker |
+| lld-link | The LLVM linker, with the MSVC interface |
+| xilink | Used with Intel-cl only, MSVC like |
+| optlink | optlink (used with DMD) |
+| rlink | The Renesas linker, used with CCrx only |
+| armlink | The ARM linker (arm and armclang compilers) |
+| pgi | Portland/Nvidia PGI |
+| nvlink | Nvidia Linker used with cuda |
+
+For languages that don't have separate dynamic linkers such as C# and Java, the
+`get_linker_id` will return the compiler name.
+
## Script environment variables
diff --git a/docs/markdown/snippets/minstall_quiet.md b/docs/markdown/snippets/minstall_quiet.md
new file mode 100644
index 0000000..3a7ff31
--- /dev/null
+++ b/docs/markdown/snippets/minstall_quiet.md
@@ -0,0 +1,11 @@
+## New option `--quiet` to `meson install`
+
+Now you can run `meson install --quiet` and meson will not verbosely print
+every file as it is being installed. As before, the full log is always
+available inside the builddir in `meson-logs/install-log.txt`.
+
+When this option is passed, install scripts will have the environment variable
+`MESON_INSTALL_QUIET` set.
+
+Numerous speed-ups were also made for the install step, especially on Windows
+where it is now 300% to 1200% faster than before depending on your workload.
diff --git a/docs/markdown/snippets/pkgconfig_dataonly.md b/docs/markdown/snippets/pkgconfig_dataonly.md
new file mode 100644
index 0000000..8a2564c
--- /dev/null
+++ b/docs/markdown/snippets/pkgconfig_dataonly.md
@@ -0,0 +1,15 @@
+## Introduce dataonly for the pkgconfig module
+This allows users to disable writing out the inbuilt variables to
+the pkg-config file as they might actualy not be required.
+
+One reason to have this is for architecture-independent pkg-config
+files in projects which also have architecture-dependent outputs.
+
+```
+pkgg.generate(
+ name : 'libhello_nolib',
+ description : 'A minimalistic pkgconfig file.',
+ version : libver,
+ dataonly: true
+)
+```
diff --git a/mesonbuild/cmake/data/preload.cmake b/mesonbuild/cmake/data/preload.cmake
new file mode 100644
index 0000000..30178fb
--- /dev/null
+++ b/mesonbuild/cmake/data/preload.cmake
@@ -0,0 +1,35 @@
+if(MESON_PS_LOADED)
+ return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+ set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+ meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+ meson_ps_inspect_vars()
+ _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+ meson_ps_inspect_vars()
+ _add_custom_target(${ARGV})
+endmacro()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target)
+meson_ps_reload_vars()
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 81844a0..941baed 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -26,6 +26,7 @@ from ..mesonlib import MachineChoice, version_compare
from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
from enum import Enum
from functools import lru_cache
+from pathlib import Path
import typing as T
import os, re
@@ -399,7 +400,19 @@ class ConverterTarget:
if not os.path.isabs(x):
x = os.path.normpath(os.path.join(self.src_dir, x))
if not os.path.exists(x) and not any([x.endswith(y) for y in obj_suffixes]) and not is_generated:
- mlog.warning('CMake: path', mlog.bold(x), 'does not exist. Ignoring. This can lead to build errors')
+ mlog.warning('CMake: path', mlog.bold(x), 'does not exist.')
+ mlog.warning(' --> Ignoring. This can lead to build errors.')
+ return None
+ if (
+ os.path.isabs(x)
+ and os.path.commonpath([x, self.env.get_source_dir()]) == self.env.get_source_dir()
+ and not (
+ os.path.commonpath([x, root_src_dir]) == root_src_dir or
+ os.path.commonpath([x, self.env.get_build_dir()]) == self.env.get_build_dir()
+ )
+ ):
+ mlog.warning('CMake: path', mlog.bold(x), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.')
+ mlog.warning(' --> Ignoring. This can lead to build errors.')
return None
if os.path.isabs(x) and os.path.commonpath([x, self.env.get_build_dir()]) == self.env.get_build_dir():
if is_header:
@@ -580,6 +593,8 @@ class ConverterCustomTarget:
out_counter = 0 # type: int
def __init__(self, target: CMakeGeneratorTarget):
+ assert(target.current_bin_dir is not None)
+ assert(target.current_src_dir is not None)
self.name = target.name
if not self.name:
self.name = 'custom_tgt_{}'.format(ConverterCustomTarget.tgt_counter)
@@ -593,6 +608,8 @@ class ConverterCustomTarget:
self.depends_raw = target.depends
self.inputs = []
self.depends = []
+ self.current_bin_dir = Path(target.current_bin_dir)
+ self.current_src_dir = Path(target.current_src_dir)
# Convert the target name to a valid meson target name
self.name = _sanitize_cmake_name(self.name)
@@ -600,29 +617,24 @@ class ConverterCustomTarget:
def __repr__(self) -> str:
return '<{}: {} {}>'.format(self.__class__.__name__, self.name, self.outputs)
- def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, build_dir: str, all_outputs: T.List[str]) -> None:
- # Default the working directory to the CMake build dir. This
- # is not 100% correct, since it should be the value of
- # ${CMAKE_CURRENT_BINARY_DIR} when add_custom_command is
- # called. However, keeping track of this variable is not
- # trivial and the current solution should work in most cases.
+ def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, all_outputs: T.List[str]) -> None:
+ # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR}
if not self.working_dir:
- self.working_dir = build_dir
+ self.working_dir = self.current_bin_dir.as_posix()
# relative paths in the working directory are always relative
- # to ${CMAKE_CURRENT_BINARY_DIR} (see note above)
+ # to ${CMAKE_CURRENT_BINARY_DIR}
if not os.path.isabs(self.working_dir):
- self.working_dir = os.path.normpath(os.path.join(build_dir, self.working_dir))
+ self.working_dir = (self.current_bin_dir / self.working_dir).as_posix()
# Modify the original outputs if they are relative. Again,
# relative paths are relative to ${CMAKE_CURRENT_BINARY_DIR}
- # and the first disclaimer is still in effect
- def ensure_absolute(x: str):
- if os.path.isabs(x):
+ def ensure_absolute(x: Path) -> Path:
+ if x.is_absolute():
return x
else:
- return os.path.normpath(os.path.join(build_dir, x))
- self.original_outputs = [ensure_absolute(x) for x in self.original_outputs]
+ return self.current_bin_dir / x
+ self.original_outputs = [ensure_absolute(Path(x)).as_posix() for x in self.original_outputs]
# Ensure that there is no duplicate output in the project so
# that meson can handle cases where the same filename is
@@ -659,23 +671,35 @@ class ConverterCustomTarget:
self.outputs = [self.name + '.h']
# Check dependencies and input files
+ root = Path(root_src_dir)
for i in self.depends_raw:
if not i:
continue
+ raw = Path(i)
art = output_target_map.artifact(i)
tgt = output_target_map.target(i)
gen = output_target_map.generated(i)
- if art:
+ rel_to_root = None
+ try:
+ rel_to_root = raw.relative_to(root)
+ except ValueError:
+ rel_to_root = None
+
+ # First check for existing files. Only then check for existing
+ # targets, etc. This reduces the chance of misdetecting input files
+ # as outputs from other targets.
+ # See https://github.com/mesonbuild/meson/issues/6632
+ if not raw.is_absolute() and (self.current_src_dir / raw).exists():
+ self.inputs += [(self.current_src_dir / raw).relative_to(root).as_posix()]
+ elif raw.is_absolute() and raw.exists() and rel_to_root is not None:
+ self.inputs += [rel_to_root.as_posix()]
+ elif art:
self.depends += [art]
elif tgt:
self.depends += [tgt]
elif gen:
self.inputs += [gen.get_ref(i)]
- elif not os.path.isabs(i) and os.path.exists(os.path.join(root_src_dir, i)):
- self.inputs += [i]
- elif os.path.isabs(i) and os.path.exists(i) and os.path.commonpath([i, root_src_dir]) == root_src_dir:
- self.inputs += [os.path.relpath(i, root_src_dir)]
def process_inter_target_dependencies(self):
# Move the dependencies from all transfer_dependencies_from to the target
@@ -755,10 +779,19 @@ class CMakeInterpreter:
raise CMakeException('Unable to find CMake')
self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True)
+ preload_file = Path(__file__).resolve().parent / 'data' / 'preload.cmake'
+
+ # Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible,
+ # since CMAKE_PROJECT_INCLUDE was actually designed for code injection.
+ preload_var = 'CMAKE_PROJECT_INCLUDE'
+ if version_compare(cmake_exe.version(), '<3.15'):
+ preload_var = 'CMAKE_TOOLCHAIN_FILE'
+
generator = backend_generator_map[self.backend_name]
cmake_args = []
trace_args = self.trace.trace_args()
cmcmp_args = ['-DCMAKE_POLICY_WARNING_{}=OFF'.format(x) for x in disable_policy_warnings]
+ pload_args = ['-D{}={}'.format(preload_var, str(preload_file))]
if version_compare(cmake_exe.version(), '>=3.14'):
self.cmake_api = CMakeAPI.FILE
@@ -790,12 +823,13 @@ class CMakeInterpreter:
mlog.log(mlog.bold(' - build directory: '), self.build_dir)
mlog.log(mlog.bold(' - source directory: '), self.src_dir)
mlog.log(mlog.bold(' - trace args: '), ' '.join(trace_args))
+ mlog.log(mlog.bold(' - preload file: '), str(preload_file))
mlog.log(mlog.bold(' - disabled policy warnings:'), '[{}]'.format(', '.join(disable_policy_warnings)))
mlog.log()
os.makedirs(self.build_dir, exist_ok=True)
os_env = os.environ.copy()
os_env['LC_ALL'] = 'C'
- final_args = cmake_args + trace_args + cmcmp_args + [self.src_dir]
+ final_args = cmake_args + trace_args + cmcmp_args + pload_args + [self.src_dir]
cmake_exe.set_exec_mode(print_cmout=True, always_capture_stderr=self.trace.requires_stderr())
rc, _, self.raw_trace = cmake_exe.call(final_args, self.build_dir, env=os_env, disable_cache=True)
@@ -901,7 +935,7 @@ class CMakeInterpreter:
object_libs = []
custom_target_outputs = [] # type: T.List[str]
for i in self.custom_targets:
- i.postprocess(self.output_target_map, self.src_dir, self.subdir, self.build_dir, custom_target_outputs)
+ i.postprocess(self.output_target_map, self.src_dir, self.subdir, custom_target_outputs)
for i in self.targets:
i.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace)
if i.type == 'OBJECT_LIBRARY':
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index 7b29c86..8eb8605 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -47,6 +47,8 @@ class CMakeTarget:
self.imported = imported
self.tline = tline
self.depends = []
+ self.current_bin_dir = None
+ self.current_src_dir = None
def __repr__(self):
s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}'
@@ -83,6 +85,36 @@ class CMakeTraceParser:
self.trace_file_path = Path(build_dir) / self.trace_file
self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human'
+ # State for delayed command execution. Delayed command execution is realised
+ # with a custom CMake file that overrides some functions and adds some
+ # introspection information to the trace.
+ self.delayed_commands = [] # type: T.List[str]
+ self.stored_commands = [] # type: T.List[CMakeTraceLine]
+
+ # All supported functions
+ self.functions = {
+ 'set': self._cmake_set,
+ 'unset': self._cmake_unset,
+ 'add_executable': self._cmake_add_executable,
+ 'add_library': self._cmake_add_library,
+ 'add_custom_command': self._cmake_add_custom_command,
+ 'add_custom_target': self._cmake_add_custom_target,
+ 'set_property': self._cmake_set_property,
+ 'set_target_properties': self._cmake_set_target_properties,
+ 'target_compile_definitions': self._cmake_target_compile_definitions,
+ 'target_compile_options': self._cmake_target_compile_options,
+ 'target_include_directories': self._cmake_target_include_directories,
+ 'target_link_libraries': self._cmake_target_link_libraries,
+ 'target_link_options': self._cmake_target_link_options,
+ 'add_dependencies': self._cmake_add_dependencies,
+
+ # Special functions defined in the preload script.
+ # These functions do nothing in the CMake code, but have special
+ # meaning here in the trace parser.
+ 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls,
+ 'meson_ps_reload_vars': self._meson_ps_reload_vars,
+ }
+
def trace_args(self) -> T.List[str]:
arg_map = {
'human': ['--trace', '--trace-expand'],
@@ -116,28 +148,15 @@ class CMakeTraceParser:
else:
raise CMakeException('CMake: Internal error: Invalid trace format {}. Expected [human, json-v1]'.format(self.trace_format))
- # All supported functions
- functions = {
- 'set': self._cmake_set,
- 'unset': self._cmake_unset,
- 'add_executable': self._cmake_add_executable,
- 'add_library': self._cmake_add_library,
- 'add_custom_command': self._cmake_add_custom_command,
- 'add_custom_target': self._cmake_add_custom_target,
- 'set_property': self._cmake_set_property,
- 'set_target_properties': self._cmake_set_target_properties,
- 'target_compile_definitions': self._cmake_target_compile_definitions,
- 'target_compile_options': self._cmake_target_compile_options,
- 'target_include_directories': self._cmake_target_include_directories,
- 'target_link_libraries': self._cmake_target_link_libraries,
- 'target_link_options': self._cmake_target_link_options,
- 'add_dependencies': self._cmake_add_dependencies,
- }
-
# Primary pass -- parse everything
for l in lexer1:
+ # store the function if its execution should be delayed
+ if l.func in self.delayed_commands:
+ self.stored_commands += [l]
+ continue
+
# "Execute" the CMake function if supported
- fn = functions.get(l.func, None)
+ fn = self.functions.get(l.func, None)
if(fn):
fn(l)
@@ -160,6 +179,12 @@ class CMakeTraceParser:
return []
+ def var_to_str(self, var: str) -> T.Optional[str]:
+ if var in self.vars and self.vars[var]:
+ return self.vars[var][0]
+
+ return None
+
def var_to_bool(self, var):
if var not in self.vars:
return False
@@ -300,7 +325,7 @@ class CMakeTraceParser:
target = CMakeGeneratorTarget(name)
def handle_output(key: str, target: CMakeGeneratorTarget) -> None:
- target.outputs += [key]
+ target.outputs += key.split(';')
def handle_command(key: str, target: CMakeGeneratorTarget) -> None:
if key == 'ARGS':
@@ -308,7 +333,7 @@ class CMakeTraceParser:
target.command[-1] += key.split(';')
def handle_depends(key: str, target: CMakeGeneratorTarget) -> None:
- target.depends += [key]
+ target.depends += key.split(';')
def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None:
if target.working_dir is None:
@@ -337,6 +362,8 @@ class CMakeTraceParser:
if fn is not None:
fn(i, target)
+ target.current_bin_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR')
+ target.current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR')
target.outputs = self._guess_files(target.outputs)
target.depends = self._guess_files(target.depends)
target.command = [self._guess_files(x) for x in target.command]
@@ -465,7 +492,8 @@ class CMakeTraceParser:
if not target:
return self._gen_exception('add_dependencies', 'target not found', tline)
- target.depends += args[1:]
+ for i in args[1:]:
+ target.depends += i.split(';')
def _cmake_target_compile_definitions(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_compile_definitions.html
@@ -531,6 +559,18 @@ class CMakeTraceParser:
self.targets[target].properties[i[0]] += i[1]
+ def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None:
+ for l in self.stored_commands:
+ fn = self.functions.get(l.func, None)
+ if(fn):
+ fn(l)
+
+ # clear the stored commands
+ self.stored_commands = []
+
+ def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None:
+ self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS')
+
def _lex_trace_human(self, trace):
# The trace format is: '<file>(<line>): <func>(<args -- can contain \n> )\n'
reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 67686a9..37cf2e0 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -724,7 +724,13 @@ class Compiler:
return self.id
def get_linker_id(self) -> str:
- return self.linker.id
+ # There is not guarantee that we have a dynamic linker instance, as
+ # some languages don't have separate linkers and compilers. In those
+ # cases return the compiler id
+ try:
+ return self.linker.id
+ except AttributeError:
+ return self.id
def get_version_string(self) -> str:
details = [self.id, self.version]
@@ -1062,7 +1068,25 @@ class Compiler:
return self.linker.get_undefined_link_args()
def remove_linkerlike_args(self, args):
- return [x for x in args if not x.startswith('-Wl')]
+ rm_exact = ('-headerpad_max_install_names',)
+ rm_prefixes = ('-Wl,', '-L',)
+ rm_next = ('-L',)
+ ret = []
+ iargs = iter(args)
+ for arg in iargs:
+ # Remove this argument
+ if arg in rm_exact:
+ continue
+ # If the argument starts with this, but is not *exactly* this
+ # f.ex., '-L' should match ['-Lfoo'] but not ['-L', 'foo']
+ if arg.startswith(rm_prefixes) and arg not in rm_prefixes:
+ continue
+ # Ignore this argument and the one after it
+ if arg in rm_next:
+ next(iargs)
+ continue
+ ret.append(arg)
+ return ret
def get_lto_compile_args(self) -> T.List[str]:
return []
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 720d064..f1ce578 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -19,7 +19,7 @@ from itertools import chain
from pathlib import PurePath
from collections import OrderedDict
from .mesonlib import (
- MesonException, MachineChoice, PerMachine,
+ MesonException, MachineChoice, PerMachine, OrderedSet,
default_libdir, default_libexecdir, default_prefix, split_args
)
from .wrap import WrapMode
@@ -746,16 +746,20 @@ class CoreData:
# Some options default to environment variables if they are
# unset, set those now. These will either be overwritten
# below, or they won't. These should only be set on the first run.
- if env.first_invocation:
- p_env = os.environ.get('PKG_CONFIG_PATH')
- if p_env:
- # PKG_CONFIG_PATH may contain duplicates, which must be
- # removed, else a duplicates-in-array-option warning arises.
- pkg_config_paths = []
- for k in p_env.split(':'):
- if k not in pkg_config_paths:
- pkg_config_paths.append(k)
- options['pkg_config_path'] = pkg_config_paths
+ p_env = os.environ.get('PKG_CONFIG_PATH')
+ if p_env:
+ # PKG_CONFIG_PATH may contain duplicates, which must be
+ # removed, else a duplicates-in-array-option warning arises.
+ p_list = list(OrderedSet(p_env.split(':')))
+ if env.first_invocation:
+ options['pkg_config_path'] = p_list
+ elif options.get('pkg_config_path', []) != p_list:
+ mlog.warning(
+ 'PKG_CONFIG_PATH environment variable has changed '
+ 'between configurations, meson ignores this. '
+ 'Use -Dpkg_config_path to change pkg-config search '
+ 'path instead.'
+ )
for k, v in env.cmd_line_options.items():
if subproject:
@@ -850,6 +854,9 @@ def read_cmd_line_file(build_dir, options):
# literal_eval to get it into the list of strings.
options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
+def cmd_line_options_to_string(options):
+ return {k: str(v) for k, v in options.cmd_line_options.items()}
+
def write_cmd_line_file(build_dir, options):
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
@@ -860,7 +867,7 @@ def write_cmd_line_file(build_dir, options):
if options.native_file:
properties['native_file'] = options.native_file
- config['options'] = options.cmd_line_options
+ config['options'] = cmd_line_options_to_string(options)
config['properties'] = properties
with open(filename, 'w') as f:
config.write(f)
@@ -869,7 +876,7 @@ def update_cmd_line_file(build_dir, options):
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
config.read(filename)
- config['options'].update(options.cmd_line_options)
+ config['options'].update(cmd_line_options_to_string(options))
with open(filename, 'w') as f:
config.write(f)
@@ -1074,8 +1081,8 @@ builtin_options = OrderedDict([
])
builtin_options_per_machine = OrderedDict([
- ('pkg_config_path', BuiltinOption(UserArrayOption, 'T.List of additional paths for pkg-config to search', [])),
- ('cmake_prefix_path', BuiltinOption(UserArrayOption, 'T.List of additional prefixes for cmake to search', [])),
+ ('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
+ ('cmake_prefix_path', BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
])
# Special prefix-dependent defaults for installation directories that reside in
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index a83e3d6..6f8181d 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -388,7 +388,7 @@ class ConfigToolDependency(ExternalDependency):
tools = None
tool_name = None
- __strip_version = re.compile(r'^[0-9.]*')
+ __strip_version = re.compile(r'^[0-9][0-9.]+')
def __init__(self, name, environment, kwargs, language: T.Optional[str] = None):
super().__init__('config-tool', environment, kwargs, language=language)
@@ -1661,8 +1661,8 @@ class DubDependency(ExternalDependency):
lib_file_name = os.path.basename(default_path)
module_build_path = os.path.join(module_path, '.dub', 'build')
- # If default_path is a path to lib file and
- # directory of lib don't have subdir '.dub/build'
+ # If default_path is a path to lib file and
+ # directory of lib don't have subdir '.dub/build'
if not os.path.isdir(module_build_path) and os.path.isfile(default_path):
if folder_only:
return module_path
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 9169039..53a0cfb 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -1,4 +1,4 @@
-# Copyright 2013-2017 The Meson development team
+# Copyright 2013-2020 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# This file contains the detection logic for miscellaneous external dependencies.
-
-import glob
import os
+import re
+import functools
+import typing as T
+from pathlib import Path
from .. import mlog
from .. import mesonlib
-from ..environment import detect_cpu_family
+from ..environment import Environment
from .base import (DependencyException, ExternalDependency)
from .misc import threads_factory
@@ -39,16 +40,6 @@ from .misc import threads_factory
# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
# libboost_<module>-mt.dll.a
#
-# Library names supported:
-# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static)
-# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared)
-# - libboost_<module>.lib (static)
-# - boost_<module>.lib|.dll (shared)
-# where compiler is vc141 for example.
-#
-# NOTE: -gd means runtime and build time debugging is on
-# -mt means threading=multi
-#
# The `modules` argument accept library names. This is because every module that
# has libraries to link against also has multiple options regarding how to
# link. See for example:
@@ -78,616 +69,832 @@ from .misc import threads_factory
# Furthermore, the boost documentation for unix above uses examples from windows like
# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
#
-# Probably we should use the linker search path to decide which libraries to use. This will
-# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will
-# also mean that it would be possible to use user-installed boost libraries when official
-# packages are installed.
-#
-# We thus follow the following strategy:
-# 1. Look for libraries using compiler.find_library( )
-# 1.1 On Linux, just look for boost_<module>
-# 1.2 On other systems (e.g. Mac) look for boost_<module>-mt if multithreading.
-# 1.3 Otherwise look for boost_<module>
-# 2. Fall back to previous approach
-# 2.1. Search particular directories.
-# 2.2. Find boost libraries with unknown suffixes using file-name globbing.
-
-# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set.
-# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug.
-
-class BoostDependency(ExternalDependency):
- def __init__(self, environment, kwargs):
- super().__init__('boost', environment, kwargs, language='cpp')
- self.need_static_link = ['boost_exception', 'boost_test_exec_monitor']
- self.is_debug = environment.coredata.get_builtin_option('buildtype').startswith('debug')
- threading = kwargs.get("threading", "multi")
- self.is_multithreading = threading == "multi"
+# We follow the following strategy for finding modules:
+# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+# B) Foreach candidate
+# 1. Look for the boost headers (boost/version.pp)
+# 2. Find all boost libraries
+# 2.1 Add all libraries in lib*
+# 2.2 Filter out non boost libraries
+# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
+# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
+# 3. Select the libraries matching the requested modules
+
+@functools.total_ordering
+class BoostIncludeDir():
+ def __init__(self, path: Path, version_int: int):
+ self.path = path
+ self.version_int = version_int
+ major = int(self.version_int / 100000)
+ minor = int((self.version_int / 100) % 1000)
+ patch = int(self.version_int % 100)
+ self.version = '{}.{}.{}'.format(major, minor, patch)
+ self.version_lib = '{}_{}'.format(major, minor)
+
+ def __repr__(self) -> str:
+ return '<BoostIncludeDir: {} -- {}>'.format(self.version, self.path)
+
+ def __lt__(self, other: T.Any) -> bool:
+ if isinstance(other, BoostIncludeDir):
+ return (self.version_int, self.path) < (other.version_int, other.path)
+ return NotImplemented
+
+@functools.total_ordering
+class BoostLibraryFile():
+ reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
+ reg_ver_tag = re.compile(r'^[0-9_]+$')
+
+ def __init__(self, path: Path):
+ self.path = path
+ self.name = self.path.name
+
+ # Initialize default properties
+ self.static = False
+ self.toolset = ''
+ self.arch = ''
+ self.version_lib = ''
+ self.mt = True
+
+ self.runtime_static = False
+ self.runtime_debug = False
+ self.python_debug = False
+ self.debug = False
+ self.stlport = False
+ self.deprecated_iostreams = False
+
+ # Post process the library name
+ name_parts = self.name.split('.')
+ self.basename = name_parts[0]
+ self.suffixes = name_parts[1:]
+ self.suffixes = [x for x in self.suffixes if not x.isdigit()]
+ self.nvsuffix = '.'.join(self.suffixes) # Used for detecting the library type
+ self.nametags = self.basename.split('-')
+ self.mod_name = self.nametags[0]
+ if self.mod_name.startswith('lib'):
+ self.mod_name = self.mod_name[3:]
+
+ # Detecting library type
+ if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
+ self.static = False
+ elif self.nvsuffix in ['a', 'lib']:
+ self.static = True
+ else:
+ raise DependencyException('Unable to process library extension "{}" ({})'.format(self.nvsuffix, self.path))
- self.requested_modules = self.get_requested(kwargs)
- if 'thread' in self.requested_modules:
- if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
- self.is_found = False
- return
+ # boost_.lib is the dll import library
+ if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
+ self.static = False
- self.boost_root = None
- self.boost_roots = []
- self.incdir = None
- self.libdir = None
-
- if 'BOOST_ROOT' in os.environ:
- self.boost_root = os.environ['BOOST_ROOT']
- self.boost_roots = [self.boost_root]
- if not os.path.isabs(self.boost_root):
- raise DependencyException('BOOST_ROOT must be an absolute path.')
- if 'BOOST_INCLUDEDIR' in os.environ:
- self.incdir = os.environ['BOOST_INCLUDEDIR']
- if 'BOOST_LIBRARYDIR' in os.environ:
- self.libdir = os.environ['BOOST_LIBRARYDIR']
-
- if self.boost_root is None:
- if self.env.machines[self.for_machine].is_windows():
- self.boost_roots = self.detect_win_roots()
- else:
- self.boost_roots = self.detect_nix_roots()
+ # Process tags
+ tags = self.nametags[1:]
+ if not tags:
+ return
- if self.incdir is None:
- if self.env.machines[self.for_machine].is_windows():
- self.incdir = self.detect_win_incdir()
+ # Without any tags mt is assumed, however, an absents of mt in the name
+ # with tags present indicates that the lib was build without mt support
+ self.mt = False
+ for i in tags:
+ if i == 'mt':
+ self.mt = True
+ elif len(i) == 3 and i[1:] in ['32', '64']:
+ self.arch = i
+ elif BoostLibraryFile.reg_abi_tag.match(i):
+ self.runtime_static = 's' in i
+ self.runtime_debug = 'g' in i
+ self.python_debug = 'y' in i
+ self.debug = 'd' in i
+ self.stlport = 'p' in i
+ self.deprecated_iostreams = 'n' in i
+ elif BoostLibraryFile.reg_ver_tag.match(i):
+ self.version_lib = i
else:
- self.incdir = self.detect_nix_incdir()
-
- mlog.debug('Boost library root dir is', mlog.bold(self.boost_root))
- mlog.debug('Boost include directory is', mlog.bold(self.incdir))
-
- # 1. check if we can find BOOST headers.
- self.detect_headers_and_version()
-
- if not self.is_found:
- return # if we can not find 'boost/version.hpp'
-
- # 2. check if we can find BOOST libraries.
- self.detect_lib_modules()
- mlog.debug('Boost library directory is', mlog.bold(self.libdir))
-
- mlog.debug('Installed Boost libraries: ')
- for key in sorted(self.lib_modules.keys()):
- mlog.debug(key, self.lib_modules[key])
-
- # 3. check if requested modules are valid, that is, either found or in the list of known boost libraries
- self.check_invalid_modules()
-
- # 4. final check whether or not we find all requested and valid modules
- self.check_find_requested_modules()
+ self.toolset = i
+
+ def __repr__(self) -> str:
+ return '<LIB: {} {:<32} {}>'.format(self.abitag, self.mod_name, self.path)
+
+ def __lt__(self, other: T.Any) -> bool:
+ if isinstance(other, BoostLibraryFile):
+ return (
+ self.mod_name, self.version_lib, self.arch, self.static,
+ not self.mt, not self.runtime_static,
+ not self.debug, self.runtime_debug, self.python_debug,
+ self.stlport, self.deprecated_iostreams,
+ self.name,
+ ) < (
+ other.mod_name, other.version_lib, other.arch, other.static,
+ not other.mt, not other.runtime_static,
+ not other.debug, other.runtime_debug, other.python_debug,
+ other.stlport, other.deprecated_iostreams,
+ other.name,
+ )
+ return NotImplemented
+
+ def __eq__(self, other: T.Any) -> bool:
+ if isinstance(other, BoostLibraryFile):
+ return self.name == other.name
+ return NotImplemented
+
+ def __hash__(self) -> int:
+ return hash(self.name)
+
+ @property
+ def abitag(self) -> str:
+ abitag = ''
+ abitag += 'S' if self.static else '-'
+ abitag += 'M' if self.mt else '-'
+ abitag += ' '
+ abitag += 's' if self.runtime_static else '-'
+ abitag += 'g' if self.runtime_debug else '-'
+ abitag += 'y' if self.python_debug else '-'
+ abitag += 'd' if self.debug else '-'
+ abitag += 'p' if self.stlport else '-'
+ abitag += 'n' if self.deprecated_iostreams else '-'
+ abitag += ' ' + (self.arch or '???')
+ abitag += ' ' + (self.toolset or '?')
+ abitag += ' ' + (self.version_lib or 'x_xx')
+ return abitag
+
+ def is_boost(self) -> bool:
+ return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
+
+ def version_matches(self, version_lib: str) -> bool:
+ # If no version tag is present, assume that it fits
+ if not self.version_lib or not version_lib:
+ return True
+ return self.version_lib == version_lib
- def check_invalid_modules(self):
- invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in self.lib_modules and 'boost_' + c not in BOOST_LIBS]
+ def arch_matches(self, arch: str) -> bool:
+ # If no version tag is present, assume that it fits
+ if not self.arch or not arch:
+ return True
+ return self.arch == arch
- # previous versions of meson allowed include dirs as modules
- remove = []
- for m in invalid_modules:
- if m in BOOST_DIRS:
- mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. '
- 'This will be an error in the future')
- remove.append(m)
+ def vscrt_matches(self, vscrt: str) -> bool:
+ # If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd']
+ if not vscrt:
+ return True
+ if vscrt in ['/MD', '-MD']:
+ return not self.runtime_static and not self.runtime_debug
+ elif vscrt in ['/MDd', '-MDd']:
+ return not self.runtime_static and self.runtime_debug
+ elif vscrt in ['/MT', '-MT']:
+ return (self.runtime_static or not self.static) and not self.runtime_debug
+ elif vscrt in ['/MTd', '-MTd']:
+ return (self.runtime_static or not self.static) and self.runtime_debug
+
+ mlog.warning('Boost: unknow vscrt tag {}. This may cause the compilation to fail. Please consider reporting this as a bug.'.format(vscrt), once=True)
+ return True
+
+ def get_compiler_args(self) -> T.List[str]:
+ args = [] # type: T.List[str]
+ if self.mod_name in boost_libraries:
+ libdef = boost_libraries[self.mod_name] # type: BoostLibrary
+ if self.static:
+ args += libdef.static
+ else:
+ args += libdef.shared
+ if self.mt:
+ args += libdef.multi
+ else:
+ args += libdef.single
+ return args
- self.requested_modules = [x for x in self.requested_modules if x not in remove]
- invalid_modules = [x for x in invalid_modules if x not in remove]
+ def get_link_args(self) -> T.List[str]:
+ return [self.path.as_posix()]
- if invalid_modules:
- mlog.error('Invalid Boost modules: ' + ', '.join(invalid_modules))
- return True
- else:
- return False
+class BoostDependency(ExternalDependency):
+ def __init__(self, environment: Environment, kwargs):
+ super().__init__('boost', environment, kwargs, language='cpp')
+ self.debug = environment.coredata.get_builtin_option('buildtype').startswith('debug')
+ self.multithreading = kwargs.get('threading', 'multi') == 'multi'
- def log_details(self):
- module_str = ', '.join(self.requested_modules)
- return module_str
+ self.boost_root = None
- def log_info(self):
- if self.boost_root:
- return self.boost_root
- return ''
+ # Extract and validate modules
+ self.modules = mesonlib.extract_as_list(kwargs, 'modules')
+ for i in self.modules:
+ if not isinstance(i, str):
+ raise DependencyException('Boost module argument is not a string.')
+ if i.startswith('boost_'):
+ raise DependencyException('Boost modules must be passed without the boost_ prefix')
- def detect_nix_roots(self):
- return [os.path.abspath(os.path.join(x, '..'))
- for x in self.clib_compiler.get_default_include_dirs()]
+ # Do we need threads?
+ if 'thread' in self.modules:
+ if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+ self.is_found = False
+ return
- def detect_win_roots(self):
- res = []
- # Where boost documentation says it should be
- globtext = 'C:\\Program Files\\boost\\boost_*'
- files = glob.glob(globtext)
- res.extend(files)
+ # Try figuring out the architecture tag
+ self.arch = environment.machines[self.for_machine].cpu_family
+ self.arch = boost_arch_map.get(self.arch, None)
+
+ # Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset
+ boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']]
+ if all(boost_manual_env):
+ inc_dir = Path(os.environ['BOOST_INCLUDEDIR'])
+ lib_dir = Path(os.environ['BOOST_LIBRARYDIR'])
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
+ mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
+
+ boost_inc_dir = None
+ for i in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+ if i.is_file():
+ boost_inc_dir = self._include_dir_from_version_header(i)
+ break
+ if not boost_inc_dir:
+ self.is_found = False
+ return
- # Where boost built from source actually installs it
- if os.path.isdir('C:\\Boost'):
- res.append('C:\\Boost')
+ self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+ return
+ elif any(boost_manual_env):
+ mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
+
+ # A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+ roots = self.detect_roots()
+ roots = list(mesonlib.OrderedSet(roots))
+
+ # B) Foreach candidate
+ for i in roots:
+ # 1. Look for the boost headers (boost/version.pp)
+ mlog.debug('Checking potential boost root {}'.format(i.as_posix()))
+ inc_dirs = self.detect_inc_dirs(i)
+ inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
+
+ # Early abort when boost is not found
+ if not inc_dirs:
+ continue
+
+ lib_dirs = self.detect_lib_dirs(i)
+ self.is_found = self.run_check(inc_dirs, lib_dirs)
+ if self.is_found:
+ self.boost_root = i
+ break
+
+ def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+ # 2. Find all boost libraries
+ libs = [] # type: T.List[BoostLibraryFile]
+ for i in lib_dirs:
+ libs += self.detect_libraries(i)
+ libs = sorted(set(libs))
+
+ modules = ['boost_' + x for x in self.modules]
+ for inc in inc_dirs:
+ mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path))
+ f_libs = self.filter_libraries(libs, inc.version_lib)
+
+ # mlog.debug(' - raw library list:')
+ # for j in libs:
+ # mlog.debug(' - {}'.format(j))
+ mlog.debug(' - filtered library list:')
+ for j in f_libs:
+ mlog.debug(' - {}'.format(j))
+
+ # 3. Select the libraries matching the requested modules
+ not_found = [] # type: T.List[str]
+ selected_modules = [] # type: T.List[BoostLibraryFile]
+ for mod in modules:
+ found = False
+ for l in f_libs:
+ if l.mod_name == mod:
+ selected_modules += [l]
+ found = True
+ break
+ if not found:
+ not_found += [mod]
+
+ # log the result
+ mlog.debug(' - found:')
+ comp_args = [] # type: T.List[str]
+ link_args = [] # type: T.List[str]
+ for j in selected_modules:
+ c_args = j.get_compiler_args()
+ l_args = j.get_link_args()
+ mlog.debug(' - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
+ comp_args += c_args
+ link_args += l_args
+
+ comp_args = list(set(comp_args))
+ link_args = list(set(link_args))
+
+ # if we found all modules we are done
+ if not not_found:
+ self.version = inc.version
+ self.compile_args = ['-I' + inc.path.as_posix()]
+ self.compile_args += comp_args
+ self.compile_args += self._extra_compile_args()
+ self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
+ self.link_args = link_args
+ mlog.debug(' - final compile args: {}'.format(self.compile_args))
+ mlog.debug(' - final link args: {}'.format(self.link_args))
+ return True
- # Where boost prebuilt binaries are
- globtext = 'C:\\local\\boost_*'
- files = glob.glob(globtext)
- res.extend(files)
- return res
+ # in case we missed something log it and try again
+ mlog.debug(' - NOT found:')
+ for mod in not_found:
+ mlog.debug(' - {}'.format(mod))
- def detect_nix_incdir(self):
- if self.boost_root:
- return os.path.join(self.boost_root, 'include')
- return None
-
- # FIXME: Should pick a version that matches the requested version
- # Returns the folder that contains the boost folder.
- def detect_win_incdir(self):
- for root in self.boost_roots:
- globtext = os.path.join(root, 'include', 'boost-*')
- incdirs = glob.glob(globtext)
- if incdirs:
- return incdirs[0]
- incboostdir = os.path.join(root, 'include', 'boost')
- if os.path.isdir(incboostdir):
- return os.path.join(root, 'include')
- incboostdir = os.path.join(root, 'boost')
- if os.path.isdir(incboostdir):
- return root
- return None
-
- def get_compile_args(self):
- args = []
- include_dir = self.incdir
-
- # Use "-isystem" when including boost headers instead of "-I"
- # to avoid compiler warnings/failures when "-Werror" is used
-
- # Careful not to use "-isystem" on default include dirs as it
- # breaks some of the headers for certain gcc versions
-
- # For example, doing g++ -isystem /usr/include on a simple
- # "int main()" source results in the error:
- # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
-
- # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
- # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
- # for more details
-
- if include_dir and include_dir not in self.clib_compiler.get_default_include_dirs():
- args.append("".join(self.clib_compiler.get_include_args(include_dir, True)))
- return args
+ return False
- def get_requested(self, kwargs):
- candidates = mesonlib.extract_as_list(kwargs, 'modules')
- for c in candidates:
- if not isinstance(c, str):
- raise DependencyException('Boost module argument is not a string.')
- return candidates
+ def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
+ candidates = [] # type: T.List[Path]
+ inc_root = root / 'include'
- def detect_headers_and_version(self):
+ candidates += [root / 'boost']
+ candidates += [inc_root / 'boost']
+ if inc_root.is_dir():
+ for i in inc_root.iterdir():
+ if not i.is_dir() or not i.name.startswith('boost-'):
+ continue
+ candidates += [i / 'boost']
+ candidates = [x for x in candidates if x.is_dir()]
+ candidates = [x / 'version.hpp' for x in candidates]
+ candidates = [x for x in candidates if x.exists()]
+ return [self._include_dir_from_version_header(x) for x in candidates]
+
+ def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+ dirs = [] # type: T.List[Path]
+ subdirs = [] # type: T.List[Path]
+ for i in root.iterdir():
+ if i.is_dir() and i.name.startswith('lib'):
+ dirs += [i]
+
+ # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
+ for i in dirs:
+ for j in i.iterdir():
+ if j.is_dir() and j.name.endswith('-linux-gnu'):
+ subdirs += [j]
+ return dirs + subdirs
+
+ def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
+ # MSVC is very picky with the library tags
+ vscrt = ''
try:
- version = self.clib_compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), [], disable_cache=True)[0]
- except mesonlib.EnvironmentException:
- return
- except TypeError:
- return
- # Remove quotes
- version = version[1:-1]
- # Fix version string
- self.version = version.replace('_', '.')
- self.is_found = True
-
- def detect_lib_modules(self):
- self.lib_modules = {}
- # 1. Try to find modules using compiler.find_library( )
- if self.find_libraries_with_abi_tags(self.abi_tags()):
+ crt_val = self.env.coredata.base_options['b_vscrt'].value
+ buildtype = self.env.coredata.builtins['buildtype'].value
+ vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
+ except (KeyError, IndexError, AttributeError):
pass
- # 2. Fall back to the old method
- else:
- if self.env.machines[self.for_machine].is_windows():
- self.detect_lib_modules_win()
- else:
- self.detect_lib_modules_nix()
- def check_find_requested_modules(self):
- # 3. Check if we can find the modules
- for m in self.requested_modules:
- if 'boost_' + m not in self.lib_modules:
- mlog.debug('Requested Boost library {!r} not found'.format(m))
- self.is_found = False
-
- def modname_from_filename(self, filename):
- modname = os.path.basename(filename)
- modname = modname.split('.', 1)[0]
- modname = modname.split('-', 1)[0]
- if modname.startswith('libboost'):
- modname = modname[3:]
- return modname
-
- def compiler_tag(self):
- tag = None
- compiler = self.env.detect_cpp_compiler(self.for_machine)
+ libs = [x for x in libs if x.static == self.static]
+ libs = [x for x in libs if x.mt == self.multithreading]
+ libs = [x for x in libs if x.version_matches(lib_vers)]
+ libs = [x for x in libs if x.arch_matches(self.arch)]
+ libs = [x for x in libs if x.vscrt_matches(vscrt)]
+ libs = [x for x in libs if x.nvsuffix != 'dll'] # Only link to import libraries
+
+ # Only filter by debug when we are building in release mode. Debug
+ # libraries are automatically prefered through sorting otherwise.
+ if not self.debug:
+ libs = [x for x in libs if not x.debug]
+
+ # Take the abitag from the first library and filter by it. This
+ # ensures that we have a set of libraries that are always compatible.
+ if not libs:
+ return []
+ abitag = libs[0].abitag
+ libs = [x for x in libs if x.abitag == abitag]
+
+ return libs
+
+ def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
+ libs = [] # type: T.List[BoostLibraryFile]
+ for i in libdir.iterdir():
+ if not i.is_file() or i.is_symlink():
+ continue
+ if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
+ continue
+
+ libs += [BoostLibraryFile(i)]
+ return [x for x in libs if x.is_boost()] # Filter out no boost libraries
+
+ def detect_roots(self) -> T.List[Path]:
+ roots = [] # type: T.List[Path]
+
+ # Add roots from the environment
+ for i in ['BOOST_ROOT', 'BOOSTROOT']:
+ if i in os.environ:
+ raw_paths = os.environ[i].split(os.pathsep)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('Paths in {} must be absolute'.format(i))
+ roots += paths
+ return roots # Do not add system paths if BOOST_ROOT is present
+
+ # Add system paths
if self.env.machines[self.for_machine].is_windows():
- if compiler.get_id() in ['msvc', 'clang-cl']:
- comp_ts_version = compiler.get_toolset_version()
- compiler_ts = comp_ts_version.split('.')
- # FIXME - what about other compilers?
- tag = '-vc{}{}'.format(compiler_ts[0], compiler_ts[1])
- else:
- tag = ''
- return tag
-
- def threading_tag(self):
- if not self.is_multithreading:
- return ''
-
- if self.env.machines[self.for_machine].is_darwin():
- # - Mac: requires -mt for multithreading, so should not fall back to non-mt libraries.
- return '-mt'
- elif self.env.machines[self.for_machine].is_windows():
- # - Windows: requires -mt for multithreading, so should not fall back to non-mt libraries.
- return '-mt'
+ # Where boost built from source actually installs it
+ c_root = Path('C:/Boost')
+ if c_root.is_dir():
+ roots += [c_root]
+
+ # Where boost documentation says it should be
+ prog_files = Path('C:/Program Files/boost')
+ # Where boost prebuilt binaries are
+ local_boost = Path('C:/local')
+
+ candidates = [] # type: T.List[Path]
+ if prog_files.is_dir():
+ candidates += [*prog_files.iterdir()]
+ if local_boost.is_dir():
+ candidates += [*local_boost.iterdir()]
+
+ roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
- # - Linux: leaves off -mt but libraries are multithreading-aware.
- # - Cygwin: leaves off -mt but libraries are multithreading-aware.
- return ''
-
- def version_tag(self):
- return '-' + self.version.replace('.', '_')
-
- def debug_tag(self):
- return '-gd' if self.is_debug else ''
-
- def arch_tag(self):
- # currently only applies to windows msvc installed binaries
- if self.env.detect_cpp_compiler(self.for_machine).get_id() not in ['msvc', 'clang-cl']:
- return ''
- # pre-compiled binaries only added arch tag for versions > 1.64
- if float(self.version) < 1.65:
- return ''
- arch = detect_cpu_family(self.env.coredata.compilers.host)
- if arch == 'x86':
- return '-x32'
- elif arch == 'x86_64':
- return '-x64'
+ tmp = [] # type: T.List[Path]
+ # Add unix paths
+ tmp += [Path(x).parent for x in self.clib_compiler.get_default_include_dirs()]
+
+ # Homebrew
+ brew_boost = Path('/usr/local/Cellar/boost')
+ if brew_boost.is_dir():
+ tmp += [x for x in brew_boost.iterdir()]
+
+ # Add some default system paths
+ tmp += [Path('/opt/local')]
+ tmp += [Path('/usr/local/opt/boost')]
+ tmp += [Path('/usr/local')]
+ tmp += [Path('/usr')]
+
+ # Cleanup paths
+ tmp = [x for x in tmp if x.is_dir()]
+ tmp = [x.resolve() for x in tmp]
+ roots += tmp
+
+ return roots
+
+ def log_details(self) -> str:
+ modules = sorted(set(self.modules))
+ if modules:
+ return 'modules: ' + ', '.join(modules)
return ''
- def versioned_abi_tag(self):
- return self.compiler_tag() + self.threading_tag() + self.debug_tag() + self.arch_tag() + self.version_tag()
-
- # FIXME - how to handle different distributions, e.g. for Mac? Currently we handle homebrew and macports, but not fink.
- def abi_tags(self):
- if self.env.machines[self.for_machine].is_windows():
- return [self.versioned_abi_tag(), self.threading_tag()]
- else:
- return [self.threading_tag()]
-
- def sourceforge_dir(self):
- if self.env.detect_cpp_compiler(self.for_machine).get_id() != 'msvc':
- return None
- comp_ts_version = self.env.detect_cpp_compiler(self.for_machine).get_toolset_version()
- arch = detect_cpu_family(self.env.coredata.compilers.host)
- if arch == 'x86':
- return 'lib32-msvc-{}'.format(comp_ts_version)
- elif arch == 'x86_64':
- return 'lib64-msvc-{}'.format(comp_ts_version)
- else:
- # Does anyone do Boost cross-compiling to other archs on Windows?
- return None
-
- def find_libraries_with_abi_tag(self, tag):
-
- # All modules should have the same tag
- self.lib_modules = {}
-
- all_found = True
-
- for module in self.requested_modules:
- libname = 'boost_' + module + tag
-
- args = self.clib_compiler.find_library(libname, self.env, self.extra_lib_dirs())
- if args is None:
- mlog.debug("Couldn\'t find library '{}' for boost module '{}' (ABI tag = '{}')".format(libname, module, tag))
- all_found = False
- else:
- mlog.debug('Link args for boost module "{}" are {}'.format(module, args))
- self.lib_modules['boost_' + module] = args
-
- return all_found
-
- def find_libraries_with_abi_tags(self, tags):
- for tag in tags:
- if self.find_libraries_with_abi_tag(tag):
- return True
- return False
-
- def detect_lib_modules_win(self):
- if not self.libdir:
- # The libdirs in the distributed binaries (from sf)
- lib_sf = self.sourceforge_dir()
-
- if self.boost_root:
- roots = [self.boost_root]
- else:
- roots = self.boost_roots
- for root in roots:
- # The default libdir when building
- libdir = os.path.join(root, 'lib')
- if os.path.isdir(libdir):
- self.libdir = libdir
- break
- if lib_sf:
- full_path = os.path.join(root, lib_sf)
- if os.path.isdir(full_path):
- self.libdir = full_path
- break
+ def log_info(self) -> str:
+ if self.boost_root:
+ return self.boost_root.as_posix()
+ return ''
- if not self.libdir:
- return
+ def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
+ # Extract the version with a regex. Using clib_compiler.get_define would
+ # also work, however, this is slower (since it the compiler has to be
+ # invoked) and overkill since the layout of the header is always the same.
+ assert hfile.exists()
+ raw = hfile.read_text()
+ m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
+ if not m:
+ mlog.debug('Failed to extract version information from {}'.format(hfile))
+ return BoostIncludeDir(hfile.parents[1], 0)
+ return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
+
+ def _extra_compile_args(self) -> T.List[str]:
+ args = [] # type: T.List[str]
+ args += ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
+ if not self.static:
+ args += ['-DBOOST_ALL_DYN_LINK']
+ return args
- for name in self.need_static_link:
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- libname = 'lib' + name + self.versioned_abi_tag() + '.lib'
- if os.path.isfile(os.path.join(self.libdir, libname)):
- self.lib_modules[self.modname_from_filename(libname)] = [libname]
- else:
- libname = "lib{}.lib".format(name)
- if os.path.isfile(os.path.join(self.libdir, libname)):
- self.lib_modules[name[3:]] = [libname]
-
- # globber1 applies to a layout=system installation
- # globber2 applies to a layout=versioned installation
- globber1 = 'libboost_*' if self.static else 'boost_*'
- globber2 = globber1 + self.versioned_abi_tag()
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib'))
- for entry in globber2_matches:
- fname = os.path.basename(entry)
- self.lib_modules[self.modname_from_filename(fname)] = [fname]
- if not globber2_matches:
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')):
- if self.static:
- fname = os.path.basename(entry)
- self.lib_modules[self.modname_from_filename(fname)] = [fname]
-
- def detect_lib_modules_nix(self):
- if self.static:
- libsuffix = 'a'
- elif self.env.machines[self.for_machine].is_darwin():
- libsuffix = 'dylib'
- else:
- libsuffix = 'so'
- globber = 'libboost_*.{}'.format(libsuffix)
- if self.libdir:
- libdirs = [self.libdir]
- elif self.boost_root is None:
- libdirs = mesonlib.get_library_dirs()
- else:
- libdirs = [os.path.join(self.boost_root, 'lib')]
- for libdir in libdirs:
- for name in self.need_static_link:
- libname = 'lib{}.a'.format(name)
- if os.path.isfile(os.path.join(libdir, libname)):
- self.lib_modules[name] = [libname]
- for entry in glob.glob(os.path.join(libdir, globber)):
- # I'm not 100% sure what to do here. Some distros
- # have modules such as thread only as -mt versions.
- # On debian all packages are built threading=multi
- # but not suffixed with -mt.
- # FIXME: implement detect_lib_modules_{debian, redhat, ...}
- # FIXME: this wouldn't work with -mt-gd either. -BDR
- if self.is_multithreading and mesonlib.is_debianlike():
- pass
- elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)):
- pass
- elif not entry.endswith('-mt.{}'.format(libsuffix)):
- pass
- else:
- continue
- modname = self.modname_from_filename(entry)
- if modname not in self.lib_modules:
- self.lib_modules[modname] = [entry]
-
- def extra_lib_dirs(self):
- if self.libdir:
- return [self.libdir]
- elif self.boost_root:
- return [os.path.join(self.boost_root, 'lib')]
- return []
-
- def get_link_args(self, **kwargs):
- args = []
- for d in self.extra_lib_dirs():
- args += self.clib_compiler.get_linker_search_args(d)
- for lib in self.requested_modules:
- args += self.lib_modules['boost_' + lib]
- return args
+# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
+# See https://mesonbuild.com/Reference-tables.html#cpu-families
+boost_arch_map = {
+ 'aarch64': 'a64',
+ 'arc': 'a32',
+ 'arm': 'a32',
+ 'ia64': 'i64',
+ 'mips': 'm32',
+ 'mips64': 'm64',
+ 'ppc': 'p32',
+ 'ppc64': 'p64',
+ 'sparc': 's32',
+ 'sparc64': 's64',
+ 'x86': 'x32',
+ 'x86_64': 'x64',
+}
+
+
+#### ---- BEGIN GENERATED ---- ####
+# #
+# Generated with tools/boost_names.py:
+# - boost version: 1.72.0
+# - modules found: 158
+# - libraries found: 42
+#
- def get_sources(self):
- return []
-
-# Generated with boost_names.py
-BOOST_LIBS = [
- 'boost_atomic',
- 'boost_chrono',
- 'boost_chrono',
- 'boost_container',
- 'boost_context',
- 'boost_coroutine',
- 'boost_date_time',
- 'boost_exception',
- 'boost_fiber',
- 'boost_filesystem',
- 'boost_graph',
- 'boost_iostreams',
- 'boost_locale',
- 'boost_log',
- 'boost_log_setup',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_mpi',
- 'boost_program_options',
- 'boost_random',
- 'boost_regex',
- 'boost_serialization',
- 'boost_wserialization',
- 'boost_signals',
- 'boost_stacktrace_noop',
- 'boost_stacktrace_backtrace',
- 'boost_stacktrace_addr2line',
- 'boost_stacktrace_basic',
- 'boost_stacktrace_windbg',
- 'boost_stacktrace_windbg_cached',
- 'boost_system',
- 'boost_prg_exec_monitor',
- 'boost_test_exec_monitor',
- 'boost_unit_test_framework',
- 'boost_thread',
- 'boost_timer',
- 'boost_type_erasure',
- 'boost_wave'
-]
-
-BOOST_DIRS = [
- 'lambda',
- 'optional',
- 'convert',
- 'system',
- 'uuid',
- 'archive',
- 'align',
- 'timer',
- 'chrono',
- 'gil',
- 'logic',
- 'signals',
- 'predef',
- 'tr1',
- 'multi_index',
- 'property_map',
- 'multi_array',
- 'context',
- 'random',
- 'endian',
- 'circular_buffer',
- 'proto',
- 'assign',
- 'format',
- 'math',
- 'phoenix',
- 'graph',
- 'locale',
- 'mpl',
- 'pool',
- 'unordered',
- 'core',
- 'exception',
- 'ptr_container',
- 'flyweight',
- 'range',
- 'typeof',
- 'thread',
- 'move',
- 'spirit',
- 'dll',
- 'compute',
- 'serialization',
- 'ratio',
- 'msm',
- 'config',
- 'metaparse',
- 'coroutine2',
- 'qvm',
- 'program_options',
- 'concept',
- 'detail',
- 'hana',
- 'concept_check',
- 'compatibility',
- 'variant',
- 'type_erasure',
- 'mpi',
- 'test',
- 'fusion',
- 'log',
- 'sort',
- 'local_function',
- 'units',
- 'functional',
- 'preprocessor',
- 'integer',
- 'container',
- 'polygon',
- 'interprocess',
- 'numeric',
- 'iterator',
- 'wave',
- 'lexical_cast',
- 'multiprecision',
- 'utility',
- 'tti',
- 'asio',
- 'dynamic_bitset',
- 'algorithm',
- 'xpressive',
- 'bimap',
- 'signals2',
- 'type_traits',
- 'regex',
- 'statechart',
- 'parameter',
- 'icl',
- 'python',
- 'lockfree',
- 'intrusive',
- 'io',
- 'pending',
- 'geometry',
- 'tuple',
- 'iostreams',
- 'heap',
- 'atomic',
- 'filesystem',
- 'smart_ptr',
- 'function',
- 'fiber',
- 'type_index',
- 'accumulators',
- 'function_types',
- 'coroutine',
- 'vmd',
- 'date_time',
- 'property_tree',
- 'bind'
-]
+class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = shared
+ self.static = static
+ self.single = single
+ self.multi = multi
+
+class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+
+# dict of all know libraries with additional compile options
+boost_libraries = {
+ 'boost_atomic': BoostLibrary(
+ name='boost_atomic',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_chrono': BoostLibrary(
+ name='boost_chrono',
+ shared=['-DBOOST_ALL_DYN_LINK=1'],
+ static=['-DBOOST_All_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_container': BoostLibrary(
+ name='boost_container',
+ shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
+ static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_context': BoostLibrary(
+ name='boost_context',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_contract': BoostLibrary(
+ name='boost_contract',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_coroutine': BoostLibrary(
+ name='boost_coroutine',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_date_time': BoostLibrary(
+ name='boost_date_time',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_exception': BoostLibrary(
+ name='boost_exception',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_fiber': BoostLibrary(
+ name='boost_fiber',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_fiber_numa': BoostLibrary(
+ name='boost_fiber_numa',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_filesystem': BoostLibrary(
+ name='boost_filesystem',
+ shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
+ static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_graph': BoostLibrary(
+ name='boost_graph',
+ shared=['-DBOOST_GRAPH_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_iostreams': BoostLibrary(
+ name='boost_iostreams',
+ shared=['-DBOOST_IOSTREAMS_DYN_LINK=1', '-DBOOST_IOSTREAMS_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_locale': BoostLibrary(
+ name='boost_locale',
+ shared=['-DBOOST_LOCALE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_log': BoostLibrary(
+ name='boost_log',
+ shared=['-DBOOST_LOG_DLL', '-DBOOST_LOG_DYN_LINK=1'],
+ static=[],
+ single=['BOOST_LOG_NO_THREADS'],
+ multi=[],
+ ),
+ 'boost_log_setup': BoostLibrary(
+ name='boost_log_setup',
+ shared=['-DBOOST_LOG_DYN_LINK=1', '-DBOOST_LOG_SETUP_DLL', '-DBOOST_LOG_SETUP_DYN_LINK=1'],
+ static=[],
+ single=['BOOST_LOG_NO_THREADS'],
+ multi=[],
+ ),
+ 'boost_math_c99': BoostLibrary(
+ name='boost_math_c99',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_c99f': BoostLibrary(
+ name='boost_math_c99f',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_c99l': BoostLibrary(
+ name='boost_math_c99l',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1': BoostLibrary(
+ name='boost_math_tr1',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1f': BoostLibrary(
+ name='boost_math_tr1f',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1l': BoostLibrary(
+ name='boost_math_tr1l',
+ shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_mpi': BoostLibrary(
+ name='boost_mpi',
+ shared=['-DBOOST_MPI_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_prg_exec_monitor': BoostLibrary(
+ name='boost_prg_exec_monitor',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_program_options': BoostLibrary(
+ name='boost_program_options',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_random': BoostLibrary(
+ name='boost_random',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_regex': BoostLibrary(
+ name='boost_regex',
+ shared=['-DBOOST_REGEX_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_serialization': BoostLibrary(
+ name='boost_serialization',
+ shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_addr2line': BoostLibrary(
+ name='boost_stacktrace_addr2line',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_backtrace': BoostLibrary(
+ name='boost_stacktrace_backtrace',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_basic': BoostLibrary(
+ name='boost_stacktrace_basic',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_noop': BoostLibrary(
+ name='boost_stacktrace_noop',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_windbg': BoostLibrary(
+ name='boost_stacktrace_windbg',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_windbg_cached': BoostLibrary(
+ name='boost_stacktrace_windbg_cached',
+ shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_system': BoostLibrary(
+ name='boost_system',
+ shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
+ static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_test_exec_monitor': BoostLibrary(
+ name='boost_test_exec_monitor',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_thread': BoostLibrary(
+ name='boost_thread',
+ shared=['-DBOOST_THREAD_USE_DLL=1'],
+ static=['-DBOOST_THREAD_USE_LIB=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_timer': BoostLibrary(
+ name='boost_timer',
+ shared=['-DBOOST_TIMER_DYN_LINK=1'],
+ static=['-DBOOST_TIMER_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_type_erasure': BoostLibrary(
+ name='boost_type_erasure',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_unit_test_framework': BoostLibrary(
+ name='boost_unit_test_framework',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_wave': BoostLibrary(
+ name='boost_wave',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_wserialization': BoostLibrary(
+ name='boost_wserialization',
+ shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+}
+
+# #
+#### ---- END GENERATED ---- ####
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index a928248..46bbea0 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -47,7 +47,8 @@ from .linkers import (
CcrxDynamicLinker,
ClangClDynamicLinker,
DynamicLinker,
- GnuDynamicLinker,
+ GnuBFDDynamicLinker,
+ GnuGoldDynamicLinker,
LLVMDynamicLinker,
MSVCDynamicLinker,
OptlinkDynamicLinker,
@@ -767,7 +768,7 @@ class Environment:
if o.startswith('LLD'):
if '(compatible with GNU linkers)' in o:
return LLVMDynamicLinker(
- compiler, for_machine, 'lld', comp_class.LINKER_PREFIX,
+ compiler, for_machine, comp_class.LINKER_PREFIX,
override, version=search_version(o))
if value is not None:
@@ -830,7 +831,7 @@ class Environment:
v = search_version(o)
if o.startswith('LLD'):
linker = LLVMDynamicLinker(
- compiler, for_machine, 'lld', comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
elif e.startswith('lld-link: '):
# Toolchain wrapper got in the way; this happens with e.g. https://github.com/mstorsjo/llvm-mingw
# Let's try to extract the linker invocation command to grab the version.
@@ -846,30 +847,29 @@ class Environment:
_, o, e = Popen_safe([linker_cmd, '--version'])
v = search_version(o)
- linker = LLVMDynamicLinker(compiler, for_machine, 'lld', comp_class.LINKER_PREFIX, override, version=v)
+ linker = LLVMDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
# first is for apple clang, second is for real gcc, the third is icc
elif e.endswith('(use -v to see invocation)\n') or 'macosx_version' in e or 'ld: unknown option:' in e:
if isinstance(comp_class.LINKER_PREFIX, str):
_, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-v'] + extra_args)
else:
_, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-v'] + extra_args)
- i = 'APPLE ld'
for line in e.split('\n'):
if 'PROJECT:ld' in line:
v = line.split('-')[1]
break
else:
v = 'unknown version'
- linker = AppleDynamicLinker(compiler, for_machine, i, comp_class.LINKER_PREFIX, override, version=v)
+ linker = AppleDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'GNU' in o:
if 'gold' in o:
- i = 'GNU ld.gold'
+ cls = GnuGoldDynamicLinker
else:
- i = 'GNU ld.bfd'
- linker = GnuDynamicLinker(compiler, for_machine, i, comp_class.LINKER_PREFIX, override, version=v)
+ cls = GnuBFDDynamicLinker
+ linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Solaris' in e or 'Solaris' in o:
linker = SolarisDynamicLinker(
- compiler, for_machine, 'solaris', comp_class.LINKER_PREFIX, override,
+ compiler, for_machine, comp_class.LINKER_PREFIX, override,
version=search_version(e))
else:
raise EnvironmentException('Unable to determine dynamic linker')
@@ -1061,7 +1061,7 @@ class Environment:
if 'PGI Compilers' in out:
cls = PGICCompiler if lang == 'c' else PGICPPCompiler
self.coredata.add_lang_args(cls.language, cls, for_machine, self)
- linker = PGIDynamicLinker(compiler, for_machine, 'pgi', cls.LINKER_PREFIX, [], version=version)
+ linker = PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache + compiler, version, for_machine, is_cross,
info, exe_wrap, linker=linker)
@@ -1214,7 +1214,7 @@ class Environment:
if 'PGI Compilers' in out:
cls = PGIFortranCompiler
self.coredata.add_lang_args(cls.language, cls, for_machine, self)
- linker = PGIDynamicLinker(compiler, for_machine, 'pgi',
+ linker = PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, exe_wrap,
@@ -1413,7 +1413,7 @@ class Environment:
linker = type(cc.linker)(for_machine, always_args, exelist=cc.linker.exelist,
version=cc.linker.version, **extra_args)
else:
- linker = type(cc.linker)(compiler, for_machine, cc.linker.id, cc.LINKER_PREFIX,
+ linker = type(cc.linker)(compiler, for_machine, cc.LINKER_PREFIX,
always_args=always_args, version=cc.linker.version,
**extra_args)
elif 'link' in override[0]:
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 09f7ff5..c29ed89 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -547,7 +547,7 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder):
output = res.stdout.strip()
if not output:
output = res.stderr.strip()
- match = re.search(r'([0-9\.]+)', output)
+ match = re.search(r'([0-9][0-9\.]+)', output)
if not match:
m = 'Could not find a version number in output of {!r}'
raise InterpreterException(m.format(raw_cmd))
diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py
index b1d80c3..489525b 100644
--- a/mesonbuild/linkers.py
+++ b/mesonbuild/linkers.py
@@ -260,8 +260,8 @@ class DynamicLinker(metaclass=abc.ABCMeta):
ret += self.prefix_arg + [arg]
return ret
- def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
- id_: str, prefix_arg: T.Union[str, T.List[str]],
+ def __init__(self, id_: str, exelist: T.List[str],
+ for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
always_args: T.List[str], *, version: str = 'unknown version'):
self.exelist = exelist
self.for_machine = for_machine
@@ -577,6 +577,9 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
"""Apple's ld implementation."""
+ def __init__(self, *args, **kwargs):
+ super().__init__('ld64', *args, **kwargs)
+
def get_asneeded_args(self) -> T.List[str]:
return self._apply_prefix('-dead_strip_dylibs')
@@ -656,17 +659,38 @@ class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dynam
"""Representation of GNU ld.bfd and ld.gold."""
- pass
+
+class GnuGoldDynamicLinker(GnuDynamicLinker):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__('ld.gold', *args, **kwargs)
+
+
+class GnuBFDDynamicLinker(GnuDynamicLinker):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__('ld.bfd', *args, **kwargs)
class LLVMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
- """Representation of LLVM's lld (not lld-link) linker.
+ """Representation of LLVM's ld.lld linker.
- This is only the posix-like linker.
+ This is only the gnu-like linker, not the apple like or link.exe like
+ linkers.
"""
- pass
+ def __init__(self, *args, **kwargs):
+ super().__init__('ld.lld', *args, **kwargs)
+
+ # Some targets don't seem to support this argument (windows, wasm, ...)
+ _, _, e = mesonlib.Popen_safe(self.exelist + self._apply_prefix('--allow-shlib-undefined'))
+ self.has_allow_shlib_undefined = not ('unknown argument: --allow-shlib-undefined' in e)
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ if self.has_allow_shlib_undefined:
+ return self._apply_prefix('--allow-shlib-undefined')
+ return []
class CcrxDynamicLinker(DynamicLinker):
@@ -675,7 +699,7 @@ class CcrxDynamicLinker(DynamicLinker):
def __init__(self, for_machine: mesonlib.MachineChoice,
*, version: str = 'unknown version'):
- super().__init__(['rlink.exe'], for_machine, 'rlink', '', [],
+ super().__init__('rlink', ['rlink.exe'], for_machine, '', [],
version=version)
def get_accepts_rsp(self) -> bool:
@@ -708,7 +732,7 @@ class ArmDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def __init__(self, for_machine: mesonlib.MachineChoice,
*, version: str = 'unknown version'):
- super().__init__(['armlink'], for_machine, 'armlink', '', [],
+ super().__init__('armlink', ['armlink'], for_machine, '', [],
version=version)
def get_accepts_rsp(self) -> bool:
@@ -740,6 +764,9 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
"""PGI linker."""
+ def __init__(self, *args, **kwargs):
+ super().__init__('pgi', *args, **kwargs)
+
def get_allow_undefined_args(self) -> T.List[str]:
return []
@@ -794,6 +821,9 @@ class VisualStudioLikeLinkerMixin:
super().__init__(*args, **kwargs)
self.machine = machine
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
def invoked_by_compiler(self) -> bool:
return not self.direct
@@ -852,7 +882,7 @@ class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
prefix: T.Union[str, T.List[str]] = '',
machine: str = 'x86', version: str = 'unknown version',
direct: bool = True):
- super().__init__(exelist or ['link.exe'], for_machine, 'link',
+ super().__init__('link', exelist or ['link.exe'], for_machine,
prefix, always_args, machine=machine, version=version, direct=direct)
def get_always_args(self) -> T.List[str]:
@@ -868,7 +898,7 @@ class ClangClDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
prefix: T.Union[str, T.List[str]] = '',
machine: str = 'x86', version: str = 'unknown version',
direct: bool = True):
- super().__init__(exelist or ['lld-link.exe'], for_machine, 'lld-link',
+ super().__init__('lld-link', exelist or ['lld-link.exe'], for_machine,
prefix, always_args, machine=machine, version=version, direct=direct)
@@ -878,13 +908,16 @@ class XilinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str],
*, version: str = 'unknown version'):
- super().__init__(['xilink.exe'], for_machine, 'xilink', '', always_args, version=version)
+ super().__init__('xilink', ['xilink.exe'], for_machine, '', always_args, version=version)
class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
"""Sys-V derived linker used on Solaris and OpenSolaris."""
+ def __init__(self, *args, **kwargs):
+ super().__init__('ld.solaris', *args, **kwargs)
+
def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
if not args:
return args
@@ -935,13 +968,18 @@ class OptlinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
*, version: str = 'unknown version'):
# Use optlink instead of link so we don't interfer with other link.exe
# implementations.
- super().__init__(['optlink.exe'], for_machine, 'optlink', '', [], version=version)
+ super().__init__('optlink', ['optlink.exe'], for_machine, '', [], version=version)
def get_allow_undefined_args(self) -> T.List[str]:
return []
+
class CudaLinker(PosixDynamicLinkerMixin, DynamicLinker):
"""Cuda linker (nvlink)"""
+
+ def __init__(self, *args, **kwargs):
+ super().__init__('nvlink', *args, **kwargs)
+
@staticmethod
def parse_version():
version_cmd = ['nvlink', '--version']
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index ef722ea..bfff50a 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -177,6 +177,22 @@ def create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, dist_scrip
output_names.append(zipname)
return output_names
+def run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin):
+ if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
+ print('Running Meson on distribution package failed')
+ return 1
+ if subprocess.call([ninja_bin], cwd=builddir) != 0:
+ print('Compiling the distribution package failed')
+ return 1
+ if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0:
+ print('Running unit tests on the distribution package failed')
+ return 1
+ myenv = os.environ.copy()
+ myenv['DESTDIR'] = installdir
+ if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0:
+ print('Installing the distribution package failed')
+ return 1
+ return 0
def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
print('Testing distribution package %s' % packagename)
@@ -185,38 +201,27 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
installdir = os.path.join(privdir, 'dist-install')
for p in (unpackdir, builddir, installdir):
if os.path.exists(p):
- shutil.rmtree(p)
+ windows_proof_rmtree(p)
os.mkdir(p)
ninja_bin = detect_ninja()
- try:
- shutil.unpack_archive(packagename, unpackdir)
- unpacked_files = glob(os.path.join(unpackdir, '*'))
- assert(len(unpacked_files) == 1)
- unpacked_src_dir = unpacked_files[0]
- with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
- meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
- if o['name'] not in ['backend', 'install_umask']]
- meson_command += extra_meson_args
- if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
- print('Running Meson on distribution package failed')
- return 1
- if subprocess.call([ninja_bin], cwd=builddir) != 0:
- print('Compiling the distribution package failed')
- return 1
- if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0:
- print('Running unit tests on the distribution package failed')
- return 1
- myenv = os.environ.copy()
- myenv['DESTDIR'] = installdir
- if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0:
- print('Installing the distribution package failed')
- return 1
- finally:
- shutil.rmtree(unpackdir)
- shutil.rmtree(builddir)
- shutil.rmtree(installdir)
- print('Distribution package %s tested' % packagename)
- return 0
+ shutil.unpack_archive(packagename, unpackdir)
+ unpacked_files = glob(os.path.join(unpackdir, '*'))
+ assert(len(unpacked_files) == 1)
+ unpacked_src_dir = unpacked_files[0]
+ with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
+ meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
+ if o['name'] not in ['backend', 'install_umask']]
+ meson_command += extra_meson_args
+
+ ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin)
+ if ret > 0:
+ print('Dist check build directory was {}'.format(builddir))
+ else:
+ windows_proof_rmtree(unpackdir)
+ windows_proof_rmtree(builddir)
+ windows_proof_rmtree(installdir)
+ print('Distribution package %s tested' % packagename)
+ return ret
def determine_archives_to_generate(options):
result = []
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 891e7a1..e09d123 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -65,8 +65,7 @@ def git(cmd: T.List[str], workingdir: str, **kwargs) -> subprocess.CompletedProc
# Sometimes git calls git recursively, such as `git submodule update
# --recursive` which will be without the above workaround, so set the
# console mode again just in case.
- if platform.system().lower() == 'windows':
- mlog._windows_ansi()
+ mlog.setup_console()
return pc
@@ -1080,11 +1079,20 @@ def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
**kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
import locale
encoding = locale.getpreferredencoding()
+ # Redirect stdin to DEVNULL otherwise the command run by us here might mess
+ # up the console and ANSI colors will stop working on Windows.
+ if 'stdin' not in kwargs:
+ kwargs['stdin'] = subprocess.DEVNULL
if sys.version_info < (3, 6) or not sys.stdout.encoding or encoding.upper() != 'UTF-8':
- return Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
- p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
- stdout=stdout, stderr=stderr, **kwargs)
- o, e = p.communicate(write)
+ p, o, e = Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
+ else:
+ p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
+ stdout=stdout, stderr=stderr, **kwargs)
+ o, e = p.communicate(write)
+ # Sometimes the command that we run will call another command which will be
+ # without the above stdin workaround, so set the console mode again just in
+ # case.
+ mlog.setup_console()
return p, o, e
def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index cc6ea0a..64bcca2 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -13,6 +13,7 @@
# limitations under the License.
import sys, pickle, os, shutil, subprocess, errno
+import argparse
import shlex
from glob import glob
from .scripts import depfixer
@@ -35,10 +36,14 @@ selinux_updates = []
def add_arguments(parser):
parser.add_argument('-C', default='.', dest='wd',
help='directory to cd into before running')
+ parser.add_argument('--profile-self', action='store_true', dest='profile',
+ help=argparse.SUPPRESS)
parser.add_argument('--no-rebuild', default=False, action='store_true',
help='Do not rebuild before installing.')
parser.add_argument('--only-changed', default=False, action='store_true',
help='Only overwrite files that are older than the copied file.')
+ parser.add_argument('--quiet', default=False, action='store_true',
+ help='Do not print every file that was installed.')
class DirMaker:
def __init__(self, lf):
@@ -216,6 +221,10 @@ class Installer:
self.lf = lf
self.preserved_file_count = 0
+ def log(self, msg):
+ if not self.options.quiet:
+ print(msg)
+
def should_preserve_existing_file(self, from_file, to_file):
if not self.options.only_changed:
return False
@@ -226,7 +235,7 @@ class Installer:
to_time = os.stat(to_file).st_mtime
return from_time <= to_time
- def do_copyfile(self, from_file, to_file):
+ def do_copyfile(self, from_file, to_file, makedirs=None):
outdir = os.path.split(to_file)[0]
if not os.path.isfile(from_file) and not os.path.islink(from_file):
raise RuntimeError('Tried to install something that isn\'t a file:'
@@ -243,7 +252,12 @@ class Installer:
self.preserved_file_count += 1
return False
os.remove(to_file)
- print('Installing %s to %s' % (from_file, outdir))
+ elif makedirs:
+ # Unpack tuple
+ dirmaker, outdir = makedirs
+ # Create dirs if needed
+ dirmaker.makedirs(outdir, exist_ok=True)
+ self.log('Installing %s to %s' % (from_file, outdir))
if os.path.islink(from_file):
if not os.path.exists(from_file):
# Dangling symlink. Replicate as is.
@@ -318,6 +332,7 @@ class Installer:
abs_dst = os.path.join(dst_dir, filepart)
if os.path.isdir(abs_dst):
print('Tried to copy file %s but a directory of that name already exists.' % abs_dst)
+ sys.exit(1)
parent_dir = os.path.dirname(abs_dst)
if not os.path.isdir(parent_dir):
os.mkdir(parent_dir)
@@ -347,10 +362,10 @@ class Installer:
restore_selinux_contexts()
self.run_install_script(d)
if not self.did_install_something:
- print('Nothing to install.')
- if self.preserved_file_count > 0:
- print('Preserved {} unchanged files, see {} for the full list'
- .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
+ self.log('Nothing to install.')
+ if not self.options.quiet and self.preserved_file_count > 0:
+ self.log('Preserved {} unchanged files, see {} for the full list'
+ .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
except PermissionError:
if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ:
print('Installation failed due to insufficient permissions.')
@@ -364,42 +379,39 @@ class Installer:
for (src_dir, dst_dir, mode, exclude) in d.install_subdirs:
self.did_install_something = True
full_dst_dir = get_destdir_path(d, dst_dir)
- print('Installing subdir %s to %s' % (src_dir, full_dst_dir))
+ self.log('Installing subdir %s to %s' % (src_dir, full_dst_dir))
d.dirmaker.makedirs(full_dst_dir, exist_ok=True)
self.do_copydir(d, src_dir, full_dst_dir, exclude, mode)
def install_data(self, d):
for i in d.data:
- self.did_install_something = True
fullfilename = i[0]
outfilename = get_destdir_path(d, i[1])
mode = i[2]
outdir = os.path.dirname(outfilename)
- d.dirmaker.makedirs(outdir, exist_ok=True)
- self.do_copyfile(fullfilename, outfilename)
+ if self.do_copyfile(fullfilename, outfilename, makedirs=(d.dirmaker, outdir)):
+ self.did_install_something = True
set_mode(outfilename, mode, d.install_umask)
def install_man(self, d):
for m in d.man:
- self.did_install_something = True
full_source_filename = m[0]
outfilename = get_destdir_path(d, m[1])
outdir = os.path.dirname(outfilename)
- d.dirmaker.makedirs(outdir, exist_ok=True)
install_mode = m[2]
- self.do_copyfile(full_source_filename, outfilename)
+ if self.do_copyfile(full_source_filename, outfilename, makedirs=(d.dirmaker, outdir)):
+ self.did_install_something = True
set_mode(outfilename, install_mode, d.install_umask)
def install_headers(self, d):
for t in d.headers:
- self.did_install_something = True
fullfilename = t[0]
fname = os.path.basename(fullfilename)
outdir = get_destdir_path(d, t[1])
outfilename = os.path.join(outdir, fname)
install_mode = t[2]
- d.dirmaker.makedirs(outdir, exist_ok=True)
- self.do_copyfile(fullfilename, outfilename)
+ if self.do_copyfile(fullfilename, outfilename, makedirs=(d.dirmaker, outdir)):
+ self.did_install_something = True
set_mode(outfilename, install_mode, d.install_umask)
def run_install_script(self, d):
@@ -409,6 +421,8 @@ class Installer:
'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix,
'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
}
+ if self.options.quiet:
+ env['MESON_INSTALL_QUIET'] = '1'
child_env = os.environ.copy()
child_env.update(env)
@@ -418,7 +432,7 @@ class Installer:
script = i['exe']
args = i['args']
name = ' '.join(script + args)
- print('Running custom install script {!r}'.format(name))
+ self.log('Running custom install script {!r}'.format(name))
try:
rc = subprocess.call(script + args, env=child_env)
if rc != 0:
@@ -429,14 +443,14 @@ class Installer:
def install_targets(self, d):
for t in d.targets:
- self.did_install_something = True
if not os.path.exists(t.fname):
# For example, import libraries of shared modules are optional
if t.optional:
- print('File {!r} not found, skipping'.format(t.fname))
+ self.log('File {!r} not found, skipping'.format(t.fname))
continue
else:
raise RuntimeError('File {!r} could not be found'.format(t.fname))
+ file_copied = False # not set when a directory is copied
fname = check_for_stampfile(t.fname)
outdir = get_destdir_path(d, t.outdir)
outname = os.path.join(outdir, os.path.basename(fname))
@@ -446,17 +460,16 @@ class Installer:
install_rpath = t.install_rpath
install_name_mappings = t.install_name_mappings
install_mode = t.install_mode
- d.dirmaker.makedirs(outdir, exist_ok=True)
if not os.path.exists(fname):
raise RuntimeError('File {!r} could not be found'.format(fname))
elif os.path.isfile(fname):
- self.do_copyfile(fname, outname)
+ file_copied = self.do_copyfile(fname, outname, makedirs=(d.dirmaker, outdir))
set_mode(outname, install_mode, d.install_umask)
if should_strip and d.strip_bin is not None:
if fname.endswith('.jar'):
- print('Not stripping jar target:', os.path.basename(fname))
+ self.log('Not stripping jar target:', os.path.basename(fname))
continue
- print('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
+ self.log('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
ps, stdo, stde = Popen_safe(d.strip_bin + [outname])
if ps.returncode != 0:
print('Could not strip file.\n')
@@ -469,10 +482,11 @@ class Installer:
wasm_source = os.path.splitext(fname)[0] + '.wasm'
if os.path.exists(wasm_source):
wasm_output = os.path.splitext(outname)[0] + '.wasm'
- self.do_copyfile(wasm_source, wasm_output)
+ file_copied = self.do_copyfile(wasm_source, wasm_output)
elif os.path.isdir(fname):
fname = os.path.join(d.build_dir, fname.rstrip('/'))
outname = os.path.join(outdir, os.path.basename(fname))
+ d.dirmaker.makedirs(outdir, exist_ok=True)
self.do_copydir(d, fname, outname, None, install_mode)
else:
raise RuntimeError('Unknown file type for {!r}'.format(fname))
@@ -491,7 +505,8 @@ class Installer:
print("Symlink creation does not work on this platform. "
"Skipping all symlinking.")
printed_symlink_error = True
- if os.path.isfile(outname):
+ if file_copied:
+ self.did_install_something = True
try:
depfixer.fix_rpath(outname, install_rpath, final_path,
install_name_mappings, verbose=False)
@@ -515,5 +530,10 @@ def run(opts):
installer = Installer(opts, lf)
append_to_log(lf, '# List of files installed by Meson')
append_to_log(lf, '# Does not contain files installed by custom scripts.')
- installer.do_install(datafilename)
+ if opts.profile:
+ import cProfile as profile
+ fname = os.path.join(private_dir, 'profile-installer.log')
+ profile.runctx('installer.do_install(datafilename)', globals(), locals(), filename=fname)
+ else:
+ installer.do_install(datafilename)
return 0
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index a30d6b9..b28eca1 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -40,13 +40,15 @@ def _windows_ansi() -> bool:
# original behavior
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
-try:
- if platform.system().lower() == 'windows':
- colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi() # type: bool
- else:
- colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
-except Exception:
- colorize_console = False
+def setup_console() -> bool:
+ try:
+ if platform.system().lower() == 'windows':
+ return os.isatty(sys.stdout.fileno()) and _windows_ansi()
+ return os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
+ except Exception:
+ return False
+
+colorize_console = setup_console()
log_dir = None # type: T.Optional[str]
log_file = None # type: T.Optional[T.TextIO]
log_fname = 'meson-log.txt' # type: str
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index 21f144f..6c4098b 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -18,7 +18,7 @@ import shutil
from . import ExtensionModule, ModuleReturnValue
from .. import build, dependencies, mesonlib, mlog
-from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder
+from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder, noPosargs
from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder
@@ -66,6 +66,7 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder):
'target': self.target,
'target_type': self.target_type,
'target_list': self.target_list,
+ 'found': self.found_method,
})
def _args_to_info(self, args):
@@ -105,12 +106,18 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder):
info = self._args_to_info(args)
return info['func']
+ @noPosargs
@permittedKwargs({})
def target_list(self, args, kwargs):
- if len(args) > 0:
- raise InterpreterException('target_list does not take any parameters.')
return self.held_object.cm_interpreter.target_list()
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('CMakeSubproject.found()', '0.53.2')
+ def found_method(self, args, kwargs):
+ return self.held_object is not None
+
+
class CmakeModule(ExtensionModule):
cmake_detected = False
cmake_root = None
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index da0a60e..cfdae4f 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -266,7 +266,7 @@ class PkgConfigModule(ExtensionModule):
def generate_pkgconfig_file(self, state, deps, subdirs, name, description,
url, version, pcfile, conflicts, variables,
- uninstalled=False):
+ uninstalled=False, dataonly=False):
deps.remove_dups()
coredata = state.environment.get_coredata()
if uninstalled:
@@ -283,12 +283,13 @@ class PkgConfigModule(ExtensionModule):
incdir = PurePath(coredata.get_builtin_option('includedir'))
fname = os.path.join(outdir, pcfile)
with open(fname, 'w', encoding='utf-8') as ofile:
- ofile.write('prefix={}\n'.format(self._escape(prefix)))
- if uninstalled:
- ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
- else:
- ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
- ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
+ if not dataonly:
+ ofile.write('prefix={}\n'.format(self._escape(prefix)))
+ if uninstalled:
+ ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
+ else:
+ ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
+ ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
if variables:
ofile.write('\n')
for k, v in variables:
@@ -370,27 +371,28 @@ class PkgConfigModule(ExtensionModule):
ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
if len(deps.priv_libs) > 0:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
+
+ def generate_compiler_flags():
+ cflags_buf = []
+ for f in deps.cflags:
+ cflags_buf.append(self._escape(f))
+ return cflags_buf
+
+ cflags = generate_compiler_flags()
ofile.write('Cflags:')
if uninstalled:
ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)))
- else:
- for h in subdirs:
- ofile.write(' ')
- if h == '.':
- ofile.write('-I${includedir}')
- else:
- ofile.write(self._escape(PurePath('-I${includedir}') / h))
- for f in deps.cflags:
- ofile.write(' ')
- ofile.write(self._escape(f))
- ofile.write('\n')
+ elif not dataonly and cflags:
+ ofile.write('{}\n'.format(' '.join(cflags)))
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
@FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables'])
+ @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly'])
@permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
'subdirs', 'requires', 'requires_private', 'libraries_private',
- 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'})
+ 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
+ 'dataonly'})
def generate(self, state, args, kwargs):
if 'variables' in kwargs:
FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject)
@@ -399,6 +401,7 @@ class PkgConfigModule(ExtensionModule):
default_description = None
default_name = None
mainlib = None
+ default_subdirs = ['.']
if not args and 'version' not in kwargs:
FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject)
elif len(args) == 1:
@@ -414,7 +417,14 @@ class PkgConfigModule(ExtensionModule):
elif len(args) > 1:
raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
- subdirs = mesonlib.stringlistify(kwargs.get('subdirs', ['.']))
+ dataonly = kwargs.get('dataonly', False)
+ if dataonly:
+ default_subdirs = []
+ blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs']
+ if len(set(kwargs) & set(blocked_vars)) > 0:
+ raise mesonlib.MesonException('Cannot combine dataonly with any of {}'.format(blocked_vars))
+
+ subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs))
version = kwargs.get('version', default_version)
if not isinstance(version, str):
raise mesonlib.MesonException('Version must be specified.')
@@ -440,6 +450,11 @@ class PkgConfigModule(ExtensionModule):
libraries = [mainlib] + libraries
deps = DependenciesHelper(state, filebase)
+ for d in subdirs:
+ if d == '.':
+ deps.add_cflags(['-I${includedir}'])
+ else:
+ deps.add_cflags(self._escape(PurePath('-I${includedir}') / d))
deps.add_pub_libs(libraries)
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
@@ -488,13 +503,14 @@ class PkgConfigModule(ExtensionModule):
if not isinstance(pkgroot, str):
raise mesonlib.MesonException('Install_dir must be a string.')
self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
- version, pcfile, conflicts, variables)
+ version, pcfile, conflicts, variables,
+ False, dataonly)
res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), pcfile), pkgroot)
variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('uninstalled_variables', [])))
pcfile = filebase + '-uninstalled.pc'
self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
version, pcfile, conflicts, variables,
- uninstalled=True)
+ uninstalled=True, dataonly=dataonly)
# Associate the main library with this generated pc file. If the library
# is used in any subsequent call to the generated, it will generate a
# 'Requires:' or 'Requires.private:'.
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 6644fd2..3f971f7 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -348,10 +348,14 @@ class PythonInstallation(ExternalProgramHolder):
return self.interpreter.func_shared_module(None, args, kwargs)
- @noPosargs
@permittedKwargs(permitted_kwargs['dependency'])
@FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
def dependency_method(self, args, kwargs):
+ if args:
+ mlog.warning('python_installation.dependency() does not take any '
+ 'positional arguments. It always returns a Python '
+ 'dependency. This will become an error in the future.',
+ location=self.interpreter.current_node)
dep = PythonDependency(self, self.interpreter.environment, kwargs)
return self.interpreter.holderify(dep)
diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py
index d6f0715..b1c79b8 100755
--- a/mesonbuild/msubprojects.py
+++ b/mesonbuild/msubprojects.py
@@ -60,7 +60,7 @@ def update_git(wrap, repo_dir, options):
git_output(['fetch'], repo_dir)
git_output(['checkout', revision], repo_dir)
except subprocess.CalledProcessError as e:
- out = e.output.decode().strip()
+ out = e.output.strip()
mlog.log(' -> Could not checkout revision', mlog.cyan(revision))
mlog.log(mlog.red(out))
mlog.log(mlog.red(str(e)))
@@ -70,7 +70,7 @@ def update_git(wrap, repo_dir, options):
# We are in the same branch, pull latest commits
git_output(['-c', 'rebase.autoStash=true', 'pull', '--rebase'], repo_dir)
except subprocess.CalledProcessError as e:
- out = e.output.decode().strip()
+ out = e.output.strip()
mlog.log(' -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.')
mlog.log(mlog.red(out))
mlog.log(mlog.red(str(e)))
@@ -83,7 +83,7 @@ def update_git(wrap, repo_dir, options):
git_output(['fetch'], repo_dir)
git_output(['-c', 'rebase.autoStash=true', 'rebase', revision], repo_dir)
except subprocess.CalledProcessError as e:
- out = e.output.decode().strip()
+ out = e.output.strip()
mlog.log(' -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.')
mlog.log(mlog.red(out))
mlog.log(mlog.red(str(e)))
@@ -153,7 +153,7 @@ def checkout(wrap, repo_dir, options):
git_output(cmd, repo_dir)
git_show(repo_dir)
except subprocess.CalledProcessError as e:
- out = e.output.decode().strip()
+ out = e.output.strip()
mlog.log(' -> ', mlog.red(out))
def download(wrap, repo_dir, options):
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index 5be7d40..5ba3a97 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -26,6 +26,9 @@ DT_STRTAB = 5
DT_SONAME = 14
DT_MIPS_RLD_MAP_REL = 1879048245
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
class DataSizes:
def __init__(self, ptrsize, is_le):
if is_le:
@@ -428,11 +431,11 @@ def fix_jar(fname):
subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True):
- # Static libraries never have rpaths
- if fname.endswith('.a'):
- return
- # DLLs and EXE never have rpaths
- if fname.endswith('.dll') or fname.endswith('.exe'):
+ global INSTALL_NAME_TOOL
+ # Static libraries, import libraries, debug information, headers, etc
+ # never have rpaths
+ # DLLs and EXE currently do not need runtime path fixing
+ if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
return
try:
if fname.endswith('.jar'):
@@ -445,5 +448,11 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True)
pass
else:
raise
- if shutil.which('install_name_tool'):
+ # We don't look for this on import because it will do a useless PATH lookup
+ # on non-mac platforms. That can be expensive on some Windows machines
+ # (upto 30ms), which is significant with --only-changed. For details, see:
+ # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+ if INSTALL_NAME_TOOL is False:
+ INSTALL_NAME_TOOL = shutil.which('install_name_tool')
+ if INSTALL_NAME_TOOL:
fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff --git a/run_project_tests.py b/run_project_tests.py
index 9965bc3..1194abf 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -65,17 +65,30 @@ class BuildStep(Enum):
class TestResult:
- def __init__(self, msg, step, stdo, stde, mlog, cicmds, conftime=0, buildtime=0, testtime=0):
- self.msg = msg
- self.step = step
- self.stdo = stdo
- self.stde = stde
- self.mlog = mlog
+ def __init__(self, cicmds):
+ self.msg = '' # empty msg indicates test success
+ self.stdo = ''
+ self.stde = ''
+ self.mlog = ''
self.cicmds = cicmds
- self.conftime = conftime
- self.buildtime = buildtime
- self.testtime = testtime
+ self.conftime = 0
+ self.buildtime = 0
+ self.testtime = 0
+ def add_step(self, step, stdo, stde, mlog='', time=0):
+ self.step = step
+ self.stdo += stdo
+ self.stde += stde
+ self.mlog += mlog
+ if step == BuildStep.configure:
+ self.conftime = time
+ elif step == BuildStep.build:
+ self.buildtime = time
+ elif step == BuildStep.test:
+ self.testtime = time
+
+ def fail(self, msg):
+ self.msg = msg
@functools.total_ordering
class TestDef:
@@ -123,6 +136,7 @@ do_debug = under_ci or print_debug
no_meson_log_msg = 'No meson-log.txt found.'
system_compiler = None
+compiler_id_map = {} # type: T.Dict[str, str]
class StopException(Exception):
def __init__(self):
@@ -230,14 +244,10 @@ def validate_install(srcdir: str, installdir: Path, compiler, env) -> str:
# List of installed files
info_file = Path(srcdir) / 'installed_files.txt'
installdir = Path(installdir)
- # If this exists, the test does not install any other files
- noinst_file = Path('usr/no-installed-files')
expected = {} # type: T.Dict[Path, bool]
ret_msg = ''
# Generate list of expected files
- if (installdir / noinst_file).is_file():
- expected[noinst_file] = False
- elif info_file.is_file():
+ if info_file.is_file():
with info_file.open() as f:
for line in f:
line = platform_fix_name(line.strip(), compiler, env)
@@ -434,16 +444,20 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
except Exception:
mesonlog = no_meson_log_msg
cicmds = run_ci_commands(mesonlog)
- gen_time = time.time() - gen_start
+ testresult = TestResult(cicmds)
+ testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start)
if should_fail == 'meson':
if returncode == 1:
- return TestResult('', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+ return testresult
elif returncode != 0:
- return TestResult('Test exited with unexpected status {}'.format(returncode), BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+ testresult.fail('Test exited with unexpected status {}.'.format(returncode))
+ return testresult
else:
- return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+ testresult.fail('Test that should have failed succeeded.')
+ return testresult
if returncode != 0:
- return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+ testresult.fail('Generating the build system failed.')
+ return testresult
builddata = build.load(test_build_dir)
# Touch the meson.build file to force a regenerate so we can test that
# regeneration works before a build is run.
@@ -453,15 +467,15 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
dir_args = get_backend_args_for_dir(backend, test_build_dir)
build_start = time.time()
pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir)
- build_time = time.time() - build_start
- stdo += o
- stde += e
+ testresult.add_step(BuildStep.build, o, e, '', time.time() - build_start)
if should_fail == 'build':
if pc.returncode != 0:
- return TestResult('', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time)
- return TestResult('Test that should have failed to build succeeded', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time)
+ return testresult
+ testresult.fail('Test that should have failed to build succeeded.')
+ return testresult
if pc.returncode != 0:
- return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time, build_time)
+ testresult.fail('Compiling source code failed.')
+ return testresult
# Touch the meson.build file to force a regenerate so we can test that
# regeneration works after a build is complete.
ensure_backend_detects_changes(backend)
@@ -469,37 +483,44 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
test_start = time.time()
# Test in-process
(returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir)
- test_time = time.time() - test_start
- stdo += tstdo
- stde += tstde
- mesonlog += test_log
+ testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start)
if should_fail == 'test':
if returncode != 0:
- return TestResult('', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time)
- return TestResult('Test that should have failed to run unit tests succeeded', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time)
+ return testresult
+ testresult.fail('Test that should have failed to run unit tests succeeded.')
+ return testresult
if returncode != 0:
- return TestResult('Running unit tests failed.', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+ testresult.fail('Running unit tests failed.')
+ return testresult
# Do installation, if the backend supports it
if install_commands:
env = os.environ.copy()
env['DESTDIR'] = install_dir
# Install with subprocess
pi, o, e = Popen_safe(install_commands, cwd=test_build_dir, env=env)
- stdo += o
- stde += e
+ testresult.add_step(BuildStep.install, o, e)
if pi.returncode != 0:
- return TestResult('Running install failed.', BuildStep.install, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+ testresult.fail('Running install failed.')
+ return testresult
+
# Clean with subprocess
env = os.environ.copy()
pi, o, e = Popen_safe(clean_commands + dir_args, cwd=test_build_dir, env=env)
- stdo += o
- stde += e
+ testresult.add_step(BuildStep.clean, o, e)
if pi.returncode != 0:
- return TestResult('Running clean failed.', BuildStep.clean, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+ testresult.fail('Running clean failed.')
+ return testresult
+
+ # Validate installed files
+ testresult.add_step(BuildStep.install, '', '')
if not install_commands:
- return TestResult('', BuildStep.install, '', '', mesonlog, cicmds, gen_time, build_time, test_time)
- return TestResult(validate_install(testdir, install_dir, compiler, builddata.environment),
- BuildStep.validate, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+ return testresult
+ install_msg = validate_install(testdir, install_dir, compiler, builddata.environment)
+ if install_msg:
+ testresult.fail(install_msg)
+ return testresult
+
+ return testresult
def gather_tests(testdir: Path) -> T.Iterator[TestDef]:
tests = [t.name for t in testdir.glob('*') if t.is_dir()]
@@ -524,12 +545,24 @@ def gather_tests(testdir: Path) -> T.Iterator[TestDef]:
assert "val" in i
skip = False
+ # Add an empty matrix entry
+ if i['val'] is None:
+ tmp_opts += [(None, False)]
+ continue
+
# Skip the matrix entry if environment variable is present
if 'skip_on_env' in i:
for env in i['skip_on_env']:
if env in os.environ:
skip = True
+ # Only run the test if all compiler ID's match
+ if 'compilers' in i:
+ for lang, id_list in i['compilers'].items():
+ if lang not in compiler_id_map or compiler_id_map[lang] not in id_list:
+ skip = True
+ break
+
tmp_opts += [('{}={}'.format(key, i['val']), skip)]
if opt_list:
@@ -541,9 +574,27 @@ def gather_tests(testdir: Path) -> T.Iterator[TestDef]:
else:
opt_list = [[x] for x in tmp_opts]
+ # Exclude specific configurations
+ if 'exclude' in matrix:
+ assert isinstance(matrix['exclude'], list)
+ new_opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]]
+ for i in opt_list:
+ exclude = False
+ opt_names = [x[0] for x in i]
+ for j in matrix['exclude']:
+ ex_list = ['{}={}'.format(k, v) for k, v in j.items()]
+ if all([x in opt_names for x in ex_list]):
+ exclude = True
+ break
+
+ if not exclude:
+ new_opt_list += [i]
+
+ opt_list = new_opt_list
+
for i in opt_list:
- name = ' '.join([x[0] for x in i])
- opts = ['-D' + x[0] for x in i]
+ name = ' '.join([x[0] for x in i if x[0] is not None])
+ opts = ['-D' + x[0] for x in i if x[0] is not None]
skip = any([x[1] for x in i])
all_tests += [TestDef(t.path, name, opts, skip)]
@@ -971,7 +1022,7 @@ def check_meson_commands_work(options):
def detect_system_compiler(options):
- global system_compiler
+ global system_compiler, compiler_id_map
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
fake_opts = get_fake_options('/')
@@ -982,7 +1033,8 @@ def detect_system_compiler(options):
for lang in sorted(compilers.all_languages):
try:
comp = env.compiler_from_language(lang, MachineChoice.HOST)
- details = '%s %s' % (' '.join(comp.get_exelist()), comp.get_version_string())
+ details = '{} {} [{}]'.format(' '.join(comp.get_exelist()), comp.get_version_string(), comp.get_id())
+ compiler_id_map[lang] = comp.get_id()
except mesonlib.MesonException:
comp = None
details = 'not found'
diff --git a/run_unittests.py b/run_unittests.py
index af2ca9d..aa27a1d 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -4964,6 +4964,11 @@ class DarwinTests(BasePlatformTests):
self.build()
self.install()
+ def test_removing_unused_linker_args(self):
+ testdir = os.path.join(self.common_test_dir, '108 has arg')
+ env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'}
+ self.init(testdir, override_envvars=env)
+
@unittest.skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
@@ -5037,6 +5042,11 @@ class LinuxlikeTests(BasePlatformTests):
self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
+ libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs)
+ self.assertTrue(libhello_nolib.found())
+ self.assertEqual(libhello_nolib.get_link_args(), [])
+ self.assertEqual(libhello_nolib.get_compile_args(), [])
+
def test_pkgconfig_gen_deps(self):
'''
Test that generated pkg-config files correctly handle dependencies
@@ -6007,7 +6017,6 @@ class LinuxlikeTests(BasePlatformTests):
'''
testdir = os.path.join(self.common_test_dir, testdir)
subdir = os.path.join(testdir, subdir_path)
- curdir = os.getcwd()
with chdir(subdir):
# Can't distribute broken symlinks in the source tree because it breaks
# the creation of zipapps. Create it dynamically and run the test by
@@ -6124,30 +6133,30 @@ c = ['{0}']
self.assertEqual(comp.linker.id, expected)
def test_ld_environment_variable_bfd(self):
- self._check_ld('ld.bfd', 'bfd', 'c', 'GNU ld.bfd')
+ self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd')
def test_ld_environment_variable_gold(self):
- self._check_ld('ld.gold', 'gold', 'c', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'c', 'ld.gold')
def test_ld_environment_variable_lld(self):
- self._check_ld('ld.lld', 'lld', 'c', 'lld')
+ self._check_ld('ld.lld', 'lld', 'c', 'ld.lld')
@skipIfNoExecutable('rustc')
def test_ld_environment_variable_rust(self):
- self._check_ld('ld.gold', 'gold', 'rust', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'rust', 'ld.gold')
def test_ld_environment_variable_cpp(self):
- self._check_ld('ld.gold', 'gold', 'cpp', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold')
def test_ld_environment_variable_objc(self):
- self._check_ld('ld.gold', 'gold', 'objc', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold')
def test_ld_environment_variable_objcpp(self):
- self._check_ld('ld.gold', 'gold', 'objcpp', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold')
@skipIfNoExecutable('gfortran')
def test_ld_environment_variable_fortran(self):
- self._check_ld('ld.gold', 'gold', 'fortran', 'GNU ld.gold')
+ self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold')
def compute_sha256(self, filename):
with open(filename, 'rb') as f:
diff --git a/setup.py b/setup.py
index b816b80..1f95be7 100644
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,7 @@ packages = ['mesonbuild',
'mesonbuild.wrap']
package_data = {
'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakeListsLLVM.txt', 'data/CMakePathInfo.txt'],
- 'mesonbuild.cmake': ['data/run_ctgt.py'],
+ 'mesonbuild.cmake': ['data/run_ctgt.py', 'data/preload.cmake'],
}
data_files = []
if sys.platform != 'win32':
diff --git a/test cases/cmake/1 basic/meson.build b/test cases/cmake/1 basic/meson.build
index 8e1671a..19c87c4 100644
--- a/test cases/cmake/1 basic/meson.build
+++ b/test cases/cmake/1 basic/meson.build
@@ -5,6 +5,7 @@ cm = import('cmake')
sub_pro = cm.subproject('cmMod')
sub_dep = sub_pro.dependency('cmModLib++')
+assert(sub_pro.found(), 'found() method reports not found, but should be found')
assert(sub_pro.target_list() == ['cmModLib++'], 'There should be exactly one target')
assert(sub_pro.target_type('cmModLib++') == 'shared_library', 'Target type should be shared_library')
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
index 776ce52..c7797db 100644
--- a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
@@ -22,7 +22,7 @@ add_custom_command(
COMMAND mycpy cpyBase.cpp.in cpyBase.cpp.something
COMMAND mycpy cpyBase.cpp.something cpyBase.cpp.IAmRunningOutOfIdeas
COMMAND mycpy cpyBase.cpp.IAmRunningOutOfIdeas cpyBase.cpp
- DEPENDS cpyBase.cpp.am gen
+ DEPENDS cpyBase.cpp.am;gen
)
add_custom_command(
@@ -89,7 +89,29 @@ add_custom_command(
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/s2_a_cpp"
)
-add_library(cmModLib SHARED cmMod.cpp genTest.cpp cpyBase.cpp cpyBase.hpp cpyNext.cpp cpyNext.hpp)
+# cpyTest (copy file without renaming)
+add_custom_command(
+ OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest.hpp"
+ COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest.hpp"
+ DEPENDS "cpyTest/cpyTest.hpp"
+)
+
+add_custom_command(
+ OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest2.hpp"
+ COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest2.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest2.hpp"
+ DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest2.hpp"
+)
+
+add_custom_command(
+ OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest3.hpp"
+ COMMAND mycpy cpyTest3.hpp "${CMAKE_CURRENT_BINARY_DIR}/cpyTest3.hpp"
+ DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest3.hpp"
+ WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest"
+)
+
+add_subdirectory(cpyTest ccppyyTTeesstt)
+
+add_library(cmModLib SHARED cmMod.cpp genTest.cpp cpyBase.cpp cpyBase.hpp cpyNext.cpp cpyNext.hpp cpyTest.cpp cpyTest.hpp cpyTest2.hpp cpyTest3.hpp)
include(GenerateExportHeader)
generate_export_header(cmModLib)
@@ -99,9 +121,9 @@ set(ARGS_TEST ${ARGS_TEST} arg2)
add_executable(macro_name macro_name.cpp)
add_executable(args_test args_test.cpp)
add_custom_target(args_test_cmd
- COMMAND args_test ARGS ${ARGS_TEST}
+ COMMAND args_test ${ARGS_TEST}
)
add_custom_target(macro_name_cmd COMMAND macro_name)
-add_dependencies(cmModLib args_test_cmd)
-add_dependencies(args_test_cmd macro_name_cmd)
+add_dependencies(cmModLib args_test_cmd tgtCpyTest4)
+add_dependencies(args_test_cmd macro_name_cmd;gen;mycpy)
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp b/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp
index e6236e4..e4d5318 100644
--- a/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp
@@ -2,6 +2,7 @@
#include "genTest.hpp"
#include "cpyBase.hpp"
#include "cpyNext.hpp"
+#include "cpyTest.hpp"
#include "cmModLib.hpp"
#ifndef FOO
@@ -19,5 +20,5 @@ string cmModClass::getStr() const {
}
string cmModClass::getOther() const {
- return "Srings:\n - " + getStrCpy() + "\n - " + getStrNext();
+ return "Srings:\n - " + getStrCpy() + "\n - " + getStrNext() + "\n - " + getStrCpyTest();
}
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp
new file mode 100644
index 0000000..f762251
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp
@@ -0,0 +1,8 @@
+#include "cpyTest.hpp"
+#include "cpyTest2.hpp"
+#include "cpyTest3.hpp"
+#include "ccppyyTTeesstt/cpyTest4.hpp"
+
+std::string getStrCpyTest() {
+ return CPY_TEST_STR_2 CPY_TEST_STR_3 CPY_TEST_STR_4;
+}
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt
new file mode 100644
index 0000000..f577dcf
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt
@@ -0,0 +1,7 @@
+add_custom_command(
+ OUTPUT cpyTest4.hpp
+ COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest4.hpp" cpyTest4.hpp
+ DEPENDS cpyTest4.hpp
+)
+
+add_custom_target(tgtCpyTest4 DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/cpyTest4.hpp")
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp
new file mode 100644
index 0000000..e8dec13
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp
@@ -0,0 +1,5 @@
+#pragma once
+
+#include <string>
+
+std::string getStrCpyTest();
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp
new file mode 100644
index 0000000..bdbcc56
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp
@@ -0,0 +1,3 @@
+#pragma once
+
+#define CPY_TEST_STR_2 "Hello "
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp
new file mode 100644
index 0000000..2d13376
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp
@@ -0,0 +1,3 @@
+#pragma once
+
+#define CPY_TEST_STR_3 "CopyFile"
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp
new file mode 100644
index 0000000..4124c43
--- /dev/null
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp
@@ -0,0 +1,3 @@
+#pragma once
+
+#define CPY_TEST_STR_4 " test"
diff --git a/test cases/cmake/9 disabled subproject/meson.build b/test cases/cmake/9 disabled subproject/meson.build
index ba38410..c153fa3 100644
--- a/test cases/cmake/9 disabled subproject/meson.build
+++ b/test cases/cmake/9 disabled subproject/meson.build
@@ -2,4 +2,5 @@ project('cmakeSubTest', ['c', 'cpp'])
cm = import('cmake')
-sub_pro = cm.subproject('nothinig', required: false) \ No newline at end of file
+sub_pro = cm.subproject('nothinig', required: false)
+assert(not sub_pro.found(), 'subproject found() reports wrong value')
diff --git a/test cases/common/47 pkgconfig-gen/installed_files.txt b/test cases/common/47 pkgconfig-gen/installed_files.txt
index 94de704..9e1a40a 100644
--- a/test cases/common/47 pkgconfig-gen/installed_files.txt
+++ b/test cases/common/47 pkgconfig-gen/installed_files.txt
@@ -2,3 +2,4 @@ usr/include/simple.h
usr/lib/pkgconfig/simple.pc
usr/lib/pkgconfig/libfoo.pc
usr/lib/pkgconfig/libhello.pc
+usr/lib/pkgconfig/libhello_nolib.pc \ No newline at end of file
diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build
index 09c46c5..c251b9f 100644
--- a/test cases/common/47 pkgconfig-gen/meson.build
+++ b/test cases/common/47 pkgconfig-gen/meson.build
@@ -51,3 +51,10 @@ pkgg.generate(
description : 'A minimalistic pkgconfig file.',
version : libver,
)
+
+pkgg.generate(
+ name : 'libhello_nolib',
+ description : 'A minimalistic pkgconfig file.',
+ version : libver,
+ dataonly: true
+)
diff --git a/test cases/common/56 install script/no-installed-files b/test cases/common/56 install script/no-installed-files
deleted file mode 100644
index e69de29..0000000
--- a/test cases/common/56 install script/no-installed-files
+++ /dev/null
diff --git a/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp b/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp
new file mode 100644
index 0000000..a5f09be
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp
@@ -0,0 +1,3 @@
+#pragma once
+
+#define SOME_DEFINE " World"
diff --git a/test cases/failing build/4 cmake subproject isolation/main.cpp b/test cases/failing build/4 cmake subproject isolation/main.cpp
new file mode 100644
index 0000000..9507961
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/main.cpp
@@ -0,0 +1,10 @@
+#include <iostream>
+#include <cmMod.hpp>
+
+using namespace std;
+
+int main(void) {
+ cmModClass obj("Hello");
+ cout << obj.getStr() << endl;
+ return 0;
+}
diff --git a/test cases/failing build/4 cmake subproject isolation/meson.build b/test cases/failing build/4 cmake subproject isolation/meson.build
new file mode 100644
index 0000000..e606335
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/meson.build
@@ -0,0 +1,17 @@
+project('subproject isolation', ['c', 'cpp'])
+
+if not find_program('cmake', required: false).found()
+ error('MESON_SKIP_TEST CMake is not installed')
+endif
+
+incdir = meson.source_root() / 'incDir'
+
+cm = import('cmake')
+
+# This should generate a warning and the include dir should be skipped.
+sub_pro = cm.subproject('cmMod', cmake_options : [ '-DMESON_INC_DIR=' + incdir ])
+sub_dep = sub_pro.dependency('cmModLib++')
+
+# Since the include dir is skipped, the compilation of this project should fail.
+exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep])
+test('test1', exe1)
diff --git a/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
new file mode 100644
index 0000000..852dd09
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
@@ -0,0 +1,10 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmMod)
+set (CMAKE_CXX_STANDARD 14)
+
+include_directories(${CMAKE_CURRENT_BINARY_DIR} ${MESON_INC_DIR})
+
+add_library(cmModLib++ SHARED cmMod.cpp)
+include(GenerateExportHeader)
+generate_export_header(cmModLib++)
diff --git a/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp
new file mode 100644
index 0000000..a668203
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp
@@ -0,0 +1,12 @@
+#include "cmMod.hpp"
+#include "fileA.hpp"
+
+using namespace std;
+
+cmModClass::cmModClass(string foo) {
+ str = foo + SOME_DEFINE;
+}
+
+string cmModClass::getStr() const {
+ return str;
+}
diff --git a/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp
new file mode 100644
index 0000000..0e6dc04
--- /dev/null
+++ b/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp
@@ -0,0 +1,14 @@
+#pragma once
+
+#include "cmmodlib++_export.h"
+#include <string>
+
+class CMMODLIB___EXPORT cmModClass {
+private:
+ std::string str;
+
+public:
+ cmModClass(std::string foo);
+
+ std::string getStr() const;
+};
diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build
index eec8728..ccfaa66 100644
--- a/test cases/frameworks/1 boost/meson.build
+++ b/test cases/frameworks/1 boost/meson.build
@@ -1,31 +1,25 @@
# this test requires the following on Ubuntu: libboost-{system,python,log,thread,test}-dev
project('boosttest', 'cpp',
- default_options : ['cpp_std=c++11'])
+ default_options : ['cpp_std=c++14'])
-add_project_arguments(['-DBOOST_LOG_DYN_LINK'],
- language : 'cpp'
-)
+s = get_option('static')
-dep = dependency('boost', required: false)
+dep = dependency('boost', static: s, required: false)
if not dep.found()
error('MESON_SKIP_TEST boost not found.')
endif
-compiler = meson.get_compiler('cpp')
-if compiler.has_argument('-permissive')
- # boost 1.64, the version we test against, doesn't work with -permissive
- add_project_arguments('-permissive', language: 'cpp')
-endif
-
# We want to have multiple separate configurations of Boost
# within one project. The need to be independent of each other.
# Use one without a library dependency and one with it.
-linkdep = dependency('boost', modules : ['thread', 'system', 'test'])
-staticdep = dependency('boost', modules : ['thread', 'system'], static : true)
-testdep = dependency('boost', modules : ['unit_test_framework'])
-nomoddep = dependency('boost')
-extralibdep = dependency('boost', modules : ['thread', 'system', 'log_setup', 'log'])
+linkdep = dependency('boost', static: s, modules : ['thread', 'system'])
+testdep = dependency('boost', static: s, modules : ['unit_test_framework'])
+nomoddep = dependency('boost', static: s)
+extralibdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time', 'log_setup', 'log', 'filesystem', 'regex'])
+notfound = dependency('boost', static: s, modules : ['this_should_not_exist_on_any_systen'], required: false)
+
+assert(not notfound.found())
pymod = import('python')
python2 = pymod.find_installation('python2', required: host_machine.system() == 'linux', disabler: true)
@@ -34,28 +28,28 @@ python2dep = python2.dependency(required: host_machine.system() == 'linux', embe
python3dep = python3.dependency(required: host_machine.system() == 'linux', embed: true, disabler: true)
# compile python 2/3 modules only if we found a corresponding python version
-if(python2dep.found() and host_machine.system() == 'linux')
+if(python2dep.found() and host_machine.system() == 'linux' and not s)
if(dep.version().version_compare('>=1.67'))
# if we have a new version of boost, we need to construct the module name based
# on the installed version of python (and hope that they match the version boost
# was compiled against)
py2version_string = ''.join(python2dep.version().split('.'))
- bpython2dep = dependency('boost', modules : ['python' + py2version_string], required: false, disabler: true)
+ bpython2dep = dependency('boost', static: s, modules : ['python' + py2version_string], required: false, disabler: true)
else
# if we have an older version of boost, we need to use the old module names
- bpython2dep = dependency('boost', modules : ['python'], required: false, disabler: true)
+ bpython2dep = dependency('boost', static: s, modules : ['python'], required: false, disabler: true)
endif
else
python2dep = disabler()
bpython2dep = disabler()
endif
-if(python3dep.found() and host_machine.system() == 'linux')
+if(python3dep.found() and host_machine.system() == 'linux' and not s)
if(dep.version().version_compare('>=1.67'))
py3version_string = ''.join(python3dep.version().split('.'))
- bpython3dep = dependency('boost', modules : ['python' + py3version_string], required: false, disabler: true)
+ bpython3dep = dependency('boost', static: s, modules : ['python' + py3version_string], required: false, disabler: true)
else
- bpython3dep = dependency('boost', modules : ['python3'], required: false, disabler: true)
+ bpython3dep = dependency('boost', static: s, modules : ['python3'], required: false, disabler: true)
endif
else
python3dep = disabler()
@@ -63,7 +57,6 @@ else
endif
linkexe = executable('linkedexe', 'linkexe.cc', dependencies : linkdep)
-staticexe = executable('staticlinkedexe', 'linkexe.cc', dependencies : staticdep)
unitexe = executable('utf', 'unit_test.cpp', dependencies: testdep)
nomodexe = executable('nomod', 'nomod.cpp', dependencies : nomoddep)
extralibexe = executable('extralibexe', 'extralib.cpp', dependencies : extralibdep)
@@ -73,7 +66,6 @@ python2module = shared_library('python2_module', ['python_module.cpp'], dependen
python3module = shared_library('python3_module', ['python_module.cpp'], dependencies: [python3dep, bpython3dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python3_module'])
test('Boost linktest', linkexe)
-test('Boost statictest', staticexe)
test('Boost UTF test', unitexe)
test('Boost nomod', nomodexe)
test('Boost extralib test', extralibexe)
@@ -87,4 +79,4 @@ test('Boost Python3', python3interpreter, args: ['./test_python_module.py', meso
subdir('partial_dep')
# check we can apply a version constraint
-dependency('boost', version: '>=@0@'.format(dep.version()))
+dependency('boost', static: s, version: '>=@0@'.format(dep.version()))
diff --git a/test cases/frameworks/1 boost/meson_options.txt b/test cases/frameworks/1 boost/meson_options.txt
new file mode 100644
index 0000000..019feaf
--- /dev/null
+++ b/test cases/frameworks/1 boost/meson_options.txt
@@ -0,0 +1 @@
+option('static', type: 'boolean', value: false)
diff --git a/test cases/frameworks/1 boost/test_matrix.json b/test cases/frameworks/1 boost/test_matrix.json
new file mode 100644
index 0000000..730610e
--- /dev/null
+++ b/test cases/frameworks/1 boost/test_matrix.json
@@ -0,0 +1,19 @@
+{
+ "options": {
+ "static": [
+ { "val": "true", "skip_on_env": [ "SKIP_STATIC_BOOST" ] },
+ { "val": "false" }
+ ],
+ "b_vscrt": [
+ { "val": null },
+ { "val": "md", "compilers": { "cpp": [ "msvc" ] } },
+ { "val": "mdd", "compilers": { "cpp": [ "msvc" ] } },
+ { "val": "mt", "compilers": { "cpp": [ "msvc" ] } },
+ { "val": "mtd", "compilers": { "cpp": [ "msvc" ] } }
+ ]
+ },
+ "exclude": [
+ { "static": "false", "b_vscrt": "mt" },
+ { "static": "false", "b_vscrt": "mtd" }
+ ]
+}
diff --git a/test cases/frameworks/1 boost/unit_test.cpp b/test cases/frameworks/1 boost/unit_test.cpp
index 3505999..fa1fbaa 100644
--- a/test cases/frameworks/1 boost/unit_test.cpp
+++ b/test cases/frameworks/1 boost/unit_test.cpp
@@ -1,4 +1,3 @@
-#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE "MesonTest"
#define BOOST_TEST_MAIN
#include <boost/test/unit_test.hpp>
diff --git a/tools/boost_names.py b/tools/boost_names.py
index af461d8..d26d34b 100755
--- a/tools/boost_names.py
+++ b/tools/boost_names.py
@@ -24,164 +24,249 @@ boost/$ path/to/meson/tools/boost_names.py >> path/to/meson/dependencies/misc.py
"""
import sys
-import os
-import collections
-import pprint
import json
import re
+import textwrap
+import functools
+import typing as T
+from pathlib import Path
+
+lib_dir = Path('libs')
+jamroot = Path('Jamroot')
+
+not_modules = ['config', 'disjoint_sets', 'headers']
+
+export_modules = False
+
+
+@functools.total_ordering
+class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = shared
+ self.static = static
+ self.single = single
+ self.multi = multi
+
+ def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
+ if isinstance(other, BoostLibrary):
+ return self.name < other.name
+ return NotImplemented
+
+ def __eq__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
+ if isinstance(other, BoostLibrary):
+ return self.name == other.name
+ elif isinstance(other, str):
+ return self.name == other
+ return NotImplemented
+
+ def __hash__(self) -> int:
+ return hash(self.name)
+
+@functools.total_ordering
+class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[BoostLibrary]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+ def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
+ if isinstance(other, BoostModule):
+ return self.key < other.key
+ return NotImplemented
-Module = collections.namedtuple('Module', ['dirname', 'name', 'libnames'])
-Module.__repr__ = lambda self: str((self.dirname, self.name, self.libnames)) # type: ignore
-
-LIBS = 'libs'
-
-manual_map = {
- 'callable_traits': 'Call Traits',
- 'crc': 'CRC',
- 'dll': 'DLL',
- 'gil': 'GIL',
- 'graph_parallel': 'GraphParallel',
- 'icl': 'ICL',
- 'io': 'IO State Savers',
- 'msm': 'Meta State Machine',
- 'mpi': 'MPI',
- 'mpl': 'MPL',
- 'multi_array': 'Multi-Array',
- 'multi_index': 'Multi-Index',
- 'numeric': 'Numeric Conversion',
- 'ptr_container': 'Pointer Container',
- 'poly_collection': 'PolyCollection',
- 'qvm': 'QVM',
- 'throw_exception': 'ThrowException',
- 'tti': 'TTI',
- 'vmd': 'VMD',
-}
-
-extra = [
- Module('utility', 'Compressed Pair', []),
- Module('core', 'Enable If', []),
- Module('functional', 'Functional/Factory', []),
- Module('functional', 'Functional/Forward', []),
- Module('functional', 'Functional/Hash', []),
- Module('functional', 'Functional/Overloaded Function', []),
- Module('utility', 'Identity Type', []),
- Module('utility', 'In Place Factory, Typed In Place Factory', []),
- Module('numeric', 'Interval', []),
- Module('math', 'Math Common Factor', []),
- Module('math', 'Math Octonion', []),
- Module('math', 'Math Quaternion', []),
- Module('math', 'Math/Special Functions', []),
- Module('math', 'Math/Statistical Distributions', []),
- Module('bind', 'Member Function', []),
- Module('algorithm', 'Min-Max', []),
- Module('numeric', 'Odeint', []),
- Module('utility', 'Operators', []),
- Module('core', 'Ref', []),
- Module('utility', 'Result Of', []),
- Module('algorithm', 'String Algo', []),
- Module('core', 'Swap', []),
- Module('', 'Tribool', []),
- Module('numeric', 'uBLAS', []),
- Module('utility', 'Value Initialized', []),
-]
-
-# Cannot find the following modules in the documentation of boost
-not_modules = ['beast', 'logic', 'mp11', 'winapi']
-
-def eprint(message):
- print(message, file=sys.stderr)
-
-def get_library_names(jamfile):
- libs = []
- with open(jamfile) as jamfh:
- jam = jamfh.read()
- res = re.finditer(r'^lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
- for matches in res:
- if ':' in matches.group(2):
- libs.append(matches.group(1))
- res = re.finditer(r'^boost-lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
- for matches in res:
- if ':' in matches.group(2):
- libs.append('boost_{}'.format(matches.group(1)))
- return libs
-def exists(modules, module):
- return len([x for x in modules if x.dirname == module.dirname]) != 0
+def get_boost_version() -> T.Optional[str]:
+ raw = jamroot.read_text()
+ m = re.search(r'BOOST_VERSION\s*:\s*([0-9\.]+)\s*;', raw)
+ if m:
+ return m.group(1)
+ return None
-def get_modules(init=extra):
- modules = init
- for directory in os.listdir(LIBS):
- if not os.path.isdir(os.path.join(LIBS, directory)):
+
+def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
+ # Extract libraries from the boost Jamfiles. This includes:
+ # - library name
+ # - compiler flags
+
+ libs: T.List[BoostLibrary] = []
+ raw = jamfile.read_text()
+ raw = re.sub(r'#.*\n', '\n', raw) # Remove comments
+ raw = re.sub(r'\s+', ' ', raw) # Force single space
+ raw = re.sub(r'}', ';', raw) # Cheat code blocks by converting } to ;
+
+ cmds = raw.split(';') # Commands always terminate with a ; (I hope)
+ cmds = [x.strip() for x in cmds] # Some cleanup
+
+ # "Parse" the relevant sections
+ for i in cmds:
+ parts = i.split(' ')
+ parts = [x for x in parts if x not in ['', ':']]
+ if not parts:
continue
- if directory in not_modules:
+
+ # Parese libraries
+ if parts[0] in ['lib', 'boost-lib']:
+ assert len(parts) >= 2
+
+ # Get and check the library name
+ lname = parts[1]
+ if parts[0] == 'boost-lib':
+ lname = f'boost_{lname}'
+ if not lname.startswith('boost_'):
+ continue
+
+ # Get shared / static defines
+ shared: T.List[str] = []
+ static: T.List[str] = []
+ single: T.List[str] = []
+ multi: T.List[str] = []
+ for j in parts:
+ m1 = re.match(r'<link>shared:<define>(.*)', j)
+ m2 = re.match(r'<link>static:<define>(.*)', j)
+ m3 = re.match(r'<threading>single:<define>(.*)', j)
+ m4 = re.match(r'<threading>multi:<define>(.*)', j)
+
+ if m1:
+ shared += [m1.group(1)]
+ if m2:
+ static += [m2.group(1)]
+ if m3:
+ single += [m3.group(1)]
+ if m4:
+ multi += [m4.group(1)]
+
+ shared = [f'-D{x}' for x in shared]
+ static = [f'-D{x}' for x in static]
+ libs += [BoostLibrary(lname, shared, static, single, multi)]
+
+ return libs
+
+
+def process_lib_dir(ldir: Path) -> T.List[BoostModule]:
+ meta_file = ldir / 'meta' / 'libraries.json'
+ bjam_file = ldir / 'build' / 'Jamfile.v2'
+ if not meta_file.exists():
+ print(f'WARNING: Meta file {meta_file} does not exist')
+ return []
+
+ # Extract libs
+ libs: T.List[BoostLibrary] = []
+ if bjam_file.exists():
+ libs = get_libraries(bjam_file)
+
+ # Extract metadata
+ data = json.loads(meta_file.read_text())
+ if not isinstance(data, list):
+ data = [data]
+
+ modules: T.List[BoostModule] = []
+ for i in data:
+ modules += [BoostModule(i['name'], i['key'], i['description'], libs)]
+
+ return modules
+
+
+def get_modules() -> T.List[BoostModule]:
+ modules: T.List[BoostModule] = []
+ for i in lib_dir.iterdir():
+ if not i.is_dir() or i.name in not_modules:
continue
- jamfile = os.path.join(LIBS, directory, 'build', 'Jamfile.v2')
- if os.path.isfile(jamfile):
- libs = get_library_names(jamfile)
- else:
- libs = []
- if directory in manual_map.keys():
- modname = manual_map[directory]
+
+ # numeric has sub libs
+ subdirs = i / 'sublibs'
+ metadir = i / 'meta'
+ if subdirs.exists() and not metadir.exists():
+ for j in i.iterdir():
+ if not j.is_dir():
+ continue
+ modules += process_lib_dir(j)
else:
- modname = directory.replace('_', ' ').title()
- modules.append(Module(directory, modname, libs))
+ modules += process_lib_dir(i)
+
return modules
-def get_modules_2():
- modules = []
- # The python module uses an older build system format and is not easily parseable.
- # We add the python module libraries manually.
- modules.append(Module('python', 'Python', ['boost_python', 'boost_python3', 'boost_numpy', 'boost_numpy3']))
- for (root, _, files) in os.walk(LIBS):
- for f in files:
- if f == "libraries.json":
- projectdir = os.path.dirname(root)
-
- jamfile = os.path.join(projectdir, 'build', 'Jamfile.v2')
- if os.path.isfile(jamfile):
- libs = get_library_names(jamfile)
- else:
- libs = []
-
- # Get metadata for module
- jsonfile = os.path.join(root, f)
- with open(jsonfile) as jsonfh:
- boost_modules = json.loads(jsonfh.read())
- if(isinstance(boost_modules, dict)):
- boost_modules = [boost_modules]
- for boost_module in boost_modules:
- modules.append(Module(boost_module['key'], boost_module['name'], libs))
-
- # Some subprojects do not have meta directory with json file. Find those
- jsonless_modules = [x for x in get_modules([]) if not exists(modules, x)]
- for module in jsonless_modules:
- eprint("WARNING: {} does not have meta/libraries.json. Will guess pretty name '{}'".format(module.dirname, module.name))
- modules.extend(jsonless_modules)
- return modules
+def main() -> int:
+ if not lib_dir.is_dir() or not jamroot.exists():
+ print("ERROR: script must be run in boost source directory")
+ return 1
+
+ vers = get_boost_version()
+ modules = get_modules()
+ modules = sorted(modules)
+ libraries = [x for y in modules for x in y.libs]
+ libraries = sorted(set(libraries))
+
+ print(textwrap.dedent(f'''\
+ #### ---- BEGIN GENERATED ---- ####
+ # #
+ # Generated with tools/boost_names.py:
+ # - boost version: {vers}
+ # - modules found: {len(modules)}
+ # - libraries found: {len(libraries)}
+ #
+
+ class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = shared
+ self.static = static
+ self.single = single
+ self.multi = multi
+
+ class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+
+ # dict of all know libraries with additional compile options
+ boost_libraries = {{\
+ '''))
+
+ for i in libraries:
+ print(textwrap.indent(textwrap.dedent(f"""\
+ '{i.name}': BoostLibrary(
+ name='{i.name}',
+ shared={i.shared},
+ static={i.static},
+ single={i.single},
+ multi={i.multi},
+ ),\
+ """), ' '))
+
+ if export_modules:
+ print(textwrap.dedent(f'''\
+ }}
+
-def main(args):
- if not os.path.isdir(LIBS):
- eprint("ERROR: script must be run in boost source directory")
+ # dict of all modules with metadata
+ boost_modules = {{\
+ '''))
- # It will pick jsonless algorithm if 1 is given as argument
- impl = 0
- if len(args) > 1:
- if args[1] == '1':
- impl = 1
+ for mod in modules:
+ desc_excaped = re.sub(r"'", "\\'", mod.desc)
+ print(textwrap.indent(textwrap.dedent(f"""\
+ '{mod.key}': BoostModule(
+ name='{mod.name}',
+ key='{mod.key}',
+ desc='{desc_excaped}',
+ libs={[x.name for x in mod.libs]},
+ ),\
+ """), ' '))
- if impl == 1:
- modules = get_modules()
- else:
- modules = get_modules_2()
+ print(textwrap.dedent(f'''\
+ }}
- sorted_modules = sorted(modules, key=lambda module: module.name.lower())
- sorted_modules = [x[2] for x in sorted_modules if x[2]]
- sorted_modules = sum(sorted_modules, [])
- sorted_modules = [x for x in sorted_modules if x.startswith('boost')]
+ # #
+ #### ---- END GENERATED ---- ####\
+ '''))
- pp = pprint.PrettyPrinter()
- pp.pprint(sorted_modules)
+ return 0
if __name__ == '__main__':
- main(sys.argv)
+ sys.exit(main())
diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py
index 5a27a51..05acd8f 100755
--- a/tools/cmake2meson.py
+++ b/tools/cmake2meson.py
@@ -139,6 +139,16 @@ class Parser:
while not self.accept('eof'):
yield(self.statement())
+def token_or_group(arg):
+ if isinstance(arg, Token):
+ return ' ' + arg.value
+ elif isinstance(arg, list):
+ line = ' ('
+ for a in arg:
+ line += ' ' + token_or_group(a)
+ line += ' )'
+ return line
+
class Converter:
ignored_funcs = {'cmake_minimum_required': True,
'enable_testing': True,
@@ -237,17 +247,16 @@ class Converter:
except AttributeError: # complex if statements
line = t.name
for arg in t.args:
- if isinstance(arg, Token):
- line += ' ' + arg.value
- elif isinstance(arg, list):
- line += ' ('
- for a in arg:
- line += ' ' + a.value
- line += ' )'
+ line += token_or_group(arg)
elif t.name == 'elseif':
preincrement = -1
postincrement = 1
- line = 'elif %s' % self.convert_args(t.args, False)
+ try:
+ line = 'elif %s' % self.convert_args(t.args, False)
+ except AttributeError: # complex if statements
+ line = t.name
+ for arg in t.args:
+ line += token_or_group(arg)
elif t.name == 'else':
preincrement = -1
postincrement = 1