aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/lint_mypy.yml2
-rw-r--r--.github/workflows/unusedargs_missingreturn.yml6
-rw-r--r--.travis.yml3
-rw-r--r--azure-pipelines.yml27
-rwxr-xr-xci/ciimage/arch/install.sh2
-rwxr-xr-xci/ciimage/bionic/install.sh3
-rwxr-xr-xci/ciimage/eoan/install.sh3
-rwxr-xr-xci/ciimage/fedora/install.sh2
-rwxr-xr-xci/ciimage/opensuse/install.sh4
-rw-r--r--ci/run.ps13
-rwxr-xr-xci/travis_script.sh8
-rw-r--r--data/macros.meson25
-rw-r--r--docs/markdown/Builtin-options.md42
-rw-r--r--docs/markdown/Configuring-a-build-directory.md4
-rw-r--r--docs/markdown/Continuous-Integration.md18
-rw-r--r--docs/markdown/Creating-OSX-packages.md2
-rw-r--r--docs/markdown/Creating-releases.md67
-rw-r--r--docs/markdown/Cross-compilation.md2
-rw-r--r--docs/markdown/Design-rationale.md4
-rw-r--r--docs/markdown/FAQ.md4
-rw-r--r--docs/markdown/Feature-autodetection.md4
-rw-r--r--docs/markdown/Gnome-module.md10
-rw-r--r--docs/markdown/IDE-integration.md4
-rw-r--r--docs/markdown/IndepthTutorial.md6
-rw-r--r--docs/markdown/Installing.md20
-rw-r--r--docs/markdown/Localisation.md4
-rw-r--r--docs/markdown/Machine-files.md73
-rw-r--r--docs/markdown/Meson-sample.md2
-rw-r--r--docs/markdown/MesonCI.md53
-rw-r--r--docs/markdown/Project-templates.md27
-rw-r--r--docs/markdown/Qt5-module.md2
-rw-r--r--docs/markdown/Quick-guide.md16
-rw-r--r--docs/markdown/Reference-manual.md956
-rw-r--r--docs/markdown/Reference-tables.md133
-rw-r--r--docs/markdown/Release-notes-for-0.54.0.md5
-rw-r--r--docs/markdown/Run-targets.md2
-rw-r--r--docs/markdown/Running-Meson.md45
-rw-r--r--docs/markdown/Style-guide.md6
-rw-r--r--docs/markdown/Subprojects.md16
-rw-r--r--docs/markdown/Syntax.md110
-rw-r--r--docs/markdown/Tutorial.md6
-rw-r--r--docs/markdown/Unit-tests.md7
-rw-r--r--docs/markdown/Users.md2
-rw-r--r--docs/markdown/Using-multiple-build-directories.md4
-rw-r--r--docs/markdown/Vs-External.md6
-rw-r--r--docs/markdown/Wrap-dependency-system-manual.md13
-rw-r--r--docs/markdown/howtox.md19
-rw-r--r--docs/markdown/snippets/add_compile_backend_arg.md26
-rw-r--r--docs/markdown/snippets/add_meson_compile_target.md19
-rw-r--r--docs/markdown/snippets/clang_coverage.md4
-rw-r--r--docs/markdown/snippets/force_fallback_for.md10
-rw-r--r--docs/markdown/snippets/gir_fatal_warnings.md5
-rw-r--r--docs/markdown/snippets/machine_file_constants.md20
-rw-r--r--docs/markdown/snippets/response-files.md7
-rw-r--r--docs/markdown/snippets/wrap_patch.md13
-rw-r--r--docs/markdown_dynamic/Commands.md296
-rw-r--r--docs/meson.build32
-rw-r--r--docs/sitemap.txt2
-rw-r--r--mesonbuild/arglist.py331
-rw-r--r--mesonbuild/backend/backends.py99
-rw-r--r--mesonbuild/backend/ninjabackend.py385
-rw-r--r--mesonbuild/backend/vs2010backend.py55
-rwxr-xr-xmesonbuild/cmake/data/run_ctgt.py96
-rw-r--r--mesonbuild/cmake/executor.py49
-rw-r--r--mesonbuild/cmake/interpreter.py21
-rw-r--r--mesonbuild/cmake/traceparser.py2
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c_function_attributes.py2
-rw-r--r--mesonbuild/compilers/compilers.py372
-rw-r--r--mesonbuild/compilers/d.py3
-rw-r--r--mesonbuild/compilers/mixins/clike.py79
-rw-r--r--mesonbuild/compilers/mixins/visualstudio.py3
-rw-r--r--mesonbuild/coredata.py80
-rw-r--r--mesonbuild/dependencies/base.py110
-rw-r--r--mesonbuild/dependencies/misc.py54
-rw-r--r--mesonbuild/dependencies/ui.py17
-rw-r--r--mesonbuild/envconfig.py40
-rw-r--r--mesonbuild/environment.py30
-rw-r--r--mesonbuild/interpreter.py36
-rw-r--r--mesonbuild/linkers.py30
-rw-r--r--mesonbuild/mcompile.py229
-rw-r--r--mesonbuild/mconf.py16
-rw-r--r--mesonbuild/mdist.py2
-rw-r--r--mesonbuild/mesonlib.py4
-rw-r--r--mesonbuild/minit.py6
-rw-r--r--mesonbuild/mintro.py30
-rw-r--r--mesonbuild/modules/cmake.py3
-rw-r--r--mesonbuild/modules/gnome.py13
-rw-r--r--mesonbuild/modules/pkgconfig.py27
-rwxr-xr-xmesonbuild/scripts/cmake_run_ctgt.py100
-rw-r--r--mesonbuild/scripts/coverage.py46
-rw-r--r--mesonbuild/scripts/gtkdochelper.py8
-rw-r--r--mesonbuild/scripts/symbolextractor.py9
-rw-r--r--mesonbuild/wrap/wrap.py36
-rw-r--r--msi/createmsi.py2
-rwxr-xr-xrun_cross_test.py6
-rwxr-xr-xrun_meson_command_tests.py6
-rwxr-xr-xrun_project_tests.py16
-rwxr-xr-xrun_tests.py5
-rwxr-xr-xrun_unittests.py392
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt2
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp4
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp4
-rw-r--r--test cases/cmake/10 header only/main.cpp6
-rw-r--r--test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt1
-rw-r--r--test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp5
-rw-r--r--test cases/cmake/19 cmake file/foolib.cmake.in1
-rw-r--r--test cases/cmake/19 cmake file/meson.build14
-rw-r--r--test cases/cmake/19 cmake file/test.json5
-rw-r--r--test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt5
-rw-r--r--test cases/cmake/7 cmake options/test.json9
-rwxr-xr-xtest cases/common/125 object only target/obj_generator.py2
-rw-r--r--test cases/common/145 special characters/arg-char-test.c10
-rw-r--r--test cases/common/145 special characters/arg-string-test.c12
-rw-r--r--test cases/common/145 special characters/arg-unquoted-test.c17
-rw-r--r--test cases/common/145 special characters/meson.build38
-rw-r--r--test cases/common/157 wrap file should not failed/meson.build2
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build2
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap9
-rwxr-xr-xtest cases/common/234 very long commmand line/codegen.py6
-rw-r--r--test cases/common/234 very long commmand line/main.c5
-rw-r--r--test cases/common/234 very long commmand line/meson.build44
-rwxr-xr-xtest cases/common/234 very long commmand line/seq.py6
-rw-r--r--test cases/common/47 pkgconfig-gen/dependencies/main.c4
-rw-r--r--test cases/common/47 pkgconfig-gen/meson.build3
-rw-r--r--test cases/fortran/7 generated/meson.build11
-rw-r--r--test cases/fortran/7 generated/mod1.fpp4
-rw-r--r--test cases/fortran/7 generated/mod2.fpp6
-rw-r--r--test cases/fortran/7 generated/mod3.fpp6
-rw-r--r--test cases/fortran/7 generated/prog.f909
-rw-r--r--test cases/frameworks/7 gnome/mkenums/meson.build8
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/meson.build2
-rw-r--r--test cases/linuxlike/13 cmake dependency/test.json2
-rw-r--r--test cases/linuxlike/3 linker script/meson.build6
-rwxr-xr-xtest cases/unit/61 identity cross/build_wrapper.py10
-rwxr-xr-xtest cases/unit/61 identity cross/host_wrapper.py10
-rw-r--r--test cases/unit/78 wrap-git/meson.build4
-rw-r--r--test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build3
-rw-r--r--test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c4
-rw-r--r--tools/copy_files.py55
-rwxr-xr-xtools/regenerate_docs.py150
143 files changed, 3871 insertions, 1721 deletions
diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml
index 7afee2e..056f96e 100644
--- a/.github/workflows/lint_mypy.yml
+++ b/.github/workflows/lint_mypy.yml
@@ -31,4 +31,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py
+ - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py mesonbuild/arglist.py
diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml
index fa4405b..859dec2 100644
--- a/.github/workflows/unusedargs_missingreturn.yml
+++ b/.github/workflows/unusedargs_missingreturn.yml
@@ -55,7 +55,11 @@ jobs:
- uses: actions/setup-python@v1
with:
python-version: '3.x'
- - run: pip install ninja pefile
+ # ninja==1.10 pypi release didn't ship with windows binaries, which causes
+ # pip to try to build it which fails on Windows. Pin the previous version
+ # for now. We can update once that's fixed.
+ # https://pypi.org/project/ninja/1.10.0/#files
+ - run: pip install ninja==1.9.0.post1 pefile
- run: python run_project_tests.py --only platform-windows
env:
CI: "1"
diff --git a/.travis.yml b/.travis.yml
index f5a32a6..22d76e7 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -31,9 +31,10 @@ matrix:
compiler: gcc
include:
# Test cross builds separately, they do not use the global compiler
+ # Also hijack one cross build to test long commandline handling codepath (and avoid overloading Travis)
- os: linux
compiler: gcc
- env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt"
+ env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_RSP_THRESHOLD=0
- os: linux
compiler: gcc
env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_ARGS="--unity=on"
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 066f1a5..85fedab 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -22,6 +22,7 @@ jobs:
arch: x86
compiler: msvc2017
backend: ninja
+ MESON_RSP_THRESHOLD: 0
vc2017x64vs:
arch: x64
compiler: msvc2017
@@ -63,6 +64,7 @@ jobs:
- template: ci/azure-steps.yml
- job: cygwin
+ timeoutInMinutes: 120
pool:
vmImage: VS2017-Win2016
strategy:
@@ -82,6 +84,7 @@ jobs:
gcc-objc,^
git,^
gobject-introspection,^
+ gtk-doc,^
libarchive13,^
libboost-devel,^
libglib2.0-devel,^
@@ -89,9 +92,15 @@ jobs:
libjsoncpp19,^
librhash0,^
libuv1,^
+ libxml2,^
+ libxml2-devel,^
+ libxslt,^
+ libxslt-devel,^
ninja,^
python2-devel,^
python3-devel,^
+ python3-libxml2,^
+ python3-libxslt,^
python36-pip,^
vala,^
wget,^
@@ -100,8 +109,8 @@ jobs:
displayName: Install Dependencies
- script: |
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
- env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema
- displayName: pip install pefile pytest-xdist jsonschema
+ env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile pytest-xdist jsonschema
+ displayName: pip install gcovr pefile pytest-xdist jsonschema
- script: |
set BOOST_ROOT=
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
@@ -138,6 +147,7 @@ jobs:
gccx64ninja:
MSYSTEM: MINGW64
MSYS2_ARCH: x86_64
+ MESON_RSP_THRESHOLD: 0
compiler: gcc
clangx64ninja:
MSYSTEM: MINGW64
@@ -151,7 +161,13 @@ jobs:
displayName: Install MSYS2
- script: |
set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem
- %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syyuu
+ # Remove this line when https://github.com/msys2/MSYS2-packages/pull/2022 is merged
+ %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Sy dash
+ echo Updating msys2
+ %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu || echo system update failed, ignoring
+ echo Killing all msys2 processes
+ taskkill /F /FI "MODULES eq msys-2.0.dll"
+ echo Updating msys2 (again)
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu
displayName: Update MSYS2
- script: |
@@ -162,14 +178,17 @@ jobs:
git ^
mercurial ^
mingw-w64-$(MSYS2_ARCH)-cmake ^
+ mingw-w64-$(MSYS2_ARCH)-lcov ^
+ mingw-w64-$(MSYS2_ARCH)-libxml2 ^
mingw-w64-$(MSYS2_ARCH)-ninja ^
mingw-w64-$(MSYS2_ARCH)-pkg-config ^
mingw-w64-$(MSYS2_ARCH)-python2 ^
mingw-w64-$(MSYS2_ARCH)-python3 ^
+ mingw-w64-$(MSYS2_ARCH)-python3-lxml ^
mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^
mingw-w64-$(MSYS2_ARCH)-python3-pip ^
%TOOLCHAIN%
- %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema"
+ %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile"
displayName: Install Dependencies
- script: |
set BOOST_ROOT=
diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh
index 6cbbb27..fb27c26 100755
--- a/ci/ciimage/arch/install.sh
+++ b/ci/ciimage/arch/install.sh
@@ -17,7 +17,7 @@ pkgs=(
)
aur_pkgs=(scalapack)
-pip_pkgs=(hotdoc)
+pip_pkgs=(hotdoc gcovr)
cleanup_pkgs=(go)
AUR_USER=docker
diff --git a/ci/ciimage/bionic/install.sh b/ci/ciimage/bionic/install.sh
index 47deb2a..0bfcdfb 100755
--- a/ci/ciimage/bionic/install.sh
+++ b/ci/ciimage/bionic/install.sh
@@ -15,6 +15,7 @@ pkgs=(
qt4-linguist-tools qt5-default qtbase5-private-dev
python-dev
libomp-dev
+ llvm lcov
ldc
libclang-dev
libgcrypt20-dev
@@ -45,7 +46,7 @@ done
# packages
eatmydata apt-get -y install "${pkgs[@]}"
-eatmydata python3 -m pip install codecov jsonschema
+eatmydata python3 -m pip install codecov gcovr jsonschema
# Install the ninja 0.10
wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip
diff --git a/ci/ciimage/eoan/install.sh b/ci/ciimage/eoan/install.sh
index 7d7a1fd..36dec72 100755
--- a/ci/ciimage/eoan/install.sh
+++ b/ci/ciimage/eoan/install.sh
@@ -18,6 +18,7 @@ pkgs=(
qt4-linguist-tools
python-dev
libomp-dev
+ llvm lcov
dub ldc
mingw-w64 mingw-w64-tools nim
libclang-dev
@@ -42,7 +43,7 @@ eatmydata apt-get -y build-dep meson
eatmydata apt-get -y install "${pkgs[@]}"
eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is special
-eatmydata python3 -m pip install hotdoc codecov jsonschema
+eatmydata python3 -m pip install hotdoc codecov gcovr jsonschema
# dub stuff
dub_fetch urld
diff --git a/ci/ciimage/fedora/install.sh b/ci/ciimage/fedora/install.sh
index f61d97e..3beb11c 100755
--- a/ci/ciimage/fedora/install.sh
+++ b/ci/ciimage/fedora/install.sh
@@ -21,7 +21,7 @@ dnf -y upgrade
# Install deps
dnf -y install "${pkgs[@]}"
-python3 -m pip install hotdoc gobject PyGObject
+python3 -m pip install hotdoc gcovr gobject PyGObject
# Cleanup
dnf -y clean all
diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh
index b9e440d..4c8e770 100755
--- a/ci/ciimage/opensuse/install.sh
+++ b/ci/ciimage/opensuse/install.sh
@@ -7,7 +7,7 @@ source /ci/common.sh
pkgs=(
python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 python3-lxml
ninja make git autoconf automake patch python3-Cython python3-jsonschema
- elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl
+ elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov
mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel
itstool gtk3-devel java-15-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static
#hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel
@@ -26,7 +26,7 @@ zypper --non-interactive update
# Install deps
zypper install -y "${pkgs[@]}"
-python3 -m pip install hotdoc gobject PyGObject
+python3 -m pip install hotdoc gcovr gobject PyGObject
echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_CONFIG_PATH"' >> /ci/env_vars.sh
diff --git a/ci/run.ps1 b/ci/run.ps1
index 34856c0..5065b87 100644
--- a/ci/run.ps1
+++ b/ci/run.ps1
@@ -4,7 +4,8 @@ if ($LastExitCode -ne 0) {
}
# remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it
-$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey' }) -join ';'
+# remove PostgreSQL from path so we don't pickup a broken zlib from it
+$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';'
# Rust puts its shared stdlib in a secret place, but it is needed to run tests.
$env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin"
diff --git a/ci/travis_script.sh b/ci/travis_script.sh
index a91a5dd..bdfd4c2 100755
--- a/ci/travis_script.sh
+++ b/ci/travis_script.sh
@@ -23,6 +23,10 @@ export CXX=$CXX
export OBJC=$CC
export OBJCXX=$CXX
export PATH=/root/tools:$PATH
+if test "$MESON_RSP_THRESHOLD" != ""
+then
+ export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
+fi
source /ci/env_vars.sh
cd /root
@@ -55,5 +59,9 @@ elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
export OBJC=$CC
export OBJCXX=$CXX
export PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH:$(brew --prefix llvm)/bin
+ if test "$MESON_RSP_THRESHOLD" != ""
+ then
+ export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
+ fi
./run_tests.py $RUN_TESTS_ARGS --backend=ninja -- $MESON_ARGS
fi
diff --git a/data/macros.meson b/data/macros.meson
index c5b90de..cc4953c 100644
--- a/data/macros.meson
+++ b/data/macros.meson
@@ -2,12 +2,6 @@
%__meson_wrap_mode nodownload
%__meson_auto_features enabled
-%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\
- && MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\
- ncpus_max=%{?_smp_ncpus_max}; \\\
- if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\
- if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi)
-
%meson \
%set_build_flags \
%{shrink:%{__meson} \
@@ -28,17 +22,24 @@
--wrap-mode=%{__meson_wrap_mode} \
--auto-features=%{__meson_auto_features} \
%{_vpath_srcdir} %{_vpath_builddir} \
- %{nil}}
+ %{nil}}
%meson_build \
- %ninja_build -C %{_vpath_builddir}
+ %{shrink:%{__meson} compile \
+ -C %{_vpath_builddir} \
+ -j %{_smp_build_ncpus} \
+ --verbose \
+ %{nil}}
%meson_install \
- %ninja_install -C %{_vpath_builddir}
+ %{shrink:DESTDIR=%{buildroot} %{__meson} install \
+ -C %{_vpath_builddir} \
+ --no-rebuild \
+ %{nil}}
%meson_test \
- %{shrink: %{__meson} test \
+ %{shrink:%{__meson} test \
-C %{_vpath_builddir} \
- %{?_smp_mesonflags} \
+ --num-processes %{_smp_build_ncpus} \
--print-errorlogs \
- %{nil}}
+ %{nil}}
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index aa7d500..e7101d5 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -55,16 +55,9 @@ particularly the paths section may be necessary.
### Core options
-Options that are labeled "per machine" in the table are set per machine.
-Prefixing the option with `build.` just affects the build machine configuration,
-while unprefixed just affects the host machine configuration, respectively.
-Using the option as-is with no prefix affects all machines. For example:
-
- - `build.pkg_config_path` controls the paths pkg-config will search for just
- `native: true` dependencies (build machine).
-
- - `pkg_config_path` controls the paths pkg-config will search for just
- `native: false` dependencies (host machine).
+Options that are labeled "per machine" in the table are set per machine. See
+the [specifying options per machine](#Specifying-options-per-machine) section
+for details.
| Option | Default value | Description | Is per machine |
| ------ | ------------- | ----------- | -------------- |
@@ -86,6 +79,7 @@ Using the option as-is with no prefix affects all machines. For example:
| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no |
| werror | false | Treat warnings as errors | no |
| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no |
+| force_fallback_for | [] | Force fallback for those dependencies | no |
<a name="build-type-options"></a>
For setting optimization levels and toggling debug, you can either set the
@@ -186,9 +180,9 @@ The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific
argument forms, but the libraries are: kernel32, user32, gdi32, winspool,
shell32, ole32, oleaut32, uuid, comdlg32, advapi32.
-c_args, cpp_args, c_link_args, and cpp_link_args only affect native builds,
-when cross compiling they will not be applied to binaries or libraries
-targeting the host system, only those being run on the build system.
+All these `<lang>_*` options are specified per machine. See below in the
+[specifying options per machine](#Specifying-options-per-machine) section on
+how to do this in cross builds.
When using MSVC, `cpp_eh=none` will result in no exception flags being passed,
while the `cpp_eh=[value]` will result in `/EH[value]`.
@@ -199,3 +193,25 @@ gcc-style compilers, nothing is passed (allowing exceptions to work), while
Since *0.54.0* The `<lang>_thread_count` option can be used to control the
value passed to `-s PTHREAD_POOL_SIZE` when using emcc. No other c/c++
compiler supports this option.
+
+## Specifying options per machine
+
+Since *0.51.0*, some options are specified per machine rather than globally for
+all machine configurations. Prefixing the option with `build.` just affects the
+build machine configuration, while unprefixed just affects the host machine
+configuration, respectively. For example:
+
+ - `build.pkg_config_path` controls the paths pkg-config will search for just
+ `native: true` dependencies (build machine).
+
+ - `pkg_config_path` controls the paths pkg-config will search for just
+ `native: false` dependencies (host machine).
+
+This is useful for cross builds. In the native builds, build = host, and the
+unprefixed option alone will suffice.
+
+Prior to *0.51.0*, these options just effected native builds when specified on
+the command line, as there was no `build.` prefix. Similarly named fields in
+the `[properties]` section of the cross file would effect cross compilers, but
+the code paths were fairly different allowing differences in behavior to crop
+out.
diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md
index 1387a46..330899f 100644
--- a/docs/markdown/Configuring-a-build-directory.md
+++ b/docs/markdown/Configuring-a-build-directory.md
@@ -109,11 +109,11 @@ you would issue the following command.
meson configure -Dprefix=/tmp/testroot
-Then you would run your build command (usually `ninja`), which would
+Then you would run your build command (usually `meson compile`), which would
cause Meson to detect that the build setup has changed and do all the
work required to bring your build tree up to date.
Since 0.50.0, it is also possible to get a list of all build options
-by invoking `meson configure` with the project source directory or
+by invoking [`meson configure`](Commands.md#configure) with the project source directory or
the path to the root `meson.build`. In this case, meson will print the
default values of all options similar to the example output from above.
diff --git a/docs/markdown/Continuous-Integration.md b/docs/markdown/Continuous-Integration.md
index 0846f2d..76a05a3 100644
--- a/docs/markdown/Continuous-Integration.md
+++ b/docs/markdown/Continuous-Integration.md
@@ -36,8 +36,8 @@ script:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM YOUR/REPO:eoan > Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi
- - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && ninja -C builddir test"; fi
- - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && ninja -C builddir test; fi
+ - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && meson test -C builddir"; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && meson test -C builddir; fi
```
## CircleCi for Linux (with Docker)
@@ -69,7 +69,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
meson_debian_build:
@@ -77,7 +77,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
meson_fedora_build:
@@ -85,7 +85,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
workflows:
@@ -138,10 +138,10 @@ install:
build_script:
- cmd: echo Building on %arch% with %compiler%
- cmd: meson --backend=ninja builddir
- - cmd: ninja -C builddir
+ - cmd: meson compile -C builddir
test_script:
- - cmd: ninja -C builddir test
+ - cmd: meson test -C builddir
```
### Qt
@@ -187,8 +187,8 @@ install:
script:
- meson builddir
- - ninja -C builddir
- - ninja -C builddir test
+ - meson compile -C builddir
+ - meson test -C builddir
```
## GitHub Actions
diff --git a/docs/markdown/Creating-OSX-packages.md b/docs/markdown/Creating-OSX-packages.md
index bda06a3..849d5fd 100644
--- a/docs/markdown/Creating-OSX-packages.md
+++ b/docs/markdown/Creating-OSX-packages.md
@@ -39,7 +39,7 @@ $ meson --prefix=/tmp/myapp.app \
<other flags you might need>
```
-Now when we do `ninja install` the bundle is properly staged. If you
+Now when we do `meson install` the bundle is properly staged. If you
have any resource files or data, you need to install them into
`Contents/Resources` either by custom install commands or specifying
more install paths to the Meson command.
diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md
index 45c4b4e..040fb53 100644
--- a/docs/markdown/Creating-releases.md
+++ b/docs/markdown/Creating-releases.md
@@ -5,27 +5,56 @@ short-description: Creating releases
# Creating releases
In addition to development, almost all projects provide periodical
-source releases. These are standalone packages (usually either in tar
-or zip format) of the source code. They do not contain any revision
-control metadata, only the source code.
+source releases. These are standalone packages (usually either in
+tar or zip format) of the source code. They do not contain any
+revision control metadata, only the source code. Meson provides
+a simple way of generating these, with the `meson dist` command.
Meson provides a simple way of generating these. It consists of a
-single command:
+single command *(available since 0.52.0)*:
- ninja dist
+```sh
+meson dist
+```
+
+or alternatively (on older meson versions with `ninja` backend):
+
+```sh
+ninja dist
+```
This creates a file called `projectname-version.tar.xz` in the build
-tree subdirectory `meson-dist`. This archive contains the full
-contents of the latest commit in revision control including all the
-submodules (recursively). All revision control metadata is removed.
-Meson then takes
-this archive and tests that it works by doing a full compile + test +
-install cycle. If all these pass, Meson will then create a SHA-256
-checksum file next to the archive.
-
-**Note**: Meson behaviour is different from Autotools. The Autotools
-"dist" target packages up the current source tree. Meson packages
-the latest revision control commit. The reason for this is that it
-prevents developers from doing accidental releases where the
-distributed archive does not match any commit in revision control
-(especially the one tagged for the release).
+tree subdirectory `meson-dist`. This archive contains the full contents
+of the latest commit in revision control including all the submodules
+(recursively). All revision control metadata is removed. Meson then
+takes this archive and tests that it works by doing a full
+`compile` + `test` + `install` cycle. If all these pass, Meson will
+then create a `SHA-256` checksum file next to the archive.
+
+
+## Autotools dist VS Meson dist
+
+Meson behaviour is different from Autotools. The Autotools "dist"
+target packages up the current source tree. Meson packages the latest
+revision control commit. The reason for this is that it prevents developers
+from doing accidental releases where the distributed archive does not match
+any commit in revision control (especially the one tagged for the release).
+
+
+## Include subprojects in your release
+
+The `meson dist` command has `--include-subprojects` command line option.
+When enabled, the source tree of all subprojects used by the current build
+will also be included in the final tarball. This is useful to distribute
+self contained tarball that can be built offline (i.e. `--wrap-mode=nodownload`).
+
+
+## Skip build and test with `--no-tests`
+
+The `meson dist` command has a `--no-tests` option to skip build and
+tests steps of generated packages. It can be used to not waste time
+for example when done in CI that already does its own testing.
+
+So with `--no-tests` you can tell Meson "Do not build and test generated
+packages.".
+
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index 1c53dcf..d86d417 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -222,7 +222,7 @@ Once you have the cross file, starting a build is simple
$ meson srcdir builddir --cross-file cross_file.txt
```
-Once configuration is done, compilation is started by invoking Ninja
+Once configuration is done, compilation is started by invoking `meson compile`
in the usual way.
## Introspection and system checks
diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md
index 57aaee4..7121192 100644
--- a/docs/markdown/Design-rationale.md
+++ b/docs/markdown/Design-rationale.md
@@ -223,11 +223,11 @@ add_test('test library', exe)
```
First we build a shared library named foobar. It is marked
-installable, so running `ninja install` installs it to the library
+installable, so running `meson install` installs it to the library
directory (the system knows which one so the user does not have to
care). Then we build a test executable which is linked against the
library. It will not be installed, but instead it is added to the list
-of unit tests, which can be run with the command `ninja test`.
+of unit tests, which can be run with the command `meson test`.
Above we mentioned precompiled headers as a feature not supported by
other build systems. Here's how you would use them.
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index e43c857..7a41443 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -51,7 +51,7 @@ $ /path/to/meson.py <options>
After this you don't have to care about invoking Meson any more. It
remembers where it was originally invoked from and calls itself
appropriately. As a user the only thing you need to do is to `cd` into
-your build directory and invoke `ninja`.
+your build directory and invoke `meson compile`.
## Why can't I specify target files with a wildcard?
@@ -432,7 +432,7 @@ sources in the build target:
libfoo_gen_headers = custom_target('gen-headers', ..., output: 'foo-gen.h')
libfoo_sources = files('foo-utils.c', 'foo-lib.c')
# Add generated headers to the list of sources for the build target
-libfoo = library('foo', sources: libfoo_sources + libfoo_gen_headers)
+libfoo = library('foo', sources: [libfoo_sources + libfoo_gen_headers])
```
Now let's say you have a new target that links to `libfoo`:
diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md
index c1b7659..4d366d9 100644
--- a/docs/markdown/Feature-autodetection.md
+++ b/docs/markdown/Feature-autodetection.md
@@ -28,12 +28,12 @@ the binaries `gcovr`, `lcov` and `genhtml`. If version 3.3 or higher
of the first is found, targets called *coverage-text*, *coverage-xml*
and *coverage-html* are generated. Alternatively, if the latter two
are found, only the target *coverage-html* is generated. Coverage
-reports can then be produced simply by calling e.g. `ninja
+reports can then be produced simply by calling e.g. `meson compile
coverage-xml`. As a convenience, a high-level *coverage* target is
also generated which will produce all 3 coverage report types, if
possible.
Note that generating any of the coverage reports described above
-requires the tests (i.e. `ninja test`) to finish running so the
+requires the tests (i.e. `meson test`) to finish running so the
information about the functions that are called in the tests can be
gathered for the report.
diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md
index a9c4531..3d06233 100644
--- a/docs/markdown/Gnome-module.md
+++ b/docs/markdown/Gnome-module.md
@@ -88,7 +88,6 @@ There are several keyword arguments. Many of these map directly to the
e.g. `Gtk`
* `includes`: list of gir names to be included, can also be a GirTarget
* `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h`
-* `dependencies`: deps to use during introspection scanning
* `include_directories`: extra include paths to look for gir files
* `install`: if true, install the generated files
* `install_dir_gir`: (*Added 0.35.0*) which directory to install the
@@ -98,6 +97,7 @@ There are several keyword arguments. Many of these map directly to the
* `link_with`: list of libraries to link with
* `symbol_prefix`: the symbol prefix for the gir object, e.g. `gtk`,
(*Since 0.43.0*) an ordered list of multiple prefixes is allowed
+* `fatal_warnings`: *Since 0.55.0* turn scanner warnings into fatal errors.
Returns an array of two elements which are: `[gir_target,
typelib_target]`
@@ -223,7 +223,7 @@ directory. Note that this is not for installing schemas and is only
useful when running the application locally for example during tests.
* `build_by_default`: causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
+ built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `depend_files`: files ([`string`](Reference-manual.md#string-object),
[`files()`](Reference-manual.md#files), or
@@ -246,7 +246,7 @@ one XML file.
* `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'`
* `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks
* `build_by_default`: causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
+ built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `install_dir`: (*Added 0.46.0*) location to install the header or
bundle depending on previous options
@@ -344,8 +344,8 @@ of the module.
Note that this has the downside of rebuilding the doc for each build, which is
often very slow. It usually should be enabled only in CI.
-This creates a `$module-doc` target that can be ran to build docs and
-normally these are only built on install.
+This also creates a `$module-doc` target that can be run to build documentation.
+Normally the documentation is only built on install.
*Since 0.52.0* Returns a target object that can be passed as dependency to other
targets using generated doc files (e.g. in `content_files` of another doc).
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index f51075e..2cc4f4f 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -227,8 +227,8 @@ in the `meson.build`.
## Tests
-Compilation and unit tests are done as usual by running the `ninja` and
-`ninja test` commands. A JSON formatted result log can be found in
+Compilation and unit tests are done as usual by running the `meson compile` and
+`meson test` commands. A JSON formatted result log can be found in
`workspace/project/builddir/meson-logs/testlog.json`.
When these tests fail, the user probably wants to run the failing test in a
diff --git a/docs/markdown/IndepthTutorial.md b/docs/markdown/IndepthTutorial.md
index dd93f82..d2e2662 100644
--- a/docs/markdown/IndepthTutorial.md
+++ b/docs/markdown/IndepthTutorial.md
@@ -79,12 +79,12 @@ With these four files we are done. To configure, build and run the test suite, w
```console
$ meson builddir && cd builddir
-$ ninja
-$ ninja test
+$ meson compile
+$ meson test
```
To then install the project you only need one command.
```console
-$ ninja install
+$ meson install
```
diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md
index 5abfdd4..9dc2ad4 100644
--- a/docs/markdown/Installing.md
+++ b/docs/markdown/Installing.md
@@ -4,6 +4,18 @@ short-description: Installing targets
# Installing
+Invoked via the [following command](Commands.md#install) *(available since 0.47.0)*:
+
+```sh
+meson install
+```
+
+or alternatively (on older meson versions with `ninja` backend):
+
+```sh
+ninja install
+```
+
By default Meson will not install anything. Build targets can be
installed by tagging them as installable in the definition.
@@ -97,15 +109,13 @@ packages. This is done with the `DESTDIR` environment variable and it
is used just like with other build systems:
```console
-$ DESTDIR=/path/to/staging/area ninja install
+$ DESTDIR=/path/to/staging/area meson install
```
## Custom install behaviour
-The default install target (executed via, e.g., `ninja install`) does
-installing with reasonable default options. More control over the
-install behaviour can be achieved with the `meson install` command,
-that has been available since 0.47.0.
+Installation behaviour can be further customized using
+additional arguments.
For example, if you wish to install the current setup without
rebuilding the code (which the default install target always does) and
diff --git a/docs/markdown/Localisation.md b/docs/markdown/Localisation.md
index ce9e3b6..ed63e13 100644
--- a/docs/markdown/Localisation.md
+++ b/docs/markdown/Localisation.md
@@ -48,7 +48,7 @@ Then we need to generate the main pot file. The potfile can have any name but is
Run the following command from your build folder to generate the pot file. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information.
```console
-$ ninja intltest-pot
+$ meson compile intltest-pot
```
### generate .po files
@@ -56,5 +56,5 @@ $ ninja intltest-pot
For each language listed in the array above we need a corresponding `.po` file. Those can be generated by running the following command from your build folder.
```console
-$ ninja intltest-update-po
+$ meson compile intltest-update-po
```
diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md
index 404c3d2..9011f79 100644
--- a/docs/markdown/Machine-files.md
+++ b/docs/markdown/Machine-files.md
@@ -8,10 +8,83 @@ environments](Native-environments.md).
## Sections
The following sections are allowed:
+- constants
- binaries
- paths
- properties
+### constants
+
+*Since 0.55.0*
+
+String and list concatenation is supported using the `+` operator, joining paths
+is supported using the `/` operator.
+Entries defined in the `[constants]` section can be used in any other section
+(they are always parsed first), entries in any other section can be used only
+within that same section and only after it has been defined.
+
+```ini
+[constants]
+toolchain = '/toolchain'
+common_flags = ['--sysroot=' + toolchain / 'sysroot']
+
+[properties]
+c_args = common_flags + ['-DSOMETHING']
+cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+[binaries]
+c = toolchain / 'gcc'
+```
+
+This can be useful with cross file composition as well. A generic cross file
+could be composed with a platform specific file where constants are defined:
+```ini
+# aarch64.ini
+[constants]
+arch = 'aarch64-linux-gnu'
+```
+
+```ini
+# cross.ini
+[binaries]
+c = arch + '-gcc'
+cpp = arch + '-g++'
+strip = arch + '-strip'
+pkgconfig = arch + '-pkg-config'
+...
+```
+
+This can be used as `meson setup --cross-file aarch64.ini --cross-file cross.ini builddir`.
+
+Note that file composition happens before the parsing of values. The example
+below results in `b` being `'HelloWorld'`:
+```ini
+# file1.ini:
+[constants]
+a = 'Foo'
+b = a + 'World'
+```
+
+```ini
+#file2.ini:
+[constants]
+a = 'Hello'
+```
+
+The example below results in an error when file1.ini is included before file2.ini
+because `b` would be defined before `a`:
+```ini
+# file1.ini:
+[constants]
+b = a + 'World'
+```
+
+```ini
+#file2.ini:
+[constants]
+a = 'Hello'
+```
+
### Binaries
The binaries section contains a list of binaries. These can be used
diff --git a/docs/markdown/Meson-sample.md b/docs/markdown/Meson-sample.md
index 6f26f36..f98e022 100644
--- a/docs/markdown/Meson-sample.md
+++ b/docs/markdown/Meson-sample.md
@@ -50,7 +50,7 @@ exe = executable('myexe', src)
test('simple test', exe)
```
-Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `ninja test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
+Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `meson test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
A note to Visual Studio users
-----
diff --git a/docs/markdown/MesonCI.md b/docs/markdown/MesonCI.md
new file mode 100644
index 0000000..73b979b
--- /dev/null
+++ b/docs/markdown/MesonCI.md
@@ -0,0 +1,53 @@
+# Meson CI setup
+
+This document is aimed for Meson contributors and documents
+the CI setup used for testing Meson itself. The Meson
+project uses multiple CI platforms for covering a wide
+range of target systems.
+
+## Travis CI
+
+The travis configuration file is the `.travis.yml` in the
+the project root. This platform tests cross compilation and
+unity builds on a [linux docker image](#docker-images) and
+on OSX.
+
+## GitHub actions
+
+The configuration files for GitHub actions are located in
+`.github/workflows`. Here, all [images](#docker-images)
+are tested with the full `run_tests.py` run. Additionally,
+some other, smaller, tests are run.
+
+## Docker images
+
+The Linux docker images are automatically built and
+uploaded by GitHub actions. An image rebuild is triggerd
+when any of the image definition files are changed (in
+`ci/ciimage`) in the master branch. Additionally, the
+images are also updated weekly.
+
+Each docker image has one corresponding dirctory in
+`ci/ciimage` with an `image.json` and an `install.sh`.
+
+### Image generation
+
+There are no manual Dockerfiles. Instead the Dockerfile is
+automatically generated by the `build.py` script. This is
+done to ensure that all images have the same layout and can
+all be built and tested automatically.
+
+The Dockerfile is generated from the `image.json` file and
+basically only adds a few common files and runs the
+`install.sh` script which should contain all distribution
+specific setup steps. The `common.sh` can be sourced via
+`source /ci/common.sh` to access some shared functionalety.
+
+To generate the image run `build.py -t build <image>`. A
+generated image can be tested with `build.py -t test <image>`.
+
+### Common image setup
+
+Each docker image has a `/ci` directory with an
+`env_vars.sh` script. This script has to be sourced before
+running the meson test suite.
diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md
index 5f323bd..7ded318 100644
--- a/docs/markdown/Project-templates.md
+++ b/docs/markdown/Project-templates.md
@@ -16,15 +16,34 @@ $ meson init --language=c --name=myproject --version=0.1
```
This would create the build definitions for a helloworld type
-project. The result can be compiled as usual. For example compiling it
-with Ninja could be done like this:
+project. The result can be compiled as usual. For example it
+could be done like this:
```
-$ meson builddir
-$ ninja -C builddir
+$ meson setup builddir
+$ meson compile -C builddir
```
The generator has many different projects and settings. They can all
be listed by invoking the command `meson init --help`.
This feature is available since Meson version 0.45.0.
+
+# Generate a build script for an existing project
+
+With `meson init` you can generate a build script for an existing
+project with existing project files by running the command in the
+root directory of your project. Meson currently supports this
+feature for `executable`, and `jar` projects.
+
+# Build after generation of template
+
+It is possible to have Meson generate a build directory from the
+`meson init` command without running `meson setup`. This is done
+by passing `-b` or `--build` switch.
+
+```console
+$ mkdir project_name
+$ cd project_name
+$ meson init --language=c --name=myproject --version=0.1 --build
+``` \ No newline at end of file
diff --git a/docs/markdown/Qt5-module.md b/docs/markdown/Qt5-module.md
index f1c2f6c..0d9a6b6 100644
--- a/docs/markdown/Qt5-module.md
+++ b/docs/markdown/Qt5-module.md
@@ -21,7 +21,7 @@ This method generates the necessary targets to build translation files with lrel
- `ts_files`, the list of input translation files produced by Qt's lupdate tool.
- `install` when true, this target is installed during the install step (optional).
- `install_dir` directory to install to (optional).
- - `build_by_default` when set to true, to have this target be built by default, that is, when invoking plain ninja; the default value is false (optional).
+ - `build_by_default` when set to true, to have this target be built by default, that is, when invoking `meson compile`; the default value is false (optional).
## has_tools
diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md
index 0bed683..74636e5 100644
--- a/docs/markdown/Quick-guide.md
+++ b/docs/markdown/Quick-guide.md
@@ -93,8 +93,8 @@ are working on. The steps to take are very simple.
```console
$ cd /path/to/source/root
$ meson builddir && cd builddir
-$ ninja
-$ ninja test
+$ meson compile
+$ meson test
```
The only thing to note is that you need to create a separate build
@@ -104,14 +104,14 @@ directory. This allows you to have multiple build trees with different
configurations at the same time. This way generated files are not
added into revision control by accident.
-To recompile after code changes, just type `ninja`. The build command
+To recompile after code changes, just type `meson compile`. The build command
is always the same. You can do arbitrary changes to source code and
build system files and Meson will detect those and will do the right
thing. If you want to build optimized binaries, just use the argument
`--buildtype=debugoptimized` when running Meson. It is recommended
that you keep one build directory for unoptimized builds and one for
optimized ones. To compile any given configuration, just go into the
-corresponding build directory and run `ninja`.
+corresponding build directory and run `meson compile`.
Meson will automatically add compiler flags to enable debug
information and compiler warnings (i.e. `-g` and `-Wall`). This means
@@ -128,9 +128,9 @@ build and install Meson projects are the following.
```console
$ cd /path/to/source/root
$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=...
-$ ninja -v -C builddir
-$ ninja -C builddir test
-$ DESTDIR=/path/to/staging/root ninja -C builddir install
+$ meson compile -C builddir
+$ meson test -C builddir
+$ DESTDIR=/path/to/staging/root meson install -C builddir
```
The command line switch `--buildtype=plain` tells Meson not to add its
@@ -139,7 +139,7 @@ on used flags.
This is very similar to other build systems. The only difference is
that the `DESTDIR` variable is passed as an environment variable
-rather than as an argument to `ninja install`.
+rather than as an argument to `meson install`.
As distro builds happen always from scratch, you might consider
enabling [unity builds](Unity-builds.md) on your packages because they
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 9b5d657..dad8c12 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -15,19 +15,19 @@ afterwards](#returned-objects).
Adds the positional arguments to the compiler command line. This
function has two keyword arguments:
-- `language` specifies the language(s) that the arguments should be
+- `language`: specifies the language(s) that the arguments should be
applied to. If a list of languages is given, the arguments are added
to each of the corresponding compiler command lines. Note that there
is no way to remove an argument set in this way. If you have an
argument that is only used in a subset of targets, you have to specify
it in per-target flags.
-- `native` is a boolean specifying whether the arguments should be
+- `native` *(since 0.48.0)*: a boolean specifying whether the arguments should be
applied to the native or cross compilation. If `true` the arguments
will only be used for native compilations. If `false` the arguments
will only be used in cross compilations. If omitted, the flags are
added to native compilations if compiling natively and cross
- compilations (only) when cross compiling. Available since 0.48.0
+ compilations (only) when cross compiling.
The arguments are used in all compiler invocations with the exception
of compile tests, because you might need to run a compile test with
@@ -70,12 +70,12 @@ endif
Takes the following keyword arguments:
-- `required` defaults to `true`, which means that if any of the languages
-specified is not found, Meson will halt. Since *0.47.0* the value of a
+- `required`: defaults to `true`, which means that if any of the languages
+specified is not found, Meson will halt. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed.
-- `native` if set to `true`, the language will be used to compile for the build
- machine, if `false`, for the host machine. Since *0.54.0*.
+- `native` *(since 0.54.0)*: if set to `true`, the language will be used to compile for the build
+ machine, if `false`, for the host machine.
Returns `true` if all languages specified were found and `false` otherwise.
@@ -113,16 +113,16 @@ Add a custom test setup that can be used to run the tests with a
custom setup, for example under Valgrind. The keyword arguments are
the following:
-- `env` environment variables to set, such as `['NAME1=value1',
+- `env`: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
-- `exe_wrapper` a list containing the wrapper command or script followed by the arguments to it
-- `gdb` if `true`, the tests are also run under `gdb`
-- `timeout_multiplier` a number to multiply the test timeout with
-- `is_default` a bool to set whether this is the default test setup.
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
+- `exe_wrapper`: a list containing the wrapper command or script followed by the arguments to it
+- `gdb`: if `true`, the tests are also run under `gdb`
+- `timeout_multiplier`: a number to multiply the test timeout with
+- `is_default` *(since 0.49.0)*: a bool to set whether this is the default test setup.
If `true`, the setup will be used whenever `meson test` is run
- without the `--setup` option. Since 0.49.0
+ without the `--setup` option.
To use the test setup, run `meson test --setup=*name*` inside the
build dir.
@@ -137,11 +137,11 @@ Note that all these options are also available while running the
runtarget alias_target(target_name, dep1, ...)
```
-Since *0.52.0*
+*(since 0.52.0)*
This function creates a new top-level target. Like all top-level targets, this
-integrates with the selected backend. For instance, with Ninja you can
-run it as `ninja target_name`. This is a dummy target that does not execute any
+integrates with the selected backend. For instance, with you can
+run it as `meson compile target_name`. This is a dummy target that does not execute any
command, but ensures that all dependencies are built. Dependencies can be any
build target (e.g. return value of [executable()](#executable), custom_target(), etc)
@@ -153,7 +153,7 @@ build target (e.g. return value of [executable()](#executable), custom_target(),
Abort with an error message if `condition` evaluates to `false`.
-*Since 0.53.0* `message` argument is optional and defaults to print the condition
+*(since 0.53.0)* `message` argument is optional and defaults to print the condition
statement instead.
### benchmark()
@@ -169,7 +169,7 @@ run. The behavior of this function is identical to [`test()`](#test) except for:
* benchmark() does not automatically add the `MALLOC_PERTURB_` environment variable
*Note:* Prior to 0.52.0 benchmark would warn that `depends` and `priority`
-were unsupported, this is incorrect
+were unsupported, this is incorrect.
### both_libraries()
@@ -177,6 +177,8 @@ were unsupported, this is incorrect
buildtarget = both_libraries(library_name, list_of_sources, ...)
```
+*(since 0.46.0)*
+
Builds both a static and shared library with the given
sources. Positional and keyword arguments are otherwise the same as
for [`library`](#library). Source files will be compiled only once and
@@ -190,8 +192,6 @@ shared library. In addition it supports the following extra methods:
- `get_shared_lib()` returns the shared library build target
- `get_static_lib()` returns the static library build target
-*Added 0.46.0*
-
### build_target()
Creates a build target whose type can be set dynamically with the
@@ -234,7 +234,7 @@ Creates an empty configuration object. You should add your
configuration with [its method calls](#configuration-data-object) and
finally use it in a call to `configure_file`.
-Since *0.49.0* takes an optional dictionary as first argument. If
+*(since 0.49.0)* Takes an optional dictionary as first argument. If
provided, each key/value pair is added into the `configuration_data`
as if `set()` method was called for each of them.
@@ -251,7 +251,7 @@ When a [`configuration_data()`](#configuration_data) object is passed
to the `configuration:` keyword argument, it takes a template file as
the `input:` (optional) and produces the `output:` (required) by
substituting values from the configuration data as detailed in [the
-configuration file documentation](Configuration.md). Since *0.49.0* a
+configuration file documentation](Configuration.md). *(since 0.49.0)* A
dictionary can be passed instead of a
[`configuration_data()`](#configuration_data) object.
@@ -259,53 +259,53 @@ When a list of strings is passed to the `command:` keyword argument,
it takes any source or configured file as the `input:` and assumes
that the `output:` is produced when the specified command is run.
-Since *0.47.0*, when the `copy:` keyword argument is set to `true`,
+*(since 0.47.0)* When the `copy:` keyword argument is set to `true`,
this function will copy the file provided in `input:` to a file in the
build directory with the name `output:` in the current directory.
These are all the supported keyword arguments:
-- `capture` when this argument is set to true, Meson captures `stdout`
- of the `command` and writes it to the target file specified as
- `output`. Available since v0.41.0.
-- `command` as explained above, if specified, Meson does not create
+- `capture` *(since 0.41.0)*: when this argument is set to true,
+ Meson captures `stdout` of the `command` and writes it to the target
+ file specified as `output`.
+- `command`: as explained above, if specified, Meson does not create
the file itself but rather runs the specified command, which allows
- you to do fully custom file generation. Since *0.52.0* the command can contain
+ you to do fully custom file generation. *(since 0.52.0)* The command can contain
file objects and more than one file can be passed to the `input` keyword
argument, see [`custom_target()`](#custom_target) for details about string
substitutions.
-- `copy` *(added 0.47.0)* as explained above, if specified Meson only
+- `copy` *(since 0.47.0)*: as explained above, if specified Meson only
copies the file from input to output.
-- `depfile` *(added 0.52.0)* is a dependency file that the command can write listing
+- `depfile` *(since 0.52.0)*: a dependency file that the command can write listing
all the additional files this target depends on. A change
in any one of these files triggers a reconfiguration.
-- `format` *(added 0.46.0)* the format of defines. It defaults to `meson`, and so substitutes
+- `format` *(since 0.46.0)*: the format of defines. It defaults to `meson`, and so substitutes
`#mesondefine` statements and variables surrounded by `@` characters, you can also use `cmake`
to replace `#cmakedefine` statements and variables with the `${variable}` syntax. Finally you can use
`cmake@` in which case substitutions will apply on `#cmakedefine` statements and variables with
the `@variable@` syntax.
-- `input` the input file name. If it's not specified in configuration
+- `input`: the input file name. If it's not specified in configuration
mode, all the variables in the `configuration:` object (see above)
are written to the `output:` file.
-- `install` *(added 0.50.0)* When true, this generated file is installed during
+- `install` *(since 0.50.0)*: when true, this generated file is installed during
the install step, and `install_dir` must be set and not empty. When false, this
generated file is not installed regardless of the value of `install_dir`.
When omitted it defaults to true when `install_dir` is set and not empty,
false otherwise.
-- `install_dir` the subdirectory to install the generated file to
+- `install_dir`: the subdirectory to install the generated file to
(e.g. `share/myproject`), if omitted or given the value of empty
string, the file is not installed.
-- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format
+- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format
and optionally the owner/uid and group/gid for the installed files.
-- `output` the output file name (since v0.41.0, may contain
- `@PLAINNAME@` or `@BASENAME@` substitutions). In configuration mode,
+- `output`: the output file name. *(since 0.41.0)* may contain
+ `@PLAINNAME@` or `@BASENAME@` substitutions. In configuration mode,
the permissions of the input file (if it is specified) are copied to
the output file.
-- `output_format` *(added 0.47.0)* the format of the output to generate when no input
+- `output_format` *(since 0.47.0)*: the format of the output to generate when no input
was specified. It defaults to `c`, in which case preprocessor directives
will be prefixed with `#`, you can also use `nasm`, in which case the
prefix will be `%`.
-- `encoding` *(added v0.47.0)* set the file encoding for the input and output file,
+- `encoding` *(since 0.47.0)*: set the file encoding for the input and output file,
defaults to utf-8. The supported encodings are those of python3, see
[standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings).
@@ -319,34 +319,30 @@ Create a custom top level build target. The only positional argument
is the name of this target and the keyword arguments are the
following.
-- `build_by_default` *(added 0.38)* causes, when set to true, to
+- `build_by_default` *(since 0.38.0)*: causes, when set to true, to
have this target be built by default. This means it will be built when
- `ninja` is called without any arguments or asked to build a target
- like `ninja test` that depends on ninja's [default
- target](https://ninja-build.org/manual.html#_default_target_statements)
- set to `all` by meson. The same behavior applies for backends other
- than `ninja`. The default value is `false`.
- *(changed in 0.50)* if `build_by_default` is explicitly set to false, `install`
+ `meson compile` is called without any arguments. The default value is `false`.
+ *(since 0.50.0)* If `build_by_default` is explicitly set to false, `install`
will no longer override it. If `build_by_default` is not set, `install` will
still determine its default.
-- `build_always` (deprecated) if `true` this target is always considered out of
+- `build_always` **(deprecated)**: if `true` this target is always considered out of
date and is rebuilt every time. Equivalent to setting both
`build_always_stale` and `build_by_default` to true.
-- `build_always_stale` *(added 0.47)* if `true` the target is always considered out of date.
+- `build_always_stale` *(since 0.47.0)*: if `true` the target is always considered out of date.
Useful for things such as build timestamps or revision control tags.
The associated command is run even if the outputs are up to date.
-- `capture`, there are some compilers that can't be told to write
+- `capture`: there are some compilers that can't be told to write
their output to a file but instead write it to standard output. When
this argument is set to true, Meson captures `stdout` and writes it
to the target file. Note that your command argument list may not
contain `@OUTPUT@` when capture mode is active.
-- `console` *(added 0.48)* keyword argument conflicts with `capture`, and is meant
+- `console` *(since 0.48.0)*: keyword argument conflicts with `capture`, and is meant
for commands that are resource-intensive and take a long time to
finish. With the Ninja backend, setting this will add this target
to [Ninja's `console` pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool),
which has special properties such as not buffering stdout and
serializing all targets in this pool.
-- `command` command to run to create outputs from inputs. The command
+- `command`: command to run to create outputs from inputs. The command
may be strings or the return value of functions that return file-like
objects such as [`find_program()`](#find_program),
[`executable()`](#executable), [`configure_file()`](#configure_file),
@@ -356,48 +352,47 @@ following.
Note: always specify commands in array form `['commandname',
'-arg1', '-arg2']` rather than as a string `'commandname -arg1
-arg2'` as the latter will *not* work.
-- `depend_files` files ([`string`](#string-object),
+- `depend_files`: files ([`string`](#string-object),
[`files()`](#files), or [`configure_file()`](#configure_file)) that
this target depends on but are not listed in the `command` keyword
argument. Useful for adding regen dependencies.
-- `depends` specifies that this target depends on the specified
+- `depends`: specifies that this target depends on the specified
target(s), even though it does not take any of them as a command
line argument. This is meant for cases where you have a tool that
e.g. does globbing internally. Usually you should just put the
generated sources as inputs and Meson will set up all dependencies
automatically.
-- `depfile` is a dependency file that the command can write listing
+- `depfile`: a dependency file that the command can write listing
all the additional files this target depends on, for example a C
compiler would list all the header files it included, and a change
in any one of these files triggers a recompilation
-- `input` list of source files. As of 0.41.0 the list will be flattened.
-- `install` when true, this target is installed during the install step
-- `install_dir` directory to install to
-- `install_mode` *(added 0.47.0)* the file mode and optionally the
+- `input`: list of source files. *(since 0.41.0)* the list is flattened.
+- `install`: when true, this target is installed during the install step
+- `install_dir`: directory to install to
+- `install_mode` *(since 0.47.0)*: the file mode and optionally the
owner/uid and group/gid
-- `output` list of output files
+- `output`: list of output files
The list of strings passed to the `command` keyword argument accept
the following special string substitutions:
-- `@INPUT@` the full path to the input passed to `input`. If more than
+- `@INPUT@`: the full path to the input passed to `input`. If more than
one input is specified, all of them will be substituted as separate
arguments only if the command uses `'@INPUT@'` as a
standalone-argument. For instance, this would not work: `command :
['cp', './@INPUT@']`, but this would: `command : ['cp', '@INPUT@']`.
-- `@OUTPUT@` the full path to the output passed to `output`. If more
+- `@OUTPUT@`: the full path to the output passed to `output`. If more
than one outputs are specified, the behavior is the same as
`@INPUT@`.
-- `@INPUT0@` `@INPUT1@` `...` the full path to the input with the specified array index in `input`
-- `@OUTPUT0@` `@OUTPUT1@` `...` the full path to the output with the specified array index in `output`
-- `@OUTDIR@` the full path to the directory where the output(s) must be written
-- `@DEPFILE@` the full path to the dependency file passed to `depfile`
+- `@INPUT0@` `@INPUT1@` `...`: the full path to the input with the specified array index in `input`
+- `@OUTPUT0@` `@OUTPUT1@` `...`: the full path to the output with the specified array index in `output`
+- `@OUTDIR@`: the full path to the directory where the output(s) must be written
+- `@DEPFILE@`: the full path to the dependency file passed to `depfile`
- `@PLAINNAME@`: the input filename, without a path
- `@BASENAME@`: the input filename, with extension removed
-- `@PRIVATE_DIR@`: path to a directory where the custom target must store all its intermediate files, available since 0.50.1
+- `@PRIVATE_DIR@` *(since 0.50.1)*: path to a directory where the custom target must store all its intermediate files.
-The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@`
-substitutions. *(since 0.47)*
+*(since 0.47.0)* The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@` substitutions.
The returned object also has methods that are documented in the
[object methods section](#custom-target-object) below.
@@ -414,22 +409,21 @@ internal to the current build. The main use case for this is in
subprojects. This allows a subproject to easily specify how it should
be used. This makes it interchangeable with the same dependency that
is provided externally by the system. This function has the following
-keyword arguments.
-
- - `compile_args`, compile arguments to use
- - `dependencies`, other dependencies needed to use this dependency
- - `include_directories`, the directories to add to header search path,
- must be include_directories objects or, since 0.50.0, plain strings
- - `link_args`, link arguments to use
- - `link_with`, libraries to link against
- - `link_whole`, libraries to link fully, same as [`executable`](#executable)
- Since 0.46.0
- - `sources`, sources to add to targets (or generated header files
- that should be built before sources including them are built)
- - `version`, the version of this dependency, such as `1.2.3`
- - `variables`, a dictionary of arbitrary strings, this is meant to be used
- in subprojects where special variables would be provided via cmake or
- pkg-config. Since 0.54.0
+keyword arguments:
+
+- `compile_args`: compile arguments to use.
+- `dependencies`: other dependencies needed to use this dependency.
+- `include_directories`: the directories to add to header search path,
+ must be include_directories objects or *(since 0.50.0)* plain strings
+- `link_args`: link arguments to use.
+- `link_with`: libraries to link against.
+- `link_whole` *(since 0.46.0)*: libraries to link fully, same as [`executable`](#executable).
+- `sources`: sources to add to targets (or generated header files
+ that should be built before sources including them are built)
+- `version`: the version of this dependency, such as `1.2.3`
+- `variables` *(since 0.54.0)*: a dictionary of arbitrary strings, this is meant to be used
+ in subprojects where special variables would be provided via cmake or
+ pkg-config.
### dependency()
@@ -445,12 +439,12 @@ logic](Dependencies.md#dependencies-with-custom-lookup-functionality)
are also supported. This function supports the following keyword
arguments:
-- `default_options` *(added 0.37.0)* an array of default option values
+- `default_options` *(since 0.37.0)*: an array of default option values
that override those set in the subproject's `meson_options.txt`
(like `default_options` in [`project()`](#project), they only have
effect when Meson is run for the first time, and command line
arguments override any default options in build files)
-- `fallback` specifies a subproject fallback to use in case the
+- `fallback`: specifies a subproject fallback to use in case the
dependency is not found in the system. The value is an array
`['subproj_name', 'subproj_dep']` where the first value is the name
of the subproject and the second is the variable name in that
@@ -459,36 +453,36 @@ arguments:
[`dependency()`](#dependency), etc. Note that this means the
fallback dependency may be a not-found dependency, in which
case the value of the `required:` kwarg will be obeyed.
- *Since 0.54.0* `'subproj_dep'` argument can be omitted in the case the
+ *(since 0.54.0)* `'subproj_dep'` argument can be omitted in the case the
subproject used `meson.override_dependency('dependency_name', subproj_dep)`.
In that case, the `fallback` keyword argument can be a single string instead
of a list of 2 strings.
-- `language` *(added 0.42.0)* defines what language-specific
+- `language` *(since 0.42.0)*: defines what language-specific
dependency to find if it's available for multiple languages.
-- `method` defines the way the dependency is detected, the default is
+- `method`: defines the way the dependency is detected, the default is
`auto` but can be overridden to be e.g. `qmake` for Qt development,
and [different dependencies support different values](
Dependencies.md#dependencies-with-custom-lookup-functionality)
for this (though `auto` will work on all of them)
-- `native` if set to `true`, causes Meson to find the dependency on
+- `native`: if set to `true`, causes Meson to find the dependency on
the build machine system rather than the host system (i.e. where the
cross compiled binary will run on), usually only needed if you build
a tool to be used during compilation.
-- `not_found_message` *(added 0.50.0)* is an optional string that will
+- `not_found_message` *(since 0.50.0)*: an optional string that will
be printed as a `message()` if the dependency was not found.
-- `required`, when set to false, Meson will proceed with the build
- even if the dependency is not found. Since *0.47.0* the value of a
+- `required`: when set to false, Meson will proceed with the build
+ even if the dependency is not found. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed.
-- `static` tells the dependency provider to try to get static
+- `static`: tells the dependency provider to try to get static
libraries instead of dynamic ones (note that this is not supported
by all dependency backends)
-- `version` specifies the required version, a string containing a
+- `version` *(since 0.37.0)*: specifies the required version, a string containing a
comparison operator followed by the version string, examples include
- `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching. *(Added 0.37.0)*
+ `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching.
You can also specify multiple restrictions by passing a list to this
keyword argument, such as: `['>=3.14.0', '<=4.1.0']`.
These requirements are never met if the version is unknown.
-- `include_type` *(added 0.52.0)* is an enum flag, marking how the dependency
+- `include_type` *(since 0.52.0)*: an enum flag, marking how the dependency
flags should be converted. Supported values are `'preserve'`, `'system'` and
`'non-system'`. System dependencies may be handled differently on some
platforms, for instance, using `-isystem` instead of `-I`, where possible.
@@ -499,9 +493,8 @@ arguments:
keywords may also be accepted (e.g. `modules` specifies submodules to use for
dependencies such as Qt5 or Boost. `components` allows the user to manually
add CMake `COMPONENTS` for the `find_package` lookup)
-- `disabler` if `true` and the dependency couldn't be found, return a
- [disabler object](#disabler-object) instead of a not-found dependency.
- *Since 0.49.0*
+- `disabler` *(since 0.49.0)*: if `true` and the dependency couldn't be found,
+ returns a [disabler object](#disabler-object) instead of a not-found dependency.
If dependency_name is `''`, the dependency is always not found. So
with `required: false`, this always returns a dependency object for
@@ -518,7 +511,9 @@ The returned object also has methods that are documented in the
### disabler()
-Returns a [disabler object](#disabler-object). Added in 0.44.0.
+*(since 0.44.0)*
+
+Returns a [disabler object](#disabler-object).
### error()
@@ -534,10 +529,11 @@ Print the argument string and halts the build process.
environment_object environment(...)
```
-Returns an empty [environment variable
-object](#environment-object). Added in 0.35.0.
+*(since 0.35.0)*
+
+Returns an empty [environment variable object](#environment-object).
-Since *0.52.0* takes an optional dictionary as first argument. If
+*(since 0.52.0)* Takes an optional dictionary as first argument. If
provided, each key/value pair is added into the `environment_object`
as if `set()` method was called for each of them.
@@ -577,100 +573,99 @@ Executable supports the following keyword arguments. Note that just
like the positional arguments above, these keyword arguments can also
be passed to [shared and static libraries](#library).
-- `<languagename>_pch` precompiled header file to use for the given language
-- `<languagename>_args` compiler flags to use for the given language;
+- `<languagename>_pch`: precompiled header file to use for the given language
+- `<languagename>_args`: compiler flags to use for the given language;
eg: `cpp_args` for C++
-- `build_by_default` causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
- value is true for all built target types, since 0.38.0
-- `build_rpath` a string to add to target's rpath definition in the
+- `build_by_default` *(since 0.38.0)*: causes, when set to true, to
+ have this target be built by default. This means it will be built when
+ `meson compile` is called without any arguments. The default value is
+ `true` for all built target types.
+- `build_rpath`: a string to add to target's rpath definition in the
build dir, but which will be removed on install
-- `dependencies` one or more objects created with
+- `dependencies`: one or more objects created with
[`dependency`](#dependency) or [`find_library`](#compiler-object)
(for external deps) or [`declare_dependency`](#declare_dependency)
(for deps built by the project)
-- `extra_files` are not used for the build itself but are shown as
+- `extra_files`: not used for the build itself but are shown as
source files in IDEs that group files by targets (such as Visual
Studio)
-- `gui_app` when set to true flags this target as a GUI application on
- platforms where this makes a difference (e.g. Windows)
-- `link_args` flags to use during linking. You can use UNIX-style
+- `gui_app`: when set to true flags this target as a GUI application on
+ platforms where this makes a difference (e.g. Windows).
+- `link_args`: flags to use during linking. You can use UNIX-style
flags here for all platforms.
-- `link_depends` strings, files, or custom targets the link step
+- `link_depends`: strings, files, or custom targets the link step
depends on such as a symbol visibility map. The purpose is to
automatically trigger a re-link (but not a re-compile) of the target
when this file changes.
-- `link_language` since 0.51.0 (broken until 0.55.0) makes the linker for this
+- `link_language` *(since 0.51.0)* *(broken until 0.55.0)*: makes the linker for this
target be for the specified language. It is generally unnecessary to set
this, as meson will detect the right linker to use in most cases. There are
only two cases where this is needed. One, your main function in an
executable is not in the language meson picked, or second you want to force
a library to use only one ABI.
-- `link_whole` links all contents of the given static libraries
- whether they are used by not, equivalent to the
- `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0.
- As of 0.41.0 if passed a list that list will be flattened. Starting
- from version 0.51.0 this argument also accepts outputs produced by
+- `link_whole` *(since 0.40.0)*: links all contents of the given static libraries
+ whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC.
+ *(since 0.41.0)* If passed a list that list will be flattened.
+ *(since 0.51.0)* This argument also accepts outputs produced by
custom targets. The user must ensure that the output is a library in
the correct format.
-- `link_with`, one or more shared or static libraries (built by this
- project) that this target should be linked with, If passed a list
- this list will be flattened as of 0.41.0. Starting with version
- 0.51.0, the arguments can also be custom targets. In this case Meson
- will assume that merely adding the output file in the linker command
+- `link_with`: one or more shared or static libraries (built by this
+ project) that this target should be linked with. *(since 0.41.0)* If passed a
+ list this list will be flattened. *(since 0.51.0)* The arguments can also be custom targets.
+ In this case Meson will assume that merely adding the output file in the linker command
line is sufficient to make linking work. If this is not sufficient,
then the build system writer must write all other steps manually.
-- `export_dynamic` when set to true causes the target's symbols to be
+- `export_dynamic` *(since 0.45.0)*: when set to true causes the target's symbols to be
dynamically exported, allowing modules built using the
[`shared_module`](#shared_module) function to refer to functions,
variables and other symbols defined in the executable itself. Implies
- the `implib` argument. Since 0.45.0
-- `implib` when set to true, an import library is generated for the
+ the `implib` argument.
+- `implib` *(since 0.42.0)*: when set to true, an import library is generated for the
executable (the name of the import library is based on *exe_name*).
Alternatively, when set to a string, that gives the base name for
the import library. The import library is used when the returned
build target object appears in `link_with:` elsewhere. Only has any
effect on platforms where that is meaningful (e.g. Windows). Implies
- the `export_dynamic` argument. Since 0.42.0
-- `implicit_include_directories` is a boolean telling whether Meson
+ the `export_dynamic` argument.
+- `implicit_include_directories` *(since 0.42.0)*: a boolean telling whether Meson
adds the current source and build directories to the include path,
- defaults to `true`, since 0.42.0
-- `include_directories` one or more objects created with the
- `include_directories` function, or, since 0.50.0, strings, which
+ defaults to `true`.
+- `include_directories`: one or more objects created with the
+ `include_directories` function, or *(since 0.50.0)* strings, which
will be transparently expanded to include directory objects
-- `install`, when set to true, this executable should be installed, defaults to `false`
-- `install_dir` override install directory for this file. The value is
+- `install`: when set to true, this executable should be installed, defaults to `false`
+- `install_dir`: override install directory for this file. The value is
relative to the `prefix` specified. F.ex, if you want to install
plugins into a subdir, you'd use something like this: `install_dir :
get_option('libdir') / 'projectname-1.0'`.
-- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format
+- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format
and optionally the owner/uid and group/gid for the installed files.
-- `install_rpath` a string to set the target's rpath to after install
+- `install_rpath`: a string to set the target's rpath to after install
(but *not* before that). On Windows, this argument has no effect.
-- `objects` list of prebuilt object files (usually for third party
+- `objects`: list of prebuilt object files (usually for third party
products you don't have source to) that should be linked in this
target, **never** use this for object files that you build yourself.
-- `name_suffix` the string that will be used as the extension for the
+- `name_suffix`: the string that will be used as the extension for the
target by overriding the default. By default on Windows this is
`exe` and on other platforms it is omitted. Set this to `[]`, or omit
the keyword argument for the default behaviour.
-- `override_options` takes an array of strings in the same format as
+- `override_options` *(since 0.40.0)*: takes an array of strings in the same format as
`project`'s `default_options` overriding the values of these options
- for this target only, since 0.40.0.
-- `gnu_symbol_visibility` specifies how symbols should be exported, see
+ for this target only.
+- `gnu_symbol_visibility` *(since 0.48.0)*: specifies how symbols should be exported, see
e.g [the GCC Wiki](https://gcc.gnu.org/wiki/Visibility) for more
information. This value can either be an empty string or one of
`default`, `internal`, `hidden`, `protected` or `inlineshidden`, which
is the same as `hidden` but also includes things like C++ implicit
constructors as specified in the GCC manual. Ignored on compilers that
- do not support GNU visibility arguments. Available since 0.48.0.
-- `d_import_dirs` list of directories to look in for string imports used
+ do not support GNU visibility arguments.
+- `d_import_dirs`: list of directories to look in for string imports used
in the D programming language
-- `d_unittest`, when set to true, the D modules are compiled in debug mode
-- `d_module_versions` list of module version identifiers set when compiling D sources
-- `d_debug` list of module debug identifiers set when compiling D sources
-- `pie` *(added 0.49.0)* build a position-independent executable
-- `native`, is a boolean controlling whether the target is compiled for the
+- `d_unittest`: when set to true, the D modules are compiled in debug mode
+- `d_module_versions`: list of module version identifiers set when compiling D sources
+- `d_debug`: list of module debug identifiers set when compiling D sources
+- `pie` *(since 0.49.0)*: build a position-independent executable
+- `native`: is a boolean controlling whether the target is compiled for the
build or host machines. Defaults to false, building for the host machine.
The list of `sources`, `objects`, and `dependencies` is always
@@ -682,7 +677,7 @@ The returned object also has methods that are documented in the
### find_library()
-This function is deprecated and in the 0.31.0 release it was moved to
+*(since 0.31.0)* **(deprecated)** Use `find_library()` method of
[the compiler object](#compiler-object) as obtained from
`meson.get_compiler(lang)`.
@@ -696,12 +691,11 @@ This function is deprecated and in the 0.31.0 release it was moved to
to be searched for in `PATH`, or a script in the current source
directory.
-`program_name2` and later positional arguments are used as fallback
+*(since 0.37.0)* `program_name2` and later positional arguments are used as fallback
strings to search for. This is meant to be used for cases where the
program may have many alternative names, such as `foo` and
`foo.py`. The function will check for the arguments one by one and the
-first one that is found is returned. Meson versions earlier than
-0.37.0 only accept one argument.
+first one that is found is returned.
Keyword arguments are the following:
@@ -709,21 +703,21 @@ Keyword arguments are the following:
abort if no program can be found. If `required` is set to `false`,
Meson continue even if none of the programs can be found. You can
then use the `.found()` method on the [returned object](#external-program-object) to check
- whether it was found or not. Since *0.47.0* the value of a
+ whether it was found or not. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed to the
`required` keyword argument.
-- `native` *(since 0.43)* defines how this executable should be searched. By default
+- `native` *(since 0.43.0)*: defines how this executable should be searched. By default
it is set to `false`, which causes Meson to first look for the
executable in the cross file (when cross building) and if it is not
defined there, then from the system. If set to `true`, the cross
file is ignored and the program is only searched from the system.
-- `disabler` if `true` and the program couldn't be found, return a
+- `disabler` *(since 0.49.0)*: if `true` and the program couldn't be found, return a
[disabler object](#disabler-object) instead of a not-found object.
- *Since 0.49.0*
+
-- `version` *(since 0.52.0)* Specifies the required version, see
+- `version` *(since 0.52.0)*: specifies the required version, see
[`dependency()`](#dependency) for argument format. The version of the program
is determined by running `program_name --version` command. If stdout is empty
it fallbacks to stderr. If the output contains more text than simply a version
@@ -731,7 +725,7 @@ Keyword arguments are the following:
If the output is more complicated than that, the version checking will have to
be done manually using [`run_command()`](#run_command).
-- `dirs` *(since 0.53.0)* Extra list of absolute paths where to look for program
+- `dirs` *(since 0.53.0)*: extra list of absolute paths where to look for program
names.
Meson will also autodetect scripts with a shebang line and run them
@@ -799,22 +793,22 @@ argument is the executable to use. It can either be a self-built
executable or one returned by find_program. Keyword arguments are the
following:
-- `arguments` a list of template strings that will be the command line
+- `arguments`: a list of template strings that will be the command line
arguments passed to the executable
-- `depends` is an array of build targets that must be built before this
+- `depends` *(since 0.51.0)*: is an array of build targets that must be built before this
generator can be run. This is used if you have a generator that calls
- a second executable that is built in this project. Available since 0.51.0
-- `depfile` is a template string pointing to a dependency file that a
+ a second executable that is built in this project.
+- `depfile`: is a template string pointing to a dependency file that a
generator can write listing all the additional files this target
depends on, for example a C compiler would list all the header files
it included, and a change in any one of these files triggers a
recompilation
-- `output` a template string (or list of template strings) defining
+- `output`: a template string (or list of template strings) defining
how an output file name is (or multiple output names are) generated
from a single source file name
-- `capture` when this argument is set to true, Meson captures `stdout`
- of the `executable` and writes it to the target file specified as
- `output`. Available since v0.43.0.
+- `capture` *(since 0.43.0)*: when this argument is set to true, Meson
+ captures `stdout` of the `executable` and writes it to the target file
+ specified as `output`.
The returned object also has methods that are documented in the
[object methods section](#generator-object) below.
@@ -977,13 +971,13 @@ except Visual Studio).
Installs files from the source tree that are listed as positional
arguments. The following keyword arguments are supported:
-- `install_dir` the absolute or relative path to the installation
+- `install_dir`: the absolute or relative path to the installation
directory. If this is a relative path, it is assumed to be relative
to the prefix.
- If omitted, the directory defaults to `{datadir}/{projectname}` *(added 0.45.0)*.
+ If omitted, the directory defaults to `{datadir}/{projectname}` *(since 0.45.0)*.
-- `install_mode` specify the file mode in symbolic format and
+- `install_mode`: specify the file mode in symbolic format and
optionally the owner/uid and group/gid for the installed files. For
example:
@@ -995,10 +989,10 @@ arguments. The following keyword arguments are supported:
To leave any of these three as the default, specify `false`.
-- `rename` if specified renames each source file into corresponding
+- `rename` *(since 0.46.0)*: if specified renames each source file into corresponding
file from `rename` list. Nested paths are allowed and they are
joined with `install_dir`. Length of `rename` list must be equal to
- the number of sources. *(added 0.46.0)*
+ the number of sources.
See [Installing](Installing.md) for more examples.
@@ -1035,10 +1029,11 @@ This will install `common.h` and `kola.h` into `/{prefix}/cust/myproj`:
install_headers('common.h', 'proj/kola.h', install_dir : 'cust', subdir : 'myproj')
```
-The `install_mode` argument can be used to specify the file mode in symbolic
-format and optionally the owner/uid and group/gid for the installed files.
-An example value could be `['rwxr-sr-x', 'root', 'root']`.
-*(Added 0.47.0)*.
+Accepts the following keywords:
+
+- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic
+ format and optionally the owner/uid and group/gid for the installed files.
+ An example value could be `['rwxr-sr-x', 'root', 'root']`.
### install_man()
@@ -1051,12 +1046,13 @@ man directory during the install step. This directory can be
overridden by specifying it with the `install_dir` keyword
argument.
-The `install_mode` argument can be used to specify the file mode in symbolic
-format and optionally the owner/uid and group/gid for the installed files.
-An example value could be `['rwxr-sr-x', 'root', 'root']`.
-*(Added 0.47.0)*.
+Accepts the following keywords:
+
+- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic
+ format and optionally the owner/uid and group/gid for the installed files.
+ An example value could be `['rwxr-sr-x', 'root', 'root']`.
-Since 0.49.0, [manpages are no longer compressed implicitly][install_man_49].
+*(since 0.49.0)* [manpages are no longer compressed implicitly][install_man_49].
[install_man_49]: https://mesonbuild.com/Release-notes-for-0-49-0.html#manpages-are-no-longer-compressed-implicitly
@@ -1077,11 +1073,10 @@ The following keyword arguments are supported:
- `exclude_directories`: a list of directory names that should not be installed.
Names are interpreted as paths relative to the `subdir_name` location.
- `install_dir`: the location to place the installed subdirectory.
-- `install_mode`: the file mode in symbolic format and optionally
- the owner/uid and group/gid for the installed files. *(Added 0.47.0)*
-- `strip_directory`: install directory contents. `strip_directory=false` by default.
+- `install_mode` *(since 0.47.0)*: the file mode in symbolic format and optionally
+ the owner/uid and group/gid for the installed files.
+- `strip_directory` *(since 0.45.0)*: install directory contents. `strip_directory=false` by default.
If `strip_directory=true` only the last component of the source path is used.
- Since 0.45.0
For a given directory `foo`:
```text
@@ -1126,7 +1121,9 @@ share/
bool is_disabler(var)
```
-Returns true if a variable is a disabler and false otherwise. Added in 0.52.0.
+*(since 0.52.0)*
+
+Returns true if a variable is a disabler and false otherwise.
### is_variable()
@@ -1153,6 +1150,8 @@ the jar with `java -jar file.jar`.
string join_paths(string1, string2, ...)
```
+*(since 0.36.0)*
+
Joins the given strings into a file system path segment. For example
`join_paths('foo', 'bar')` results in `foo/bar`. If any one of the
individual segments is an absolute path, all segments before it are
@@ -1161,9 +1160,7 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`.
**Warning** Don't use `join_paths()` for sources in [`library`](#library) and
[`executable`](#executable), you should use [`files`](#files) instead.
-*Added 0.36.0*
-
-Since 0.49.0 using the`/` operator on strings is equivalent to calling
+*(since 0.49.0)* Using the`/` operator on strings is equivalent to calling
`join_paths`.
```meson
@@ -1193,12 +1190,12 @@ library basis using the [dependency()](#dependency)) `static` keyword.
The keyword arguments for this are the same as for
[`executable`](#executable) with the following additions:
-- `name_prefix` the string that will be used as the prefix for the
+- `name_prefix`: the string that will be used as the prefix for the
target output filename by overriding the default (only used for
libraries). By default this is `lib` on all platforms and compilers,
except for MSVC shared libraries where it is omitted to follow
convention, and Cygwin shared libraries where it is `cyg`.
-- `name_suffix` the string that will be used as the suffix for the
+- `name_suffix`: the string that will be used as the suffix for the
target output filename by overriding the default (see also:
[executable()](#executable)). By default, for shared libraries this
is `dylib` on macOS, `dll` on Windows, and `so` everywhere else.
@@ -1206,7 +1203,7 @@ The keyword arguments for this are the same as for
static libraries use the `lib` suffix, but we use `a` to avoid a
potential name clash with shared libraries which also generate
import libraries with a `lib` suffix.
-- `rust_crate_type` specifies the crate type for Rust
+- `rust_crate_type`: specifies the crate type for Rust
libraries. Defaults to `dylib` for shared libraries and `rlib` for
static libraries.
@@ -1224,7 +1221,7 @@ them for the default behaviour for each platform.
This function prints its argument to stdout.
-**Since 0.54.0** Can take more more than one argument that will be separated by
+*(since 0.54.0)* Can take more more than one argument that will be separated by
space.
### warning()
@@ -1233,11 +1230,11 @@ space.
void warning(text)
```
-This function prints its argument to stdout prefixed with WARNING:.
+*(since 0.44.0)*
-*Added 0.44.0*
+This function prints its argument to stdout prefixed with WARNING:.
-**Since 0.54.0** Can take more more than one argument that will be separated by
+*(since 0.54.0)* Can take more more than one argument that will be separated by
space.
### summary()
@@ -1247,6 +1244,8 @@ space.
void summary(dictionary)
```
+*(since 0.53.0)*
+
This function is used to summarize build configuration at the end of the build
process. This function provides a way for projects (and subprojects) to report
this information in a clear way.
@@ -1262,10 +1261,10 @@ pair doesn't appear twice. All sections will be collected and printed at
the end of the configuration in the same order as they have been called.
Keyword arguments:
-- `section` title to group a set of key/value pairs.
-- `bool_yn` if set to true, all boolean values will be replaced by green YES
+- `section`: title to group a set of key/value pairs.
+- `bool_yn`: if set to true, all boolean values will be replaced by green YES
or red NO.
-- `list_sep` *Since 0.54.0* string used to separate list values (e.g. `', '`).
+- `list_sep` *(since 0.54.0)*: string used to separate list values (e.g. `', '`).
Example:
```meson
@@ -1300,8 +1299,6 @@ My Project 1.0
True
```
-*Added 0.53.0*
-
### project()
``` meson
@@ -1312,7 +1309,7 @@ The first argument to this function must be a string defining the name
of this project. It is followed by programming languages that the
project uses. Supported values for languages are `c`, `cpp` (for
`C++`), `cuda`, `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`),
-`vala` and `rust`. Since version `0.40.0` the list of languages
+`vala` and `rust`. *(since 0.40.0)* The list of languages
is optional.
The project name can be any string you want, it's not used for
@@ -1324,40 +1321,40 @@ Library_.
Project supports the following keyword arguments.
- - `default_options` takes an array of strings. The strings are in the
- form `key=value` and have the same format as options to
- `meson configure`. For example to set the default project type you would
- set this: `default_options : ['buildtype=debugoptimized']`. Note
- that these settings are only used when running Meson for the first
- time. Global options such as `buildtype` can only be specified in
- the master project, settings in subprojects are ignored. Project
- specific options are used normally even in subprojects.
-
-
- - `license` takes a string or array of strings describing the
- license(s) the code is under. Usually this would be something like
- `license : 'GPL2+'`, but if the code has multiple licenses you can
- specify them as an array like this: `license : ['proprietary',
- 'GPL3']`. Note that the text is informal and is only written to
- the dependency manifest. Meson does not do any license validation,
- you are responsible for verifying that you abide by all licensing
- terms. You can access the value in your Meson build files with
- `meson.project_license()`.
-
- - `meson_version` takes a string describing which Meson version the
- project requires. Usually something like `>=0.28.0`.
-
- - `subproject_dir` specifies the top level directory name that holds
- Meson subprojects. This is only meant as a compatibility option
- for existing code bases that house their embedded source code in a
- custom directory. All new projects should not set this but instead
- use the default value. It should be noted that this keyword
- argument is ignored inside subprojects. There can be only one
- subproject dir and it is set in the top level Meson file.
-
- - `version`, which is a free form string describing the version of
- this project. You can access the value in your Meson build files
- with `meson.project_version()`.
+- `default_options`: takes an array of strings. The strings are in the
+ form `key=value` and have the same format as options to
+ `meson configure`. For example to set the default project type you would
+ set this: `default_options : ['buildtype=debugoptimized']`. Note
+ that these settings are only used when running Meson for the first
+ time. Global options such as `buildtype` can only be specified in
+ the master project, settings in subprojects are ignored. Project
+ specific options are used normally even in subprojects.
+
+
+- `license`: takes a string or array of strings describing the
+ license(s) the code is under. Usually this would be something like
+ `license : 'GPL2+'`, but if the code has multiple licenses you can
+ specify them as an array like this: `license : ['proprietary',
+ 'GPL3']`. Note that the text is informal and is only written to
+ the dependency manifest. Meson does not do any license validation,
+ you are responsible for verifying that you abide by all licensing
+ terms. You can access the value in your Meson build files with
+ `meson.project_license()`.
+
+- `meson_version`: takes a string describing which Meson version the
+ project requires. Usually something like `>=0.28.0`.
+
+- `subproject_dir`: specifies the top level directory name that holds
+ Meson subprojects. This is only meant as a compatibility option
+ for existing code bases that house their embedded source code in a
+ custom directory. All new projects should not set this but instead
+ use the default value. It should be noted that this keyword
+ argument is ignored inside subprojects. There can be only one
+ subproject dir and it is set in the top level Meson file.
+
+- `version`: which is a free form string describing the version of
+ this project. You can access the value in your Meson build files
+ with `meson.project_version()`.
### run_command()
@@ -1379,15 +1376,13 @@ respectively.
This function supports the following keyword arguments:
- - `check` takes a boolean. If `true`, the exit status code of the command will
+ - `check` *(since 0.47.0)*: takes a boolean. If `true`, the exit status code of the command will
be checked, and the configuration will fail if it is non-zero. The default is
`false`.
- Since 0.47.0
- - `env` environment variables to set, such as `['NAME1=value1',
+ - `env` *(since 0.50.0)*: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
- Since 0.50.0
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
See also [External commands](External-commands.md).
@@ -1399,8 +1394,8 @@ runtarget run_target(target_name, ...)
This function creates a new top-level target that runs a specified
command with the specified arguments. Like all top-level targets, this
-integrates with the selected backend. For instance, with Ninja you can
-run it as `ninja target_name`. Note that a run target produces no
+integrates with the selected backend. For instance, you can
+run it as `meson compile target_name`. Note that a run target produces no
output as far as Meson is concerned. It is only meant for tasks such
as running a code formatter or flashing an external device's firmware
with a built file.
@@ -1428,8 +1423,7 @@ and subdirectory the target was defined in, respectively.
Assigns a value to the given variable name. Calling
`set_variable('foo', bar)` is equivalent to `foo = bar`.
-**Note:** Prior to v0.46.1, the `value` parameter could not be an
-array type, due to flattening of the function parameters.
+*(since 0.46.1)* The `value` parameter can be an array type.
### shared_library()
@@ -1441,7 +1435,7 @@ Builds a shared library with the given sources. Positional and keyword
arguments are the same as for [`library`](#library) with the following
extra keyword arguments.
-- `soversion` a string specifying the soversion of this shared
+- `soversion`: a string specifying the soversion of this shared
library, such as `0`. On Linux and Windows this is used to set the
soversion (or equivalent) in the filename. For example, if
`soversion` is `4`, a Windows DLL will be called `foo-4.dll` and one
@@ -1449,19 +1443,19 @@ extra keyword arguments.
`libfoo.so.4`. If this is not specified, the first part of `version`
is used instead (see below). For example, if `version` is `3.6.0` and
`soversion` is not defined, it is set to `3`.
-- `version` a string specifying the version of this shared library,
+- `version`: a string specifying the version of this shared library,
such as `1.1.0`. On Linux and OS X, this is used to set the shared
library version in the filename, such as `libfoo.so.1.1.0` and
`libfoo.1.1.0.dylib`. If this is not specified, `soversion` is used
instead (see above).
-- `darwin_versions` *(added 0.48)* an integer, string, or a list of
+- `darwin_versions` *(since 0.48.0)*: an integer, string, or a list of
versions to use for setting dylib `compatibility version` and
`current version` on macOS. If a list is specified, it must be
either zero, one, or two elements. If only one element is specified
or if it's not a list, the specified value will be used for setting
both compatibility version and current version. If unspecified, the
`soversion` will be used as per the aforementioned rules.
-- `vs_module_defs` a string, a File object, or Custom Target for a
+- `vs_module_defs`: a string, a File object, or Custom Target for a
Microsoft module definition file for controlling symbol exports,
etc., on platforms where that is possible (e.g. Windows).
@@ -1471,6 +1465,8 @@ extra keyword arguments.
buildtarget shared_module(module_name, list_of_sources, ...)
```
+*(since 0.37.0)*
+
Builds a shared module with the given sources. Positional and keyword
arguments are the same as for [`library`](#library).
@@ -1485,7 +1481,7 @@ you will need to set the `export_dynamic` argument of the executable to
Supports the following extra keyword arguments:
-- `vs_module_defs`, *(Added 0.52.0)*, a string, a File object, or
+- `vs_module_defs` *(since 0.52.0)*: a string, a File object, or
Custom Target for a Microsoft module definition file for controlling
symbol exports, etc., on platforms where that is possible
(e.g. Windows).
@@ -1495,8 +1491,6 @@ platforms, notably OSX. Consider using a
[`shared_library`](#shared_library) instead, if you need to both
`dlopen()` and link with a library.
-*Added 0.37.0*
-
### static_library()
``` meson
@@ -1507,7 +1501,7 @@ Builds a static library with the given sources. Positional and keyword
arguments are otherwise the same as for [`library`](#library), but it
has one argument the others don't have:
- - `pic`, *(Added 0.36.0)* builds the library as positional
+ - `pic` *(since 0.36.0)*: builds the library as positional
independent code (so it can be linked into a shared library). This
option has no effect on Windows and OS X since it doesn't make
sense on Windows and PIC cannot be disabled on OS X.
@@ -1530,7 +1524,7 @@ and must only be executed once.
This function has one keyword argument.
- - `if_found` takes one or several dependency objects and will only
+ - `if_found`: takes one or several dependency objects and will only
recurse in the subdir if they all return `true` when queried with
`.found()`
@@ -1575,16 +1569,15 @@ example a subproject called `foo` must be located in
`${MESON_SOURCE_ROOT}/subprojects/foo`. Supports the following keyword
arguments:
- - `default_options` *(added 0.37.0)* an array of default option values
+ - `default_options` *(since 0.37.0)*: an array of default option values
that override those set in the subproject's `meson_options.txt`
(like `default_options` in `project`, they only have effect when
Meson is run for the first time, and command line arguments override
- any default options in build files). *Since 0.54.0* `default_library`
+ any default options in build files). *(since 0.54.0)*: `default_library`
built-in option can also be overridden.
- - `version` keyword argument that works just like the one in
- `dependency`. It specifies what version the subproject should be,
- as an example `>=1.0.1`
- - `required` *(added 0.48.0)* By default, `required` is `true` and
+ - `version`: works just like the same as in `dependency`.
+ It specifies what version the subproject should be, as an example `>=1.0.1`
+ - `required` *(since 0.48.0)*: By default, `required` is `true` and
Meson will abort if the subproject could not be setup. You can set
this to `false` and then use the `.found()` method on the [returned
object](#subproject-object). You may also pass the value of a
@@ -1613,12 +1606,12 @@ object](#build-target-object) returned by
object](#external-program-object) returned by
[`find_program()`](#find_program).
-*Since 0.55.0* When cross compiling, if an exe_wrapper is needed and defined
+*(since 0.55.0)* When cross compiling, if an exe_wrapper is needed and defined
the environment variable `MESON_EXE_WRAPPER` will be set to the string value
of that wrapper (implementation detail: using `mesonlib.join_args`). Test
scripts may use this to run cross built binaries. If your test needs
`MESON_EXE_WRAPPER` in cross build situations it is your responsibility to
-return code 77 to tell the harness to report "skip"
+return code 77 to tell the harness to report "skip".
By default, environment variable
[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html)
@@ -1640,53 +1633,52 @@ test(..., env: nomalloc, ...)
#### test() Keyword arguments
-- `args` arguments to pass to the executable
+- `args`: arguments to pass to the executable
-- `env` environment variables to set, such as `['NAME1=value1',
+- `env`: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
-- `is_parallel` when false, specifies that no other test must be
+- `is_parallel`: when false, specifies that no other test must be
running at the same time as this test
-- `should_fail` when true the test is considered passed if the
+- `should_fail`: when true the test is considered passed if the
executable returns a non-zero return value (i.e. reports an error)
-- `suite` `'label'` (or list of labels `['label1', 'label2']`)
+- `suite`: `'label'` (or list of labels `['label1', 'label2']`)
attached to this test. The suite name is qualified by a (sub)project
name resulting in `(sub)project_name:label`. In the case of a list
of strings, the suite names will be `(sub)project_name:label1`,
`(sub)project_name:label2`, etc.
-- `timeout` the amount of seconds the test is allowed to run, a test
+- `timeout`: the amount of seconds the test is allowed to run, a test
that exceeds its time limit is always considered failed, defaults to
30 seconds
-- `workdir` absolute path that will be used as the working directory
+- `workdir`: absolute path that will be used as the working directory
for the test
-- `depends` specifies that this test depends on the specified
+- `depends` *(since 0.46.0)*: specifies that this test depends on the specified
target(s), even though it does not take any of them as a command
line argument. This is meant for cases where test finds those
targets internally, e.g. plugins or globbing. Those targets are built
before test is executed even if they have `build_by_default : false`.
- Since 0.46.0
-- `protocol` *(Since 0.50.0)* specifies how the test results are parsed and can
+- `protocol` *(since 0.50.0)*: specifies how the test results are parsed and can
be one of `exitcode`, `tap`, or `gtest`. For more information about test
harness protocol read [Unit Tests](Unit-tests.md). The following values are
accepted:
- `exitcode`: the executable's exit code is used by the test harness
- to record the outcome of the test)
- - `tap` ([Test Anything Protocol](https://www.testanything.org/))
- - `gtest`. *(Since 0.55.0)* for Google Tests.
+ to record the outcome of the test).
+ - `tap`: [Test Anything Protocol](https://www.testanything.org/).
+ - `gtest` *(since 0.55.0)*: for Google Tests.
-- `priority` specifies the priority of a test. Tests with a
+- `priority` *(since 0.52.0)*:specifies the priority of a test. Tests with a
higher priority are *started* before tests with a lower priority.
The starting order of tests with identical priorities is
implementation-defined. The default priority is 0, negative numbers are
- permitted. Since 0.52.0
+ permitted.
Defined tests can be run in a backend-agnostic way by calling
`meson test` inside the build dir, or by using backend-specific
@@ -1702,15 +1694,15 @@ This command detects revision control commit information at build time
and places it in the specified output file. This file is guaranteed to
be up to date on every build. Keywords are similar to `custom_target`.
-- `command` string list with the command to execute, see
+- `command`: string list with the command to execute, see
[`custom_target`](#custom_target) for details on how this command
must be specified
-- `fallback` version number to use when no revision control
+- `fallback`: version number to use when no revision control
information is present, such as when building from a release tarball
(defaults to `meson.project_version()`)
-- `input` file to modify (e.g. `version.c.in`) (required)
-- `output` file to write the results to (e.g. `version.c`) (required)
-- `replace_string` string in the input file to substitute with the
+- `input`: file to modify (e.g. `version.c.in`) (required)
+- `output`: file to write the results to (e.g. `version.c`) (required)
+- `replace_string`: string in the input file to substitute with the
commit information (defaults to `@VCS_TAG@`)
Meson will read the contents of `input`, substitute the
@@ -1738,31 +1730,30 @@ The `meson` object allows you to introspect various properties of the
system. This object is always mapped in the `meson` variable. It has
the following methods.
-- `add_dist_script(script_name, arg1, arg, ...)` causes the script
+- `add_dist_script(script_name, arg1, arg2, ...)` *(since 0.48.0)*: causes the script
given as argument to run during `dist` operation after the
distribution source has been generated but before it is
archived. Note that this runs the script file that is in the
_staging_ directory, not the one in the source directory. If the
script file can not be found in the staging directory, it is a hard
error. This command can only invoked from the main project, calling
- it from a subproject is a hard error. Available since 0.48.0. Before
- 0.49.0, the function only accepted a single argument. Since 0.54.0
- the `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` environment variables
- are set when dist scripts are run.
- *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
+ it from a subproject is a hard error. *(since 0.49.0)* Accepts multiple arguments
+ for the fscript. *(since 0.54.0)* The `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT`
+ environment variables are set when dist scripts are run.
+ *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
as well as strings.
-- `add_install_script(script_name, arg1, arg2, ...)` causes the script
+- `add_install_script(script_name, arg1, arg2, ...)`: causes the script
given as an argument to be run during the install step, this script
will have the environment variables `MESON_SOURCE_ROOT`,
`MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`,
`MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set.
All positional arguments are passed as parameters.
- *(Since 0.55.0)* The output of `configure_file`, `files`, `find_program`,
+ *(since 0.55.0)* The output of `configure_file`, `files`, `find_program`,
`custom_target`, indexes of `custom_target`, `executable`, `library`, and
other built targets as well as strings.
- *(added 0.54)* If `meson install` is called with the `--quiet` option, the
+ *(since 0.54.0)* If `meson install` is called with the `--quiet` option, the
environment variable `MESON_INSTALL_QUIET` will be set.
Meson uses the `DESTDIR` environment variable as set by the
@@ -1787,23 +1778,23 @@ the following methods.
shell would. If your script uses Python, `shlex.split()` is the
easiest correct way to do this.
-- `add_postconf_script(script_name, arg1, arg2, ...)` will run the
+- `add_postconf_script(script_name, arg1, arg2, ...)`: runs the
executable given as an argument after all project files have been
generated. This script will have the environment variables
`MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set.
- *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
+ *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
as well as strings.
-- `backend()` *(added 0.37.0)* returns a string representing the
+- `backend()` *(since 0.37.0)*: returns a string representing the
current backend: `ninja`, `vs2010`, `vs2015`, `vs2017`, `vs2019`,
or `xcode`.
-- `build_root()` returns a string with the absolute path to the build
+- `build_root()`: returns a string with the absolute path to the build
root directory. Note: this function will return the build root of
the parent project if called from a subproject, which is usually
not what you want. Try using `current_build_dir()`.
-- `source_root()` returns a string with the absolute path to the
+- `source_root()`: returns a string with the absolute path to the
source root directory. Note: you should use the `files()` function
to refer to files in the root source directory instead of
constructing paths manually with `meson.source_root()`. This
@@ -1811,17 +1802,17 @@ the following methods.
from a subproject, which is usually not what you want. Try using
`current_source_dir()`.
-- `current_build_dir()` returns a string with the absolute path to the
+- `current_build_dir()`: returns a string with the absolute path to the
current build directory.
-- `current_source_dir()` returns a string to the current source
+- `current_source_dir()`: returns a string to the current source
directory. Note: **you do not need to use this function** when
passing files from the current source directory to a function since
that is the default. Also, you can use the `files()` function to
refer to files in the current or any other source directory instead
of constructing paths manually with `meson.current_source_dir()`.
-- `get_compiler(language)` returns [an object describing a
+- `get_compiler(language)`: returns [an object describing a
compiler](#compiler-object), takes one positional argument which is
the language to use. It also accepts one keyword argument, `native`
which when set to true makes Meson return the compiler for the build
@@ -1830,55 +1821,52 @@ the following methods.
returns the "cross" compiler if we're currently cross-compiling and
the "native" compiler if we're not.
-- `get_cross_property(propname, fallback_value)`
- *Consider get_external_property() instead*. Returns the given
+- `get_cross_property(propname, fallback_value)`:
+ *Consider `get_external_property()` instead*. Returns the given
property from a cross file, the optional fallback_value is returned
if not cross compiling or the given property is not found.
- `get_external_property(propname, fallback_value, native: true/false)`
- *(added 0.54.0)* returns the given property from a native or cross file.
+ *(since 0.54.0)*: returns the given property from a native or cross file.
The optional fallback_value is returned if the given property is not found.
The optional `native: true` forces retrieving a variable from the
native file, even when cross-compiling.
If `native: false` or not specified, variable is retrieved from the
cross-file if cross-compiling, and from the native-file when not cross-compiling.
-- `can_run_host_binaries()` returns true if the build machine can run
+- `can_run_host_binaries()` *(since 0.55.0)*: returns true if the build machine can run
binaries compiled for the host. This returns true unless you are
cross compiling, need a helper to run host binaries, and don't have one.
For example when cross compiling from Linux to Windows, one can use `wine`
- as the helper. *New in 0.55.0*
+ as the helper.
-- `has_exe_wrapper()` alias of `can_run_host_binaries`
- *Deprecated since 0.55.0*
+- `has_exe_wrapper()`: *(since 0.55.0)* **(deprecated)**. Use `can_run_host_binaries` instead.
-- `install_dependency_manifest(output_name)` installs a manifest file
+- `install_dependency_manifest(output_name)`: installs a manifest file
containing a list of all subprojects, their versions and license
files to the file name given as the argument.
-- `is_cross_build()` returns `true` if the current build is a [cross
+- `is_cross_build()`: returns `true` if the current build is a [cross
build](Cross-compilation.md) and `false` otherwise.
-- `is_subproject()` returns `true` if the current project is being
+- `is_subproject()`: returns `true` if the current project is being
built as a subproject of some other project and `false` otherwise.
-- `is_unity()` returns `true` when doing a [unity
+- `is_unity()`: returns `true` when doing a [unity
build](Unity-builds.md) (multiple sources are combined before
compilation to reduce build time) and `false` otherwise.
-- `override_find_program(progname, program)` [*(Added
- 0.46.0)*](Release-notes-for-0.46.0.md#can-override-find_program)
+- `override_find_program(progname, program)` *(since 0.46.0)*:
specifies that whenever `find_program` is used to find a program
named `progname`, Meson should not look it up on the system but
instead return `program`, which may either be the result of
- `find_program`, `configure_file` or `executable`. *Since 0.55.0* if a version
+ `find_program`, `configure_file` or `executable`. *(since 0.55.0)* If a version
check is passed to `find_program` for a program that has been overridden with
an executable, the current project version is used.
If `program` is an `executable`, it cannot be used during configure.
-- `override_dependency(name, dep_object)` [*(Added
- 0.54.0)*](Release-notes-for-0.54.0.md#override-dependency)
+- `override_dependency(name, dep_object)` *(since 0.54.0)*:
specifies that whenever `dependency(name, ...)` is used, Meson should not
look it up on the system but instead return `dep_object`, which may either be
the result of `dependency()` or `declare_dependency()`. It takes optional
@@ -1886,16 +1874,16 @@ the following methods.
project to retrieve the dependency without having to know the dependency
variable name: `dependency(name, fallback : subproject_name)`.
-- `project_version()` returns the version string specified in
+- `project_version()`: returns the version string specified in
`project` function call.
-- `project_license()` returns the array of licenses specified in
+- `project_license()`: returns the array of licenses specified in
`project` function call.
-- `project_name()` returns the project name specified in the `project`
+- `project_name()`: returns the project name specified in the `project`
function call.
-- `version()` return a string with the version of Meson.
+- `version()`: return a string with the version of Meson.
### `build_machine` object
@@ -1904,19 +1892,19 @@ doing the actual compilation. See
[Cross-compilation](Cross-compilation.md). It has the following
methods:
-- `cpu_family()` returns the CPU family name. [This
+- `cpu_family()`: returns the CPU family name. [This
table](Reference-tables.md#cpu-families) contains all known CPU
families. These are guaranteed to continue working.
-- `cpu()` returns a more specific CPU name, such as `i686`, `amd64`,
+- `cpu()`: returns a more specific CPU name, such as `i686`, `amd64`,
etc.
-- `system()` returns the operating system name. [This
+- `system()`: returns the operating system name. [This
table](Reference-tables.md#operating-system-names) Lists all of
the currently known Operating System names, these are guaranteed to
continue working.
-- `endian()` returns `big` on big-endian systems and `little` on
+- `endian()`: returns `big` on big-endian systems and `little` on
little-endian systems.
Currently, these values are populated using
@@ -1964,58 +1952,58 @@ the cross-info file, `host_machine` values are returned instead.
All [strings](Syntax.md#strings) have the following methods. Strings
are immutable, all operations return their results as a new string.
-- `contains(string)` returns true if string contains the string
- specified as the argument
+- `contains(string)`: returns true if string contains the string
+ specified as the argument.
-- `endswith(string)` returns true if string ends with the string
- specified as the argument
+- `endswith(string)`: returns true if string ends with the string
+ specified as the argument.
-- `format()` formats text, see the [Syntax
- manual](Syntax.md#string-formatting) for usage info
+- `format()`: formats text, see the [Syntax
+ manual](Syntax.md#string-formatting) for usage info.
-- `join(list_of_strings)` is the opposite of split, for example
- `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`
+- `join(list_of_strings)`: the opposite of split, for example
+ `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`.
-- `split(split_character)` splits the string at the specified
+- `split(split_character)`: splits the string at the specified
character (or whitespace if not set) and returns the parts in an
- array
+ array.
-- `startswith(string)` returns true if string starts with the string
+- `startswith(string)`: returns true if string starts with the string
specified as the argument
-- `strip()` removes whitespace at the beginning and end of the string
- *(added 0.43.0)* optionally can take one positional string argument,
- and all characters in that string will be stripped
+- `strip()`: removes whitespace at the beginning and end of the string.
+ *(since 0.43.0)* Optionally can take one positional string argument,
+ and all characters in that string will be stripped.
-- `to_int` returns the string converted to an integer (error if string
- is not a number)
+- `to_int()`: returns the string converted to an integer (error if string
+ is not a number).
-- `to_lower()` creates a lower case version of the string
+- `to_lower()`: creates a lower case version of the string.
-- `to_upper()` creates an upper case version of the string
+- `to_upper()`: creates an upper case version of the string.
-- `underscorify()` creates a string where every non-alphabetical
- non-number character is replaced with `_`
+- `underscorify()`: creates a string where every non-alphabetical
+ non-number character is replaced with `_`.
-- `version_compare(comparison_string)` does semantic version
+- `version_compare(comparison_string)`: does semantic version
comparison, if `x = '1.2.3'` then `x.version_compare('>1.0.0')`
- returns `true`
+ returns `true`.
### `Number` object
[Numbers](Syntax.md#numbers) support these methods:
- - `is_even()` returns true if the number is even
- - `is_odd()` returns true if the number is odd
- - `to_string()` returns the value of the number as a string.
+- `is_even()`: returns true if the number is even
+- `is_odd()`: returns true if the number is odd
+- `to_string()`: returns the value of the number as a string.
### `boolean` object
A [boolean](Syntax.md#booleans) object has two simple methods:
-- `to_int()` as above, but returns either `1` or `0`
+- `to_int()`: returns either `1` or `0`.
-- `to_string()` returns the string `'true'` if the boolean is true or
+- `to_string()`: returns the string `'true'` if the boolean is true or
`'false'` otherwise. You can also pass it two strings as positional
arguments to specify what to return for true/false. For instance,
`bool.to_string('yes', 'no')` will return `yes` if the boolean is
@@ -2025,27 +2013,29 @@ A [boolean](Syntax.md#booleans) object has two simple methods:
The following methods are defined for all [arrays](Syntax.md#arrays):
-- `contains(item)`, returns `true` if the array contains the object
+- `contains(item)`: returns `true` if the array contains the object
given as argument, `false` otherwise
-- `get(index, fallback)`, returns the object at the given index,
+- `get(index, fallback)`: returns the object at the given index,
negative indices count from the back of the array, indexing out of
- bounds returns the `fallback` value *(added 0.38.0)* or, if it is
+ bounds returns the `fallback` value *(since 0.38.0)* or, if it is
not specified, causes a fatal error
-- `length()`, the size of the array
+- `length()`: the size of the array
You can also iterate over arrays with the [`foreach`
statement](Syntax.md#foreach-statements).
### `dictionary` object
+*(since 0.47.0)*
+
The following methods are defined for all [dictionaries](Syntax.md#dictionaries):
-- `has_key(key)` returns `true` if the dictionary contains the key
+- `has_key(key)`: returns `true` if the dictionary contains the key
given as argument, `false` otherwise
-- `get(key, fallback)`, returns the value for the key given as first
+- `get(key, fallback)`: returns the value for the key given as first
argument if it is present in the dictionary, or the optional
fallback value given as the second argument. If a single argument
was given and the key was not found, causes a fatal error
@@ -2053,9 +2043,7 @@ The following methods are defined for all [dictionaries](Syntax.md#dictionaries)
You can also iterate over dictionaries with the [`foreach`
statement](Syntax.md#foreach-statements).
-Dictionaries are available since 0.47.0.
-
-Since 0.48.0 dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`).
+*(since 0.48.0)* Dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`).
Values from the second dictionary overrides values from the first.
## Returned objects
@@ -2069,204 +2057,202 @@ This object is returned by
for a given language and allows you to query its properties. It has
the following methods:
-- `alignment(typename)` returns the alignment of the type specified in
+- `alignment(typename)`: returns the alignment of the type specified in
the positional argument, you can specify external dependencies to
use with `dependencies` keyword argument.
-- `cmd_array()` returns an array containing the command arguments for
+- `cmd_array()`: returns an array containing the command arguments for
the current compiler.
-- `compiles(code)` returns true if the code fragment given in the
+- `compiles(code)`: returns true if the code fragment given in the
positional argument compiles, you can specify external dependencies
to use with `dependencies` keyword argument, `code` can be either a
string containing source code or a `file` object pointing to the
source code.
-- `compute_int(expr, ...')` computes the value of the given expression
+- `compute_int(expr, ...')`: computes the value of the given expression
(as an example `1 + 2`). When cross compiling this is evaluated with
an iterative algorithm, you can specify keyword arguments `low`
(defaults to -1024), `high` (defaults to 1024) and `guess` to
specify max and min values for the search and the value to try
first.
-- `find_library(lib_name, ...)` tries to find the library specified in
+- `find_library(lib_name, ...)`: tries to find the library specified in
the positional argument. The [result
object](#external-library-object) can be used just like the return
value of `dependency`. If the keyword argument `required` is false,
Meson will proceed even if the library is not found. By default the
library is searched for in the system library directory
(e.g. /usr/lib). This can be overridden with the `dirs` keyword
- argument, which can be either a string or a list of strings. Since
- *0.47.0* the value of a [`feature`](Build-options.md#features)
+ argument, which can be either a string or a list of strings.
+ *(since 0.47.0)* The value of a [`feature`](Build-options.md#features)
option can also be passed to the `required` keyword argument.
- *Since 0.49.0* if the keyword argument `disabler` is `true` and the
+ *(since 0.49.0)* If the keyword argument `disabler` is `true` and the
dependency couldn't be found, return a [disabler object](#disabler-object)
- instead of a not-found dependency. *Since 0.50.0* the `has_headers` keyword
+ instead of a not-found dependency. *(since 0.50.0)* The `has_headers` keyword
argument can be a list of header files that must be found as well, using
`has_header()` method. All keyword arguments prefixed with `header_` will be
- passed down to `has_header()` method with the prefix removed. *Since 0.51.0*
- the `static` keyword (boolean) can be set to `true` to limit the search to
+ passed down to `has_header()` method with the prefix removed. *(since 0.51.0)*
+ The `static` keyword (boolean) can be set to `true` to limit the search to
static libraries and `false` for dynamic/shared.
-- `first_supported_argument(list_of_strings)`, given a list of
+- `first_supported_argument(list_of_strings)`: given a list of
strings, returns the first argument that passes the `has_argument`
test or an empty array if none pass.
-- `first_supported_link_argument(list_of_strings)` *(added 0.46.0)*,
+- `first_supported_link_argument(list_of_strings)` *(since 0.46.0)*:
given a list of strings, returns the first argument that passes the
`has_link_argument` test or an empty array if none pass.
-- `get_define(definename)` returns the given preprocessor symbol's
+- `get_define(definename)`: returns the given preprocessor symbol's
value as a string or empty string if it is not defined.
- Starting with 0.47.0, this method will concatenate string literals as
+ *(since 0.47.0)* This method will concatenate string literals as
the compiler would. E.g. `"a" "b"` will become `"ab"`.
-- `get_id()` returns a string identifying the compiler. For example,
+- `get_id()`: returns a string identifying the compiler. For example,
`gcc`, `msvc`, [and more](Reference-tables.md#compiler-ids).
-- `get_argument_syntax()` *(new in 0.49.0)* returns a string identifying the type
+- `get_argument_syntax()` *(since 0.49.0)*: returns a string identifying the type
of arguments the compiler takes. Can be one of `gcc`, `msvc`, or an undefined
string value. This method is useful for identifying compilers that are not
gcc or msvc, but use the same argument syntax as one of those two compilers
such as clang or icc, especially when they use different syntax on different
operating systems.
-- `get_linker_id()` *(added 0.53.0)* returns a string identifying the linker.
+- `get_linker_id()` *(since 0.53.0)*: returns a string identifying the linker.
For example, `ld.bfd`, `link`, [and more](Reference-tables.md#linker-ids).
-- `get_supported_arguments(list_of_string)` *(added 0.43.0)* returns
+- `get_supported_arguments(list_of_string)` *(since 0.43.0)*: returns
an array containing only the arguments supported by the compiler,
as if `has_argument` were called on them individually.
-- `get_supported_link_arguments(list_of_string)` *(added 0.46.0)* returns
+- `get_supported_link_arguments(list_of_string)` *(since 0.46.0)*: returns
an array containing only the arguments supported by the linker,
as if `has_link_argument` were called on them individually.
-- `has_argument(argument_name)` returns true if the compiler accepts
+- `has_argument(argument_name)`: returns true if the compiler accepts
the specified command line argument, that is, can compile code
without erroring out or printing a warning about an unknown flag.
-- `has_link_argument(argument_name)` *(added 0.46.0)* returns true if
+- `has_link_argument(argument_name)` *(since 0.46.0)*: returns true if
the linker accepts the specified command line argument, that is, can
compile and link code without erroring out or printing a warning
about an unknown flag. Link arguments will be passed to the
compiler, so should usually have the `-Wl,` prefix. On VisualStudio
a `/link` argument will be prepended.
-- `has_function(funcname)` returns true if the given function is
+- `has_function(funcname)`: returns true if the given function is
provided by the standard library or a library passed in with the
`args` keyword, you can specify external dependencies to use with
`dependencies` keyword argument.
-- `check_header` *(added 0.47.0)* returns true if the specified header is *usable* with
+- `check_header` *(since 0.47.0)*: returns true if the specified header is *usable* with
the specified prefix, dependencies, and arguments.
You can specify external dependencies to use with `dependencies`
keyword argument and extra code to put above the header test with
the `prefix` keyword. In order to look for headers in a specific
directory you can use `args : '-I/extra/include/dir`, but this
should only be used in exceptional cases for includes that can't be
- detected via pkg-config and passed via `dependencies`. Since *0.50.0* the
+ detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The
`required` keyword argument can be used to abort if the header cannot be
found.
-- `has_header` returns true if the specified header *exists*, and is
+- `has_header`: returns true if the specified header *exists*, and is
faster than `check_header()` since it only does a pre-processor check.
You can specify external dependencies to use with `dependencies`
keyword argument and extra code to put above the header test with
the `prefix` keyword. In order to look for headers in a specific
directory you can use `args : '-I/extra/include/dir`, but this
should only be used in exceptional cases for includes that can't be
- detected via pkg-config and passed via `dependencies`. Since *0.50.0* the
+ detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The
`required` keyword argument can be used to abort if the header cannot be
found.
-- `has_header_symbol(headername, symbolname)` allows one to detect
+- `has_header_symbol(headername, symbolname)`: detects
whether a particular symbol (function, variable, #define, type
definition, etc) is declared in the specified header, you can
specify external dependencies to use with `dependencies` keyword
- argument. Since *0.50.0* the `required` keyword argument can be used to abort
- if the symbol cannot be found.
+ argument. *(since 0.50.0)* The `required` keyword argument can be
+ used to abort if the symbol cannot be found.
-- `has_member(typename, membername)` takes two arguments, type name
+- `has_member(typename, membername)`: takes two arguments, type name
and member name and returns true if the type has the specified
member, you can specify external dependencies to use with
`dependencies` keyword argument.
-- `has_members(typename, membername1, membername2, ...)` takes at
+- `has_members(typename, membername1, membername2, ...)`: takes at
least two arguments, type name and one or more member names, returns
true if the type has all the specified members, you can specify
external dependencies to use with `dependencies` keyword argument.
-- `has_multi_arguments(arg1, arg2, arg3, ...)` is the same as
+- `has_multi_arguments(arg1, arg2, arg3, ...)` *(since 0.37.0)*: the same as
`has_argument` but takes multiple arguments and uses them all in a
- single compiler invocation, available since 0.37.0.
+ single compiler invocation.
-- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(added 0.46.0)*
- is the same as `has_link_argument` but takes multiple arguments and
+- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(since 0.46.0)*:
+ the same as `has_link_argument` but takes multiple arguments and
uses them all in a single compiler invocation.
-- `has_type(typename)` returns true if the specified token is a type,
+- `has_type(typename)`: returns true if the specified token is a type,
you can specify external dependencies to use with `dependencies`
keyword argument.
-- `links(code)` returns true if the code fragment given in the
+- `links(code)`: returns true if the code fragment given in the
positional argument compiles and links, you can specify external
dependencies to use with `dependencies` keyword argument, `code` can
be either a string containing source code or a `file` object
pointing to the source code.
-- `run(code)` attempts to compile and execute the given code fragment,
+- `run(code)`: attempts to compile and execute the given code fragment,
returns a run result object, you can specify external dependencies
to use with `dependencies` keyword argument, `code` can be either a
string containing source code or a `file` object pointing to the
source code.
-- `symbols_have_underscore_prefix()` returns `true` if the C symbol
- mangling is one underscore (`_`) prefixed to the symbol, available
- since 0.37.0.
+- `symbols_have_underscore_prefix()` *(since 0.37.0)*: returns `true`
+ if the C symbol mangling is one underscore (`_`) prefixed to the symbol.
-- `sizeof(typename, ...)` returns the size of the given type
+- `sizeof(typename, ...)`: returns the size of the given type
(e.g. `'int'`) or -1 if the type is unknown, to add includes set
them in the `prefix` keyword argument, you can specify external
dependencies to use with `dependencies` keyword argument.
-- `version()` returns the compiler's version number as a string.
+- `version()`: returns the compiler's version number as a string.
-- `has_function_attribute(name)` *(added in 0.48.0)* returns `true` if the
+- `has_function_attribute(name)` *(since 0.48.0)*: returns `true` if the
compiler supports the GNU style (`__attribute__(...)`) `name`. This is
preferable to manual compile checks as it may be optimized for compilers that
do not support such attributes.
[This table](Reference-tables.md#gcc-__attribute__) lists all of the
supported attributes.
-- `get_supported_function_attributes(list_of_names)` *(added in 0.48.0)*
+- `get_supported_function_attributes(list_of_names)` *(since 0.48.0)*:
returns an array containing any names that are supported GCC style
attributes. Equivalent to `has_function_attribute` was called on each of them
individually.
The following keyword arguments can be used:
-- `args` can be used to pass a list of compiler arguments that are
+- `args`: used to pass a list of compiler arguments that are
required to find the header or symbol. For example, you might need
to pass the include path `-Isome/path/to/header` if a header is not
- in the default include path. In versions newer than 0.38.0 you
- should use the `include_directories` keyword described below. You
- may also want to pass a library name `-lfoo` for `has_function` to
- check for a function. Supported by all methods except `get_id`,
- `version`, and `find_library`.
+ in the default include path. *(since 0.38.0)* you should use the
+ `include_directories` keyword described below. You may also want to
+ pass a library name `-lfoo` for `has_function` to check for a function.
+ Supported by all methods except `get_id`, `version`, and `find_library`.
-- `include_directories` specifies extra directories for header
- searches. *(added 0.38.0)*
+- `include_directories` *(since 0.38.0)*: specifies extra directories for
+ header searches.
-- `name` the name to use for printing a message about the compiler
+- `name`: the name to use for printing a message about the compiler
check. Supported by the methods `compiles()`, `links()`, and
`run()`. If this keyword argument is not passed to those methods, no
message will be printed about the check.
-- `no_builtin_args` when set to true, the compiler arguments controlled
+- `no_builtin_args`: when set to true, the compiler arguments controlled
by built-in configuration options are not added.
-- `prefix` can be used to add #includes and other things that are
+- `prefix`: adds #includes and other things that are
required for the symbol to be declared. System definitions should be
passed via compiler args (eg: `_GNU_SOURCE` is often required for
some symbols to be exposed on Linux, and it should be passed via
@@ -2297,15 +2283,15 @@ A build target is either an [executable](#executable),
[both shared and static library](#both_libraries) or
[shared module](#shared_module).
-- `extract_all_objects()` is same as `extract_objects` but returns all
- object files generated by this target. Since 0.46.0 keyword argument
+- `extract_all_objects()`: is same as `extract_objects` but returns all
+ object files generated by this target. *(since 0.46.0)* keyword argument
`recursive` must be set to `true` to also return objects passed to
the `object` argument of this target. By default only objects built
for this target are returned to maintain backward compatibility with
previous versions. The default will eventually be changed to `true`
in a future version.
-- `extract_objects(source1, source2, ...)` takes as its arguments
+- `extract_objects(source1, source2, ...)`: takes as its arguments
a number of source files as [`string`](#string-object) or
[`files()`](#files) and returns an opaque value representing the
object files generated for those source files. This is typically used
@@ -2313,17 +2299,17 @@ A build target is either an [executable](#executable),
some source files with custom flags. To use the object file(s)
in another build target, use the `objects:` keyword argument.
-- `full_path()` returns a full path pointing to the result target file.
+- `full_path()`: returns a full path pointing to the result target file.
NOTE: In most cases using the object itself will do the same job as
this and will also allow Meson to setup inter-target dependencies
correctly. Please file a bug if that doesn't work for you.
-- `private_dir_include()` returns a opaque value that works like
+- `private_dir_include()`: returns a opaque value that works like
`include_directories` but points to the private directory of this
target, usually only needed if an another target needs to access
some generated internal headers of this target
-- `name()` *Since 0.54.0*, returns the target name.
+- `name()` *(since 0.54.0)*: returns the target name.
### `configuration` data object
@@ -2334,20 +2320,19 @@ configuration values to be used for generating configuration files. A
more in-depth description can be found in the [the configuration wiki
page](Configuration.md) It has three methods:
-- `get(varname, default_value)` returns the value of `varname`, if the
+- `get(varname, default_value)`: returns the value of `varname`, if the
value has not been set returns `default_value` if it is defined
- *(added 0.38.0)* and errors out if not
+ *(since 0.38.0)* and errors out if not
-- `get_unquoted(varname, default_value)` returns the value of `varname`
- but without surrounding double quotes (`"`). If the value has not been
- set returns `default_value` if it is defined and errors out if not.
- Available since 0.44.0
+- `get_unquoted(varname, default_value)` *(since 0.44.0)*: returns the value
+ of `varname` but without surrounding double quotes (`"`). If the value has
+ not been set returns `default_value` if it is defined and errors out if not.
-- `has(varname)`, returns `true` if the specified variable is set
+- `has(varname)`: returns `true` if the specified variable is set
-- `merge_from(other)` takes as argument a different configuration data
- object and copies all entries from that object to the current
- object, available since 0.42.0
+- `merge_from(other)` *(since 0.42.0)*: takes as argument a different
+ configuration data object and copies all entries from that object to
+ the current.
- `set(varname, value)`, sets a variable to a given value
@@ -2369,20 +2354,20 @@ cause a syntax error.
This object is returned by [`custom_target`](#custom_target) and
contains a target with the following methods:
-- `full_path()` returns a full path pointing to the result target file
+- `full_path()`: returns a full path pointing to the result target file
NOTE: In most cases using the object itself will do the same job as
this and will also allow Meson to setup inter-target dependencies
correctly. Please file a bug if that doesn't work for you.
- *Since 0.54.0* it can be also called on indexes objects:
+ *(since 0.54.0)* It can be also called on indexes objects:
`custom_targets[i].full_path()`.
-- `[index]` returns an opaque object that references this target, and
+- `[index]`: returns an opaque object that references this target, and
can be used as a source in other targets. When it is used as such it
will make that target depend on this custom target, but the only
source added will be the one that corresponds to the index of the
custom target's output argument.
-- `to_list()` *Since 0.54.0*, returns a list of opaque objects that references
+- `to_list()` *(since 0.54.0)*: returns a list of opaque objects that references
this target, and can be used as a source in other targets. This can be used to
iterate outputs with `foreach` loop.
@@ -2391,48 +2376,48 @@ contains a target with the following methods:
This object is returned by [`dependency()`](#dependency) and contains
an external dependency with the following methods:
- - `found()` which returns whether the dependency was found
+ - `found()`: returns whether the dependency was found.
- - `name()` *(Added 0.48.0)* returns the name of the dependency that was
+ - `name()` *(since 0.48.0)*: returns the name of the dependency that was
searched. Returns `internal` for dependencies created with
`declare_dependency()`.
- - `get_pkgconfig_variable(varname)` *(Added 0.36.0)* will get the
+ - `get_pkgconfig_variable(varname)` *(since 0.36.0)*: gets the
pkg-config variable specified, or, if invoked on a non pkg-config
- dependency, error out. *(Added 0.44.0)* You can also redefine a
+ dependency, error out. *(since 0.44.0)* You can also redefine a
variable by passing a list to the `define_variable` parameter
that can affect the retrieved variable: `['prefix', '/'])`.
- *(Added 0.45.0)* A warning is issued if the variable is not defined,
+ *(since 0.45.0)* A warning is issued if the variable is not defined,
unless a `default` parameter is specified.
- - `get_configtool_variable(varname)` *(Added 0.44.0)* will get the
+ - `get_configtool_variable(varname)` *(since 0.44.0)*: gets the
command line argument from the config tool (with `--` prepended), or,
if invoked on a non config-tool dependency, error out.
- - `type_name()` which returns a string describing the type of the
+ - `type_name()`: returns a string describing the type of the
dependency, the most common values are `internal` for deps created
with `declare_dependency()` and `pkgconfig` for system dependencies
obtained with Pkg-config.
- - `version()` is the version number as a string, for example `1.2.8`.
+ - `version()`: the version number as a string, for example `1.2.8`.
`unknown` if the dependency provider doesn't support determining the
version.
- - `include_type()` returns whether the value set by the `include_type` kwarg
+ - `include_type()`: returns whether the value set by the `include_type` kwarg
- - `as_system(value)` returns a copy of the dependency object, which has changed
+ - `as_system(value)`: returns a copy of the dependency object, which has changed
the value of `include_type` to `value`. The `value` argument is optional and
defaults to `'preserve'`.
- `partial_dependency(compile_args : false, link_args : false, links
- : false, includes : false, source : false)` *(Added 0.46.0)* returns
+ : false, includes : false, sources : false)` *(since 0.46.0)*: returns
a new dependency object with the same name, version, found status,
type name, and methods as the object that called it. This new
object will only inherit other attributes from its parent as
controlled by keyword arguments.
If the parent has any dependencies, those will be applied to the new
- partial dependency with the same rules. So , given:
+ partial dependency with the same rules. So, given:
```meson
dep1 = declare_dependency(compile_args : '-Werror=foo', link_with : 'libfoo')
@@ -2457,14 +2442,14 @@ an external dependency with the following methods:
- `get_variable(cmake : str, pkgconfig : str, configtool : str,
internal: str, default_value : str, pkgconfig_define : [str, str])`
- *(Added in 0.51.0)* A generic variable getter method, which replaces the
+ *(since 0.51.0)*: a generic variable getter method, which replaces the
get_*type*_variable methods. This allows one to get the variable
from a dependency without knowing specifically how that dependency
was found. If default_value is set and the value cannot be gotten
from the object then default_value is returned, if it is not set
then an error is raised.
- *New in 0.54.0, the `internal` keyword*
+ *(since 0.54.0)* added `internal` keyword.
### `disabler` object
@@ -2474,7 +2459,7 @@ statement (function call, logical op, etc) they will cause the
statement evaluation to immediately short circuit to return a disabler
object. A disabler object has one method:
- - `found()`, always returns `false`
+- `found()`: always returns `false`.
### `external program` object
@@ -2482,15 +2467,14 @@ This object is returned by [`find_program()`](#find_program) and
contains an external (i.e. not built as part of this project) program
and has the following methods:
-- `found()` which returns whether the executable was found
+- `found()`: returns whether the executable was found.
-- `path()` which returns a string pointing to the script or executable
+- `path()`: *(since 0.55.0)* **(deprecated)** use `full_path()` instead.
+ Returns a string pointing to the script or executable
**NOTE:** You should not need to use this method. Passing the object
- itself should work in all cases. For example: `run_command(obj, arg1, arg2)`
- *Since 0.55.0* this method has been deprecated in favor of `full_path()` for
- consistency with other returned objects.
+ itself should work in all cases. For example: `run_command(obj, arg1, arg2)`.
-- `full_path()` *Since 0.55.0* which returns a string pointing to the script or
+- `full_path()` (*since 0.55.0*): which returns a string pointing to the script or
executable **NOTE:** You should not need to use this method. Passing the object
itself should work in all cases. For example: `run_command(obj, arg1, arg2)`.
@@ -2501,7 +2485,7 @@ detailed information about how environment variables should be set
during tests. It should be passed as the `env` keyword argument to
tests and other functions. It has the following methods.
-- `append(varname, value1, value2, ...)` appends the given values to
+- `append(varname, value1, value2, ...)`: appends the given values to
the old value of the environment variable, e.g. `env.append('FOO',
'BAR', 'BAZ', separator : ';')` produces `BOB;BAR;BAZ` if `FOO` had
the value `BOB` and plain `BAR;BAZ` if the value was not defined. If
@@ -2509,10 +2493,10 @@ tests and other functions. It has the following methods.
separator for the host operating system will be used, i.e. ';' for
Windows and ':' for UNIX/POSIX systems.
-- `prepend(varname, value1, value2, ...)` is the same as `append`
- except that it writes to the beginning of the variable
+- `prepend(varname, value1, value2, ...)`: same as `append`
+ except that it writes to the beginning of the variable.
-- `set(varname, value1, value2)` sets the environment variable
+- `set(varname, value1, value2)`: sets the environment variable
specified in the first argument to the values in the second argument
joined by the separator, e.g. `env.set('FOO', 'BAR'),` sets envvar
`FOO` to value `BAR`. See `append()` above for how separators work.
@@ -2526,27 +2510,27 @@ This object is returned by [`find_library()`](#find_library) and
contains an external (i.e. not built as part of this project)
library. This object has the following methods:
- - `found()` which returns whether the library was found.
+- `found()`: returns whether the library was found.
- - `type_name()` *(added 0.48.0)* which returns a string describing
- the type of the dependency, which will be `library` in this case.
+- `type_name()` *(since 0.48.0)*: returns a string describing
+ the type of the dependency, which will be `library` in this case.
- - `partial_dependency(compile_args : false, link_args : false, links
- : false, includes : false, source : false)` *(Added 0.46.0)* returns
- a new dependency object with the same name, version, found status,
- type name, and methods as the object that called it. This new
- object will only inherit other attributes from its parent as
- controlled by keyword arguments.
+- `partial_dependency(compile_args : false, link_args : false, links
+ : false, includes : false, source : false)` *(since 0.46.0)*: returns
+ a new dependency object with the same name, version, found status,
+ type name, and methods as the object that called it. This new
+ object will only inherit other attributes from its parent as
+ controlled by keyword arguments.
### Feature option object
-The following methods are defined for all [`feature` options](Build-options.md#features):
+*(since 0.47.0)*
-- `enabled()` returns whether the feature was set to `'enabled'`
-- `disabled()` returns whether the feature was set to `'disabled'`
-- `auto()` returns whether the feature was set to `'auto'`
+The following methods are defined for all [`feature` options](Build-options.md#features):
-Feature options are available since 0.47.0.
+- `enabled()`: returns whether the feature was set to `'enabled'`
+- `disabled()`: returns whether the feature was set to `'disabled'`
+- `auto()`: returns whether the feature was set to `'auto'`
### `generator` object
@@ -2554,7 +2538,7 @@ This object is returned by [`generator()`](#generator) and contains a
generator that is used to transform files from one type to another by
an executable (e.g. `idl` files into source code and headers).
-* `process(list_of_files, ...)` takes a list of files, causes them to
+- `process(list_of_files, ...)`: takes a list of files, causes them to
be processed and returns an object containing the result which can
then, for example, be passed into a build target definition. The
keyword argument `extra_args`, if specified, will be used to replace
@@ -2572,10 +2556,10 @@ an executable (e.g. `idl` files into source code and headers).
This object is returned by [`subproject()`](#subproject) and is an
opaque object representing it.
-- `found()` *(added 0.48.0)* which returns whether the subproject was
+- `found()` *(since 0.48.0)*: returns whether the subproject was
successfully setup
-- `get_variable(name, fallback)` fetches the specified variable from
+- `get_variable(name, fallback)`: fetches the specified variable from
inside the subproject. This is useful to, for instance, get a
[declared dependency](#declare_dependency) from the
[subproject](Subprojects.md).
@@ -2590,9 +2574,9 @@ This object encapsulates the result of trying to compile and run a
sample piece of code with [`compiler.run()`](#compiler-object) or
[`run_command()`](#run_command). It has the following methods:
-- `compiled()` if true, the compilation succeeded, if false it did not
+- `compiled()`: if true, the compilation succeeded, if false it did not
and the other methods return unspecified data. This is only available
for `compiler.run()` results.
-- `returncode()` the return code of executing the compiled binary
-- `stderr()` the standard error produced when the command was run
-- `stdout()` the standard out produced when the command was run
+- `returncode()`: the return code of executing the compiled binary
+- `stderr()`: the standard error produced when the command was run
+- `stdout()`: the standard out produced when the command was run
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index 60a9720..3be129f 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -82,14 +82,16 @@ set in the cross file.
| arc | 32 bit ARC processor |
| arm | 32 bit ARM processor |
| avr | Atmel AVR processor |
-| e2k | MCST Elbrus processor |
| c2000 | 32 bit C2000 processor |
+| dspic | 16 bit Microchip dsPIC |
+| e2k | MCST Elbrus processor |
| ia64 | Itanium processor |
| m68k | Motorola 68000 processor |
| microblaze | MicroBlaze processor |
| mips | 32 bit MIPS processor |
| mips64 | 64 bit MIPS processor |
| parisc | HP PA-RISC processor |
+| pic24 | 16 bit Microchip PIC24 |
| ppc | 32 bit PPC processors |
| ppc64 | 64 bit PPC processors |
| riscv32 | 32 bit RISC-V Open ISA |
@@ -98,12 +100,11 @@ set in the cross file.
| rx | Renesas RX 32 bit MCU |
| s390 | IBM zSystem s390 |
| s390x | IBM zSystem s390x |
+| sh4 | SuperH SH-4 |
| sparc | 32 bit SPARC |
| sparc64 | SPARC v9 processor |
| wasm32 | 32 bit Webassembly |
| wasm64 | 64 bit Webassembly |
-| pic24 | 16 bit Microchip PIC24 |
-| dspic | 16 bit Microchip dsPIC |
| x86 | 32 bit x86 processor |
| x86_64 | 64 bit x86 processor |
@@ -155,6 +156,10 @@ These are the parameter names for passing language specific arguments to your bu
| Rust | rust_args | rust_link_args |
| Vala | vala_args | vala_link_args |
+All these `<lang>_*` options are specified per machine. See in [specifying
+options per machine](Builtin-options.md#Specifying-options-per-machine) for on
+how to do this in cross builds.
+
## Compiler and linker flag environment variables
These environment variables will be used to modify the compiler and
@@ -177,6 +182,10 @@ instead.
| RUSTFLAGS | Flags for the Rust compiler |
| LDFLAGS | The linker flags, used for all languages |
+N.B. these settings are specified per machine, and so the environment varibles
+actually come in pairs. See the [environment variables per
+machine](#Environment-variables-per-machine) section for details.
+
## Function Attributes
These are the parameters names that are supported using
@@ -189,49 +198,50 @@ These values are supported using the GCC style `__attribute__` annotations,
which are supported by GCC, Clang, and other compilers.
-| Name |
-|----------------------|
-| alias |
-| aligned |
-| alloc_size |
-| always_inline |
-| artificial |
-| cold |
-| const |
-| constructor |
-| constructor_priority |
-| deprecated |
-| destructor |
-| error |
-| externally_visible |
-| fallthrough |
-| flatten |
-| format |
-| format_arg |
-| gnu_inline |
-| hot |
-| ifunc |
-| malloc |
-| noclone |
-| noinline |
-| nonnull |
-| noreturn |
-| nothrow |
-| optimize |
-| packed |
-| pure |
-| returns_nonnull |
-| unused |
-| used |
-| visibility* |
-| visibility:default† |
-| visibility:hidden† |
-| visibility:internal† |
-| visibility:protected†|
-| warning |
-| warn_unused_result |
-| weak |
-| weakreaf |
+| Name |
+|--------------------------|
+| alias |
+| aligned |
+| alloc_size |
+| always_inline |
+| artificial |
+| cold |
+| const |
+| constructor |
+| constructor_priority |
+| deprecated |
+| destructor |
+| error |
+| externally_visible |
+| fallthrough |
+| flatten |
+| format |
+| format_arg |
+| force_align_arg_pointer³ |
+| gnu_inline |
+| hot |
+| ifunc |
+| malloc |
+| noclone |
+| noinline |
+| nonnull |
+| noreturn |
+| nothrow |
+| optimize |
+| packed |
+| pure |
+| returns_nonnull |
+| unused |
+| used |
+| visibility* |
+| visibility:default† |
+| visibility:hidden† |
+| visibility:internal† |
+| visibility:protected† |
+| warning |
+| warn_unused_result |
+| weak |
+| weakreaf |
\* *Changed in 0.52.0* the "visibility" target no longer includes
"protected", which is not present in Apple's clang.
@@ -239,6 +249,8 @@ which are supported by GCC, Clang, and other compilers.
† *New in 0.52.0* These split visibility attributes are preferred to the plain
"visibility" as they provide narrower checks.
+³ *New in 0.55.0*
+
### MSVC __declspec
These values are supported using the MSVC style `__declspec` annotation,
@@ -267,6 +279,10 @@ These are the values that can be passed to `dependency` function's
## Compiler and Linker selection variables
+N.B. these settings are specified per machine, and so the environment varibles
+actually come in pairs. See the [environment variables per
+machine](#Environment-variables-per-machine) section for details.
+
| Language | Compiler | Linker | Note |
|---------------|----------|-----------|---------------------------------------------|
| C | CC | CC_LD | |
@@ -280,5 +296,28 @@ These are the values that can be passed to `dependency` function's
| C# | CSC | CSC | The linker is the compiler |
*The old environment variales are still supported, but are deprecated and will
-be removed in a future version of meson.
+be removed in a future version of meson.*
+
+## Environment variables per machine
+
+Since *0.54.0*, Following Autotool and other legacy build systems, environment
+variables that affect machine-specific settings come in pairs: for every bare
+environment variable `FOO`, there is a suffixed `FOO_FOR_BUILD`, where `FOO`
+just affects the host machine configuration, while `FOO_FOR_BUILD` just affects
+the build machine configuration. For example:
+
+ - `PKG_CONFIG_PATH_FOR_BUILD` controls the paths pkg-config will search for
+ just `native: true` dependencies (build machine).
+
+ - `PKG_CONFIG_PATH` controls the paths pkg-config will search for just
+ `native: false` dependencies (host machine).
+
+This mirrors the `build.` prefix used for (built-in) meson options, which has
+the same meaning.
+
+This is useful for cross builds. In the native builds, build = host, and the
+unsuffixed environment variables alone will suffice.
+Prior to *0.54.0*, there was no `_FOR_BUILD`-suffixed variables, and most
+environment variables only effected native machine configurations, though this
+wasn't consistent (e.g. `PKG_CONFIG_PATH` still affected cross builds).
diff --git a/docs/markdown/Release-notes-for-0.54.0.md b/docs/markdown/Release-notes-for-0.54.0.md
index 3202b57..2f215de 100644
--- a/docs/markdown/Release-notes-for-0.54.0.md
+++ b/docs/markdown/Release-notes-for-0.54.0.md
@@ -359,3 +359,8 @@ target that has eight source files, Meson will generate two unity
files each of which includes four source files. The old behaviour can
be replicated by setting `unity_size` to a large value, such as 10000.
+## Verbose mode for `meson compile`
+
+The new option `--verbose` has been added to `meson compile` that will enable
+more verbose compilation logs. Note that for VS backend it means that logs will
+be less verbose by default (without `--verbose` option).
diff --git a/docs/markdown/Run-targets.md b/docs/markdown/Run-targets.md
index 38129a6..b584bf7 100644
--- a/docs/markdown/Run-targets.md
+++ b/docs/markdown/Run-targets.md
@@ -29,7 +29,7 @@ run_target('inspector',
Run targets are not run by default. To run it run the following command.
```console
-$ ninja inspector
+$ meson compile inspector
```
All additional entries in `run_target`'s `command` array are passed unchanged to the inspector script, so you can do things like this:
diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md
index 910513c..326ecb9 100644
--- a/docs/markdown/Running-Meson.md
+++ b/docs/markdown/Running-Meson.md
@@ -9,13 +9,12 @@ from the source tree with the command `/path/to/source/meson.py`. Meson may
also be installed in which case the command is simply `meson`. In this manual
we only use the latter format for simplicity.
-Additionally, the invocation can pass options to meson. The list of options is
-documented [here](Builtin-options.md).
-
At the time of writing only a command line version of Meson is available. This
means that Meson must be invoked using the terminal. If you wish to use the
MSVC compiler, you need to run Meson under "Visual Studio command prompt".
+All available meson commands are listed on the [commands reference page](Commands.md).
+
## Configuring the build directory
Let us assume that we have a source tree that has a Meson build system. This
@@ -41,6 +40,9 @@ build backend in the build directory. By default Meson generates a *debug
build*, which turns on basic warnings and debug information and disables
compiler optimizations.
+Additionally, the invocation can pass options to meson. The list of options is
+documented [here](Builtin-options.md).
+
You can specify a different type of build with the `--buildtype` command line
argument. It can have one of the following values.
@@ -83,7 +85,7 @@ during configuration time. As an example, here is how you would use Meson to
generate a Visual studio solution.
```sh
-meson setup <build dir> --backend=vs2010
+meson setup <build dir> --backend=vs
```
You can then open the generated solution with Visual Studio and compile it in
@@ -105,9 +107,18 @@ linker arguments needed.
## Building from the source
-If you are not using an IDE, Meson uses the [Ninja build
-system](https://ninja-build.org/) to actually build the code. To start the
-build, simply type the following command.
+To start the build, simply type the following command.
+
+```sh
+meson compile -C builddir
+```
+
+See [`meson compile` description](Commands.md#compile) for more info.
+
+### Building directly with ninja
+
+By default Meson uses the [Ninja build system](https://ninja-build.org/) to
+actually build the code. To start the build, simply type the following command.
```sh
ninja -C builddir
@@ -133,20 +144,29 @@ Meson provides native support for running tests. The command to do that is
simple.
```sh
-ninja -C builddir test
+meson test -C builddir
```
+See [`meson test` description](Commands.md#test) for more info.
+
Meson does not force the use of any particular testing framework. You are free
to use GTest, Boost Test, Check or even custom executables.
+Note: it can be also invoked directly with ninja with the following command:
+```sh
+ninja -C builddir test
+```
+
## Installing
Installing the built software is just as simple.
```sh
-ninja -C builddir install
+meson install -C builddir
```
+See [`meson install` description](Commands.md#install) for more info.
+
Note that Meson will only install build targets explicitly tagged as
installable, as detailed in the [installing targets
documentation](Installing.md).
@@ -157,7 +177,12 @@ Meson also supports the `DESTDIR` variable used in e.g. building packages. It
is used like this:
```sh
-DESTDIR=/path/to/staging ninja -C builddir install
+DESTDIR=/path/to/staging meson install -C builddir
+```
+
+Note: it can be also invoked directly with ninja with the following command:
+```sh
+ninja -C builddir install
```
## Command line help
diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md
index ee2ecfe..960e60c 100644
--- a/docs/markdown/Style-guide.md
+++ b/docs/markdown/Style-guide.md
@@ -11,6 +11,12 @@ Meson build files.
Always spaces.
+## Naming Variable
+
+The most consistent naming convention is the snake case. Let say you would
+like to refer to your executable so something like `my_exe` would work or
+just `exe`.
+
## Naming options
There are two ways of naming project options. As an example for
diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md
index 8232da9..9c54d69 100644
--- a/docs/markdown/Subprojects.md
+++ b/docs/markdown/Subprojects.md
@@ -212,6 +212,9 @@ the following command-line options:
calls, and those are meant to be used for sources that cannot be
provided by the system, such as copylibs.
+ This option may be overriden by `--force-fallback-for` for specific
+ dependencies.
+
* **--wrap-mode=forcefallback**
Meson will not look at the system for any dependencies which have
@@ -220,6 +223,19 @@ the following command-line options:
want to specifically build against the library sources provided by
your subprojects.
+* **--force-fallback-for=list,of,dependencies**
+
+ Meson will not look at the system for any dependencies listed there,
+ provided a fallback was supplied when the dependency was declared.
+
+ This option takes precedence over `--wrap-mode=nofallback`, and when
+ used in combination with `--wrap-mode=nodownload` will only work
+ if the dependency has already been downloaded.
+
+ This is useful when your project has many fallback dependencies,
+ but you only want to build against the library sources for a few
+ of them.
+
## Download subprojects
*Since 0.49.0*
diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md
index 666d50e..7cb39e9 100644
--- a/docs/markdown/Syntax.md
+++ b/docs/markdown/Syntax.md
@@ -16,12 +16,12 @@ statements* and *includes*.
Usually one Meson statement takes just one line. There is no way to
have multiple statements on one line as in e.g. *C*. Function and
method calls' argument lists can be split over multiple lines. Meson
-will autodetect this case and do the right thing. In other cases you
-can get multi-line statements by ending the line with a `\`. Apart
-from line ending whitespace has no syntactic meaning.
+will autodetect this case and do the right thing.
-Variables
---
+In other cases, *(added 0.50)* you can get multi-line statements by ending the
+line with a `\`. Apart from line ending whitespace has no syntactic meaning.
+
+## Variables
Variables in Meson work just like in other high level programming
languages. A variable can contain a value of any type, such as an
@@ -46,8 +46,7 @@ var2 += [4]
# var1 is still [1, 2, 3]
```
-Numbers
---
+## Numbers
Meson supports only integer numbers. They are declared simply by
writing them out. Basic arithmetic operations are supported.
@@ -85,8 +84,7 @@ int_var = 42
string_var = int_var.to_string()
```
-Booleans
---
+## Booleans
A boolean is either `true` or `false`.
@@ -94,8 +92,7 @@ A boolean is either `true` or `false`.
truth = true
```
-Strings
---
+## Strings
Strings in Meson are declared with single quotes. To enter a literal
single quote do it like this:
@@ -126,7 +123,7 @@ As in python and C, up to three octal digits are accepted in `\ooo`.
Unrecognized escape sequences are left in the string unchanged, i.e., the
backslash is left in the string.
-#### String concatenation
+### String concatenation
Strings can be concatenated to form a new string using the `+` symbol.
@@ -136,7 +133,25 @@ str2 = 'xyz'
combined = str1 + '_' + str2 # combined is now abc_xyz
```
-#### Strings running over multiple lines
+### String path building
+
+*(Added 0.49)*
+
+You can concatenate any two strings using `/` as an operator to build paths.
+This will always use `/` as the path separator on all platforms.
+
+```meson
+joined = '/usr/share' / 'projectname' # => /usr/share/projectname
+joined = '/usr/local' / '/etc/name' # => /etc/name
+
+joined = 'C:\\foo\\bar' / 'builddir' # => C:/foo/bar/builddir
+joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir
+```
+
+Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths),
+which was obsoleted by this operator.
+
+### Strings running over multiple lines
Strings running over multiple lines can be declared with three single
quotes, like this:
@@ -152,7 +167,7 @@ These are raw strings that do not support the escape sequences listed
above. These strings can also be combined with the string formatting
functionality described below.
-#### String formatting
+### String formatting
Strings can be built using the string formatting functionality.
@@ -165,12 +180,12 @@ res = template.format('text', 1, true)
As can be seen, the formatting works by replacing placeholders of type
`@number@` with the corresponding argument.
-#### String methods
+### String methods
Strings also support a number of other methods that return transformed
copies.
-**.strip()**
+#### .strip()
```meson
# Similar to the Python str.strip(). Removes leading/ending spaces and newlines
@@ -179,7 +194,7 @@ stripped_define = define.strip()
# 'stripped_define' now has the value '-Dsomedefine'
```
-**.to_upper()**, **.to_lower()**
+#### .to_upper(), .to_lower()
```meson
target = 'x86_FreeBSD'
@@ -187,7 +202,7 @@ upper = target.to_upper() # t now has the value 'X86_FREEBSD'
lower = target.to_lower() # t now has the value 'x86_freebsd'
```
-**.to_int()**
+#### .to_int()
```meson
version = '1'
@@ -195,7 +210,7 @@ version = '1'
ver_int = version.to_int()
```
-**.contains()**, **.startswith()**, **.endswith()**
+#### .contains(), .startswith(), .endswith()
```meson
target = 'x86_FreeBSD'
@@ -205,7 +220,7 @@ is_x86 = target.startswith('x86') # boolean value 'true'
is_bsd = target.to_lower().endswith('bsd') # boolean value 'true'
```
-**.split()**, **.join()**
+#### .split(), .join()
```meson
# Similar to the Python str.split()
@@ -246,7 +261,7 @@ api_version = '@0@.@1@'.format(version_array[0], version_array[1])
# api_version now (again) has the value '0.2'
```
-**.underscorify()**
+#### .underscorify()
```meson
name = 'Meson Docs.txt#Reference-manual'
@@ -256,7 +271,7 @@ underscored = name.underscorify()
# underscored now has the value 'Meson_Docs_txt_Reference_manual'
```
-**.version_compare()**
+#### .version_compare()
```meson
version = '1.2.3'
@@ -266,8 +281,15 @@ is_new = version.version_compare('>=2.0')
# Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '='
```
-Arrays
---
+Meson version comparison conventions include:
+
+```meson
+'3.6'.version_compare('>=3.6.0') == false
+```
+
+It is best to be unambiguous and specify the full revision level to compare.
+
+## Arrays
Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type.
@@ -302,6 +324,7 @@ assign it to `my_array` instead of modifying the original since all
objects in Meson are immutable.
Since 0.49.0, you can check if an array contains an element like this:
+
```meson
my_array = [1, 2]
if 1 in my_array
@@ -312,7 +335,7 @@ if 1 not in my_array
endif
```
-#### Array methods
+### Array methods
The following methods are defined for all arrays:
@@ -320,8 +343,7 @@ The following methods are defined for all arrays:
- `contains`, returns `true` if the array contains the object given as argument, `false` otherwise
- `get`, returns the object at the given index, negative indices count from the back of the array, indexing out of bounds is a fatal error. Provided for backwards-compatibility, it is identical to array indexing.
-Dictionaries
---
+## Dictionaries
Dictionaries are delimited by curly braces. A dictionary can contain an
arbitrary number of key value pairs. Keys are required to be strings, values can
@@ -346,6 +368,7 @@ Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read
about the methods exposed by dictionaries.
Since 0.49.0, you can check if a dictionary contains a key like this:
+
```meson
my_dict = {'foo': 42, 'bar': 43}
if 'foo' in my_dict
@@ -361,14 +384,14 @@ endif
*Since 0.53.0* Keys can be any expression evaluating to a string value, not limited
to string literals any more.
+
```meson
d = {'a' + 'b' : 42}
k = 'cd'
d += {k : 43}
```
-Function calls
---
+## Function calls
Meson provides a set of usable functions. The most common use case is
creating build objects.
@@ -413,8 +436,7 @@ executable('progname', 'prog.c',
Attempting to do this causes Meson to immediately exit with an error.
-Method calls
---
+## Method calls
Objects can have methods, which are called with the dot operator. The
exact methods it provides depends on the object.
@@ -424,8 +446,7 @@ myobj = some_function()
myobj.do_something('now')
```
-If statements
---
+## If statements
If statements work just like in other languages.
@@ -446,8 +467,7 @@ if opt != 'foo'
endif
```
-Logical operations
---
+## Logical operations
Meson has the standard range of logical operations which can be used in
`if` statements.
@@ -537,8 +557,7 @@ endforeach
# result is ['a', 'b']
```
-Comments
---
+## Comments
A comment starts with the `#` character and extends until the end of the line.
@@ -547,8 +566,7 @@ some_function() # This is a comment
some_other_function()
```
-Ternary operator
---
+## Ternary operator
The ternary operator works just like in other languages.
@@ -560,8 +578,7 @@ The only exception is that nested ternary operators are forbidden to
improve legibility. If your branching needs are more complex than this
you need to write an `if/else` construct.
-Includes
---
+## Includes
Most source trees have multiple subdirectories to process. These can
be handled by Meson's `subdir` command. It changes to the given
@@ -576,8 +593,7 @@ test_data_dir = 'data'
subdir('tests')
```
-User-defined functions and methods
---
+## User-defined functions and methods
Meson does not currently support user-defined functions or
methods. The addition of user-defined functions would make Meson
@@ -589,8 +605,7 @@ because of this limitation you find yourself copying and pasting code
a lot you may be able to use a [`foreach` loop
instead](#foreach-statements).
-Stability Promises
---
+## Stability Promises
Meson is very actively developed and continuously improved. There is a
possibility that future enhancements to the Meson build system will require
@@ -599,8 +614,7 @@ keywords, changing the meaning of existing keywords or additions around the
basic building blocks like statements and fundamental types. It is planned
to stabilize the syntax with the 1.0 release.
-Grammar
---
+## Grammar
This is the full Meson grammar, as it is used to parse Meson build definition files:
@@ -622,7 +636,7 @@ equality_expression: relational_expression | (equality_expression equality_opera
equality_operator: "==" | "!="
expression: assignment_expression
expression_list: expression ("," expression)*
-expression_statememt: expression
+expression_statememt: expression
function_expression: id_expression "(" [argument_list] ")"
hex_literal: "0x" HEX_NUMBER
HEX_NUMBER: /[a-fA-F0-9]+/
diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md
index 6b248b5..c5a4e6b 100644
--- a/docs/markdown/Tutorial.md
+++ b/docs/markdown/Tutorial.md
@@ -74,7 +74,7 @@ Now we are ready to build our code.
```
$ cd builddir
-$ ninja
+$ meson compile
```
Once that is done we can run the resulting binary.
@@ -124,12 +124,12 @@ or the like. Instead we just type the exact same command as if we were
rebuilding our code without any build system changes.
```
-$ ninja
+$ meson compile
```
Once you have set up your build directory the first time, you don't
ever need to run the `meson` command again. You always just run
-`ninja`. Meson will automatically detect when you have done changes to
+`meson compile`. Meson will automatically detect when you have done changes to
build definitions and will take care of everything so users don't have
to care. In this case the following output is produced.
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index 06664db6..60fcad2 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -12,7 +12,7 @@ e = executable('prog', 'testprog.c')
test('name of test', e)
```
-You can add as many tests as you want. They are run with the command `ninja
+You can add as many tests as you want. They are run with the command `meson
test`.
Meson captures the output of all tests and writes it in the log file
@@ -75,7 +75,7 @@ test machine. You can override this with the environment variable
`MESON_TESTTHREADS` like this.
```console
-$ MESON_TESTTHREADS=5 ninja test
+$ MESON_TESTTHREADS=5 meson test
```
## Priorities
@@ -122,8 +122,7 @@ The goal of the meson test tool is to provide a simple way to run tests in a
variety of different ways. The tool is designed to be run in the build
directory.
-The simplest thing to do is just to run all tests, which is equivalent to
-running `ninja test`.
+The simplest thing to do is just to run all tests.
```console
$ meson test
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index 1867407..49d30a4 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -70,6 +70,7 @@ topic](https://github.com/topics/meson).
- [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation
- [Ksh](https://github.com/att/ast), a Korn Shell
- [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network
+ - [Le](https://github.com/kirushyk/le), machine learning framework
- [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android
- [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces
- [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one
@@ -116,6 +117,7 @@ format files
- [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock)
- [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP
- [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk
+ - [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy
- [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP
- [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor
- [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool
diff --git a/docs/markdown/Using-multiple-build-directories.md b/docs/markdown/Using-multiple-build-directories.md
index 2455640..ab6cf3c 100644
--- a/docs/markdown/Using-multiple-build-directories.md
+++ b/docs/markdown/Using-multiple-build-directories.md
@@ -32,9 +32,9 @@ You can add cross builds, too. As an example, let's set up a Linux -> Windows cr
mkdir buildwine
meson --cross-file=mingw-cross.txt buildwine
-The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `ninja test`.
+The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `meson test`.
-To compile any of these build types, just cd into the corresponding build directory and run `ninja` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
+To compile any of these build types, just cd into the corresponding build directory and run `meson compile` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
## Specialized uses
diff --git a/docs/markdown/Vs-External.md b/docs/markdown/Vs-External.md
index add089e..ab3d191 100644
--- a/docs/markdown/Vs-External.md
+++ b/docs/markdown/Vs-External.md
@@ -23,9 +23,9 @@ as follows:
| entry | value |
| ----- | ----- |
-|build | `ninja -C $(Configuration)` |
-|clean | `ninja -C $(Configuration) clean` |
-|rebuild| `ninja -C $(Configuration) clean all|
+|build | `meson compile -C $(Configuration)` |
+|clean | `meson compile -C $(Configuration) --clean` |
+|rebuild| `meson compile -C $(Configuration) --clean && meson compile -C $(Configuration)` |
|Output | `$(Configuration)\name_of_your_executable.exe|
diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md
index 868263c..e59a6be 100644
--- a/docs/markdown/Wrap-dependency-system-manual.md
+++ b/docs/markdown/Wrap-dependency-system-manual.md
@@ -70,15 +70,20 @@ revision = head
## Accepted configuration properties for wraps
- `directory` - name of the subproject root directory, defaults to the name of the wrap.
+Since *0.55.0* those can be used in all wrap types, they were previously reserved to `wrap-file`:
+- `patch_url` - download url to retrieve an optional overlay archive
+- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
+- `patch_filename` - filename of the downloaded overlay archive
+- `patch_hash` - sha256 checksum of the downloaded overlay archive
+- `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case
+ files are local instead of a downloaded archive. The directory must be placed in
+ `subprojects/packagefiles`.
+
### Specific to wrap-file
- `source_url` - download url to retrieve the wrap-file source archive
- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0*
- `source_filename` - filename of the downloaded source archive
- `source_hash` - sha256 checksum of the downloaded source archive
-- `patch_url` - download url to retrieve an optional overlay archive
-- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
-- `patch_filename` - filename of the downloaded overlay archive
-- `patch_hash` - sha256 checksum of the downloaded overlay archive
- `lead_directory_missing` - for `wrap-file` create the leading
directory name. Needed when the source file does not have a leading
directory.
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 84546b7..c89f883 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -148,15 +148,14 @@ $ meson <other flags> -Db_coverage=true
Then issue the following commands.
```console
-$ ninja
-$ ninja test
-$ ninja coverage-html (or coverage-xml)
+$ meson compile
+$ meson test
+$ meson compile coverage-html (or coverage-xml)
```
The coverage report can be found in the meson-logs subdirectory.
-Note: Currently, Meson does not support generating coverage reports
-with Clang.
+*New in 0.55.0* llvm-cov support for use with clang
## Add some optimization to debug builds
@@ -191,14 +190,14 @@ test failures.
Install scan-build and configure your project. Then do this:
```console
-$ ninja scan-build
+$ meson compile scan-build
```
You can use the `SCANBUILD` environment variable to choose the
scan-build executable.
```console
-$ SCANBUILD=<your exe> ninja scan-build
+$ SCANBUILD=<your exe> meson compile scan-build
```
@@ -209,8 +208,8 @@ operation. First we set up the project with profile measurements
enabled and compile it.
```console
-$ meson <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
-$ ninja -C builddir
+$ meson setup <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
+$ meson compile -C builddir
```
Then we need to run the program with some representative input. This
@@ -221,7 +220,7 @@ information and rebuild.
```console
$ meson configure -Db_pgo=use
-$ ninja
+$ meson compile
```
After these steps the resulting binary is fully optimized.
diff --git a/docs/markdown/snippets/add_compile_backend_arg.md b/docs/markdown/snippets/add_compile_backend_arg.md
new file mode 100644
index 0000000..76e2abb
--- /dev/null
+++ b/docs/markdown/snippets/add_compile_backend_arg.md
@@ -0,0 +1,26 @@
+## Added ability to specify backend arguments in `meson compile`
+
+It's now possible to specify backend specific arguments in `meson compile`.
+
+Usage: `meson compile [--vs-args=args] [--ninja-args=args]`
+
+```
+ --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend).
+ --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend).
+```
+
+These arguments use the following syntax:
+
+If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
+
+```
+$ meson compile --ninja-args=-n,-d,explain
+```
+
+would add `-n`, `-d` and `explain` arguments to ninja invocation.
+
+If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
+
+```
+$ meson compile "--ninja-args=['a,b', 'c d']"
+```
diff --git a/docs/markdown/snippets/add_meson_compile_target.md b/docs/markdown/snippets/add_meson_compile_target.md
new file mode 100644
index 0000000..d75862f
--- /dev/null
+++ b/docs/markdown/snippets/add_meson_compile_target.md
@@ -0,0 +1,19 @@
+## Added ability to specify targets in `meson compile`
+
+It's now possible to specify targets in `meson compile`, which will result in building only the requested targets.
+
+Usage: `meson compile [TARGET [TARGET...]]`
+`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`.
+`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
+`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
+`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc)
+
+`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
+
+For example targets from the following code:
+```meson
+shared_library('foo', ...)
+static_library('foo', ...)
+executable('bar', ...)
+```
+can be invoked with `meson compile foo:shared_library foo:static_library bar`.
diff --git a/docs/markdown/snippets/clang_coverage.md b/docs/markdown/snippets/clang_coverage.md
new file mode 100644
index 0000000..733a3d9
--- /dev/null
+++ b/docs/markdown/snippets/clang_coverage.md
@@ -0,0 +1,4 @@
+## Clang coverage support
+
+llvm-cov is now used to generate coverage information when clang is used as
+the compiler. \ No newline at end of file
diff --git a/docs/markdown/snippets/force_fallback_for.md b/docs/markdown/snippets/force_fallback_for.md
new file mode 100644
index 0000000..b6af209
--- /dev/null
+++ b/docs/markdown/snippets/force_fallback_for.md
@@ -0,0 +1,10 @@
+## Force fallback for
+
+A newly-added `--force-fallback-for` command line option can now be used to
+force fallback for specific subprojects.
+
+Example:
+
+```
+meson build --force-fallback-for=foo,bar
+```
diff --git a/docs/markdown/snippets/gir_fatal_warnings.md b/docs/markdown/snippets/gir_fatal_warnings.md
new file mode 100644
index 0000000..951e98e
--- /dev/null
+++ b/docs/markdown/snippets/gir_fatal_warnings.md
@@ -0,0 +1,5 @@
+## Fatal warnings in `gnome.generate_gir()`
+
+`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when
+a warning is produced. This is useful for example in CI environment where it's
+important to catch potential issues.
diff --git a/docs/markdown/snippets/machine_file_constants.md b/docs/markdown/snippets/machine_file_constants.md
new file mode 100644
index 0000000..84b0848
--- /dev/null
+++ b/docs/markdown/snippets/machine_file_constants.md
@@ -0,0 +1,20 @@
+## Machine file constants
+
+Native and cross files now support string and list concatenation using the `+`
+operator, and joining paths using the `/` operator.
+Entries defined in the `[constants]` section can be used in any other section.
+An entry defined in any other section can be used only within that same section and only
+after it has been defined.
+
+```ini
+[constants]
+toolchain = '/toolchain'
+common_flags = ['--sysroot=' + toolchain + '/sysroot']
+
+[properties]
+c_args = common_flags + ['-DSOMETHING']
+cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+[binaries]
+c = toolchain + '/gcc'
+```
diff --git a/docs/markdown/snippets/response-files.md b/docs/markdown/snippets/response-files.md
new file mode 100644
index 0000000..624b664
--- /dev/null
+++ b/docs/markdown/snippets/response-files.md
@@ -0,0 +1,7 @@
+## Response files enabled on Linux, reined in on Windows
+
+Meson used to always use response files on Windows,
+but never on Linux.
+
+It now strikes a happier balance, using them on both platforms,
+but only when needed to avoid command line length limits.
diff --git a/docs/markdown/snippets/wrap_patch.md b/docs/markdown/snippets/wrap_patch.md
index 7d6d9c2..ae66bbd 100644
--- a/docs/markdown/snippets/wrap_patch.md
+++ b/docs/markdown/snippets/wrap_patch.md
@@ -4,3 +4,16 @@ It is now possible to use the `patch_filename` and `source_filename` value in a
`.wrap` file without `*_url` to specify a local source / patch file. All local
files must be located in the `subprojects/packagefiles` directory. The `*_hash`
entries are optional with this setup.
+
+## Local wrap patch directory
+
+Wrap files can now specify `patch_directory` instead of `patch_filename` in the
+case overlay files are local. Every files in that directory, and subdirectories,
+will be copied to the subproject directory. This can be used for example to add
+`meson.build` files to a project not using Meson build system upstream.
+The patch directory must be placed in `subprojects/packagefiles` directory.
+
+## Patch on all wrap types
+
+`patch_*` keys are not limited to `wrap-file` any more, they can be specified for
+all wrap types.
diff --git a/docs/markdown_dynamic/Commands.md b/docs/markdown_dynamic/Commands.md
new file mode 100644
index 0000000..a35b4da
--- /dev/null
+++ b/docs/markdown_dynamic/Commands.md
@@ -0,0 +1,296 @@
+# Command-line commands
+
+There are two different ways of invoking Meson. First, you can run it directly
+from the source tree with the command `/path/to/source/meson.py`. Meson may
+also be installed in which case the command is simply `meson`. In this manual
+we only use the latter format for simplicity.
+
+Meson is invoked using the following syntax:
+`meson [COMMAND] [COMMAND_OPTIONS]`
+
+This section describes all available commands and some of their Optional arguments.
+The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install).
+
+For the full list of all available options for a specific command use the following syntax:
+`meson COMMAND --help`
+
+### configure
+
+```
+{{ cmd_help['configure']['usage'] }}
+```
+
+Changes options of a configured meson project.
+
+```
+{{ cmd_help['configure']['arguments'] }}
+```
+
+Most arguments are the same as in [`setup`](#setup).
+
+Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`).
+
+#### Examples:
+
+List all available options:
+```
+meson configure builddir
+```
+
+Change value of a single option:
+```
+meson configure builddir -Doption=new_value
+```
+
+### compile
+
+*(since 0.54.0)*
+
+```
+{{ cmd_help['compile']['usage'] }}
+```
+
+Builds a default or a specified target of a configured meson project.
+
+```
+{{ cmd_help['compile']['arguments'] }}
+```
+
+`--verbose` argument is available since 0.55.0.
+
+#### Targets
+
+*(since 0.55.0)*
+
+`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where:
+- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
+- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
+- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'.
+
+`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
+
+#### Backend specific arguments
+
+*(since 0.55.0)*
+
+`BACKEND-args` use the following syntax:
+
+If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
+
+```
+$ meson compile --ninja-args=-n,-d,explain
+```
+
+would add `-n`, `-d` and `explain` arguments to ninja invocation.
+
+If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
+
+```
+$ meson compile "--ninja-args=['a,b', 'c d']"
+```
+
+#### Examples:
+
+Build the project:
+```
+meson compile -C builddir
+```
+
+Execute a dry run on ninja backend with additional debug info:
+```
+meson compile --ninja-args=-n,-d,explain
+```
+
+Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target:
+```
+meson compile foo:shared_library foo:static_library bar
+```
+
+Produce a coverage html report (if available):
+```
+meson compile coverage-html
+```
+
+### dist
+
+*(since 0.52.0)*
+
+```
+{{ cmd_help['dist']['usage'] }}
+```
+
+Generates a release archive from the current source tree.
+
+```
+{{ cmd_help['dist']['arguments'] }}
+```
+
+See [notes about creating releases](Creating-releases.md) for more info.
+
+#### Examples:
+
+Create a release archive:
+```
+meson dist -C builddir
+```
+
+### init
+
+*(since 0.45.0)*
+
+```
+{{ cmd_help['init']['usage'] }}
+```
+
+Creates a basic set of build files based on a template.
+
+```
+{{ cmd_help['init']['arguments'] }}
+```
+
+#### Examples:
+
+Create a project in `sourcedir`:
+```
+meson init -C sourcedir
+```
+
+### introspect
+
+```
+{{ cmd_help['introspect']['usage'] }}
+```
+
+Displays information about a configured meson project.
+
+```
+{{ cmd_help['introspect']['arguments'] }}
+```
+
+#### Examples:
+
+Display basic information about a configured project in `builddir`:
+```
+meson introspect builddir
+```
+
+### install
+
+*(since 0.47.0)*
+
+```
+{{ cmd_help['install']['usage'] }}
+```
+
+Installs the project to the prefix specified in [`setup`](#setup).
+
+```
+{{ cmd_help['install']['arguments'] }}
+```
+
+See [the installation documentation](Installing.md) for more info.
+
+#### Examples:
+
+Install project to `prefix`:
+```
+meson install -C builddir
+```
+
+Install project to `$DESTDIR/prefix`:
+```
+DESTDIR=/path/to/staging/area meson install -C builddir
+```
+
+### rewrite
+
+*(since 0.50.0)*
+
+```
+{{ cmd_help['rewrite']['usage'] }}
+```
+
+Modifies the meson project.
+
+```
+{{ cmd_help['rewrite']['arguments'] }}
+```
+
+See [the meson file rewriter documentation](Rewriter.md) for more info.
+
+### setup
+
+```
+{{ cmd_help['setup']['usage'] }}
+```
+
+Configures a build directory for the meson project.
+
+This is the default meson command (invoked if there was no COMMAND supplied).
+
+```
+{{ cmd_help['setup']['arguments'] }}
+```
+
+See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info.
+
+#### Examples:
+
+Configures `builddir` with default values:
+```
+meson setup builddir
+```
+
+### subprojects
+
+*(since 0.49.0)*
+
+```
+{{ cmd_help['subprojects']['usage'] }}
+```
+
+Manages subprojects of the meson project.
+
+```
+{{ cmd_help['subprojects']['arguments'] }}
+```
+
+### test
+
+```
+{{ cmd_help['test']['usage'] }}
+```
+
+Run tests for the configure meson project.
+
+```
+{{ cmd_help['test']['arguments'] }}
+```
+
+See [the unit test documentation](Unit-tests.md) for more info.
+
+#### Examples:
+
+Run tests for the project:
+```
+meson test -C builddir
+```
+
+Run only `specific_test_1` and `specific_test_2`:
+```
+meson test -C builddir specific_test_1 specific_test_2
+```
+
+### wrap
+
+```
+{{ cmd_help['wrap']['usage'] }}
+```
+
+An utility to manage WrapDB dependencies.
+
+```
+{{ cmd_help['wrap']['arguments'] }}
+```
+
+See [the WrapDB tool documentation](Using-wraptool.md) for more info.
diff --git a/docs/meson.build b/docs/meson.build
index 32aab07..c07a200 100644
--- a/docs/meson.build
+++ b/docs/meson.build
@@ -1,16 +1,40 @@
project('Meson documentation', version: '1.0')
+cur_bdir = meson.current_build_dir()
+
+# Copy all files to build dir, since HotDoc uses relative paths
+run_command(
+ files('../tools/copy_files.py'),
+ '-C', meson.current_source_dir(),
+ '--output-dir', cur_bdir,
+ 'markdown', 'theme', 'sitemap.txt',
+ check: true)
+
+# Only the script knows which files are being generated
+docs_gen = custom_target(
+ 'gen_docs',
+ input: files('markdown/index.md'),
+ output: 'gen_docs.dummy',
+ command: [
+ files('../tools/regenerate_docs.py'),
+ '--output-dir', join_paths(cur_bdir, 'markdown'),
+ '--dummy-output-file', '@OUTPUT@',
+ ],
+ build_by_default: true,
+ install: false)
+
hotdoc = import('hotdoc')
documentation = hotdoc.generate_doc(meson.project_name(),
project_version: meson.project_version(),
- sitemap: 'sitemap.txt',
+ sitemap: join_paths(cur_bdir, 'sitemap.txt'),
build_by_default: true,
- index: 'markdown/index.md',
+ depends: docs_gen,
+ index: join_paths(cur_bdir, 'markdown/index.md'),
install: false,
extra_assets: ['images/'],
- include_paths: ['markdown'],
+ include_paths: [join_paths(cur_bdir, 'markdown')],
default_license: 'CC-BY-SAv4.0',
- html_extra_theme: join_paths('theme', 'extra'),
+ html_extra_theme: join_paths(cur_bdir, 'theme', 'extra'),
git_upload_repository: 'git@github.com:jpakkane/jpakkane.github.io.git',
edit_on_github_repository: 'https://github.com/mesonbuild/meson/',
syntax_highlighting_activate: true,
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index aa3f51a..be1d908 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -5,6 +5,7 @@ index.md
Manual.md
Overview.md
Running-Meson.md
+ Commands.md
Builtin-options.md
Using-with-Visual-Studio.md
Meson-sample.md
@@ -117,5 +118,6 @@ index.md
Using-multiple-build-directories.md
Vs-External.md
Contributing.md
+ MesonCI.md
legal.md
Videos.md
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py
new file mode 100644
index 0000000..fd4de96
--- /dev/null
+++ b/mesonbuild/arglist.py
@@ -0,0 +1,331 @@
+# Copyright 2012-2020 The Meson development team
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import lru_cache
+import collections
+import enum
+import os
+import re
+import typing as T
+
+from . import mesonlib
+
+if T.TYPE_CHECKING:
+ from .linkers import StaticLinker
+ from .compilers import Compiler
+
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
+# execinfo is a compiler lib on FreeBSD and NetBSD
+if mesonlib.is_freebsd() or mesonlib.is_netbsd():
+ UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
+
+
+class Dedup(enum.Enum):
+
+ """What kind of deduplication can be done to compiler args.
+
+ OVERRIDEN - Whether an argument can be 'overridden' by a later argument.
+ For example, -DFOO defines FOO and -UFOO undefines FOO. In this case,
+ we can safely remove the previous occurrence and add a new one. The
+ same is true for include paths and library paths with -I and -L.
+ UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
+ `-pipe`. New instances of these can be completely skipped.
+ NO_DEDUP - Whether it matters where or how many times on the command-line
+ a particular argument is present. This can matter for symbol
+ resolution in static or shared libraries, so we cannot de-dup or
+ reorder them.
+ """
+
+ NO_DEDUP = 0
+ UNIQUE = 1
+ OVERRIDEN = 2
+
+
+class CompilerArgs(collections.abc.MutableSequence):
+ '''
+ List-like class that manages a list of compiler arguments. Should be used
+ while constructing compiler arguments from various sources. Can be
+ operated with ordinary lists, so this does not need to be used
+ everywhere.
+
+ All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
+ and can converted to the native type of each compiler by using the
+ .to_native() method to which you must pass an instance of the compiler or
+ the compiler class.
+
+ New arguments added to this class (either with .append(), .extend(), or +=)
+ are added in a way that ensures that they override previous arguments.
+ For example:
+
+ >>> a = ['-Lfoo', '-lbar']
+ >>> a += ['-Lpho', '-lbaz']
+ >>> print(a)
+ ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
+
+ Arguments will also be de-duped if they can be de-duped safely.
+
+ Note that because of all this, this class is not commutative and does not
+ preserve the order of arguments if it is safe to not. For example:
+ >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
+ ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
+ >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
+ ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
+
+ '''
+ # Arg prefixes that override by prepending instead of appending
+ prepend_prefixes = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 2
+ dedup2_prefixes = () # type: T.Tuple[str, ...]
+ dedup2_suffixes = () # type: T.Tuple[str, ...]
+ dedup2_args = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 1
+ #
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = () # type: T.Tuple[str, ...]
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
+ # Match a .so of the form path/to/libfoo.so.0.1.0
+ # Only UNIX shared libraries require this. Others have a fixed extension.
+ dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+ dedup1_args = () # type: T.Tuple[str, ...]
+ # In generate_link() we add external libs without de-dup, but we must
+ # *always* de-dup these because they're special arguments to the linker
+ # TODO: these should probably move too
+ always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
+
+ def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
+ iterable: T.Optional[T.Iterable[str]] = None):
+ self.compiler = compiler
+ self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
+ self.pre = collections.deque() # type: T.Deque[str]
+ self.post = collections.deque() # type: T.Deque[str]
+
+ # Flush the saved pre and post list into the _container list
+ #
+ # This correctly deduplicates the entries after _can_dedup definition
+ # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
+ def flush_pre_post(self) -> None:
+ pre_flush = collections.deque() # type: T.Deque[str]
+ pre_flush_set = set() # type: T.Set[str]
+ post_flush = collections.deque() # type: T.Deque[str]
+ post_flush_set = set() # type: T.Set[str]
+
+ #The two lists are here walked from the front to the back, in order to not need removals for deduplication
+ for a in self.pre:
+ dedup = self._can_dedup(a)
+ if a not in pre_flush_set:
+ pre_flush.append(a)
+ if dedup is Dedup.OVERRIDEN:
+ pre_flush_set.add(a)
+ for a in reversed(self.post):
+ dedup = self._can_dedup(a)
+ if a not in post_flush_set:
+ post_flush.appendleft(a)
+ if dedup is Dedup.OVERRIDEN:
+ post_flush_set.add(a)
+
+ #pre and post will overwrite every element that is in the container
+ #only copy over args that are in _container but not in the post flush or pre flush set
+
+ for a in self._container:
+ if a not in post_flush_set and a not in pre_flush_set:
+ pre_flush.append(a)
+
+ self._container = list(pre_flush) + list(post_flush)
+ self.pre.clear()
+ self.post.clear()
+
+ def __iter__(self) -> T.Iterator[str]:
+ self.flush_pre_post()
+ return iter(self._container)
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: int) -> str: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
+ pass
+
+ def __getitem__(self, index): # noqa: F811
+ self.flush_pre_post()
+ return self._container[index]
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: int, value: str) -> None: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
+ pass
+
+ def __setitem__(self, index, value) -> None: # noqa: F811
+ self.flush_pre_post()
+ self._container[index] = value
+
+ def __delitem__(self, index: T.Union[int, slice]) -> None:
+ self.flush_pre_post()
+ del self._container[index]
+
+ def __len__(self) -> int:
+ return len(self._container) + len(self.pre) + len(self.post)
+
+ def insert(self, index: int, value: str) -> None:
+ self.flush_pre_post()
+ self._container.insert(index, value)
+
+ def copy(self) -> 'CompilerArgs':
+ self.flush_pre_post()
+ return type(self)(self.compiler, self._container.copy())
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _can_dedup(cls, arg: str) -> Dedup:
+ """Returns whether the argument can be safely de-duped.
+
+ In addition to these, we handle library arguments specially.
+ With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
+ to recursively search for symbols in the libraries. This is not needed
+ with other linkers.
+ """
+
+ # A standalone argument must never be deduplicated because it is
+ # defined by what comes _after_ it. Thus dedupping this:
+ # -D FOO -D BAR
+ # would yield either
+ # -D FOO BAR
+ # or
+ # FOO -D BAR
+ # both of which are invalid.
+ if arg in cls.dedup2_prefixes:
+ return Dedup.NO_DEDUP
+ if arg in cls.dedup2_args or \
+ arg.startswith(cls.dedup2_prefixes) or \
+ arg.endswith(cls.dedup2_suffixes):
+ return Dedup.OVERRIDEN
+ if arg in cls.dedup1_args or \
+ arg.startswith(cls.dedup1_prefixes) or \
+ arg.endswith(cls.dedup1_suffixes) or \
+ re.search(cls.dedup1_regex, arg):
+ return Dedup.UNIQUE
+ return Dedup.NO_DEDUP
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _should_prepend(cls, arg: str) -> bool:
+ return arg.startswith(cls.prepend_prefixes)
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ return self.compiler.unix_args_to_native(new._container)
+
+ def append_direct(self, arg: str) -> None:
+ '''
+ Append the specified argument without any reordering or de-dup except
+ for absolute paths to libraries, etc, which can always be de-duped
+ safely.
+ '''
+ self.flush_pre_post()
+ if os.path.isabs(arg):
+ self.append(arg)
+ else:
+ self._container.append(arg)
+
+ def extend_direct(self, iterable: T.Iterable[str]) -> None:
+ '''
+ Extend using the elements in the specified iterable without any
+ reordering or de-dup except for absolute paths where the order of
+ include search directories is not relevant
+ '''
+ self.flush_pre_post()
+ for elem in iterable:
+ self.append_direct(elem)
+
+ def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
+ normal_flags = []
+ lflags = []
+ for i in iterable:
+ if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
+ lflags.append(i)
+ else:
+ normal_flags.append(i)
+ self.extend(normal_flags)
+ self.extend_direct(lflags)
+
+ def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = self.copy()
+ new += args
+ return new
+
+ def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ '''
+ Add two CompilerArgs while taking into account overriding of arguments
+ and while preserving the order of arguments as much as possible
+ '''
+ tmp_pre = collections.deque() # type: T.Deque[str]
+ if not isinstance(args, collections.abc.Iterable):
+ raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
+ for arg in args:
+ # If the argument can be de-duped, do it either by removing the
+ # previous occurrence of it and adding a new one, or not adding the
+ # new occurrence.
+ dedup = self._can_dedup(arg)
+ if dedup is Dedup.UNIQUE:
+ # Argument already exists and adding a new instance is useless
+ if arg in self._container or arg in self.pre or arg in self.post:
+ continue
+ if self._should_prepend(arg):
+ tmp_pre.appendleft(arg)
+ else:
+ self.post.append(arg)
+ self.pre.extendleft(tmp_pre)
+ #pre and post is going to be merged later before a iter call
+ return self
+
+ def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = type(self)(self.compiler, args)
+ new += self
+ return new
+
+ def __eq__(self, other: T.Any) -> T.Union[bool]:
+ self.flush_pre_post()
+ # Only allow equality checks against other CompilerArgs and lists instances
+ if isinstance(other, CompilerArgs):
+ return self.compiler == other.compiler and self._container == other._container
+ elif isinstance(other, list):
+ return self._container == other
+ return NotImplemented
+
+ def append(self, arg: str) -> None:
+ self.__iadd__([arg])
+
+ def extend(self, args: T.Iterable[str]) -> None:
+ self.__iadd__(args)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 840c9a3..cfd3a39 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -28,7 +28,6 @@ from .. import build
from .. import dependencies
from .. import mesonlib
from .. import mlog
-from ..compilers import CompilerArgs, VisualStudioLikeCompiler
from ..mesonlib import (
File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy,
classify_unity_sources, unholder
@@ -501,6 +500,12 @@ class Backend:
target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result])
return tuple(result)
+ @staticmethod
+ def canonicalize_filename(fname):
+ for ch in ('/', '\\', ':'):
+ fname = fname.replace(ch, '_')
+ return fname
+
def object_filename_from_source(self, target, source):
assert isinstance(source, mesonlib.File)
build_dir = self.environment.get_build_dir()
@@ -531,7 +536,7 @@ class Backend:
source = os.path.relpath(os.path.join(build_dir, rel_src),
os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
- return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix()
+ return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
def determine_ext_objs(self, extobj, proj_dir_to_build_root):
result = []
@@ -607,36 +612,20 @@ class Backend:
@staticmethod
def escape_extra_args(compiler, args):
- # No extra escaping/quoting needed when not running on Windows
- if not mesonlib.is_windows():
- return args
+ # all backslashes in defines are doubly-escaped
extra_args = []
- # Compiler-specific escaping is needed for -D args but not for any others
- if isinstance(compiler, VisualStudioLikeCompiler):
- # MSVC needs escaping when a -D argument ends in \ or \"
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- # Without extra escaping for these two, the next character
- # gets eaten
- if arg.endswith('\\'):
- arg += '\\'
- elif arg.endswith('\\"'):
- arg = arg[:-2] + '\\\\"'
- extra_args.append(arg)
- else:
- # MinGW GCC needs all backslashes in defines to be doubly-escaped
- # FIXME: Not sure about Cygwin or Clang
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- arg = arg.replace('\\', '\\\\')
- extra_args.append(arg)
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ arg = arg.replace('\\', '\\\\')
+ extra_args.append(arg)
+
return extra_args
def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
# starting from hard-coded defaults followed by build options and so on.
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
copt_proxy = self.get_compiler_options_for_target(target)[compiler.language]
# First, the trivial ones that are impossible to override.
@@ -759,6 +748,7 @@ class Backend:
for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
for bdep in extra_bdeps:
+ prospectives.add(bdep)
prospectives.update(bdep.get_transitive_link_deps())
# Internal deps
for ld in prospectives:
@@ -1050,35 +1040,36 @@ class Backend:
elif not isinstance(i, str):
err_msg = 'Argument {0} is of unknown type {1}'
raise RuntimeError(err_msg.format(str(i), str(type(i))))
- elif '@SOURCE_ROOT@' in i:
- i = i.replace('@SOURCE_ROOT@', source_root)
- elif '@BUILD_ROOT@' in i:
- i = i.replace('@BUILD_ROOT@', build_root)
- elif '@DEPFILE@' in i:
- if target.depfile is None:
- msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
- 'keyword argument.'.format(target.name)
- raise MesonException(msg)
- dfilename = os.path.join(outdir, target.depfile)
- i = i.replace('@DEPFILE@', dfilename)
- elif '@PRIVATE_DIR@' in i:
- if target.absolute_paths:
- pdir = self.get_target_private_dir_abs(target)
- else:
- pdir = self.get_target_private_dir(target)
- i = i.replace('@PRIVATE_DIR@', pdir)
- elif '@PRIVATE_OUTDIR_' in i:
- match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
- if not match:
- msg = 'Custom target {!r} has an invalid argument {!r}' \
- ''.format(target.name, i)
- raise MesonException(msg)
- source = match.group(0)
- if match.group(1) is None and not target.absolute_paths:
- lead_dir = ''
- else:
- lead_dir = self.environment.get_build_dir()
- i = i.replace(source, os.path.join(lead_dir, outdir))
+ else:
+ if '@SOURCE_ROOT@' in i:
+ i = i.replace('@SOURCE_ROOT@', source_root)
+ if '@BUILD_ROOT@' in i:
+ i = i.replace('@BUILD_ROOT@', build_root)
+ if '@DEPFILE@' in i:
+ if target.depfile is None:
+ msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
+ 'keyword argument.'.format(target.name)
+ raise MesonException(msg)
+ dfilename = os.path.join(outdir, target.depfile)
+ i = i.replace('@DEPFILE@', dfilename)
+ if '@PRIVATE_DIR@' in i:
+ if target.absolute_paths:
+ pdir = self.get_target_private_dir_abs(target)
+ else:
+ pdir = self.get_target_private_dir(target)
+ i = i.replace('@PRIVATE_DIR@', pdir)
+ if '@PRIVATE_OUTDIR_' in i:
+ match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ if not match:
+ msg = 'Custom target {!r} has an invalid argument {!r}' \
+ ''.format(target.name, i)
+ raise MesonException(msg)
+ source = match.group(0)
+ if match.group(1) is None and not target.absolute_paths:
+ lead_dir = ''
+ else:
+ lead_dir = self.environment.get_build_dir()
+ i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# Substitute the rest of the template strings
values = mesonlib.get_filenames_templates_dict(inputs, outputs)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 69e7618..62bda1a 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -15,8 +15,10 @@ import typing as T
import os
import re
import pickle
+import shlex
import subprocess
from collections import OrderedDict
+from enum import Enum, unique
import itertools
from pathlib import PurePath, Path
from functools import lru_cache
@@ -28,9 +30,15 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
-from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler,
- PGICCompiler, VisualStudioLikeCompiler)
-from ..linkers import ArLinker
+from ..arglist import CompilerArgs
+from ..compilers import (
+ Compiler, CCompiler,
+ DmdDCompiler,
+ FortranCompiler, PGICCompiler,
+ VisualStudioCsCompiler,
+ VisualStudioLikeCompiler,
+)
+from ..linkers import ArLinker, VisualStudioLinker
from ..mesonlib import (
File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg, unholder,
@@ -45,18 +53,67 @@ FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+def cmd_quote(s):
+ # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
+
+ # backslash escape any existing double quotes
+ # any existing backslashes preceding a quote are doubled
+ s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s)
+ # any terminal backslashes likewise need doubling
+ s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s)
+ # and double quote
+ s = '"{}"'.format(s)
+
+ return s
+
+def gcc_rsp_quote(s):
+ # see: the function buildargv() in libiberty
+ #
+ # this differs from sh-quoting in that a backslash *always* escapes the
+ # following character, even inside single quotes.
+
+ s = s.replace('\\', '\\\\')
+
+ return shlex.quote(s)
+
+# How ninja executes command lines differs between Unix and Windows
+# (see https://ninja-build.org/manual.html#ref_rule_command)
if mesonlib.is_windows():
- # FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds
- # throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args
- # and NinjaBuildElement.write below) and need to be properly untangled before attempting this
- quote_func = lambda s: '"{}"'.format(s)
- execute_wrapper = ['cmd', '/c']
+ quote_func = cmd_quote
+ execute_wrapper = ['cmd', '/c'] # unused
rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
else:
quote_func = quote_arg
execute_wrapper = []
rmfile_prefix = ['rm', '-f', '{}', '&&']
+def get_rsp_threshold():
+ '''Return a conservative estimate of the commandline size in bytes
+ above which a response file should be used. May be overridden for
+ debugging by setting environment variable MESON_RSP_THRESHOLD.'''
+
+ if mesonlib.is_windows():
+ # Usually 32k, but some projects might use cmd.exe,
+ # and that has a limit of 8k.
+ limit = 8192
+ else:
+ # On Linux, ninja always passes the commandline as a single
+ # big string to /bin/sh, and the kernel limits the size of a
+ # single argument; see MAX_ARG_STRLEN
+ limit = 131072
+ # Be conservative
+ limit = limit / 2
+ return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
+
+# a conservative estimate of the command-line length limit
+rsp_threshold = get_rsp_threshold()
+
+# ninja variables whose value should remain unquoted. The value of these ninja
+# variables (or variables we use them in) is interpreted directly by ninja
+# (e.g. the value of the depfile variable is a pathname that ninja will read
+# from, etc.), so it must not be shell quoted.
+raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'}
+
def ninja_quote(text, is_build_line=False):
if is_build_line:
qcs = ('$', ' ', ':')
@@ -73,6 +130,25 @@ Please report this error with a test case to the Meson bug tracker.'''.format(te
raise MesonException(errmsg)
return text
+@unique
+class Quoting(Enum):
+ both = 0
+ notShell = 1
+ notNinja = 2
+ none = 3
+
+class NinjaCommandArg:
+ def __init__(self, s, quoting = Quoting.both):
+ self.s = s
+ self.quoting = quoting
+
+ def __str__(self):
+ return self.s
+
+ @staticmethod
+ def list(l, q):
+ return [NinjaCommandArg(i, q) for i in l]
+
class NinjaComment:
def __init__(self, comment):
self.comment = comment
@@ -86,49 +162,127 @@ class NinjaComment:
class NinjaRule:
def __init__(self, rule, command, args, description,
- rspable = False, deps = None, depfile = None, extra = None):
+ rspable = False, deps = None, depfile = None, extra = None,
+ rspfile_quote_style = 'gcc'):
+
+ def strToCommandArg(c):
+ if isinstance(c, NinjaCommandArg):
+ return c
+
+ # deal with common cases here, so we don't have to explicitly
+ # annotate the required quoting everywhere
+ if c == '&&':
+ # shell constructs shouldn't be shell quoted
+ return NinjaCommandArg(c, Quoting.notShell)
+ if c.startswith('$'):
+ var = re.search(r'\$\{?(\w*)\}?', c).group(1)
+ if var not in raw_names:
+ # ninja variables shouldn't be ninja quoted, and their value
+ # is already shell quoted
+ return NinjaCommandArg(c, Quoting.none)
+ else:
+ # shell quote the use of ninja variables whose value must
+ # not be shell quoted (as it also used by ninja)
+ return NinjaCommandArg(c, Quoting.notNinja)
+
+ return NinjaCommandArg(c)
+
self.name = rule
- self.command = command # includes args which never go into a rspfile
- self.args = args # args which will go into a rspfile, if used
+ self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
+ self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
self.description = description
self.deps = deps # depstyle 'gcc' or 'msvc'
self.depfile = depfile
self.extra = extra
self.rspable = rspable # if a rspfile can be used
self.refcount = 0
+ self.rsprefcount = 0
+ self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl'
- def write(self, outfile):
- if not self.refcount:
- return
+ if self.depfile == '$DEPFILE':
+ self.depfile += '_UNQUOTED'
+
+ @staticmethod
+ def _quoter(x, qf = quote_func):
+ if isinstance(x, NinjaCommandArg):
+ if x.quoting == Quoting.none:
+ return x.s
+ elif x.quoting == Quoting.notNinja:
+ return qf(x.s)
+ elif x.quoting == Quoting.notShell:
+ return ninja_quote(x.s)
+ # fallthrough
+ return ninja_quote(qf(str(x)))
- outfile.write('rule {}\n'.format(self.name))
- if self.rspable:
- outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command)))
- outfile.write(' rspfile = $out.rsp\n')
- outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args)))
+ def write(self, outfile):
+ if self.rspfile_quote_style == 'cl':
+ rspfile_quote_func = cmd_quote
else:
- outfile.write(' command = {}\n'.format(' '.join(self.command + self.args)))
- if self.deps:
- outfile.write(' deps = {}\n'.format(self.deps))
- if self.depfile:
- outfile.write(' depfile = {}\n'.format(self.depfile))
- outfile.write(' description = {}\n'.format(self.description))
- if self.extra:
- for l in self.extra.split('\n'):
- outfile.write(' ')
- outfile.write(l)
- outfile.write('\n')
- outfile.write('\n')
+ rspfile_quote_func = gcc_rsp_quote
+
+ def rule_iter():
+ if self.refcount:
+ yield ''
+ if self.rsprefcount:
+ yield '_RSP'
+
+ for rsp in rule_iter():
+ outfile.write('rule {}{}\n'.format(self.name, rsp))
+ if rsp == '_RSP':
+ outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ outfile.write(' rspfile = $out.rsp\n')
+ outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args])))
+ else:
+ outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)])))
+ if self.deps:
+ outfile.write(' deps = {}\n'.format(self.deps))
+ if self.depfile:
+ outfile.write(' depfile = {}\n'.format(self.depfile))
+ outfile.write(' description = {}\n'.format(self.description))
+ if self.extra:
+ for l in self.extra.split('\n'):
+ outfile.write(' ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+ def length_estimate(self, infiles, outfiles, elems):
+ # determine variables
+ # this order of actions only approximates ninja's scoping rules, as
+ # documented at: https://ninja-build.org/manual.html#ref_scope
+ ninja_vars = {}
+ for e in elems:
+ (name, value) = e
+ ninja_vars[name] = value
+ ninja_vars['deps'] = self.deps
+ ninja_vars['depfile'] = self.depfile
+ ninja_vars['in'] = infiles
+ ninja_vars['out'] = outfiles
+
+ # expand variables in command
+ command = ' '.join([self._quoter(x) for x in self.command + self.args])
+ expanded_command = ''
+ for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command):
+ chunk = m.group()
+ if chunk.startswith('$'):
+ chunk = chunk[1:]
+ chunk = re.sub(r'{(.*)}', r'\1', chunk)
+ chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
+ chunk = ' '.join(chunk)
+ expanded_command += chunk
+
+ # determine command length
+ return len(expanded_command)
class NinjaBuildElement:
- def __init__(self, all_outputs, outfilenames, rule, infilenames, implicit_outs=None):
+ def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
self.implicit_outfilenames = implicit_outs or []
if isinstance(outfilenames, str):
self.outfilenames = [outfilenames]
else:
self.outfilenames = outfilenames
- assert(isinstance(rule, str))
- self.rule = rule
+ assert(isinstance(rulename, str))
+ self.rulename = rulename
if isinstance(infilenames, str):
self.infilenames = [infilenames]
else:
@@ -159,6 +313,31 @@ class NinjaBuildElement:
elems = [elems]
self.elems.append((name, elems))
+ if name == 'DEPFILE':
+ self.elems.append((name + '_UNQUOTED', elems))
+
+ def _should_use_rspfile(self):
+ # 'phony' is a rule built-in to ninja
+ if self.rulename == 'phony':
+ return False
+
+ if not self.rule.rspable:
+ return False
+
+ infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames])
+ outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
+
+ return self.rule.length_estimate(infilenames,
+ outfilenames,
+ self.elems) >= rsp_threshold
+
+ def count_rule_references(self):
+ if self.rulename != 'phony':
+ if self._should_use_rspfile():
+ self.rule.rsprefcount += 1
+ else:
+ self.rule.refcount += 1
+
def write(self, outfile):
self.check_outputs()
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
@@ -166,7 +345,13 @@ class NinjaBuildElement:
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
if implicit_outs:
implicit_outs = ' | ' + implicit_outs
- line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rule, ins)
+ use_rspfile = self._should_use_rspfile()
+ if use_rspfile:
+ rulename = self.rulename + '_RSP'
+ mlog.log("Command line for building %s is long, using a response file" % self.outfilenames)
+ else:
+ rulename = self.rulename
+ line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins)
if len(self.deps) > 0:
line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps])
if len(self.orderdeps) > 0:
@@ -180,11 +365,13 @@ class NinjaBuildElement:
line = line.replace('\\', '/')
outfile.write(line)
- # ninja variables whose value should remain unquoted. The value of these
- # ninja variables (or variables we use them in) is interpreted directly
- # by ninja (e.g. the value of the depfile variable is a pathname that
- # ninja will read from, etc.), so it must not be shell quoted.
- raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'}
+ if use_rspfile:
+ if self.rule.rspfile_quote_style == 'cl':
+ qf = cmd_quote
+ else:
+ qf = gcc_rsp_quote
+ else:
+ qf = quote_func
for e in self.elems:
(name, elems) = e
@@ -195,10 +382,7 @@ class NinjaBuildElement:
if not should_quote or i == '&&': # Hackety hack hack
quoter = ninja_quote
else:
- quoter = lambda x: ninja_quote(quote_func(x))
- i = i.replace('\\', '\\\\')
- if quote_func('') == '""':
- i = i.replace('"', '\\"')
+ quoter = lambda x: ninja_quote(qf(x))
newelems.append(quoter(i))
line += ' '.join(newelems)
line += '\n'
@@ -350,10 +534,14 @@ int dummy;
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
def generate_compdb(self):
rules = []
+ # TODO: Rather than an explicit list here, rules could be marked in the
+ # rule store as being wanted in compdb
for for_machine in MachineChoice:
for lang in self.environment.coredata.compilers[for_machine]:
- rules += [self.get_compiler_rule_name(lang, for_machine)]
- rules += [self.get_pch_rule_name(lang, for_machine)]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules
builddir = self.environment.get_build_dir()
@@ -781,6 +969,13 @@ int dummy;
self.processed_targets[target.get_id()] = True
def generate_coverage_command(self, elem, outputs):
+ targets = self.build.get_targets().values()
+ use_llvm_cov = False
+ for target in targets:
+ for compiler in target.compilers.values():
+ if compiler.get_id() == 'clang' and not compiler.info.is_darwin():
+ use_llvm_cov = True
+ break
elem.add_item('COMMAND', self.environment.get_build_command() +
['--internal', 'coverage'] +
outputs +
@@ -788,7 +983,8 @@ int dummy;
os.path.join(self.environment.get_source_dir(),
self.build.get_subproject_dir()),
self.environment.get_build_dir(),
- self.environment.get_log_dir()])
+ self.environment.get_log_dir()] +
+ ['--use_llvm_cov'] if use_llvm_cov else [])
def generate_coverage_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
@@ -877,13 +1073,15 @@ int dummy;
deps='gcc', depfile='$DEPFILE',
extra='restat = 1'))
- c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ c = self.environment.get_build_command() + \
['--internal',
'regenerate',
- ninja_quote(quote_func(self.environment.get_source_dir())),
- ninja_quote(quote_func(self.environment.get_build_dir()))]
+ self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ '--backend',
+ 'ninja']
self.add_rule(NinjaRule('REGENERATE_BUILD',
- c + ['--backend', 'ninja'], [],
+ c, [],
'Regenerating build files.',
extra='generator = 1'))
@@ -900,11 +1098,15 @@ int dummy;
def add_build(self, build):
self.build_elements.append(build)
- # increment rule refcount
- if build.rule != 'phony':
- self.ruledict[build.rule].refcount += 1
+ if build.rulename != 'phony':
+ # reference rule
+ build.rule = self.ruledict[build.rulename]
def write_rules(self, outfile):
+ for b in self.build_elements:
+ if isinstance(b, NinjaBuildElement):
+ b.count_rule_references()
+
for r in self.rules:
r.write(outfile)
@@ -1000,7 +1202,7 @@ int dummy;
compiler = target.compilers['cs']
rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
deps = []
- commands = CompilerArgs(compiler, target.extra_args.get('cs', []))
+ commands = compiler.compiler_args(target.extra_args.get('cs', []))
commands += compiler.get_buildtype_args(buildtype)
commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target))
commands += compiler.get_debug_args(self.get_option_for_target('debug', target))
@@ -1558,7 +1760,7 @@ int dummy;
cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
cmdlist += static_linker.get_exelist()
cmdlist += ['$LINK_ARGS']
- cmdlist += static_linker.get_output_args('$out')
+ cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none)
description = 'Linking static target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1566,6 +1768,7 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, cmdlist, args, description,
rspable=static_linker.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc',
extra=pool))
def generate_dynamic_link_rules(self):
@@ -1580,7 +1783,7 @@ int dummy;
continue
rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine))
command = compiler.get_linker_exelist()
- args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS']
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS']
description = 'Linking target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1588,12 +1791,14 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
extra=pool))
- args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ args = self.environment.get_build_command() + \
['--internal',
'symbolextractor',
- ninja_quote(quote_func(self.environment.get_build_dir())),
+ self.environment.get_build_dir(),
'$in',
'$IMPLIB',
'$out']
@@ -1605,31 +1810,28 @@ int dummy;
def generate_java_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Java object $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_cs_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc
+ command = compiler.get_exelist()
args = ['$ARGS', '$in']
description = 'Compiling C Sharp target $out'
self.add_rule(NinjaRule(rule, command, args, description,
- rspable=mesonlib.is_windows()))
+ rspable=mesonlib.is_windows(),
+ rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc'))
def generate_vala_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Vala source $in'
self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
def generate_rust_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Rust source $in'
depfile = '$targetdep'
depstyle = 'gcc'
@@ -1638,12 +1840,12 @@ int dummy;
def generate_swift_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
+ full_exe = self.environment.get_build_command() + [
'--internal',
'dirchanger',
'$RUNDIR',
]
- invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()]
+ invoc = full_exe + compiler.get_exelist()
command = invoc + ['$ARGS', '$in']
description = 'Compiling Swift source $in'
self.add_rule(NinjaRule(rule, command, [], description))
@@ -1663,8 +1865,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if self.created_llvm_ir_rule[compiler.for_machine]:
return
rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling LLVM IR object $in'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp()))
@@ -1693,15 +1895,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if langname == 'fortran':
self.generate_fortran_dep_hack(crstr)
rule = self.get_compiler_rule_name(langname, compiler.for_machine)
- depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
-
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling {} object $out'.format(compiler.get_display_language())
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
@@ -1711,6 +1907,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
depfile = '$DEPFILE'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
deps=deps, depfile=depfile))
def generate_pch_rule_for(self, langname, compiler):
@@ -1719,16 +1917,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
rule = self.compiler_to_pch_rule_name(compiler)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
if isinstance(compiler, VisualStudioLikeCompiler):
output = []
else:
- output = compiler.get_output_args('$out')
- command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in']
+ output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none)
+ command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in']
description = 'Precompiling header $in'
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
@@ -1963,7 +2156,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def generate_llvm_ir_compile(self, target, src):
compiler = get_compiler_for_source(target.compilers.values(), src)
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Compiler args for compiling this target
commands += compilers.get_base_compile_args(self.environment.coredata.base_options,
compiler)
@@ -1976,7 +2169,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
src_filename = os.path.basename(src)
else:
src_filename = src
- obj_basename = src_filename.replace('/', '_').replace('\\', '_')
+ obj_basename = self.canonicalize_filename(src_filename)
rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix()
commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
@@ -2004,6 +2197,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
curdir = '.'
return compiler.get_include_args(curdir, False)
+ @lru_cache(maxsize=None)
+ def get_normpath_target(self, source) -> str:
+ return os.path.normpath(source)
+
def get_custom_target_dir_include_args(self, target, compiler):
custom_target_include_dirs = []
for i in target.get_generated_sources():
@@ -2012,7 +2209,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# own target build dir.
if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
continue
- idir = os.path.normpath(self.get_target_dir(i))
+ idir = self.get_normpath_target(self.get_target_dir(i))
if not idir:
idir = '.'
if idir not in custom_target_include_dirs:
@@ -2048,7 +2245,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
base_proxy = self.get_base_options_for_target(target)
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Start with symbol visibility.
commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility)
# Add compiler args for compiling this target derived from 'base' build
@@ -2128,7 +2325,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = self._generate_single_compile(target, compiler, is_generated)
- commands = CompilerArgs(commands.compiler, commands)
+ commands = commands.compiler.compiler_args(commands)
# Create introspection information
if is_generated is False:
@@ -2477,7 +2674,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
#
# Once all the linker options have been passed, we will start passing
# libraries and library paths from internal and external sources.
- commands = CompilerArgs(linker)
+ commands = linker.compiler_args()
# First, the trivial ones that are impossible to override.
#
# Add linker args for linking this target derived from 'base' build
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 6965c42..f282d02 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -26,7 +26,6 @@ from .. import build
from .. import dependencies
from .. import mlog
from .. import compilers
-from ..compilers import CompilerArgs
from ..interpreter import Interpreter
from ..mesonlib import (
MesonException, File, python_command, replace_if_different
@@ -858,6 +857,18 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
+ # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
+ # cl will give warning D9025: overriding '/Ehs' with cpp_eh value
+ if 'cpp' in target.compilers:
+ eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh']
+ if eh.value == 'a':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
+ elif eh.value == 's':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
+ elif eh.value == 'none':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
+ else: # 'sc' or 'default'
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
# End configuration
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
@@ -887,9 +898,9 @@ class Vs2010Backend(backends.Backend):
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
- file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items())
- file_defines = dict((lang, []) for lang in target.compilers)
- file_inc_dirs = dict((lang, []) for lang in target.compilers)
+ file_args = {l: c.compiler_args() for l, c in target.compilers.items()}
+ file_defines = {l: [] for l in target.compilers}
+ file_inc_dirs = {l: [] for l in target.compilers}
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
@@ -992,23 +1003,23 @@ class Vs2010Backend(backends.Backend):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
if isinstance(d, dependencies.OpenMPDependency):
- d_compile_args = compiler.openmp_flags()
+ ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
else:
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
- for arg in d_compile_args:
- if arg.startswith(('-D', '/D')):
- define = arg[2:]
- # De-dup
- if define in target_defines:
- target_defines.remove(define)
- target_defines.append(define)
- elif arg.startswith(('-I', '/I')):
- inc_dir = arg[2:]
- # De-dup
- if inc_dir not in target_inc_dirs:
- target_inc_dirs.append(inc_dir)
- else:
- target_args.append(arg)
+ for arg in d_compile_args:
+ if arg.startswith(('-D', '/D')):
+ define = arg[2:]
+ # De-dup
+ if define in target_defines:
+ target_defines.remove(define)
+ target_defines.append(define)
+ elif arg.startswith(('-I', '/I')):
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+ else:
+ target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
@@ -1072,7 +1083,7 @@ class Vs2010Backend(backends.Backend):
# Linker options
link = ET.SubElement(compiles, 'Link')
- extra_link_args = CompilerArgs(compiler)
+ extra_link_args = compiler.compiler_args()
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
@@ -1100,14 +1111,14 @@ class Vs2010Backend(backends.Backend):
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
diff --git a/mesonbuild/cmake/data/run_ctgt.py b/mesonbuild/cmake/data/run_ctgt.py
deleted file mode 100755
index 9d5d437..0000000
--- a/mesonbuild/cmake/data/run_ctgt.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-
-import argparse
-import subprocess
-import shutil
-import os
-import sys
-from pathlib import Path
-
-commands = [[]]
-SEPARATOR = ';;;'
-
-# Generate CMD parameters
-parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
-parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
-parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
-parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
-parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
-
-# Parse
-args = parser.parse_args()
-
-dummy_target = None
-if len(args.outputs) == 1 and len(args.original_outputs) == 0:
- dummy_target = args.outputs[0]
-elif len(args.outputs) != len(args.original_outputs):
- print('Length of output list and original output list differ')
- sys.exit(1)
-
-for i in args.commands:
- if i == SEPARATOR:
- commands += [[]]
- continue
-
- i = i.replace('"', '') # Remove lefover quotes
- commands[-1] += [i]
-
-# Execute
-for i in commands:
- # Skip empty lists
- if not i:
- continue
-
- cmd = []
- stdout = None
- stderr = None
- capture_file = ''
-
- for j in i:
- if j in ['>', '>>']:
- stdout = subprocess.PIPE
- continue
- elif j in ['&>', '&>>']:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- continue
-
- if stdout is not None or stderr is not None:
- capture_file += j
- else:
- cmd += [j]
-
- try:
- os.makedirs(args.directory, exist_ok=True)
-
- res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
- if capture_file:
- out_file = Path(args.directory) / capture_file
- out_file.write_bytes(res.stdout)
- except subprocess.CalledProcessError:
- exit(1)
-
-if dummy_target:
- with open(dummy_target, 'a'):
- os.utime(dummy_target, None)
- exit(0)
-
-# Copy outputs
-zipped_outputs = zip(args.outputs, args.original_outputs)
-for expected, generated in zipped_outputs:
- do_copy = False
- if not os.path.exists(expected):
- if not os.path.exists(generated):
- print('Unable to find generated file. This can cause the build to fail:')
- print(generated)
- do_copy = False
- else:
- do_copy = True
- elif os.path.exists(generated):
- if os.path.getmtime(generated) > os.path.getmtime(expected):
- do_copy = True
-
- if do_copy:
- if os.path.exists(expected):
- os.remove(expected)
- shutil.copyfile(generated, expected)
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index adc028c..148a999 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -28,6 +28,7 @@ import textwrap
from .. import mlog, mesonlib
from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice
from ..environment import Environment
+from ..envconfig import get_env_var
if T.TYPE_CHECKING:
from ..dependencies.base import ExternalProgram
@@ -48,6 +49,8 @@ class CMakeExecutor:
self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
self.always_capture_stderr = True
self.print_cmout = False
+ self.prefix_paths = [] # type: T.List[str]
+ self.extra_cmake_args = [] # type: T.List[str]
if self.cmakebin is False:
self.cmakebin = None
return
@@ -60,26 +63,23 @@ class CMakeExecutor:
self.cmakebin = None
return
+ self.prefix_paths = self.environment.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
+ env_pref_path = get_env_var(
+ self.for_machine,
+ self.environment.is_cross_build(),
+ 'CMAKE_PREFIX_PATH')
+ if env_pref_path is not None:
+ env_pref_path = re.split(r':|;', env_pref_path)
+ env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
+ if not self.prefix_paths:
+ self.prefix_paths = []
+ self.prefix_paths += env_pref_path
+
+ if self.prefix_paths:
+ self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))]
+
def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]:
- from ..dependencies.base import ExternalProgram
-
- # Create an iterator of options
- def search():
- # Lookup in cross or machine file.
- potential_cmakepath = environment.lookup_binary_entry(self.for_machine, 'cmake')
- if potential_cmakepath is not None:
- mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', self.for_machine, potential_cmakepath)
- yield ExternalProgram.from_entry('cmake', potential_cmakepath)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('CMake binary missing from cross or native file, or env var undefined.')
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if environment.machines.matches_build_machine(self.for_machine):
- for potential_cmakepath in environment.default_cmake:
- mlog.debug('Trying a default CMake fallback at', potential_cmakepath)
- yield ExternalProgram(potential_cmakepath, silent=True)
+ from ..dependencies.base import find_external_program
# Only search for CMake the first time and store the result in the class
# definition
@@ -89,10 +89,11 @@ class CMakeExecutor:
mlog.debug('CMake binary for %s is cached.' % self.for_machine)
else:
assert CMakeExecutor.class_cmakebin[self.for_machine] is None
+
mlog.debug('CMake binary for %s is not cached' % self.for_machine)
- for potential_cmakebin in search():
- mlog.debug('Trying CMake binary {} for machine {} at {}'
- .format(potential_cmakebin.name, self.for_machine, potential_cmakebin.command))
+ for potential_cmakebin in find_external_program(
+ environment, self.for_machine, 'cmake', 'CMake',
+ environment.default_cmake, allow_default_for_cross=False):
version_if_ok = self.check_cmake(potential_cmakebin)
if not version_if_ok:
continue
@@ -226,6 +227,7 @@ class CMakeExecutor:
if env is None:
env = os.environ
+ args = args + self.extra_cmake_args
if disable_cache:
return self._call_impl(args, build_dir, env)
@@ -362,5 +364,8 @@ class CMakeExecutor:
def get_command(self) -> T.List[str]:
return self.cmakebin.get_command()
+ def get_cmake_prefix_paths(self) -> T.List[str]:
+ return self.prefix_paths
+
def machine_choice(self) -> MachineChoice:
return self.for_machine
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 57e6e1d..0516947 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -22,7 +22,7 @@ from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCo
from .fileapi import CMakeFileAPI
from .executor import CMakeExecutor
from .traceparser import CMakeTraceParser, CMakeGeneratorTarget
-from .. import mlog
+from .. import mlog, mesonlib
from ..environment import Environment
from ..mesonlib import MachineChoice, OrderedSet, version_compare
from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
@@ -317,13 +317,6 @@ class ConverterTarget:
tgt = trace.targets.get(self.cmake_name)
if tgt:
self.depends_raw = trace.targets[self.cmake_name].depends
- if self.type.upper() == 'INTERFACE_LIBRARY':
- props = tgt.properties
-
- self.includes += props.get('INTERFACE_INCLUDE_DIRECTORIES', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_DEFINITIONS', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_OPTIONS', [])
- self.link_flags += props.get('INTERFACE_LINK_OPTIONS', [])
# TODO refactor this copy paste from CMakeDependency for future releases
reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$')
@@ -342,6 +335,12 @@ class ConverterTarget:
libraries = []
mlog.debug(tgt)
+ if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+ self.includes += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+ if 'INTERFACE_LINK_OPTIONS' in tgt.properties:
+ self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+
if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
@@ -1060,9 +1059,6 @@ class CMakeInterpreter:
root_cb.lines += [function('project', [self.project_name] + self.languages)]
# Add the run script for custom commands
- run_script = pkg_resources.resource_filename('mesonbuild', 'cmake/data/run_ctgt.py')
- run_script_var = 'ctgt_run_script'
- root_cb.lines += [assign(run_script_var, function('find_program', [[run_script]], {'required': True}))]
# Add the targets
processing = []
@@ -1249,7 +1245,8 @@ class CMakeInterpreter:
# Generate the command list
command = []
- command += [id_node(run_script_var)]
+ command += mesonlib.meson_command
+ command += ['--internal', 'cmake_run_ctgt']
command += ['-o', '@OUTPUT@']
if tgt.original_outputs:
command += ['-O'] + tgt.original_outputs
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index d94e774..a241360 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -139,7 +139,7 @@ class CMakeTraceParser:
if not self.requires_stderr():
if not self.trace_file_path.exists and not self.trace_file_path.is_file():
raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path)))
- trace = self.trace_file_path.read_text()
+ trace = self.trace_file_path.read_text(errors='ignore')
if not trace:
raise CMakeException('CMake: The CMake trace was not provided or is empty')
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index af7e519..fd47545 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -48,7 +48,6 @@ __all__ = [
'ClangObjCPPCompiler',
'ClangClCCompiler',
'ClangClCPPCompiler',
- 'CompilerArgs',
'CPPCompiler',
'DCompiler',
'DmdDCompiler',
@@ -123,7 +122,6 @@ from .compilers import (
is_known_suffix,
lang_suffixes,
sort_clink,
- CompilerArgs,
)
from .c import (
CCompiler,
diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py
index e5de485..f31229e 100644
--- a/mesonbuild/compilers/c_function_attributes.py
+++ b/mesonbuild/compilers/c_function_attributes.py
@@ -56,6 +56,8 @@ C_FUNC_ATTRIBUTES = {
'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));',
'format_arg':
'char * foo(const char * p) __attribute__((format_arg(1)));',
+ 'force_align_arg_pointer':
+ '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }',
'gnu_inline':
'inline __attribute__((gnu_inline)) int foo(void) { return 0; }',
'hot':
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index b0fa5f5..8ecb972 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -12,19 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import abc
import contextlib, os.path, re, tempfile
-import collections.abc
import itertools
import typing as T
from functools import lru_cache
-from ..linkers import (
- GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker,
- StaticLinker,
-)
from .. import coredata
from .. import mlog
from .. import mesonlib
+from ..linkers import LinkerEnvVarsMixin
from ..mesonlib import (
EnvironmentException, MachineChoice, MesonException,
Popen_safe, split_args
@@ -32,6 +29,7 @@ from ..mesonlib import (
from ..envconfig import (
Properties, get_env_var
)
+from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
from ..coredata import OptionDictType
@@ -98,11 +96,6 @@ cflags_mapping = {'c': 'CFLAGS',
'vala': 'VALAFLAGS',
'rust': 'RUSTFLAGS'}
-unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt')
-# execinfo is a compiler lib on FreeBSD and NetBSD
-if mesonlib.is_freebsd() or mesonlib.is_netbsd():
- unixy_compiler_internal_libs += ('execinfo',)
-
# All these are only for C-linkable languages; see `clink_langs` above.
def sort_clink(lang):
@@ -138,11 +131,15 @@ def is_llvm_ir(fname):
fname = fname.fname
return fname.split('.')[-1] == 'll'
+@lru_cache(maxsize=None)
+def cached_by_name(fname):
+ suffix = fname.split('.')[-1]
+ return suffix in obj_suffixes
+
def is_object(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
- suffix = fname.split('.')[-1]
- return suffix in obj_suffixes
+ return cached_by_name(fname)
def is_library(fname):
if hasattr(fname, 'fname'):
@@ -372,9 +369,10 @@ def get_base_link_args(options, linker, is_shared_module):
# -Wl,-dead_strip_dylibs is incompatible with bitcode
args.extend(linker.get_asneeded_args())
- # Apple's ld (the only one that supports bitcode) does not like any
- # -undefined arguments at all, so don't pass these when using bitcode
+ # Apple's ld (the only one that supports bitcode) does not like -undefined
+ # arguments or -headerpad_max_install_names when bitcode is enabled
if not bitcode:
+ args.extend(linker.headerpad_args())
if (not is_shared_module and
option_enabled(linker.base_options, options, 'b_lundef')):
args.extend(linker.no_undefined_link_args())
@@ -403,334 +401,8 @@ class RunResult:
self.stdout = stdout
self.stderr = stderr
-class CompilerArgs(collections.abc.MutableSequence):
- '''
- List-like class that manages a list of compiler arguments. Should be used
- while constructing compiler arguments from various sources. Can be
- operated with ordinary lists, so this does not need to be used
- everywhere.
-
- All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
- and can converted to the native type of each compiler by using the
- .to_native() method to which you must pass an instance of the compiler or
- the compiler class.
-
- New arguments added to this class (either with .append(), .extend(), or +=)
- are added in a way that ensures that they override previous arguments.
- For example:
-
- >>> a = ['-Lfoo', '-lbar']
- >>> a += ['-Lpho', '-lbaz']
- >>> print(a)
- ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
-
- Arguments will also be de-duped if they can be de-duped safely.
-
- Note that because of all this, this class is not commutative and does not
- preserve the order of arguments if it is safe to not. For example:
- >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
- ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
- >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
- ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
-
- '''
- # NOTE: currently this class is only for C-like compilers, but it can be
- # extended to other languages easily. Just move the following to the
- # compiler class and initialize when self.compiler is set.
-
- # Arg prefixes that override by prepending instead of appending
- prepend_prefixes = ('-I', '-L')
- # Arg prefixes and args that must be de-duped by returning 2
- dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
- dedup2_suffixes = ()
- dedup2_args = ()
- # Arg prefixes and args that must be de-duped by returning 1
- #
- # NOTE: not thorough. A list of potential corner cases can be found in
- # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
- dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
- dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
- # Match a .so of the form path/to/libfoo.so.0.1.0
- # Only UNIX shared libraries require this. Others have a fixed extension.
- dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
- dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
- # In generate_link() we add external libs without de-dup, but we must
- # *always* de-dup these because they're special arguments to the linker
- always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs)
-
- def __init__(self, compiler: T.Union['Compiler', StaticLinker],
- iterable: T.Optional[T.Iterable[str]] = None):
- self.compiler = compiler
- self.__container = list(iterable) if iterable is not None else [] # type: T.List[str]
- self.__seen_args = set()
- for arg in self.__container:
- self.__seen_args.add(arg)
-
- @T.overload # noqa: F811
- def __getitem__(self, index: int) -> str: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811
- pass
-
- def __getitem__(self, index): # noqa: F811
- return self.__container[index]
-
- @T.overload # noqa: F811
- def __setitem__(self, index: int, value: str) -> None: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811
- pass
-
- def __setitem__(self, index, value) -> None: # noqa: F811
- self.__container[index] = value
- for v in value:
- self.__seen_args.add(v)
- def __delitem__(self, index: T.Union[int, slice]) -> None:
- value = self.__container[index]
- del self.__container[index]
- if value in self.__seen_args and value in self.__container: # this is also honoring that you can have duplicated entries
- self.__seen_args.remove(value)
-
- def __len__(self) -> int:
- return len(self.__container)
-
- def insert(self, index: int, value: str) -> None:
- self.__container.insert(index, value)
- self.__seen_args.add(value)
-
- def copy(self) -> 'CompilerArgs':
- return CompilerArgs(self.compiler, self.__container.copy())
-
- @classmethod
- @lru_cache(maxsize=None)
- def _can_dedup(cls, arg):
- '''
- Returns whether the argument can be safely de-duped. This is dependent
- on three things:
-
- a) Whether an argument can be 'overridden' by a later argument. For
- example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we
- can safely remove the previous occurrence and add a new one. The same
- is true for include paths and library paths with -I and -L. For
- these we return `2`. See `dedup2_prefixes` and `dedup2_args`.
- b) Arguments that once specified cannot be undone, such as `-c` or
- `-pipe`. New instances of these can be completely skipped. For these
- we return `1`. See `dedup1_prefixes` and `dedup1_args`.
- c) Whether it matters where or how many times on the command-line
- a particular argument is present. This can matter for symbol
- resolution in static or shared libraries, so we cannot de-dup or
- reorder them. For these we return `0`. This is the default.
-
- In addition to these, we handle library arguments specially.
- With GNU ld, we surround library arguments with -Wl,--start/end-group
- to recursively search for symbols in the libraries. This is not needed
- with other linkers.
- '''
- # A standalone argument must never be deduplicated because it is
- # defined by what comes _after_ it. Thus dedupping this:
- # -D FOO -D BAR
- # would yield either
- # -D FOO BAR
- # or
- # FOO -D BAR
- # both of which are invalid.
- if arg in cls.dedup2_prefixes:
- return 0
- if arg.startswith('-L='):
- # DMD and LDC proxy all linker arguments using -L=; in conjunction
- # with ld64 on macOS this can lead to command line arguments such
- # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`.
- # These cannot be combined, ld64 insists they must be passed with
- # spaces and quoting does not work. if we deduplicate these then
- # one of the -L=0 arguments will be removed and the version
- # argument will consume the next argument instead.
- return 0
- if arg in cls.dedup2_args or \
- arg.startswith(cls.dedup2_prefixes) or \
- arg.endswith(cls.dedup2_suffixes):
- return 2
- if arg in cls.dedup1_args or \
- arg.startswith(cls.dedup1_prefixes) or \
- arg.endswith(cls.dedup1_suffixes) or \
- re.search(cls.dedup1_regex, arg):
- return 1
- return 0
-
- @classmethod
- @lru_cache(maxsize=None)
- def _should_prepend(cls, arg):
- if arg.startswith(cls.prepend_prefixes):
- return True
- return False
-
- def need_to_split_linker_args(self):
- return isinstance(self.compiler, Compiler) and self.compiler.get_language() == 'd'
-
- def to_native(self, copy: bool = False) -> T.List[str]:
- # Check if we need to add --start/end-group for circular dependencies
- # between static libraries, and for recursively searching for symbols
- # needed by static libraries that are provided by object files or
- # shared libraries.
- if copy:
- new = self.copy()
- else:
- new = self
- # To proxy these arguments with D you need to split the
- # arguments, thus you get `-L=-soname -L=lib.so` we don't
- # want to put the lib in a link -roup
- split_linker_args = self.need_to_split_linker_args()
- # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
- # all act like (or are) gnu ld
- # TODO: this could probably be added to the DynamicLinker instead
- if (isinstance(self.compiler, Compiler) and
- self.compiler.linker is not None and
- isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))):
- group_start = -1
- group_end = -1
- is_soname = False
- for i, each in enumerate(new):
- if is_soname:
- is_soname = False
- continue
- elif split_linker_args and '-soname' in each:
- is_soname = True
- continue
- if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
- not soregex.match(each):
- continue
- group_end = i
- if group_start < 0:
- # First occurrence of a library
- group_start = i
- if group_start >= 0:
- # Last occurrence of a library
- new.insert(group_end + 1, '-Wl,--end-group')
- new.insert(group_start, '-Wl,--start-group')
- # Remove system/default include paths added with -isystem
- if hasattr(self.compiler, 'get_default_include_dirs'):
- default_dirs = self.compiler.get_default_include_dirs()
- bad_idx_list = [] # type: T.List[int]
- for i, each in enumerate(new):
- # Remove the -isystem and the path if the path is a default path
- if (each == '-isystem' and
- i < (len(new) - 1) and
- new[i + 1] in default_dirs):
- bad_idx_list += [i, i + 1]
- elif each.startswith('-isystem=') and each[9:] in default_dirs:
- bad_idx_list += [i]
- elif each.startswith('-isystem') and each[8:] in default_dirs:
- bad_idx_list += [i]
- for i in reversed(bad_idx_list):
- new.pop(i)
- return self.compiler.unix_args_to_native(new.__container)
-
- def append_direct(self, arg: str) -> None:
- '''
- Append the specified argument without any reordering or de-dup except
- for absolute paths to libraries, etc, which can always be de-duped
- safely.
- '''
- if os.path.isabs(arg):
- self.append(arg)
- else:
- self.__container.append(arg)
- self.__seen_args.add(arg)
-
- def extend_direct(self, iterable: T.Iterable[str]) -> None:
- '''
- Extend using the elements in the specified iterable without any
- reordering or de-dup except for absolute paths where the order of
- include search directories is not relevant
- '''
- for elem in iterable:
- self.append_direct(elem)
-
- def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
- normal_flags = []
- lflags = []
- for i in iterable:
- if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
- lflags.append(i)
- else:
- normal_flags.append(i)
- self.extend(normal_flags)
- self.extend_direct(lflags)
-
- def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- new = self.copy()
- new += args
- return new
-
- def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- '''
- Add two CompilerArgs while taking into account overriding of arguments
- and while preserving the order of arguments as much as possible
- '''
- this_round_added = set() # a dict that contains a value, when the value was added this round
- pre = [] # type: T.List[str]
- post = [] # type: T.List[str]
- if not isinstance(args, collections.abc.Iterable):
- raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
- for arg in args:
- # If the argument can be de-duped, do it either by removing the
- # previous occurrence of it and adding a new one, or not adding the
- # new occurrence.
- dedup = self._can_dedup(arg)
- if dedup == 1:
- # Argument already exists and adding a new instance is useless
- if arg in self.__seen_args or arg in pre or arg in post:
- continue
- should_prepend = self._should_prepend(arg)
- if dedup == 2:
- # Remove all previous occurrences of the arg and add it anew
- if arg in self.__seen_args and arg not in this_round_added: # if __seen_args contains arg as well as this_round_added, then its not yet part in self.
- self.remove(arg)
- if should_prepend:
- if arg in pre:
- pre.remove(arg)
- else:
- if arg in post:
- post.remove(arg)
- if should_prepend:
- pre.append(arg)
- else:
- post.append(arg)
- self.__seen_args.add(arg)
- this_round_added.add(arg)
- # Insert at the beginning
- self[:0] = pre
- # Append to the end
- self.__container += post
- return self
-
- def __radd__(self, args: T.Iterable[str]):
- new = CompilerArgs(self.compiler, args)
- new += self
- return new
-
- def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
- # Only allow equality checks against other CompilerArgs and lists instances
- if isinstance(other, CompilerArgs):
- return self.compiler == other.compiler and self.__container == other.__container
- elif isinstance(other, list):
- return self.__container == other
- return NotImplemented
-
- def append(self, arg: str) -> None:
- self.__iadd__([arg])
-
- def extend(self, args: T.Iterable[str]) -> None:
- self.__iadd__(args)
-
- def __repr__(self) -> str:
- return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container)
-
-class Compiler:
+class Compiler(metaclass=abc.ABCMeta):
# Libraries to ignore in find_library() since they are provided by the
# compiler or the C library. Currently only used for MSVC.
ignore_libs = ()
@@ -953,6 +625,10 @@ class Compiler:
args += self.get_preprocess_only_args()
return args
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ """Return an appropriate CompilerArgs instance for this class."""
+ return CompilerArgs(self, args)
+
@contextlib.contextmanager
def compile(self, code: str, extra_args: list = None, *, mode: str = 'link', want_output: bool = False, temp_dir: str = None):
if extra_args is None:
@@ -971,7 +647,7 @@ class Compiler:
srcname = code.fname
# Construct the compiler command-line
- commands = CompilerArgs(self)
+ commands = self.compiler_args()
commands.append(srcname)
# Preprocess mode outputs to stdout, so no output args
if mode != 'preprocess':
@@ -1087,6 +763,9 @@ class Compiler:
def openmp_flags(self):
raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language())
+ def openmp_link_flags(self):
+ return self.openmp_flags()
+
def language_stdlib_only_link_flags(self):
return []
@@ -1169,6 +848,9 @@ class Compiler:
def get_asneeded_args(self) -> T.List[str]:
return self.linker.get_asneeded_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self.linker.headerpad_args()
+
def bitcode_args(self) -> T.List[str]:
return self.linker.bitcode_args()
@@ -1212,10 +894,10 @@ def get_largefile_args(compiler):
'''
Enable transparent large-file-support for 32-bit UNIX systems
'''
- if not (compiler.info.is_windows() or compiler.info.is_darwin()):
+ if not (compiler.get_argument_syntax() == 'msvc' or compiler.info.is_darwin()):
# Enable large-file support unconditionally on all platforms other
- # than macOS and Windows. macOS is now 64-bit-only so it doesn't
- # need anything special, and Windows doesn't have automatic LFS.
+ # than macOS and MSVC. macOS is now 64-bit-only so it doesn't
+ # need anything special, and MSVC doesn't have automatic LFS.
# You must use the 64-bit counterparts explicitly.
# glibc, musl, and uclibc, and all BSD libcs support this. On Android,
# support for transparent LFS is available depending on the version of
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 777fa19..32919e4 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -25,7 +25,6 @@ from .compilers import (
d_ldc_buildtype_args,
clike_debug_args,
Compiler,
- CompilerArgs,
)
from .mixins.gnu import GnuCompiler
@@ -582,7 +581,7 @@ class DCompiler(Compiler):
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- args = CompilerArgs(self)
+ args = self.compiler_args()
for d in dependencies:
# Add compile flags needed by dependencies
args += d.get_compile_args()
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 56a9ea6..47e97d2 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -29,15 +29,79 @@ import subprocess
import typing as T
from pathlib import Path
+from ... import arglist
from ... import mesonlib
-from ...mesonlib import LibType
from ... import mlog
+from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker
+from ...mesonlib import LibType
from .. import compilers
from .visualstudio import VisualStudioLikeCompiler
if T.TYPE_CHECKING:
from ...environment import Environment
+SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+
+class CLikeCompilerArgs(arglist.CompilerArgs):
+ prepend_prefixes = ('-I', '-L')
+ dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
+
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
+ # all act like (or are) gnu ld
+ # TODO: this could probably be added to the DynamicLinker instead
+ if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker)):
+ group_start = -1
+ group_end = -1
+ for i, each in enumerate(new):
+ if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
+ not SOREGEX.match(each):
+ continue
+ group_end = i
+ if group_start < 0:
+ # First occurrence of a library
+ group_start = i
+ if group_start >= 0:
+ # Last occurrence of a library
+ new.insert(group_end + 1, '-Wl,--end-group')
+ new.insert(group_start, '-Wl,--start-group')
+ # Remove system/default include paths added with -isystem
+ if hasattr(self.compiler, 'get_default_include_dirs'):
+ default_dirs = self.compiler.get_default_include_dirs()
+ bad_idx_list = [] # type: T.List[int]
+ for i, each in enumerate(new):
+ # Remove the -isystem and the path if the path is a default path
+ if (each == '-isystem' and
+ i < (len(new) - 1) and
+ new[i + 1] in default_dirs):
+ bad_idx_list += [i, i + 1]
+ elif each.startswith('-isystem=') and each[9:] in default_dirs:
+ bad_idx_list += [i]
+ elif each.startswith('-isystem') and each[8:] in default_dirs:
+ bad_idx_list += [i]
+ for i in reversed(bad_idx_list):
+ new.pop(i)
+ return self.compiler.unix_args_to_native(new._container)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
+
class CLikeCompiler:
@@ -48,7 +112,7 @@ class CLikeCompiler:
program_dirs_cache = {}
find_library_cache = {}
find_framework_cache = {}
- internal_libs = compilers.unixy_compiler_internal_libs
+ internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
def __init__(self, is_cross: bool, exe_wrapper: T.Optional[str] = None):
# If a child ObjC or CPP class has already set it, don't set it ourselves
@@ -61,6 +125,9 @@ class CLikeCompiler:
else:
self.exe_wrapper = exe_wrapper.get_command()
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs:
+ return CLikeCompilerArgs(self, args)
+
def needs_static_linker(self):
return True # When compiling static libraries, so yes.
@@ -338,7 +405,7 @@ class CLikeCompiler:
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- cargs = compilers.CompilerArgs(self)
+ cargs = self.compiler_args()
largs = []
for d in dependencies:
# Add compile flags needed by dependencies
@@ -1008,7 +1075,7 @@ class CLikeCompiler:
return value[:]
def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED):
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.find_library_impl(libname, env, extra_dirs, code, libtype)
def find_framework_paths(self, env):
@@ -1108,7 +1175,7 @@ class CLikeCompiler:
'the compiler you are using. has_link_argument or '
'other similar method can be used instead.'
.format(arg))
- code = 'int i;\n'
+ code = 'extern int i;\nint i;\n'
return self.has_arguments(args, env, code, mode='compile')
def has_multi_link_arguments(self, args, env):
@@ -1117,7 +1184,7 @@ class CLikeCompiler:
# false positive.
args = self.linker.fatal_warnings() + args
args = self.linker_to_compiler_args(args)
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.has_arguments(args, env, code, mode='link')
@staticmethod
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index d0004ce..4dfd8b4 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -208,6 +208,9 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
def openmp_flags(self) -> T.List[str]:
return ['/openmp']
+ def openmp_link_flags(self) -> T.List[str]:
+ return []
+
# FIXME, no idea what these should be.
def thread_flags(self, env: 'Environment') -> T.List[str]:
return []
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 8774b80..329c333 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from . import mlog
+from . import mlog, mparser
import pickle, os, uuid
import sys
from itertools import chain
@@ -161,7 +161,9 @@ class UserComboOption(UserOption[str]):
def validate_value(self, value):
if value not in self.choices:
optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
- raise MesonException('Value "%s" for combo option is not one of the choices. Possible choices are: %s.' % (value, optionsstring))
+ raise MesonException('Value "{}" for combo option "{}" is not one of the choices.'
+ ' Possible choices are: {}.'.format(
+ value, self.description, optionsstring))
return value
class UserArrayOption(UserOption[T.List[str]]):
@@ -227,14 +229,6 @@ class UserFeatureOption(UserComboOption):
def is_auto(self):
return self.value == 'auto'
-
-def load_configs(filenames: T.List[str]) -> configparser.ConfigParser:
- """Load configuration files from a named subdirectory."""
- config = configparser.ConfigParser(interpolation=None)
- config.read(filenames)
- return config
-
-
if T.TYPE_CHECKING:
CacheKeyType = T.Tuple[T.Tuple[T.Any, ...], ...]
SubCacheKeyType = T.Tuple[T.Any, ...]
@@ -868,7 +862,7 @@ class CoreData:
def emit_base_options_warnings(self, enabled_opts: list):
if 'b_bitcode' in enabled_opts:
- mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
+ mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.')
mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
class CmdLineFileParser(configparser.ConfigParser):
@@ -877,6 +871,69 @@ class CmdLineFileParser(configparser.ConfigParser):
# storing subproject options like "subproject:option=value"
super().__init__(delimiters=['='], interpolation=None)
+class MachineFileParser():
+ def __init__(self, filenames: T.List[str]):
+ self.parser = CmdLineFileParser()
+ self.constants = {'True': True, 'False': False}
+ self.sections = {}
+
+ self.parser.read(filenames)
+
+ # Parse [constants] first so they can be used in other sections
+ if self.parser.has_section('constants'):
+ self.constants.update(self._parse_section('constants'))
+
+ for s in self.parser.sections():
+ if s == 'constants':
+ continue
+ self.sections[s] = self._parse_section(s)
+
+ def _parse_section(self, s):
+ self.scope = self.constants.copy()
+ section = {}
+ for entry, value in self.parser.items(s):
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry))
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ try:
+ ast = mparser.Parser(value, 'machinefile').parse()
+ res = self._evaluate_statement(ast.lines[0])
+ except MesonException:
+ raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry))
+ except KeyError as e:
+ raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry))
+ section[entry] = res
+ self.scope[entry] = res
+ return section
+
+ def _evaluate_statement(self, node):
+ if isinstance(node, (mparser.StringNode)):
+ return node.value
+ elif isinstance(node, mparser.BooleanNode):
+ return node.value
+ elif isinstance(node, mparser.NumberNode):
+ return node.value
+ elif isinstance(node, mparser.ArrayNode):
+ return [self._evaluate_statement(arg) for arg in node.args.arguments]
+ elif isinstance(node, mparser.IdNode):
+ return self.scope[node.value]
+ elif isinstance(node, mparser.ArithmeticNode):
+ l = self._evaluate_statement(node.left)
+ r = self._evaluate_statement(node.right)
+ if node.operation == 'add':
+ if (isinstance(l, str) and isinstance(r, str)) or \
+ (isinstance(l, list) and isinstance(r, list)):
+ return l + r
+ elif node.operation == 'div':
+ if isinstance(l, str) and isinstance(r, str):
+ return os.path.join(l, r)
+ raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+ parser = MachineFileParser(filenames)
+ return parser.sections
+
def get_cmd_line_file(build_dir):
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
@@ -1127,6 +1184,7 @@ builtin_options = OrderedDict([
('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])),
('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])),
+ ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
])
builtin_options_per_machine = OrderedDict([
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index b0401c6..23701da 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -22,6 +22,7 @@ import json
import shlex
import shutil
import stat
+import sys
import textwrap
import platform
import typing as T
@@ -76,6 +77,30 @@ class DependencyMethods(Enum):
DUB = 'dub'
+def find_external_program(env: Environment, for_machine: MachineChoice, name: str,
+ display_name: str, default_names: T.List[str],
+ allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+ """Find an external program, chcking the cross file plus any default options."""
+ # Lookup in cross or machine file.
+ potential_path = env.lookup_binary_entry(for_machine, name)
+ if potential_path is not None:
+ mlog.debug('{} binary for {} specified from cross file, native file, '
+ 'or env var as {}'.format(display_name, for_machine, potential_path))
+ yield ExternalProgram.from_entry(name, potential_path)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
+ # Fallback on hard-coded defaults, if a default binary is allowed for use
+ # with cross targets, or if this is not a cross target
+ if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+ for potential_path in default_names:
+ mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
+ yield ExternalProgram(potential_path, silent=True)
+ else:
+ mlog.debug('Default target is not allowed for cross use')
+
+
class Dependency:
@classmethod
@@ -352,25 +377,6 @@ class ExternalDependency(Dependency, HasNativeKwarg):
raise DependencyException(m.format(self.name, not_found, self.version))
return
- # Create an iterator of options
- def search_tool(self, name, display_name, default_names):
- # Lookup in cross or machine file.
- potential_path = self.env.lookup_binary_entry(self.for_machine, name)
- if potential_path is not None:
- mlog.debug('{} binary for {} specified from cross file, native file, '
- 'or env var as {}'.format(display_name, self.for_machine, potential_path))
- yield ExternalProgram.from_entry(name, potential_path)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if self.env.machines.matches_build_machine(self.for_machine):
- for potential_path in default_names:
- mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
- yield ExternalProgram(potential_path, silent=True)
-
class NotFoundDependency(Dependency):
def __init__(self, environment):
@@ -419,8 +425,6 @@ class ConfigToolDependency(ExternalDependency):
self.config = None
return
self.version = version
- if getattr(self, 'finish_init', None):
- self.finish_init(self)
def _sanitize_version(self, version):
"""Remove any non-numeric, non-point version suffixes."""
@@ -431,14 +435,17 @@ class ConfigToolDependency(ExternalDependency):
return m.group(0).rstrip('.')
return version
- def find_config(self, versions=None, returncode: int = 0):
+ def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \
+ -> T.Tuple[T.Optional[str], T.Optional[str]]:
"""Helper method that searches for config tool binaries in PATH and
returns the one that best matches the given version requirements.
"""
if not isinstance(versions, list) and versions is not None:
versions = listify(versions)
- best_match = (None, None)
- for potential_bin in self.search_tool(self.tool_name, self.tool_name, self.tools):
+ best_match = (None, None) # type: T.Tuple[T.Optional[str], T.Optional[str]]
+ for potential_bin in find_external_program(
+ self.env, self.for_machine, self.tool_name,
+ self.tool_name, self.tools, allow_default_for_cross=False):
if not potential_bin.found():
continue
tool = potential_bin.get_command()
@@ -562,9 +569,9 @@ class PkgConfigDependency(ExternalDependency):
else:
assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
- for potential_pkgbin in self.search_tool('pkgconfig', 'Pkg-config', environment.default_pkgconfig):
- mlog.debug('Trying pkg-config binary {} for machine {} at {}'
- .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command))
+ for potential_pkgbin in find_external_program(
+ self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+ environment.default_pkgconfig, allow_default_for_cross=False):
version_if_ok = self.check_pkgconfig(potential_pkgbin)
if not version_if_ok:
continue
@@ -1071,8 +1078,9 @@ class CMakeDependency(ExternalDependency):
# Setup the trace parser
self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+ cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
- CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info()
+ CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
if self.cmakeinfo is None:
raise self._gen_exception('Unable to obtain CMake system information')
@@ -1082,25 +1090,9 @@ class CMakeDependency(ExternalDependency):
modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
- cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if cm_path:
cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
-
- pref_path = self.env.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
- env_pref_path = get_env_var(
- self.for_machine,
- self.env.is_cross_build(),
- 'CMAKE_PREFIX_PATH')
- if env_pref_path is not None:
- env_pref_path = env_pref_path.split(os.pathsep)
- env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
- if not pref_path:
- pref_path = []
- pref_path += env_pref_path
- if pref_path:
- cm_args.append('-DCMAKE_PREFIX_PATH={}'.format(';'.join(pref_path)))
-
- if not self._preliminary_find_check(name, cm_path, pref_path, environment.machines[self.for_machine]):
+ if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
mlog.debug('Preliminary CMake check failed. Aborting.')
return
self._detect_dep(name, modules, components, cm_args)
@@ -1110,7 +1102,7 @@ class CMakeDependency(ExternalDependency):
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
- def _get_cmake_info(self):
+ def _get_cmake_info(self, cm_args):
mlog.debug("Extracting basic cmake information")
res = {}
@@ -1129,6 +1121,7 @@ class CMakeDependency(ExternalDependency):
# Prepare options
cmake_opts = temp_parser.trace_args() + ['.']
+ cmake_opts += cm_args
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
@@ -1152,12 +1145,17 @@ class CMakeDependency(ExternalDependency):
except MesonException:
return None
+ def process_paths(l: T.List[str]) -> T.Set[str]:
+ l = [x.split(':') for x in l]
+ l = [x for sublist in l for x in sublist]
+ return set(l)
+
# Extract the variables and sanity check them
- root_paths = set(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
- root_paths.update(set(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+ root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+ root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
root_paths = sorted(root_paths)
root_paths = list(filter(lambda x: os.path.isdir(x), root_paths))
- module_paths = set(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+ module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
rooted_paths = []
for j in [Path(x) for x in root_paths]:
for i in [Path(x) for x in module_paths]:
@@ -1847,14 +1845,22 @@ class ExternalProgram:
# Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
if 'USERPROFILE' not in os.environ:
return path
- # Ignore executables in the WindowsApps directory which are
- # zero-sized wrappers that magically open the Windows Store to
- # install the application.
+ # The WindowsApps directory is a bit of a problem. It contains
+ # some zero-sized .exe files which have "reparse points", that
+ # might either launch an installed application, or might open
+ # a page in the Windows Store to download the application.
+ #
+ # To handle the case where the python interpreter we're
+ # running on came from the Windows Store, if we see the
+ # WindowsApps path in the search path, replace it with
+ # dirname(sys.executable).
appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
paths = []
for each in path.split(os.pathsep):
if Path(each) != appstore_dir:
paths.append(each)
+ elif 'WindowsApps' in sys.executable:
+ paths.append(os.path.dirname(sys.executable))
return os.pathsep.join(paths)
@staticmethod
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 04dee06..47694af 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -97,7 +97,8 @@ class OpenMPDependency(ExternalDependency):
for name in header_names:
if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
self.is_found = True
- self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+ self.compile_args = self.clib_compiler.openmp_flags()
+ self.link_args = self.clib_compiler.openmp_link_flags()
break
if not self.is_found:
mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
@@ -271,8 +272,10 @@ class PcapDependencyConfigTool(ConfigToolDependency):
tools = ['pcap-config']
tool_name = 'pcap-config'
- @staticmethod
- def finish_init(self) -> None:
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
self.link_args = self.get_config_value(['--libs'], 'link_args')
self.version = self.get_pcap_lib_version()
@@ -284,6 +287,7 @@ class PcapDependencyConfigTool(ConfigToolDependency):
def get_pcap_lib_version(self):
# Since we seem to need to run a program to discover the pcap version,
# we can't do that when cross-compiling
+ # FIXME: this should be handled if we have an exe_wrapper
if not self.env.machines.matches_build_machine(self.for_machine):
return None
@@ -299,10 +303,12 @@ class CupsDependencyConfigTool(ConfigToolDependency):
tools = ['cups-config']
tool_name = 'cups-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -317,10 +323,12 @@ class LibWmfDependencyConfigTool(ConfigToolDependency):
tools = ['libwmf-config']
tool_name = 'libwmf-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -332,11 +340,13 @@ class LibGCryptDependencyConfigTool(ConfigToolDependency):
tools = ['libgcrypt-config']
tool_name = 'libgcrypt-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
@@ -348,11 +358,13 @@ class GpgmeDependencyConfigTool(ConfigToolDependency):
tools = ['gpgme-config']
tool_name = 'gpg-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 741f0b8..3bba3dc 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -31,9 +31,11 @@ from .base import DependencyException, DependencyMethods
from .base import ExternalDependency, NonExistingExternalProgram
from .base import ExtraFrameworkDependency, PkgConfigDependency
from .base import ConfigToolDependency, DependencyFactory
+from .base import find_external_program
if T.TYPE_CHECKING:
from ..environment import Environment
+ from .base import ExternalProgram
class GLDependencySystem(ExternalDependency):
@@ -324,10 +326,9 @@ class QtBaseDependency(ExternalDependency):
if prefix:
self.bindir = os.path.join(prefix, 'bin')
- def search_qmake(self):
+ def search_qmake(self) -> T.Generator['ExternalProgram', None, None]:
for qmake in ('qmake-' + self.name, 'qmake'):
- for potential_qmake in self.search_tool(qmake, 'QMake', [qmake]):
- yield potential_qmake
+ yield from find_external_program(self.env, self.for_machine, qmake, 'QMake', [qmake])
def _qmake_detect(self, mods, kwargs):
for qmake in self.search_qmake():
@@ -545,10 +546,12 @@ class SDL2DependencyConfigTool(ConfigToolDependency):
tools = ['sdl2-config']
tool_name = 'sdl2-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index b74be35..219b62e 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import configparser, os, subprocess
+import os, subprocess
import typing as T
from . import mesonlib
@@ -42,6 +42,7 @@ known_cpu_families = (
'arm',
'avr',
'c2000',
+ 'dspic',
'e2k',
'ia64',
'm68k',
@@ -49,6 +50,7 @@ known_cpu_families = (
'mips',
'mips64',
'parisc',
+ 'pic24',
'ppc',
'ppc64',
'riscv32',
@@ -57,14 +59,13 @@ known_cpu_families = (
'rx',
's390',
's390x',
+ 'sh4',
'sparc',
'sparc64',
- 'pic24',
- 'dspic',
'wasm32',
'wasm64',
'x86',
- 'x86_64'
+ 'x86_64',
)
# It would feel more natural to call this "64_BIT_CPU_FAMILES", but
@@ -82,33 +83,6 @@ CPU_FAMILES_64_BIT = [
'x86_64',
]
-class MesonConfigFile:
- @classmethod
- def from_config_parser(cls, parser: configparser.ConfigParser) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
- out = {}
- # This is a bit hackish at the moment.
- for s in parser.sections():
- section = {}
- for entry in parser[s]:
- value = parser[s][entry]
- # Windows paths...
- value = value.replace('\\', '\\\\')
- if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
- raise EnvironmentException('Malformed variable name {} in cross file..'.format(entry))
- try:
- res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
- except Exception:
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- for i in (res if isinstance(res, list) else [res]):
- if not isinstance(i, (str, int, bool)):
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- section[entry] = res
-
- out[s] = section
- return out
-
def get_env_var_pair(for_machine: MachineChoice,
is_cross: bool,
var_name: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
@@ -299,6 +273,10 @@ class MachineInfo:
"""
return self.system == 'gnu'
+ def is_irix(self) -> bool:
+ """Machine is IRIX?"""
+ return self.system.startswith('irix')
+
# Various prefixes and suffixes for import libraries, shared libraries,
# static libraries, and executables.
# Versioning is added to these names in the backends as-needed.
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index cb6ae7d..d1cbfe7 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -27,7 +27,7 @@ from .mesonlib import (
from . import mlog
from .envconfig import (
- BinaryTable, Directories, MachineInfo, MesonConfigFile,
+ BinaryTable, Directories, MachineInfo,
Properties, known_cpu_families,
)
from . import compilers
@@ -134,9 +134,18 @@ def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
return None, None
+def detect_llvm_cov():
+ tools = get_llvm_tool_names('llvm-cov')
+ for tool in tools:
+ if mesonlib.exe_exists([tool, '--version']):
+ return tool
+ return None
+
def find_coverage_tools():
gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+ llvm_cov_exe = detect_llvm_cov()
+
lcov_exe = 'lcov'
genhtml_exe = 'genhtml'
@@ -145,7 +154,7 @@ def find_coverage_tools():
if not mesonlib.exe_exists([genhtml_exe, '--version']):
genhtml_exe = None
- return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe
+ return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
def detect_ninja(version: str = '1.7', log: bool = False) -> str:
r = detect_ninja_command_and_version(version, log)
@@ -344,6 +353,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
trial = 'sparc64'
elif trial in {'mipsel', 'mips64el'}:
trial = trial.rstrip('el')
+ elif trial in {'ip30', 'ip35'}:
+ trial = 'mips64'
# On Linux (and maybe others) there can be any mixture of 32/64 bit code in
# the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
@@ -440,7 +451,7 @@ def machine_info_can_run(machine_info: MachineInfo):
(machine_info.cpu_family == true_build_cpu_family) or \
((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
-def search_version(text):
+def search_version(text: str) -> str:
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
# (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
@@ -474,6 +485,13 @@ def search_version(text):
match = version_regex.search(text)
if match:
return match.group(0)
+
+ # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
+ version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
+ match = version_regex.search(text)
+ if match:
+ return match.group(0)
+
return 'unknown version'
class Environment:
@@ -545,8 +563,7 @@ class Environment:
## Read in native file(s) to override build machine configuration
if self.coredata.config_files is not None:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.config_files))
+ config = coredata.parse_machine_files(self.coredata.config_files)
binaries.build = BinaryTable(config.get('binaries', {}))
paths.build = Directories(**config.get('paths', {}))
properties.build = Properties(config.get('properties', {}))
@@ -554,8 +571,7 @@ class Environment:
## Read in cross file(s) to override host machine configuration
if self.coredata.cross_files:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.cross_files))
+ config = coredata.parse_machine_files(self.coredata.cross_files)
properties.host = Properties(config.get('properties', {}))
binaries.host = BinaryTable(config.get('binaries', {}))
if 'host_machine' in config:
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 740b0bc..487bdd6 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -41,6 +41,7 @@ import shutil
import uuid
import re
import shlex
+import stat
import subprocess
import collections
import functools
@@ -2030,12 +2031,15 @@ class MesonMain(InterpreterObject):
@permittedKwargs({})
@FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
- return self.can_run_host_binaries_method(args, kwargs)
+ return self.can_run_host_binaries_impl(args, kwargs)
@noPosargs
@permittedKwargs({})
@FeatureNew('meson.can_run_host_binaries', '0.55.0')
def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ def can_run_host_binaries_impl(self, args, kwargs):
if (self.is_cross_build_method(None, None) and
self.build.environment.need_exe_wrapper()):
if self.build.environment.exe_wrapper is None:
@@ -2512,7 +2516,19 @@ class Interpreter(InterpreterBase):
elif os.path.isfile(f) and not f.startswith('/dev'):
srcdir = Path(self.environment.get_source_dir())
builddir = Path(self.environment.get_build_dir())
- f = Path(f).resolve()
+ try:
+ f = Path(f).resolve()
+ except OSError:
+ f = Path(f)
+ s = f.stat()
+ if (hasattr(s, 'st_file_attributes') and
+ s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+ # This is a Windows Store link which we can't
+ # resolve, so just do our best otherwise.
+ f = f.parent.resolve() / f.name
+ else:
+ raise
if builddir in f.parents:
return
if srcdir in f.parents:
@@ -3563,7 +3579,10 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.get_subproject_dep(name, display_name, dirname, varname, kwargs)
wrap_mode = self.coredata.get_builtin_option('wrap_mode')
- forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ forcefallback = has_fallback and (wrap_mode == WrapMode.forcefallback or \
+ name in force_fallback_for or \
+ dirname in force_fallback_for)
if name != '' and not forcefallback:
self._handle_featurenew_dependencies(name)
kwargs['required'] = required and not has_fallback
@@ -3616,8 +3635,16 @@ external dependencies (including libraries) must go to "dependencies".''')
return fbinfo
def dependency_fallback(self, name, display_name, kwargs):
+ dirname, varname = self.get_subproject_infos(kwargs)
required = kwargs.get('required', True)
- if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
+
+ # Explicitly listed fallback preferences for specific subprojects
+ # take precedence over wrap-mode
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ if name in force_fallback_for or dirname in force_fallback_for:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject')
+ elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
mlog.log('Not looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback '
'dependencies is disabled.')
@@ -3631,7 +3658,6 @@ external dependencies (including libraries) must go to "dependencies".''')
else:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name))
- dirname, varname = self.get_subproject_infos(kwargs)
sp_kwargs = {
'default_options': kwargs.get('default_options', []),
'required': required,
diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py
index f02c297..4264e7d 100644
--- a/mesonbuild/linkers.py
+++ b/mesonbuild/linkers.py
@@ -17,6 +17,7 @@ import os
import typing as T
from . import mesonlib
+from .arglist import CompilerArgs
from .envconfig import get_env_var
if T.TYPE_CHECKING:
@@ -29,6 +30,9 @@ class StaticLinker:
def __init__(self, exelist: T.List[str]):
self.exelist = exelist
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ return CompilerArgs(self, args)
+
def can_linker_accept_rsp(self) -> bool:
"""
Determines whether the linker can accept arguments using the @rsp syntax.
@@ -149,6 +153,10 @@ class ArLinker(StaticLinker):
self.std_args = ['csrD']
else:
self.std_args = ['csr']
+ self.can_rsp = '@<' in stdo
+
+ def can_linker_accept_rsp(self) -> bool:
+ return self.can_rsp
def get_std_link_args(self) -> T.List[str]:
return self.std_args
@@ -436,6 +444,10 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta):
"""Arguments to make all warnings errors."""
return []
+ def headerpad_args(self) -> T.List[str]:
+ # Only used by the Apple linker
+ return []
+
def bitcode_args(self) -> T.List[str]:
raise mesonlib.MesonException('This linker does not support bitcode bundles')
@@ -655,8 +667,8 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def no_undefined_args(self) -> T.List[str]:
return self._apply_prefix('-undefined,error')
- def get_always_args(self) -> T.List[str]:
- return self._apply_prefix('-headerpad_max_install_names') + super().get_always_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self._apply_prefix('-headerpad_max_install_names')
def bitcode_args(self) -> T.List[str]:
return self._apply_prefix('-bitcode_bundle')
@@ -684,9 +696,7 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
return ([], set())
- # Ensure that there is enough space for install_name_tool in-place
- # editing of large RPATHs
- args = self._apply_prefix('-headerpad_max_install_names')
+ args = []
# @loader_path is the equivalent of $ORIGIN on macOS
# https://stackoverflow.com/q/26280738
origin_placeholder = '@loader_path'
@@ -704,6 +714,9 @@ class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dynam
"""Representation of GNU ld.bfd and ld.gold."""
+ def get_accepts_rsp(self) -> bool:
+ return True;
+
class GnuGoldDynamicLinker(GnuDynamicLinker):
@@ -1085,6 +1098,13 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
return args
return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+ def get_pie_args(self) -> T.List[str]:
+ # Available in Solaris 11.2 and later
+ return ['-z', 'type=pie']
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix(['-z', 'ignore'])
+
def no_undefined_args(self) -> T.List[str]:
return ['-z', 'defs']
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index e457623..9fe3a65 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -14,8 +14,11 @@
"""Entrypoint script for backend agnostic compile."""
+import json
+import re
import sys
import typing as T
+from collections import defaultdict
from pathlib import Path
from . import mlog
@@ -23,11 +26,15 @@ from . import mesonlib
from . import coredata
from .mesonlib import MesonException
from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
if T.TYPE_CHECKING:
import argparse
-
-def validate_builddir(builddir: Path):
+
+def array_arg(value: str) -> T.List[str]:
+ return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
raise MesonException('Current directory is not a meson build directory: `{}`.\n'
'Please specify a valid build dir or change the working directory to it.\n'
@@ -40,7 +47,93 @@ def get_backend_from_coredata(builddir: Path) -> str:
"""
return coredata.load(str(builddir)).get_builtin_option('backend')
-def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path):
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+ """
+ Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+ """
+ path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+ if not path_to_intro.exists():
+ raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name))
+ with path_to_intro.open() as f:
+ schema = json.load(f)
+
+ parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+ for target in schema:
+ parsed_data[target['name']] += [target]
+ return parsed_data
+
+class ParsedTargetName:
+ full_name = ''
+ name = ''
+ type = ''
+ path = ''
+
+ def __init__(self, target: str):
+ self.full_name = target
+ split = target.rsplit(':', 1)
+ if len(split) > 1:
+ self.type = split[1]
+ if not self._is_valid_type(self.type):
+ raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type))
+
+ split = split[0].rsplit('/', 1)
+ if len(split) > 1:
+ self.path = split[0]
+ self.name = split[1]
+ else:
+ self.name = split[0]
+
+ @staticmethod
+ def _is_valid_type(type: str) -> bool:
+ # Ammend docs in Commands.md when editing this list
+ allowed_types = {
+ 'executable',
+ 'static_library',
+ 'shared_library',
+ 'shared_module',
+ 'custom',
+ 'run',
+ 'jar',
+ }
+ return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> dict:
+ if target.name not in introspect_data:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+
+ intro_targets = introspect_data[target.name]
+ found_targets = []
+
+ resolved_bdir = builddir.resolve()
+
+ if not target.type and not target.path:
+ found_targets = intro_targets
+ else:
+ for intro_target in intro_targets:
+ if (intro_target['subproject'] or
+ (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+ (target.path
+ and intro_target['filename'] != 'no_name'
+ and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+ continue
+ found_targets += [intro_target]
+
+ if not found_targets:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+ elif len(found_targets) > 1:
+ raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name))
+
+ return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ if intro_target['type'] == 'run':
+ return [target.name]
+ else:
+ return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
runner = detect_ninja()
if runner is None:
raise MesonException('Cannot find ninja.')
@@ -48,40 +141,114 @@ def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path):
cmd = [runner, '-C', builddir.as_posix()]
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ for t in options.targets:
+ cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+ if options.clean:
+ cmd.append('clean')
+
# If the value is set to < 1 then don't set anything, which let's
# ninja/samu decide what to do.
if options.jobs > 0:
cmd.extend(['-j', str(options.jobs)])
if options.load_average > 0:
cmd.extend(['-l', str(options.load_average)])
- if options.clean:
- cmd.append('clean')
-
+
+ if options.verbose:
+ cmd.append('--verbose')
+
+ cmd += options.ninja_args
+
return cmd
-def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path):
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+ # Normalize project name
+ # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+ target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id'])
+ rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+ if rel_path != '.':
+ target_name = str(rel_path / target_name)
+ return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
slns = list(builddir.glob('*.sln'))
assert len(slns) == 1, 'More than one solution in a project?'
-
sln = slns[0]
- cmd = ['msbuild', str(sln.resolve())]
-
- # In msbuild `-m` with no number means "detect cpus", the default is `-m1`
+
+ cmd = ['msbuild']
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ has_run_target = any(map(
+ lambda t:
+ get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+ options.targets
+ ))
+
+ if has_run_target:
+ # `run` target can't be used the same way as other targets on `vs` backend.
+ # They are defined as disabled projects, which can't be invoked as `.sln`
+ # target and have to be invoked directly as project instead.
+ # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+ if len(options.targets) > 1:
+ raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+ intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+ proj_dir = Path(intro_target['filename'][0]).parent
+ proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+ cmd += [str(proj.resolve())]
+ else:
+ cmd += [str(sln.resolve())]
+ cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+ else:
+ cmd += [str(sln.resolve())]
+
+ if options.clean:
+ cmd.extend(['-target:Clean'])
+
+ # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
if options.jobs > 0:
- cmd.append('-m{}'.format(options.jobs))
+ cmd.append('-maxCpuCount:{}'.format(options.jobs))
else:
- cmd.append('-m')
-
+ cmd.append('-maxCpuCount')
+
if options.load_average:
mlog.warning('Msbuild does not have a load-average switch, ignoring.')
- if options.clean:
- cmd.extend(['/t:Clean'])
-
+
+ if not options.verbose:
+ cmd.append('-verbosity:minimal')
+
+ cmd += options.vs_args
+
return cmd
-
+
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
"""Add compile specific arguments."""
parser.add_argument(
+ 'targets',
+ metavar='TARGET',
+ nargs='*',
+ default=None,
+ help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+ parser.add_argument(
+ '--clean',
+ action='store_true',
+ help='Clean the build directory.'
+ )
+ parser.add_argument(
+ '-C',
+ action='store',
+ dest='builddir',
+ type=Path,
+ default='.',
+ help='The directory containing build files to be built.'
+ )
+ parser.add_argument(
'-j', '--jobs',
action='store',
default=0,
@@ -93,22 +260,25 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
action='store',
default=0,
type=int,
- help='The system load average to try to maintain (if supported)'
+ help='The system load average to try to maintain (if supported).'
)
parser.add_argument(
- '--clean',
+ '--verbose',
action='store_true',
- help='Clean the build directory.'
+ help='Show more verbose output.'
)
parser.add_argument(
- '-C',
- action='store',
- dest='builddir',
- type=Path,
- default='.',
- help='The directory containing build files to be built.'
+ '--ninja-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+ )
+ parser.add_argument(
+ '--vs-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
)
-
def run(options: 'argparse.Namespace') -> int:
bdir = options.builddir # type: Path
@@ -116,6 +286,9 @@ def run(options: 'argparse.Namespace') -> int:
cmd = [] # type: T.List[str]
+ if options.targets and options.clean:
+ raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
+
backend = get_backend_from_coredata(bdir)
if backend == 'ninja':
cmd = get_parsed_args_ninja(options, bdir)
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 05e9518..2e03cab 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -97,9 +97,9 @@ class Conf:
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
- def split_options_per_subproject(self, options_iter):
+ def split_options_per_subproject(self, options):
result = {}
- for k, o in options_iter:
+ for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
@@ -211,15 +211,15 @@ class Conf:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
- core_options = self.split_options_per_subproject(core_options.items())
+ core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
- self.coredata.flatten_lang_iterator(
- self.coredata.compiler_options.host.items()))
+ dict(self.coredata.flatten_lang_iterator(
+ self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
- self.coredata.flatten_lang_iterator(
+ dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
- for k, o in self.coredata.compiler_options.build.items()))
- project_options = self.split_options_per_subproject(self.coredata.user_options.items())
+ for k, o in self.coredata.compiler_options.build.items())))
+ project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index b324f76..5ab0ad4 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -213,7 +213,7 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
unpacked_src_dir = unpacked_files[0]
with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
- if o['name'] not in ['backend', 'install_umask']]
+ if o['name'] not in ['backend', 'install_umask', 'buildtype']]
meson_command += extra_meson_args
ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin)
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 2413cb1..a43d4c4 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -509,6 +509,8 @@ def is_netbsd() -> bool:
def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
+def is_irix() -> bool:
+ return platform.system().startswith('irix')
def is_hurd() -> bool:
return platform.system().lower() == 'gnu'
@@ -733,7 +735,7 @@ def default_libdir() -> str:
return 'lib/' + archpath
except Exception:
pass
- if is_freebsd():
+ if is_freebsd() or is_irix():
return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index 4238ecd..d0aff49 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -44,8 +44,8 @@ class DEFAULT_TYPES(Enum):
INFO_MESSAGE = '''Sample project created. To build it run the
following commands:
-meson builddir
-ninja -C builddir
+meson setup builddir
+meson compile -C builddir
'''
@@ -139,7 +139,7 @@ def add_arguments(parser):
parser.add_argument("-n", "--name", help="project name. default: name of current directory")
parser.add_argument("-e", "--executable", help="executable name. default: project name")
parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
- parser.add_argument("-l", "--language", choices=LANG_SUPPORTED, help="project language. default: autodetected based on source files")
+ parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
parser.add_argument("-b", "--build", action='store_true', help="build after generation")
parser.add_argument("--builddir", default='build', help="directory for build")
parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 8eb659b..cccedaa 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -19,6 +19,7 @@ tests and so on. All output is in JSON for simple parsing.
Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
+import collections
import json
from . import build, coredata as cdata
from . import mesonlib
@@ -52,7 +53,7 @@ class IntroCommand:
def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
builddata: T.Optional[build.Build] = None,
backend: T.Optional[backends.Backend] = None,
- sourcedir: T.Optional[str] = None) -> T.Dict[str, IntroCommand]:
+ sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
if backend and builddata:
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
testdata = backend.create_test_serialisation(builddata.get_tests())
@@ -61,18 +62,19 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
else:
benchmarkdata = testdata = installdata = None
- return {
- 'ast': IntroCommand('Dump the AST of the meson file', no_bd=dump_ast),
- 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)),
- 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source),
- 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter)),
- 'dependencies': IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source),
- 'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source),
- 'installed': IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata)),
- 'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source),
- 'targets': IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source),
- 'tests': IntroCommand('List all unit tests', func=lambda: list_tests(testdata)),
- }
+ # Enforce key order for argparse
+ return collections.OrderedDict([
+ ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+ ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+ ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+ ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+ ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+ ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+ ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+ ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+ ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+ ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+ ])
def add_arguments(parser):
intro_types = get_meson_introspection_types()
@@ -80,7 +82,7 @@ def add_arguments(parser):
flag = '--' + key.replace('_', '-')
parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
- parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja',
+ parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
help='The backend to use for the --buildoptions introspection.')
parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
help='Print all available information.')
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index dcbeda8..ca98b1c 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -333,8 +333,7 @@ class CmakeModule(ExtensionModule):
(ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name)))
ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
- if 'install_dir' not in kwargs:
- install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+ install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name))
if not isinstance(install_dir, str):
raise mesonlib.MesonException('"install_dir" must be a string.')
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index ea1b325..1faa128 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -721,11 +721,12 @@ class GnomeModule(ExtensionModule):
if f.startswith(('-L', '-l', '--extra-library')):
yield f
- @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+ @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
@permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
- 'packages', 'header', 'build_by_default'})
+ 'packages', 'header', 'build_by_default', 'fatal_warnings'})
def generate_gir(self, state, args, kwargs):
if not args:
raise MesonException('generate_gir takes at least one argument')
@@ -798,6 +799,14 @@ class GnomeModule(ExtensionModule):
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
+ if '--warn-error' in scan_command:
+ mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+ fatal_warnings = kwargs.get('fatal_warnings', False)
+ if not isinstance(fatal_warnings, bool):
+ raise MesonException('fatal_warnings keyword argument must be a boolean')
+ if fatal_warnings:
+ scan_command.append('--warn-error')
+
scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 18baf0c..b7a12ff 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -372,18 +372,18 @@ class PkgConfigModule(ExtensionModule):
if len(deps.priv_libs) > 0:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
- def generate_compiler_flags():
- cflags_buf = []
- for f in deps.cflags:
- cflags_buf.append(self._escape(f))
- return cflags_buf
-
- cflags = generate_compiler_flags()
- ofile.write('Cflags: ')
+ cflags = []
if uninstalled:
- ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)))
- elif not dataonly and cflags:
- ofile.write('{}\n'.format(' '.join(cflags)))
+ cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+ else:
+ for d in subdirs:
+ if d == '.':
+ cflags.append('-I${includedir}')
+ else:
+ cflags.append(self._escape(PurePath('-I${includedir}') / d))
+ cflags += [self._escape(f) for f in deps.cflags]
+ if cflags and not dataonly:
+ ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
@@ -448,11 +448,6 @@ class PkgConfigModule(ExtensionModule):
libraries = [mainlib] + libraries
deps = DependenciesHelper(state, filebase)
- for d in subdirs:
- if d == '.':
- deps.add_cflags(['-I${includedir}'])
- else:
- deps.add_cflags(self._escape(PurePath('-I${includedir}') / d))
deps.add_pub_libs(libraries)
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 0000000..5c0b31f
--- /dev/null
+++ b/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import os
+import sys
+from pathlib import Path
+
+def run(argsv):
+ commands = [[]]
+ SEPARATOR = ';;;'
+
+ # Generate CMD parameters
+ parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+ parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+ parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+ parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+ parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
+
+ # Parse
+ args = parser.parse_args(argsv)
+
+ dummy_target = None
+ if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+ dummy_target = args.outputs[0]
+ elif len(args.outputs) != len(args.original_outputs):
+ print('Length of output list and original output list differ')
+ sys.exit(1)
+
+ for i in args.commands:
+ if i == SEPARATOR:
+ commands += [[]]
+ continue
+
+ i = i.replace('"', '') # Remove lefover quotes
+ commands[-1] += [i]
+
+ # Execute
+ for i in commands:
+ # Skip empty lists
+ if not i:
+ continue
+
+ cmd = []
+ stdout = None
+ stderr = None
+ capture_file = ''
+
+ for j in i:
+ if j in ['>', '>>']:
+ stdout = subprocess.PIPE
+ continue
+ elif j in ['&>', '&>>']:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
+ continue
+
+ if stdout is not None or stderr is not None:
+ capture_file += j
+ else:
+ cmd += [j]
+
+ try:
+ os.makedirs(args.directory, exist_ok=True)
+
+ res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
+ if capture_file:
+ out_file = Path(args.directory) / capture_file
+ out_file.write_bytes(res.stdout)
+ except subprocess.CalledProcessError:
+ sys.exit(1)
+
+ if dummy_target:
+ with open(dummy_target, 'a'):
+ os.utime(dummy_target, None)
+ sys.exit(0)
+
+ # Copy outputs
+ zipped_outputs = zip(args.outputs, args.original_outputs)
+ for expected, generated in zipped_outputs:
+ do_copy = False
+ if not os.path.exists(expected):
+ if not os.path.exists(generated):
+ print('Unable to find generated file. This can cause the build to fail:')
+ print(generated)
+ do_copy = False
+ else:
+ do_copy = True
+ elif os.path.exists(generated):
+ if os.path.getmtime(generated) > os.path.getmtime(expected):
+ do_copy = True
+
+ if do_copy:
+ if os.path.exists(expected):
+ os.remove(expected)
+ shutil.copyfile(generated, expected)
+
+if __name__ == '__main__':
+ sys.run(sys.argv[1:])
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 4bd41fe..7231972 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -12,15 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import environment
+from mesonbuild import environment, mesonlib
-import argparse, sys, os, subprocess, pathlib
+import argparse, sys, os, subprocess, pathlib, stat
-def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov):
outfiles = []
exitcode = 0
- (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+ (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
# gcovr >= 4.2 requires a different syntax for out of source builds
if gcovr_new_rootdir:
@@ -28,13 +28,18 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
else:
gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+ if use_llvm_cov:
+ gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+ else:
+ gcov_exe_args = []
+
if not outputs or 'xml' in outputs:
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-x',
'-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.xml'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.xml')
+ ] + gcov_exe_args)
outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
elif outputs:
print('gcovr >= 3.3 needed to generate Xml coverage report')
@@ -44,8 +49,8 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.txt'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.txt')
+ ] + gcov_exe_args)
outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
elif outputs:
print('gcovr >= 3.3 needed to generate text coverage report')
@@ -58,19 +63,34 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
initial_tracefile = covinfo + '.initial'
run_tracefile = covinfo + '.run'
raw_tracefile = covinfo + '.raw'
+ if use_llvm_cov:
+ # Create a shim to allow using llvm-cov as a gcov tool.
+ if mesonlib.is_windows():
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_bat:
+ llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe))
+ else:
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_sh:
+ llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe))
+ os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+ gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+ else:
+ gcov_tool_args = []
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--initial',
'--output-file',
- initial_tracefile])
+ initial_tracefile] +
+ gcov_tool_args)
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--output-file', run_tracefile,
'--no-checksum',
- '--rc', 'lcov_branch_coverage=1',
- ])
+ '--rc', 'lcov_branch_coverage=1'] +
+ gcov_tool_args)
# Join initial and test results.
subprocess.check_call([lcov_exe,
'-a', initial_tracefile,
@@ -137,6 +157,8 @@ def run(args):
const='xml', help='generate Xml report')
parser.add_argument('--html', dest='outputs', action='append_const',
const='html', help='generate Html report')
+ parser.add_argument('--use_llvm_cov', action='store_true',
+ help='use llvm-cov')
parser.add_argument('source_root')
parser.add_argument('subproject_root')
parser.add_argument('build_root')
@@ -144,7 +166,7 @@ def run(args):
options = parser.parse_args(args)
return coverage(options.outputs, options.source_root,
options.subproject_root, options.build_root,
- options.log_dir)
+ options.log_dir, options.use_llvm_cov)
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 6b174a6..812604a 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -16,7 +16,7 @@ import sys, os
import subprocess
import shutil
import argparse
-from ..mesonlib import MesonException, Popen_safe, is_windows, split_args
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
from . import destdir_join
parser = argparse.ArgumentParser()
@@ -55,16 +55,18 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None):
library_paths = []
env = dict(os.environ)
- if is_windows():
+ if is_windows() or is_cygwin():
if 'PATH' in env:
library_paths.extend(env['PATH'].split(os.pathsep))
env['PATH'] = os.pathsep.join(library_paths)
- cmd.insert(0, sys.executable)
else:
if 'LD_LIBRARY_PATH' in env:
library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+ if is_windows():
+ cmd.insert(0, sys.executable)
+
# Put stderr into stdout since we want to print it out anyway.
# This preserves the order of messages.
p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
index 41cca26..5240275 100644
--- a/mesonbuild/scripts/symbolextractor.py
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -121,6 +121,13 @@ def gnu_syms(libfilename: str, outfilename: str):
result += [' '.join(entry)]
write_if_changed('\n'.join(result) + '\n', outfilename)
+def solaris_syms(libfilename: str, outfilename: str):
+ # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+ origpath = os.environ['PATH']
+ os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+ gnu_syms(libfilename, outfilename)
+ os.environ['PATH'] = origpath
+
def osx_syms(libfilename: str, outfilename: str):
# Get the name of the library
output = call_tool('otool', ['-l', libfilename])
@@ -270,6 +277,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
# No import library. Not sure how the DLL is being used, so just
# rebuild everything that links to it every time.
dummy_syms(outfilename)
+ elif mesonlib.is_sunos():
+ solaris_syms(libfilename, outfilename)
else:
if not os.path.exists(TOOL_WARNING_FILE):
mlog.warning('Symbol extracting has not been implemented for this '
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 9d95bff..689fb4f 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -29,6 +29,7 @@ import typing as T
from pathlib import Path
from . import WrapMode
+from .. import coredata
from ..mesonlib import git, GIT, ProgressBar, MesonException
if T.TYPE_CHECKING:
@@ -126,9 +127,6 @@ class PackageDefinition:
m = 'Missing key {!r} in {}'
raise WrapException(m.format(key, self.basename))
- def has_patch(self) -> bool:
- return 'patch_filename' in self.values
-
def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition:
fname = os.path.join(subdir_root, packagename + '.wrap')
if os.path.isfile(fname):
@@ -194,6 +192,7 @@ class Resolver:
self.get_svn()
else:
raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type))
+ self.apply_patch()
# A meson.build or CMakeLists.txt file is required in the directory
if method == 'meson' and not os.path.exists(meson_file):
@@ -253,8 +252,6 @@ class Resolver:
os.mkdir(self.dirname)
extract_dir = self.dirname
shutil.unpack_archive(path, extract_dir)
- if self.wrap.has_patch():
- self.apply_patch()
def get_git(self) -> None:
if not GIT:
@@ -333,7 +330,8 @@ class Resolver:
raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
else:
try:
- resp = urllib.request.urlopen(urlstring, timeout=REQ_TIMEOUT)
+ req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)})
+ resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
except urllib.error.URLError as e:
mlog.log(str(e))
raise WrapException('could not get {} is the internet available?'.format(urlstring))
@@ -422,13 +420,25 @@ class Resolver:
return path.as_posix()
def apply_patch(self) -> None:
- path = self.get_file_internal('patch')
- try:
- shutil.unpack_archive(path, self.subdir_root)
- except Exception:
- with tempfile.TemporaryDirectory() as workdir:
- shutil.unpack_archive(path, workdir)
- self.copy_tree(workdir, self.subdir_root)
+ if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+ m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+ raise WrapException(m.format(self.wrap.basename))
+ if 'patch_filename' in self.wrap.values:
+ path = self.get_file_internal('patch')
+ try:
+ shutil.unpack_archive(path, self.subdir_root)
+ except Exception:
+ with tempfile.TemporaryDirectory() as workdir:
+ shutil.unpack_archive(path, workdir)
+ self.copy_tree(workdir, self.subdir_root)
+ elif 'patch_directory' in self.wrap.values:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+ patch_dir = self.wrap.values['patch_directory']
+ src_dir = os.path.join(self.filesdir, patch_dir)
+ if not os.path.isdir(src_dir):
+ raise WrapException('patch directory does not exists: {}'.format(patch_dir))
+ self.copy_tree(src_dir, self.dirname)
def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
"""
diff --git a/msi/createmsi.py b/msi/createmsi.py
index 4d03593..76cb520 100644
--- a/msi/createmsi.py
+++ b/msi/createmsi.py
@@ -154,6 +154,7 @@ class PackageGenerator:
shutil.rmtree(sdir)
main_stage, ninja_stage = self.staging_dirs
dep_data_dir = 'mesonbuild/dependencies/data'
+ cmake_data_dir = 'mesonbuild/cmake/data'
modules = self.get_all_modules_from_dir('mesonbuild/modules')
modules += self.get_all_modules_from_dir('mesonbuild/scripts')
modules += self.get_more_modules()
@@ -176,6 +177,7 @@ class PackageGenerator:
subprocess.check_call(pyinst_cmd)
shutil.move(pyinstaller_tmpdir + '/meson', main_stage)
shutil.copytree(dep_data_dir, main_stage + '/mesonbuild/dependencies/data')
+ shutil.copytree(cmake_data_dir, main_stage + '/mesonbuild/cmake/data')
if not os.path.exists(os.path.join(main_stage, 'meson.exe')):
sys.exit('Meson exe missing from staging dir.')
os.mkdir(ninja_stage)
diff --git a/run_cross_test.py b/run_cross_test.py
index abbfdac..1e67876 100755
--- a/run_cross_test.py
+++ b/run_cross_test.py
@@ -21,8 +21,9 @@ This is now just a wrapper around run_project_tests.py with specific arguments
import argparse
import subprocess
-import sys
from mesonbuild import mesonlib
+from mesonbuild.coredata import version as meson_version
+
def runtests(cross_file, failfast):
tests = ['--only', 'common']
@@ -37,4 +38,5 @@ def main():
return runtests(options.cross_file, options.failfast)
if __name__ == '__main__':
- sys.exit(main())
+ print('Meson build system', meson_version, 'Cross Tests')
+ raise SystemExit(main())
diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py
index 9dfb62e..6ed3d8f 100755
--- a/run_meson_command_tests.py
+++ b/run_meson_command_tests.py
@@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
import os
import tempfile
import unittest
@@ -23,6 +22,8 @@ import zipapp
from pathlib import Path
from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows
+from mesonbuild.coredata import version as meson_version
+
def get_pypath():
import sysconfig
@@ -195,4 +196,5 @@ class CommandTests(unittest.TestCase):
if __name__ == '__main__':
- sys.exit(unittest.main(buffer=True))
+ print('Meson build system', meson_version, 'Command Tests')
+ raise SystemExit(unittest.main(buffer=True))
diff --git a/run_project_tests.py b/run_project_tests.py
index 5fc8aa2..c368253 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -42,7 +42,7 @@ from mesonbuild import mesonlib
from mesonbuild import mlog
from mesonbuild import mtest
from mesonbuild.mesonlib import MachineChoice, Popen_safe
-from mesonbuild.coredata import backendlist
+from mesonbuild.coredata import backendlist, version as meson_version
from run_tests import get_fake_options, run_configure, get_meson_script
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
@@ -438,7 +438,10 @@ def validate_output(test: TestDef, stdo: str, stde: str) -> str:
# coded to run as a batch process.
def clear_internal_caches():
import mesonbuild.interpreterbase
+ from mesonbuild.dependencies import CMakeDependency
+ from mesonbuild.mesonlib import PerMachine
mesonbuild.interpreterbase.FeatureNew.feature_registry = {}
+ CMakeDependency.class_cmakeinfo = PerMachine(None, None)
def run_test_inprocess(testdir):
old_stdout = sys.stdout
@@ -666,11 +669,6 @@ def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]:
assert "val" in i
skip = False
- # Add an empty matrix entry
- if i['val'] is None:
- tmp_opts += [(None, False)]
- continue
-
# Skip the matrix entry if environment variable is present
if 'skip_on_env' in i:
for skip_env_var in i['skip_on_env']:
@@ -684,6 +682,11 @@ def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]:
skip = True
break
+ # Add an empty matrix entry
+ if i['val'] is None:
+ tmp_opts += [(None, skip)]
+ continue
+
tmp_opts += [('{}={}'.format(key, i['val']), skip)]
if opt_list:
@@ -1245,6 +1248,7 @@ if __name__ == '__main__':
if options.cross_file:
options.extra_args += ['--cross-file', options.cross_file]
+ print('Meson build system', meson_version, 'Project Tests')
setup_commands(options.backend)
detect_system_compiler(options)
print_tool_versions()
diff --git a/run_tests.py b/run_tests.py
index 44dcf82..2648e06 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -33,7 +33,7 @@ from mesonbuild import mesonmain
from mesonbuild import mtest
from mesonbuild import mlog
from mesonbuild.environment import Environment, detect_ninja
-from mesonbuild.coredata import backendlist
+from mesonbuild.coredata import backendlist, version as meson_version
NINJA_1_9_OR_NEWER = False
NINJA_CMD = None
@@ -401,4 +401,5 @@ def main():
return returncode
if __name__ == '__main__':
- sys.exit(main())
+ print('Meson build system', meson_version, 'Project and Unit Tests')
+ raise SystemExit(main())
diff --git a/run_unittests.py b/run_unittests.py
index 7e0c403..c4978c2 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import typing as T
import stat
import subprocess
import re
@@ -40,7 +41,7 @@ from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
from distutils.dir_util import copy_tree
-import typing
+import typing as T
import mesonbuild.mlog
import mesonbuild.depfile
@@ -312,8 +313,14 @@ class InternalTests(unittest.TestCase):
self.assertEqual(searchfunc('1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
- self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
- self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
+ self.assertEqual(searchfunc('foobar 2016.10.128'), '2016.10.128')
+ self.assertEqual(searchfunc('2016.10.128'), '2016.10.128')
+ self.assertEqual(searchfunc('2016.10'), '2016.10')
+ self.assertEqual(searchfunc('2016.10 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('oops v1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.oops 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.x'), 'unknown version')
+
def test_mode_symbolic_to_bits(self):
modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
@@ -350,17 +357,34 @@ class InternalTests(unittest.TestCase):
stat.S_IRWXU | stat.S_ISUID |
stat.S_IRGRP | stat.S_IXGRP)
+ def test_compiler_args_class_none_flush(self):
+ cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
+ a = cc.compiler_args(['-I.'])
+ #first we are checking if the tree construction deduplicates the correct -I argument
+ a += ['-I..']
+ a += ['-I./tests/']
+ a += ['-I./tests2/']
+ #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes:
+ # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.'])
+ a += ['-I.']
+ a += ['-I.', '-I./tests/']
+ self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..'])
+
+ #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one
+ a += ['-I.', '-I./tests2/']
+ self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..'])
+
+
def test_compiler_args_class(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
# Test that empty initialization works
- a = cargsfunc(cc)
+ a = cc.compiler_args()
self.assertEqual(a, [])
# Test that list initialization works
- a = cargsfunc(cc, ['-I.', '-I..'])
+ a = cc.compiler_args(['-I.', '-I..'])
self.assertEqual(a, ['-I.', '-I..'])
# Test that there is no de-dup on initialization
- self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.'])
+ self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.'])
## Test that appending works
a.append('-I..')
@@ -406,7 +430,7 @@ class InternalTests(unittest.TestCase):
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
## Test that adding libraries works
- l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+ l = cc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Adding a library and a libpath appends both correctly
l += ['-Lbardir', '-lbar']
@@ -416,7 +440,7 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
## Test that 'direct' append and extend works
- l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+ l = cc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
@@ -432,14 +456,13 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
def test_compiler_args_class_gnuld(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
- l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+ l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
@@ -461,14 +484,13 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group'])
def test_compiler_args_remove_system(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
- l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+ l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
## Test that to_native removes all system includes
l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include']
@@ -682,7 +704,6 @@ class InternalTests(unittest.TestCase):
self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
# Test flattening and unholdering
holder1 = ObjectHolder(1)
- holder3 = ObjectHolder(3)
self.assertEqual([holder1], listify(holder1))
self.assertEqual([holder1], listify([holder1]))
self.assertEqual([holder1, 2], listify([holder1, 2]))
@@ -1468,6 +1489,7 @@ class DataTests(unittest.TestCase):
class BasePlatformTests(unittest.TestCase):
prefix = '/usr'
libdir = 'lib'
+
def setUp(self):
super().setUp()
self.maxDiff = None
@@ -1900,48 +1922,48 @@ class AllPlatformTests(BasePlatformTests):
(result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat)
return '\n'.join(result)
- def check_formats (confdata, result):
- self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'),result)
- self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'),result)
- self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'),result)
+ def check_formats(confdata, result):
+ self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result)
+ self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result)
+ self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result)
confdata = ConfigurationData()
# Key error as they do not exists
check_formats(confdata, '/* #undef VAR */\n')
# Check boolean
- confdata.values = {'VAR': (False,'description')}
+ confdata.values = {'VAR': (False, 'description')}
check_formats(confdata, '#undef VAR\n')
- confdata.values = {'VAR': (True,'description')}
+ confdata.values = {'VAR': (True, 'description')}
check_formats(confdata, '#define VAR\n')
# Check string
- confdata.values = {'VAR': ('value','description')}
+ confdata.values = {'VAR': ('value', 'description')}
check_formats(confdata, '#define VAR value\n')
# Check integer
- confdata.values = {'VAR': (10,'description')}
+ confdata.values = {'VAR': (10, 'description')}
check_formats(confdata, '#define VAR 10\n')
# Check multiple string with cmake formats
- confdata.values = {'VAR': ('value','description')}
- self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value\n')
- self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value')
- self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value\n')
- self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value')
+ confdata.values = {'VAR': ('value', 'description')}
+ self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n')
+ self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value')
+ self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n')
+ self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value')
# Handles meson format exceptions
# Unknown format
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'unknown_format')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format')
# More than 2 params in mesondefine
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'meson')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson')
# Mismatched line with format
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#cmakedefine VAR'], confdata, 'meson')
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake')
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake@')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@')
# Dict value in confdata
- confdata.values = {'VAR': (['value'],'description')}
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'meson')
+ confdata.values = {'VAR': (['value'], 'description')}
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson')
def test_absolute_prefix_libdir(self):
'''
@@ -2261,6 +2283,12 @@ class AllPlatformTests(BasePlatformTests):
self.build()
self.run_tests()
+ def test_force_fallback_for(self):
+ testdir = os.path.join(self.unit_test_dir, '31 forcefallback')
+ self.init(testdir, extra_args=['--force-fallback-for=zlib,foo'])
+ self.build()
+ self.run_tests()
+
def test_env_ops_dont_stack(self):
'''
Test that env ops prepend/append do not stack, and that this usage issues a warning
@@ -2432,6 +2460,9 @@ class AllPlatformTests(BasePlatformTests):
self.assertPathExists(exe2)
def test_internal_include_order(self):
+ if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ):
+ raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2')
+
testdir = os.path.join(self.common_test_dir, '134 include order')
self.init(testdir)
execmd = fxecmd = None
@@ -2551,6 +2582,8 @@ class AllPlatformTests(BasePlatformTests):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin))
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, clangcl):
@@ -3144,8 +3177,9 @@ int main(int argc, char **argv) {
self.assertEqual(foo_dep.get_link_args(), link_args)
# Ensure include args are properly quoted
incdir = PurePath(prefix) / PurePath('include')
- cargs = ['-I' + incdir.as_posix()]
- self.assertEqual(foo_dep.get_compile_args(), cargs)
+ cargs = ['-I' + incdir.as_posix(), '-DLIBFOO']
+ # pkg-config and pkgconf does not respect the same order
+ self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs))
def test_array_option_change(self):
def get_opt():
@@ -3881,7 +3915,7 @@ recommended as it is not supported on some platforms''')
with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
crossfile.write(textwrap.dedent(
'''[binaries]
- pkgconfig = r'{0}'
+ pkgconfig = '{0}'
[properties]
@@ -3911,7 +3945,7 @@ recommended as it is not supported on some platforms''')
pkgconfig = 'pkg-config'
[properties]
- pkg_config_libdir = [r'{0}']
+ pkg_config_libdir = ['{0}']
[host_machine]
system = 'linux'
@@ -4596,18 +4630,83 @@ recommended as it is not supported on some platforms''')
def test_meson_compile(self):
"""Test the meson compile command."""
- prog = 'trivialprog'
- if is_windows():
- prog = '{}.exe'.format(prog)
+
+ def get_exe_name(basename: str) -> str:
+ if is_windows():
+ return '{}.exe'.format(basename)
+ else:
+ return basename
+
+ def get_shared_lib_name(basename: str) -> str:
+ if mesonbuild.environment.detect_msys2_arch():
+ return 'lib{}.dll'.format(basename)
+ elif is_windows():
+ return '{}.dll'.format(basename)
+ elif is_cygwin():
+ return 'cyg{}.dll'.format(basename)
+ elif is_osx():
+ return 'lib{}.dylib'.format(basename)
+ else:
+ return 'lib{}.so'.format(basename)
+
+ def get_static_lib_name(basename: str) -> str:
+ return 'lib{}.a'.format(basename)
+
+ # Base case (no targets or additional arguments)
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
+
self._run([*self.meson_command, 'compile', '-C', self.builddir])
- # If compile worked then we should get a program
- self.assertPathExists(os.path.join(self.builddir, prog))
+ self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+ # `--clean`
self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean'])
- self.assertPathDoesNotExist(os.path.join(self.builddir, prog))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+ # Target specified in a project with unique names
+
+ testdir = os.path.join(self.common_test_dir, '6 linkshared')
+ self.init(testdir, extra_args=['--wipe'])
+ # Multiple targets and target type specified
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library'])
+ # Check that we have a shared lib, but not an executable, i.e. check that target actually worked
+ self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib')))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog')))
+ self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib')))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog')))
+
+ # Target specified in a project with non unique names
+
+ testdir = os.path.join(self.common_test_dir, '190 same target name')
+ self.init(testdir, extra_args=['--wipe'])
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo'])
+ self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo')))
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo'])
+ self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo')))
+
+ # run_target
+
+ testdir = os.path.join(self.common_test_dir, '54 run target')
+ self.init(testdir, extra_args=['--wipe'])
+ out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi'])
+ self.assertIn('I am Python3.', out)
+
+ # `--$BACKEND-args`
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ if self.backend is Backend.ninja:
+ self.init(testdir, extra_args=['--wipe'])
+ # Dry run - should not create a program
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n'])
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+ elif self.backend is Backend.vs:
+ self.init(testdir, extra_args=['--wipe'])
+ self._run([*self.meson_command, 'compile', '-C', self.builddir])
+ # Explicitly clean the target through msbuild interface
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))])
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
def test_spurious_reconfigure_built_dep_file(self):
testdir = os.path.join(self.unit_test_dir, '75 dep files')
@@ -4680,6 +4779,177 @@ recommended as it is not supported on some platforms''')
self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER')
self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER')
+ def test_commands_documented(self):
+ '''
+ Test that all listed meson commands are documented in Commands.md.
+ '''
+
+ doc_path = 'docs/markdown_dynamic/Commands.md'
+
+ md = None
+ with open(doc_path, encoding='utf-8') as f:
+ md = f.read()
+ self.assertIsNotNone(md)
+
+ ## Get command sections
+
+ section_pattern = re.compile(r'^### (.+)$', re.MULTILINE)
+ md_command_section_matches = [i for i in section_pattern.finditer(md)]
+ md_command_sections = dict()
+ for i, s in enumerate(md_command_section_matches):
+ section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start()
+ md_command_sections[s.group(1)] = (s.start(), section_end)
+
+ ## Validate commands
+
+ md_commands = set(k for k,v in md_command_sections.items())
+
+ help_output = self._run(self.meson_command + ['--help'])
+ help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(','))
+
+ self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path))
+
+ ## Validate that each section has proper placeholders
+
+ def get_data_pattern(command):
+ return re.compile(
+ r'^```[\r\n]'
+ r'{{ cmd_help\[\'' + command + r'\'\]\[\'usage\'\] }}[\r\n]'
+ r'^```[\r\n]'
+ r'.*?'
+ r'^```[\r\n]'
+ r'{{ cmd_help\[\'' + command + r'\'\]\[\'arguments\'\] }}[\r\n]'
+ r'^```',
+ flags = re.MULTILINE|re.DOTALL)
+
+ for command in md_commands:
+ m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1])
+ self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path))
+
+ def test_coverage(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage')
+
+ def test_coverage_html(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-html')
+
+ def test_coverage_text(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-text')
+
+ def test_coverage_xml(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-xml')
+
+ def test_cross_file_constants(self):
+ with temp_filename() as crossfile1, temp_filename() as crossfile2:
+ with open(crossfile1, 'w') as f:
+ f.write(textwrap.dedent(
+ '''
+ [constants]
+ compiler = 'gcc'
+ '''))
+ with open(crossfile2, 'w') as f:
+ f.write(textwrap.dedent(
+ '''
+ [constants]
+ toolchain = '/toolchain/'
+ common_flags = ['--sysroot=' + toolchain / 'sysroot']
+
+ [properties]
+ c_args = common_flags + ['-DSOMETHING']
+ cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+ [binaries]
+ c = toolchain / compiler
+ '''))
+
+ values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2])
+ self.assertEqual(values['binaries']['c'], '/toolchain/gcc')
+ self.assertEqual(values['properties']['c_args'],
+ ['--sysroot=/toolchain/sysroot', '-DSOMETHING'])
+ self.assertEqual(values['properties']['cpp_args'],
+ ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE'])
+
+ @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason')
+ def test_wrap_git(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ srcdir = os.path.join(tmpdir, 'src')
+ shutil.copytree(os.path.join(self.unit_test_dir, '78 wrap-git'), srcdir)
+ upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream')
+ upstream_uri = Path(upstream).as_uri()
+ _git_init(upstream)
+ with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w') as f:
+ f.write(textwrap.dedent('''
+ [wrap-git]
+ url = {}
+ patch_directory = wrap_git_builddef
+ revision = master
+ '''.format(upstream_uri)))
+ self.init(srcdir)
+ self.build()
+ self.run_tests()
class FailureTests(BasePlatformTests):
'''
@@ -6153,21 +6423,6 @@ class LinuxlikeTests(BasePlatformTests):
for i in compdb:
self.assertIn("-fsanitize=address", i["command"])
- def test_coverage(self):
- gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
- if not gcovr_exe:
- raise unittest.SkipTest('gcovr not found')
- if not shutil.which('genhtml') and not gcovr_new_rootdir:
- raise unittest.SkipTest('genhtml not found and gcovr is too old')
- if 'clang' in os.environ.get('CC', ''):
- # We need to use llvm-cov instead of gcovr with clang
- raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
- testdir = os.path.join(self.common_test_dir, '1 trivial')
- self.init(testdir, extra_args=['-Db_coverage=true'])
- self.build()
- self.run_tests()
- self.run_target('coverage-html')
-
def test_cross_find_program(self):
testdir = os.path.join(self.unit_test_dir, '11 cross prog')
crossfile = tempfile.NamedTemporaryFile(mode='w')
@@ -6518,10 +6773,10 @@ class LinuxlikeTests(BasePlatformTests):
prog = os.path.join(self.installdir, 'bin', 'client')
env3 = {}
if is_cygwin():
- env3['PATH'] = os.path.join(val1prefix, 'bin') + \
- os.pathsep + \
- os.path.join(val2prefix, 'bin') + \
- os.pathsep + os.environ['PATH']
+ env3['PATH'] = os.path.join(val1prefix, 'bin') + \
+ os.pathsep + \
+ os.path.join(val2prefix, 'bin') + \
+ os.pathsep + os.environ['PATH']
out = self._run([prog], override_envvars=env3).strip()
# Expected output is val1 + val2 = 3
self.assertEqual(out, '3')
@@ -6565,6 +6820,11 @@ class LinuxlikeTests(BasePlatformTests):
testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
if is_cygwin() or is_osx():
raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
+ env = get_fake_env()
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ linker = cc.linker
+ if not linker.export_dynamic_args(env):
+ raise unittest.SkipTest('Not applicable for linkers without --export-dynamic')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
max_count = 0
@@ -7633,7 +7893,7 @@ class CrossFileTests(BasePlatformTests):
"""
def _cross_file_generator(self, *, needs_exe_wrapper: bool = False,
- exe_wrapper: typing.Optional[typing.List[str]] = None) -> str:
+ exe_wrapper: T.Optional[T.List[str]] = None) -> str:
if is_windows():
raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows')
if is_sunos():
@@ -8128,6 +8388,9 @@ def convert_args(argv):
test_list = []
for arg in argv:
if arg.startswith('-'):
+ if arg in ('-f', '--failfast'):
+ arg = '--exitfirst'
+ pytest_args.append(arg)
continue
# ClassName.test_name => 'ClassName and test_name'
if '.' in arg:
@@ -8159,4 +8422,5 @@ def main():
return unittest.main(defaultTest=cases, buffer=True)
if __name__ == '__main__':
+ print('Meson build system', mesonbuild.coredata.version, 'Unit Tests')
raise SystemExit(main())
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
index 9798209..9c95636 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
@@ -8,5 +8,7 @@ include_directories(${CMAKE_CURRENT_BINARY_DIR})
add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
add_library(cmModLib++ SHARED cmMod.cpp)
+target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21)
+target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42)
include(GenerateExportHeader)
generate_export_header(cmModLib++)
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
index d3141d5..f4cbea0 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
+++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
@@ -2,6 +2,10 @@
using namespace std;
+#if MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG (private)"
+#endif
+
cmModClass::cmModClass(string foo) {
str = foo + " World";
}
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
index 0e6dc04..4445e1f 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
+++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
@@ -3,6 +3,10 @@
#include "cmmodlib++_export.h"
#include <string>
+#if MESON_MAGIC_FLAG != 42 && MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG"
+#endif
+
class CMMODLIB___EXPORT cmModClass {
private:
std::string str;
diff --git a/test cases/cmake/10 header only/main.cpp b/test cases/cmake/10 header only/main.cpp
index 9507961..1417881 100644
--- a/test cases/cmake/10 header only/main.cpp
+++ b/test cases/cmake/10 header only/main.cpp
@@ -3,8 +3,14 @@
using namespace std;
+#define EXPECTED "Hello World compDef 42"
+
int main(void) {
cmModClass obj("Hello");
cout << obj.getStr() << endl;
+ if (obj.getStr() != EXPECTED) {
+ cerr << "Expected: '" << EXPECTED << "'" << endl;
+ return 1;
+ }
return 0;
}
diff --git a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
index f5d9a47..e01b6e2 100644
--- a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
@@ -9,3 +9,4 @@ add_library(cmModLib INTERFACE)
set_target_properties(cmModLib PROPERTIES INTERFACE_COMPILE_OPTIONS "-DCMAKE_FLAG_MUST_BE_PRESENT")
target_include_directories(cmModLib INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/include")
target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef")
+target_compile_definitions(cmModLib INTERFACE MESON_MAGIC_FLAG=42)
diff --git a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
index 7ea72f7..fe01040 100644
--- a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
+++ b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
@@ -6,6 +6,9 @@
#error "The flag CMAKE_FLAG_MUST_BE_PRESENT was not set"
#endif
+#define xstr(s) str(s)
+#define str(s) #s
+
class cmModClass {
private:
std::string str;
@@ -13,6 +16,8 @@ class cmModClass {
cmModClass(std::string foo) {
str = foo + " World ";
str += CMAKE_COMPILER_DEFINE_STR;
+ str += ' ';
+ str += xstr(MESON_MAGIC_FLAG);
}
inline std::string getStr() const { return str; }
diff --git a/test cases/cmake/19 cmake file/foolib.cmake.in b/test cases/cmake/19 cmake file/foolib.cmake.in
new file mode 100644
index 0000000..16e992b
--- /dev/null
+++ b/test cases/cmake/19 cmake file/foolib.cmake.in
@@ -0,0 +1 @@
+@foo@
diff --git a/test cases/cmake/19 cmake file/meson.build b/test cases/cmake/19 cmake file/meson.build
new file mode 100644
index 0000000..758bbee
--- /dev/null
+++ b/test cases/cmake/19 cmake file/meson.build
@@ -0,0 +1,14 @@
+project(
+ 'cmake config file',
+)
+
+cmake = import('cmake')
+
+cmake_conf = configuration_data()
+cmake_conf.set_quoted('foo', 'bar')
+cmake.configure_package_config_file(
+ name : 'foolib',
+ input : 'foolib.cmake.in',
+ install_dir : get_option('libdir') / 'cmake',
+ configuration : cmake_conf,
+)
diff --git a/test cases/cmake/19 cmake file/test.json b/test cases/cmake/19 cmake file/test.json
new file mode 100644
index 0000000..a8c4ba3
--- /dev/null
+++ b/test cases/cmake/19 cmake file/test.json
@@ -0,0 +1,5 @@
+{
+ "installed": [
+ {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"}
+ ]
+}
diff --git a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
index 62b5990..873b9b3 100644
--- a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
+++ b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
@@ -1,5 +1,10 @@
cmake_minimum_required(VERSION 3.7)
+project(testPro)
if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something")
message(FATAL_ERROR "Setting the CMake var failed")
endif()
+
+if(NOT "${CMAKE_PREFIX_PATH}" STREQUAL "val1;val2")
+ message(FATAL_ERROR "Setting the CMAKE_PREFIX_PATH failed '${CMAKE_PREFIX_PATH}'")
+endif()
diff --git a/test cases/cmake/7 cmake options/test.json b/test cases/cmake/7 cmake options/test.json
new file mode 100644
index 0000000..046e2ee
--- /dev/null
+++ b/test cases/cmake/7 cmake options/test.json
@@ -0,0 +1,9 @@
+{
+ "matrix": {
+ "options": {
+ "cmake_prefix_path": [
+ { "val": ["val1", "val2"] }
+ ]
+ }
+ }
+}
diff --git a/test cases/common/125 object only target/obj_generator.py b/test cases/common/125 object only target/obj_generator.py
index a33872a..afdbc09 100755
--- a/test cases/common/125 object only target/obj_generator.py
+++ b/test cases/common/125 object only target/obj_generator.py
@@ -13,6 +13,8 @@ if __name__ == '__main__':
ofile = sys.argv[3]
if compiler.endswith('cl'):
cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+ elif sys.platform == 'sunos5':
+ cmd = [compiler, '-fpic', '-c', ifile, '-o', ofile]
else:
cmd = [compiler, '-c', ifile, '-o', ofile]
sys.exit(subprocess.call(cmd))
diff --git a/test cases/common/145 special characters/arg-char-test.c b/test cases/common/145 special characters/arg-char-test.c
new file mode 100644
index 0000000..04e02f8
--- /dev/null
+++ b/test cases/common/145 special characters/arg-char-test.c
@@ -0,0 +1,10 @@
+#include <assert.h>
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ char c = CHAR;
+ assert(argc == 2);
+ if (c != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) c, (unsigned int) argv[1][0]);
+ assert(c == argv[1][0]);
+}
diff --git a/test cases/common/145 special characters/arg-string-test.c b/test cases/common/145 special characters/arg-string-test.c
new file mode 100644
index 0000000..199fd79
--- /dev/null
+++ b/test cases/common/145 special characters/arg-string-test.c
@@ -0,0 +1,12 @@
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+int main(int argc, char **argv) {
+ const char *s = CHAR;
+ assert(argc == 2);
+ assert(strlen(s) == 1);
+ if (s[0] != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+ assert(s[0] == argv[1][0]);
+}
diff --git a/test cases/common/145 special characters/arg-unquoted-test.c b/test cases/common/145 special characters/arg-unquoted-test.c
new file mode 100644
index 0000000..7f679ca
--- /dev/null
+++ b/test cases/common/145 special characters/arg-unquoted-test.c
@@ -0,0 +1,17 @@
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+int main(int argc, char **argv) {
+ const char *s = QUOTE(CHAR);
+ assert(argc == 2);
+ assert(strlen(s) == 1);
+ if (s[0] != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+ assert(s[0] == argv[1][0]);
+ // There is no way to convert a macro argument into a character constant.
+ // Otherwise we'd test that as well
+}
diff --git a/test cases/common/145 special characters/meson.build b/test cases/common/145 special characters/meson.build
index ecba650..579601e 100644
--- a/test cases/common/145 special characters/meson.build
+++ b/test cases/common/145 special characters/meson.build
@@ -35,3 +35,41 @@ gen2 = custom_target('gen2',
output : 'result2',
install : true,
install_dir : get_option('datadir'))
+
+# Test that we can pass these special characters in compiler arguments
+#
+# (this part of the test is crafted so we don't try to use these special
+# characters in filenames or target names)
+#
+# TODO: similar tests needed for languages other than C
+# TODO: add similar test for quote, doublequote, and hash, carefully
+# Re hash, see
+# https://docs.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions
+
+special = [
+ ['amp', '&'],
+ ['at', '@'],
+ ['backslash', '\\'],
+ ['dollar', '$'],
+ ['gt', '>'],
+ ['lt', '<'],
+ ['slash', '/'],
+]
+
+cc = meson.get_compiler('c')
+
+foreach s : special
+ args = '-DCHAR="@0@"'.format(s[1])
+ e = executable('arg-string-' + s[0], 'arg-string-test.c', c_args: args)
+ test('arg-string-' + s[0], e, args: s[1])
+
+ args = '-DCHAR=@0@'.format(s[1])
+ e = executable('arg-unquoted-' + s[0], 'arg-unquoted-test.c', c_args: args)
+ test('arg-unquoted-' + s[0], e, args: s[1])
+endforeach
+
+foreach s : special
+ args = '-DCHAR=\'@0@\''.format(s[1])
+ e = executable('arg-char-' + s[0], 'arg-char-test.c', c_args: args)
+ test('arg-char-' + s[0], e, args: s[1])
+endforeach
diff --git a/test cases/common/157 wrap file should not failed/meson.build b/test cases/common/157 wrap file should not failed/meson.build
index cffce2f..48d1068 100644
--- a/test cases/common/157 wrap file should not failed/meson.build
+++ b/test cases/common/157 wrap file should not failed/meson.build
@@ -12,3 +12,5 @@ libbar = bar.get_variable('libbar')
executable('grabprog', files('src/subprojects/prog.c'))
executable('grabprog2', files('src/subprojects/foo/prog2.c'))
subdir('src')
+
+subproject('patchdir')
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build
new file mode 100644
index 0000000..dbaf91f
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build
@@ -0,0 +1,2 @@
+project('static lib patchdir', 'c')
+libfoo = static_library('foo', 'foo.c')
diff --git a/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap
new file mode 100644
index 0000000..1a2134c
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap
@@ -0,0 +1,9 @@
+[wrap-file]
+directory = foo-1.0-patchdir
+
+source_url = http://something.invalid
+source_filename = foo-1.0.tar.xz
+source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1
+lead_directory_missing = true
+
+patch_directory = foo-1.0
diff --git a/test cases/common/234 very long commmand line/codegen.py b/test cases/common/234 very long commmand line/codegen.py
new file mode 100755
index 0000000..4de78ce
--- /dev/null
+++ b/test cases/common/234 very long commmand line/codegen.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[2], 'w') as f:
+ print('int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1]), file=f)
diff --git a/test cases/common/234 very long commmand line/main.c b/test cases/common/234 very long commmand line/main.c
new file mode 100644
index 0000000..dbb64a8
--- /dev/null
+++ b/test cases/common/234 very long commmand line/main.c
@@ -0,0 +1,5 @@
+int main(int argc, char **argv) {
+ (void) argc;
+ (void) argv;
+ return 0;
+}
diff --git a/test cases/common/234 very long commmand line/meson.build b/test cases/common/234 very long commmand line/meson.build
new file mode 100644
index 0000000..fe47b5e
--- /dev/null
+++ b/test cases/common/234 very long commmand line/meson.build
@@ -0,0 +1,44 @@
+project('very long command lines', 'c')
+
+# Get the current system's commandline length limit.
+if build_machine.system() == 'windows'
+ # Various limits on windows:
+ # cmd.exe: 8kb
+ # CreateProcess: 32kb
+ limit = 32767
+elif build_machine.system() == 'cygwin'
+ # cygwin-to-win32: see above
+ # cygwin-to-cygwin: no limit?
+ # Cygwin is slow, so only test it lightly here.
+ limit = 8192
+else
+ # ninja passes whole line as a single argument, for which
+ # the limit is 128k as of Linux 2.6.23. See MAX_ARG_STRLEN.
+ # BSD seems similar, see https://www.in-ulm.de/~mascheck/various/argmax
+ limit = 131072
+endif
+# Now exceed that limit, but not so far that the test takes too long.
+name = 'ALongFilenameMuchLongerThanIsNormallySeenAndReallyHardToReadThroughToTheEndAMooseOnceBitMySisterSheNowWorksAtLLamaFreshFarmsThisHasToBeSoLongThatWeExceed128KBWithoutCompilingTooManyFiles'
+namelen = 187
+nfiles = 50 + limit / namelen
+message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28))))
+
+seq = run_command('seq.py', '1', '@0@'.format(nfiles)).stdout().strip().split('\n')
+
+sources = []
+codegen = find_program('codegen.py')
+
+foreach i : seq
+ sources += custom_target('codegen' + i,
+ command: [codegen, i, '@OUTPUT@'],
+ output: name + i + '.c')
+endforeach
+
+shared_library('sharedlib', sources)
+static_library('staticlib', sources)
+executable('app', 'main.c', sources)
+
+# Also test short commandlines to make sure that doesn't regress
+shared_library('sharedlib0', sources[0])
+static_library('staticlib0', sources[0])
+executable('app0', 'main.c', sources[0])
diff --git a/test cases/common/234 very long commmand line/seq.py b/test cases/common/234 very long commmand line/seq.py
new file mode 100755
index 0000000..637bf57
--- /dev/null
+++ b/test cases/common/234 very long commmand line/seq.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import sys
+
+for i in range(int(sys.argv[1]), int(sys.argv[2])):
+ print(i)
diff --git a/test cases/common/47 pkgconfig-gen/dependencies/main.c b/test cases/common/47 pkgconfig-gen/dependencies/main.c
index 61708d3..397d40c 100644
--- a/test cases/common/47 pkgconfig-gen/dependencies/main.c
+++ b/test cases/common/47 pkgconfig-gen/dependencies/main.c
@@ -1,5 +1,9 @@
#include <simple.h>
+#ifndef LIBFOO
+#error LIBFOO should be defined in pkgconfig cflags
+#endif
+
int main(int argc, char *argv[])
{
return simple_function() == 42 ? 0 : 1;
diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build
index c251b9f..eb2afe4 100644
--- a/test cases/common/47 pkgconfig-gen/meson.build
+++ b/test cases/common/47 pkgconfig-gen/meson.build
@@ -43,7 +43,8 @@ pkgg.generate(
name : 'libfoo',
version : libver,
description : 'A foo library.',
- variables : ['foo=bar', 'datadir=${prefix}/data']
+ variables : ['foo=bar', 'datadir=${prefix}/data'],
+ extra_cflags : ['-DLIBFOO'],
)
pkgg.generate(
diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build
index c2efe34..b555b17 100644
--- a/test cases/fortran/7 generated/meson.build
+++ b/test cases/fortran/7 generated/meson.build
@@ -1,11 +1,18 @@
# Tests whether fortran sources files created during configuration are properly
# scanned for dependency information
-project('generated', 'fortran')
+project('generated', 'fortran',
+ default_options : ['default_library=static'])
conf_data = configuration_data()
conf_data.set('ONE', 1)
conf_data.set('TWO', 2)
+conf_data.set('THREE', 3)
+
+configure_file(input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data)
+# Manually build absolute path to source file to test
+# https://github.com/mesonbuild/meson/issues/7265
+three = library('mod3', meson.current_build_dir() / 'mod3.f90')
templates_basenames = ['mod2', 'mod1']
generated_sources = []
@@ -18,5 +25,5 @@ foreach template_basename : templates_basenames
endforeach
sources = ['prog.f90'] + generated_sources
-exe = executable('generated', sources)
+exe = executable('generated', sources, link_with: three)
test('generated', exe)
diff --git a/test cases/fortran/7 generated/mod1.fpp b/test cases/fortran/7 generated/mod1.fpp
index 42d1fde..c4decf6 100644
--- a/test cases/fortran/7 generated/mod1.fpp
+++ b/test cases/fortran/7 generated/mod1.fpp
@@ -1,6 +1,6 @@
module mod1
- implicit none
+implicit none
- integer, parameter :: modval1 = @ONE@
+integer, parameter :: modval1 = @ONE@
end module mod1
diff --git a/test cases/fortran/7 generated/mod2.fpp b/test cases/fortran/7 generated/mod2.fpp
index 594e9df..78ceae4 100644
--- a/test cases/fortran/7 generated/mod2.fpp
+++ b/test cases/fortran/7 generated/mod2.fpp
@@ -1,7 +1,7 @@
module mod2
- use mod1
- implicit none
+use mod1, only : modval1
+implicit none
- integer, parameter :: modval2 = @TWO@
+integer, parameter :: modval2 = @TWO@
end module mod2
diff --git a/test cases/fortran/7 generated/mod3.fpp b/test cases/fortran/7 generated/mod3.fpp
new file mode 100644
index 0000000..ab3db65
--- /dev/null
+++ b/test cases/fortran/7 generated/mod3.fpp
@@ -0,0 +1,6 @@
+module mod3
+implicit none
+
+integer, parameter :: modval3 = @THREE@
+
+end module mod3
diff --git a/test cases/fortran/7 generated/prog.f90 b/test cases/fortran/7 generated/prog.f90
index 8a102c0..6ee0bca 100644
--- a/test cases/fortran/7 generated/prog.f90
+++ b/test cases/fortran/7 generated/prog.f90
@@ -1,7 +1,8 @@
-program prog
-use mod2
+program generated
+use mod2, only : modval1, modval2
+use mod3, only : modval3
implicit none
-if (modval1 + modval2 /= 3) stop 1
+if (modval1 + modval2 + modval3 /= 6) error stop
-end program prog
+end program generated
diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build
index af4a901..3d7adf0 100644
--- a/test cases/frameworks/7 gnome/mkenums/meson.build
+++ b/test cases/frameworks/7 gnome/mkenums/meson.build
@@ -126,6 +126,14 @@ enums5 = gnome.mkenums_simple('enums5', sources : 'meson-sample.h',
install_header : true,
decorator : 'MESON_EXPORT',
header_prefix : '#include "meson-decls.h"')
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums5.h')
+main = configure_file(
+ input : 'main.c',
+ output : 'main5.c',
+ configuration : conf)
+
enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj)
# Generate template then use as input to mkenums
diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake
new file mode 100644
index 0000000..e12aeb9
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(cmMesonTestF1_FOUND ON)
+ set(cmMesonTestF1_LIBRARIES ${ZLIB_LIBRARY})
+ set(cmMesonTestF1_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(cmMesonTestF1_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake
new file mode 100644
index 0000000..a7a55d8
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(cmMesonTestF2_FOUND ON)
+ set(cmMesonTestF2_LIBRARIES ${ZLIB_LIBRARY})
+ set(cmMesonTestF2_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(cmMesonTestF2_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build
index 93824ab..9918a71 100644
--- a/test cases/linuxlike/13 cmake dependency/meson.build
+++ b/test cases/linuxlike/13 cmake dependency/meson.build
@@ -44,6 +44,8 @@ assert(depf2.found() == false, 'Invalid CMake targets should fail')
# Try to find cmMesonTestDep in a custom prefix
# setup_env.json is used by run_project_tests.py:_run_test to point to ./cmake_pref_env/
depPrefEnv = dependency('cmMesonTestDep', required : true, method : 'cmake')
+depPrefEnv1 = dependency('cmMesonTestF1', required : true, method : 'cmake')
+depPrefEnv2 = dependency('cmMesonTestF2', required : true, method : 'cmake')
# Try to find a dependency with a custom CMake module
diff --git a/test cases/linuxlike/13 cmake dependency/test.json b/test cases/linuxlike/13 cmake dependency/test.json
index 565713e..fc29f72 100644
--- a/test cases/linuxlike/13 cmake dependency/test.json
+++ b/test cases/linuxlike/13 cmake dependency/test.json
@@ -1,5 +1,5 @@
{
"env": {
- "CMAKE_PREFIX_PATH": "@ROOT@/cmake_pref_env"
+ "CMAKE_PREFIX_PATH": "@ROOT@/cmake_fake1;@ROOT@/cmake_fake2:@ROOT@/cmake_pref_env"
}
}
diff --git a/test cases/linuxlike/3 linker script/meson.build b/test cases/linuxlike/3 linker script/meson.build
index 63765e7..5901bf7 100644
--- a/test cases/linuxlike/3 linker script/meson.build
+++ b/test cases/linuxlike/3 linker script/meson.build
@@ -1,5 +1,11 @@
project('linker script', 'c')
+# Solaris 11.4 ld supports --version-script only when you also specify
+# -z gnu-version-script-compat
+if meson.get_compiler('c').get_linker_id() == 'ld.solaris'
+ add_project_link_arguments('-Wl,-z,gnu-version-script-compat', language: 'C')
+endif
+
# Static map file
mapfile = 'bob.map'
vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
diff --git a/test cases/unit/61 identity cross/build_wrapper.py b/test cases/unit/61 identity cross/build_wrapper.py
index b5fe7bb..15d5c07 100755
--- a/test cases/unit/61 identity cross/build_wrapper.py
+++ b/test cases/unit/61 identity cross/build_wrapper.py
@@ -1,5 +1,11 @@
#!/usr/bin/env python3
-import subprocess, sys
+import subprocess, sys, platform
-subprocess.call(["cc", "-DEXTERNAL_BUILD"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+ cc = 'gcc'
+else:
+ cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_BUILD"] + sys.argv[1:])
diff --git a/test cases/unit/61 identity cross/host_wrapper.py b/test cases/unit/61 identity cross/host_wrapper.py
index e88577c..a3a694a 100755
--- a/test cases/unit/61 identity cross/host_wrapper.py
+++ b/test cases/unit/61 identity cross/host_wrapper.py
@@ -1,5 +1,11 @@
#!/usr/bin/env python3
-import subprocess, sys
+import subprocess, sys, platform
-subprocess.call(["cc", "-DEXTERNAL_HOST"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+ cc = 'gcc'
+else:
+ cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_HOST"] + sys.argv[1:])
diff --git a/test cases/unit/78 wrap-git/meson.build b/test cases/unit/78 wrap-git/meson.build
new file mode 100644
index 0000000..b0af30a
--- /dev/null
+++ b/test cases/unit/78 wrap-git/meson.build
@@ -0,0 +1,4 @@
+project('test-wrap-git')
+
+exe = subproject('wrap_git').get_variable('exe')
+test('test1', exe)
diff --git a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build
new file mode 100644
index 0000000..2570f77
--- /dev/null
+++ b/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build
@@ -0,0 +1,3 @@
+project('foo', 'c')
+
+exe = executable('app', 'main.c')
diff --git a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c
new file mode 100644
index 0000000..8488f4e
--- /dev/null
+++ b/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c
@@ -0,0 +1,4 @@
+int main(void)
+{
+ return 0;
+}
diff --git a/tools/copy_files.py b/tools/copy_files.py
new file mode 100644
index 0000000..39eaa0a
--- /dev/null
+++ b/tools/copy_files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Copy files
+'''
+
+import argparse
+import shutil
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def copy_files(files: T.List[str], input_dir: PathLike, output_dir: PathLike) -> None:
+ if not input_dir:
+ raise ValueError(f'Input directory value is not set')
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ input_dir = Path(input_dir).resolve()
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ for f in files:
+ if (input_dir/f).is_dir():
+ shutil.copytree(input_dir/f, output_dir/f)
+ else:
+ shutil.copy2(input_dir/f, output_dir/f)
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Copy files')
+ parser.add_argument('files', metavar='FILE', nargs='*')
+ parser.add_argument('-C', dest='input_dir', required=True)
+ parser.add_argument('--output-dir', required=True)
+
+ args = parser.parse_args()
+
+ copy_files(files=args.files,
+ input_dir=args.input_dir,
+ output_dir=args.output_dir)
diff --git a/tools/regenerate_docs.py b/tools/regenerate_docs.py
new file mode 100755
index 0000000..d443570
--- /dev/null
+++ b/tools/regenerate_docs.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Regenerate markdown docs by using `meson.py` from the root dir
+'''
+
+import argparse
+import jinja2
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def _get_meson_output(root_dir: Path, args: T.List):
+ env = os.environ.copy()
+ env['COLUMNS'] = '80'
+ return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
+
+def get_commands_data(root_dir: Path):
+ usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
+ positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+ options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+ commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
+
+ def get_next_start(iterators, end):
+ return next((i.start() for i in iterators if i), end)
+
+ def normalize_text(text):
+ # clean up formatting
+ out = text
+ out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
+ out = re.sub(r'^ +$', '', out, flags=re.MULTILINE) # remove trailing whitespace
+ out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
+ return out
+
+ def parse_cmd(cmd):
+ cmd_len = len(cmd)
+ usage = usage_start_pattern.search(cmd)
+ positionals = positional_start_pattern.search(cmd)
+ options = options_start_pattern.search(cmd)
+ commands = commands_start_pattern.search(cmd)
+
+ arguments_start = get_next_start([positionals, options, commands], None)
+ assert arguments_start
+
+ # replace `usage:` with `$` and dedent
+ dedent_size = (usage.end() - usage.start()) - len('$ ')
+ usage_text = textwrap.dedent(f'{dedent_size * " "}$ {normalize_text(cmd[usage.end():arguments_start])}')
+
+ return {
+ 'usage': usage_text,
+ 'arguments': normalize_text(cmd[arguments_start:cmd_len]),
+ }
+
+ def clean_dir_arguments(text):
+ # Remove platform specific defaults
+ args = [
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir'
+ ]
+ out = text
+ for a in args:
+ out = re.sub(r'(--' + a + r' .+?)\s+\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL)
+ return out
+
+ output = _get_meson_output(root_dir, ['--help'])
+ commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', output, re.MULTILINE|re.DOTALL)[0].split(','))
+ commands.remove('help')
+
+ cmd_data = dict()
+
+ for cmd in commands:
+ cmd_output = _get_meson_output(root_dir, [cmd, '--help'])
+ cmd_data[cmd] = parse_cmd(cmd_output)
+ if cmd in ['setup', 'configure']:
+ cmd_data[cmd]['arguments'] = clean_dir_arguments(cmd_data[cmd]['arguments'])
+
+ return cmd_data
+
+def regenerate_commands(root_dir: Path, output_dir: Path) -> None:
+ with open(root_dir/'docs'/'markdown_dynamic'/'Commands.md') as f:
+ template = f.read()
+
+ cmd_data = get_commands_data(root_dir)
+
+ t = jinja2.Template(template, undefined=jinja2.StrictUndefined, keep_trailing_newline=True)
+ content = t.render(cmd_help=cmd_data)
+
+ output_file = output_dir/'Commands.md'
+ with open(output_file, 'w') as f:
+ f.write(content)
+
+ print(f'`{output_file}` was regenerated')
+
+def regenerate_docs(output_dir: PathLike,
+ dummy_output_file: T.Optional[PathLike]) -> None:
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ root_dir = Path(__file__).resolve().parent.parent
+
+ regenerate_commands(root_dir, output_dir)
+
+ if dummy_output_file:
+ with open(output_dir/dummy_output_file, 'w') as f:
+ f.write('dummy file for custom_target output')
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Generate meson docs')
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--dummy-output-file', type=str)
+
+ args = parser.parse_args()
+
+ regenerate_docs(output_dir=args.output_dir,
+ dummy_output_file=args.dummy_output_file)