aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.appveyor.yml63
-rw-r--r--.travis.yml8
-rw-r--r--README.md2
-rw-r--r--ciimage/Dockerfile2
-rw-r--r--contributing.md8
-rw-r--r--contributing.txt4
-rw-r--r--cross/ubuntu-armhf.txt4
-rw-r--r--data/shell-completions/zsh/_meson2
-rw-r--r--data/syntax-highlighting/vim/syntax/meson.vim1
-rw-r--r--docs/markdown/Adding-arguments.md2
-rw-r--r--docs/markdown/Adding-new-projects-to-wrapdb.md5
-rw-r--r--docs/markdown/Configuring-a-build-directory.md4
-rw-r--r--docs/markdown/Contributing.md39
-rw-r--r--docs/markdown/Cross-compilation.md45
-rw-r--r--docs/markdown/Dependencies.md78
-rw-r--r--docs/markdown/FAQ.md240
-rw-r--r--docs/markdown/Feature-autodetection.md2
-rw-r--r--docs/markdown/Generating-sources.md85
-rw-r--r--docs/markdown/Gnome-module.md20
-rw-r--r--docs/markdown/IDE-integration.md10
-rw-r--r--docs/markdown/Icestorm-module.md6
-rw-r--r--docs/markdown/Installing.md13
-rw-r--r--docs/markdown/Pkg-config-files.md2
-rw-r--r--docs/markdown/Pkgconfig-module.md12
-rw-r--r--docs/markdown/Project-templates.md2
-rw-r--r--docs/markdown/Reference-manual.md153
-rw-r--r--docs/markdown/Reference-tables.md18
-rw-r--r--docs/markdown/Release-notes-for-0.45.0.md198
-rw-r--r--docs/markdown/Release-notes-for-0.46.0.md23
-rw-r--r--docs/markdown/Subprojects.md24
-rw-r--r--docs/markdown/Unit-tests.md10
-rw-r--r--docs/markdown/Users.md6
-rw-r--r--docs/markdown/howtox.md4
-rw-r--r--docs/markdown/index.md10
-rw-r--r--docs/markdown/snippets/altered-logging.md5
-rw-r--r--docs/markdown/snippets/both-libraries.md9
-rw-r--r--docs/markdown/snippets/compiler-object-run_command.md10
-rw-r--r--docs/markdown/snippets/config-tool-cross.md13
-rw-r--r--docs/markdown/snippets/declare_dependency-link_whole.md4
-rw-r--r--docs/markdown/snippets/del-old-names.md7
-rw-r--r--docs/markdown/snippets/deprecations.md14
-rw-r--r--docs/markdown/snippets/fpga.md12
-rw-r--r--docs/markdown/snippets/gen-subdirs.md21
-rw-r--r--docs/markdown/snippets/hexnumbers.md5
-rw-r--r--docs/markdown/snippets/if-release.md7
-rw-r--r--docs/markdown/snippets/improved-help.md6
-rw-r--r--docs/markdown/snippets/improved-meson-init.md19
-rw-r--r--docs/markdown/snippets/install_data-rename.md11
-rw-r--r--docs/markdown/snippets/install_subdir-strip_directory.md4
-rw-r--r--docs/markdown/snippets/intopt.md6
-rw-r--r--docs/markdown/snippets/new-wrap-mode.md3
-rw-r--r--docs/markdown/snippets/pkg-config-fix-static-only.md12
-rw-r--r--docs/markdown/snippets/pkgconfig-generator.md14
-rw-r--r--docs/markdown/snippets/pkgconfig-requires-non-string.md5
-rw-r--r--docs/markdown/snippets/project-license.md4
-rw-r--r--docs/markdown/snippets/rust-cross.md16
-rw-r--r--docs/markdown/snippets/templates.md8
-rw-r--r--docs/markdown/snippets/windows-resources-custom-targets.md3
-rw-r--r--docs/markdown/snippets/wrap_promote.md11
-rw-r--r--docs/markdown/snippets/yield.md8
-rw-r--r--docs/sitemap.txt1
-rwxr-xr-xghwt.py2
-rw-r--r--man/meson.12
-rw-r--r--man/mesonconf.12
-rw-r--r--man/mesonintrospect.12
-rw-r--r--man/mesontest.12
-rw-r--r--man/wraptool.12
-rwxr-xr-xmeson.py4
-rw-r--r--mesonbuild/backend/backends.py64
-rw-r--r--mesonbuild/backend/ninjabackend.py226
-rw-r--r--mesonbuild/backend/vs2010backend.py11
-rw-r--r--mesonbuild/backend/xcodebackend.py8
-rw-r--r--mesonbuild/build.py280
-rw-r--r--mesonbuild/compilers/__init__.py3
-rw-r--r--mesonbuild/compilers/c.py77
-rw-r--r--mesonbuild/compilers/compilers.py85
-rw-r--r--mesonbuild/compilers/cpp.py2
-rw-r--r--mesonbuild/compilers/cs.py46
-rw-r--r--mesonbuild/compilers/d.py14
-rw-r--r--mesonbuild/compilers/vala.py4
-rw-r--r--mesonbuild/coredata.py99
-rw-r--r--mesonbuild/dependencies/__init__.py3
-rw-r--r--mesonbuild/dependencies/base.py59
-rw-r--r--mesonbuild/dependencies/boost.py683
-rw-r--r--mesonbuild/dependencies/dev.py9
-rw-r--r--mesonbuild/dependencies/misc.py660
-rw-r--r--mesonbuild/dependencies/ui.py28
-rw-r--r--mesonbuild/environment.py67
-rw-r--r--mesonbuild/interpreter.py489
-rw-r--r--mesonbuild/interpreterbase.py23
-rw-r--r--mesonbuild/mconf.py185
-rw-r--r--mesonbuild/mesonlib.py86
-rw-r--r--mesonbuild/mesonmain.py34
-rw-r--r--mesonbuild/minit.py155
-rw-r--r--mesonbuild/mintro.py89
-rw-r--r--mesonbuild/mlog.py60
-rw-r--r--mesonbuild/modules/gnome.py96
-rw-r--r--mesonbuild/modules/pkgconfig.py152
-rw-r--r--mesonbuild/modules/python3.py8
-rw-r--r--mesonbuild/modules/qt.py52
-rw-r--r--mesonbuild/modules/unstable_icestorm.py13
-rw-r--r--mesonbuild/mparser.py3
-rw-r--r--mesonbuild/mtest.py523
-rw-r--r--mesonbuild/optinterpreter.py2
-rw-r--r--mesonbuild/rewriter.py28
-rw-r--r--mesonbuild/scripts/coverage.py185
-rw-r--r--mesonbuild/scripts/depfixer.py67
-rw-r--r--mesonbuild/scripts/gtkdochelper.py2
-rw-r--r--mesonbuild/scripts/meson_exe.py8
-rw-r--r--mesonbuild/scripts/meson_install.py17
-rw-r--r--mesonbuild/scripts/yelphelper.py5
-rw-r--r--mesonbuild/wrap/__init__.py7
-rwxr-xr-xmesonconf.py5
-rwxr-xr-xmesonintrospect.py5
-rwxr-xr-xmesonrewriter.py5
-rwxr-xr-xmesontest.py5
-rwxr-xr-xmsi/createmsi.py14
-rwxr-xr-xrun_project_tests.py71
-rwxr-xr-xrun_tests.py2
-rwxr-xr-xrun_unittests.py449
-rw-r--r--setup.py6
-rwxr-xr-xskip_ci.py74
-rw-r--r--test cases/common/12 data/installed_files.txt5
-rw-r--r--test cases/common/12 data/meson.build8
-rw-r--r--test cases/common/12 data/somefile.txt0
-rw-r--r--test cases/common/12 data/to_be_renamed_1.txt0
-rw-r--r--test cases/common/12 data/to_be_renamed_3.txt0
-rw-r--r--test cases/common/12 data/to_be_renamed_4.txt0
-rw-r--r--test cases/common/12 data/vanishing/to_be_renamed_2.txt0
-rw-r--r--test cases/common/138 include order/inc1/hdr.h1
-rw-r--r--test cases/common/138 include order/inc2/hdr.h1
-rw-r--r--test cases/common/138 include order/meson.build4
-rw-r--r--test cases/common/138 include order/ordertest.c11
-rw-r--r--test cases/common/142 compute int/config.h.in2
-rw-r--r--test cases/common/142 compute int/meson.build8
-rw-r--r--test cases/common/142 compute int/prog.c.in9
-rw-r--r--test cases/common/145 whole archive/allofme/meson.build1
-rw-r--r--test cases/common/145 whole archive/exe/meson.build3
-rw-r--r--test cases/common/145 whole archive/exe2/meson.build2
-rw-r--r--test cases/common/145 whole archive/exe3/meson.build1
-rw-r--r--test cases/common/145 whole archive/exe4/meson.build1
-rw-r--r--test cases/common/145 whole archive/func1.c (renamed from test cases/common/145 whole archive/libfile.c)0
-rw-r--r--test cases/common/145 whole archive/func2.c (renamed from test cases/common/145 whole archive/dylib.c)0
-rw-r--r--test cases/common/145 whole archive/meson.build40
-rw-r--r--test cases/common/145 whole archive/sh_func2_dep_func1/meson.build4
-rw-r--r--test cases/common/145 whole archive/sh_func2_linked_func1/meson.build3
-rw-r--r--test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build6
-rw-r--r--test cases/common/145 whole archive/sh_only_link_whole/meson.build1
-rw-r--r--test cases/common/145 whole archive/shlib/meson.build4
-rw-r--r--test cases/common/145 whole archive/st_func1/meson.build1
-rw-r--r--test cases/common/145 whole archive/st_func2/meson.build1
-rw-r--r--test cases/common/145 whole archive/stlib/meson.build1
-rw-r--r--test cases/common/145 whole archive/wholeshlib/meson.build1
-rw-r--r--test cases/common/16 configure file/config7.h.in16
-rw-r--r--test cases/common/16 configure file/meson.build18
-rw-r--r--test cases/common/16 configure file/prog7.c10
-rw-r--r--test cases/common/168 disabler/meson.build4
-rw-r--r--test cases/common/178 preserve gendir/base.inp (renamed from test cases/common/174 preserve gendir/base.inp)0
-rw-r--r--test cases/common/178 preserve gendir/com/mesonbuild/subbie.inp (renamed from test cases/common/174 preserve gendir/com/mesonbuild/subbie.inp)0
-rwxr-xr-xtest cases/common/178 preserve gendir/genprog.py (renamed from test cases/common/174 preserve gendir/genprog.py)0
-rw-r--r--test cases/common/178 preserve gendir/meson.build (renamed from test cases/common/174 preserve gendir/meson.build)0
-rw-r--r--test cases/common/178 preserve gendir/testprog.c (renamed from test cases/common/174 preserve gendir/testprog.c)0
-rw-r--r--test cases/common/179 source in dep/bar.cpp5
-rw-r--r--test cases/common/179 source in dep/foo.c3
-rw-r--r--test cases/common/179 source in dep/meson.build6
-rw-r--r--test cases/common/180 generator link whole/export.h18
-rwxr-xr-xtest cases/common/180 generator link whole/generator.py30
-rw-r--r--test cases/common/180 generator link whole/main.c11
-rw-r--r--test cases/common/180 generator link whole/meson.build65
-rw-r--r--test cases/common/180 generator link whole/meson_test_function.tmpl0
-rw-r--r--test cases/common/180 generator link whole/pull_meson_test_function.c6
-rw-r--r--test cases/common/181 initial c_args/meson.build7
-rw-r--r--test cases/common/181 initial c_args/test_args.txt4
-rw-r--r--test cases/common/182 identical target name in subproject flat layout/foo.c1
-rw-r--r--test cases/common/182 identical target name in subproject flat layout/main.c16
-rw-r--r--test cases/common/182 identical target name in subproject flat layout/meson.build11
-rw-r--r--test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c1
-rw-r--r--test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build3
-rw-r--r--test cases/common/184 as-needed/config.h14
-rw-r--r--test cases/common/184 as-needed/libA.cpp7
-rw-r--r--test cases/common/184 as-needed/libA.h5
-rw-r--r--test cases/common/184 as-needed/libB.cpp19
-rw-r--r--test cases/common/184 as-needed/main.cpp7
-rw-r--r--test cases/common/184 as-needed/meson.build13
-rw-r--r--test cases/common/185 ndebug if-release enabled/main.c15
-rw-r--r--test cases/common/185 ndebug if-release enabled/meson.build7
-rw-r--r--test cases/common/186 ndebug if-release disabled/main.c7
-rw-r--r--test cases/common/186 ndebug if-release disabled/meson.build7
-rw-r--r--test cases/common/187 subproject version/meson.build10
-rw-r--r--test cases/common/187 subproject version/subprojects/a/meson.build5
-rw-r--r--test cases/common/188 subdir_done/meson.build10
-rw-r--r--test cases/common/189 bothlibraries/libfile.c7
-rw-r--r--test cases/common/189 bothlibraries/main.c8
-rw-r--r--test cases/common/189 bothlibraries/meson.build12
-rw-r--r--test cases/common/189 bothlibraries/mylib.h13
-rw-r--r--test cases/common/51 pkgconfig-gen/dependencies/meson.build32
-rw-r--r--test cases/common/51 pkgconfig-gen/meson.build6
-rw-r--r--test cases/common/64 custom header generator/meson.build6
-rwxr-xr-xtest cases/common/72 build always/version_gen.py12
-rw-r--r--test cases/common/98 gen extra/srcgen3.py1
-rw-r--r--test cases/csharp/1 basic/meson.build2
-rw-r--r--test cases/csharp/1 basic/prog.cs5
-rw-r--r--test cases/csharp/1 basic/text.cs7
-rw-r--r--test cases/csharp/4 external dep/meson.build7
-rw-r--r--test cases/d/3 shared library/meson.build9
-rw-r--r--test cases/d/6 unittest/app.d4
-rw-r--r--test cases/d/6 unittest/meson.build6
-rw-r--r--test cases/d/6 unittest/second_unit.d10
-rw-r--r--test cases/d/9 features/app.d9
-rw-r--r--test cases/d/9 features/extra.d9
-rw-r--r--test cases/d/9 features/meson.build25
-rw-r--r--test cases/failing/70 install_data rename bad size/file1.txt0
-rw-r--r--test cases/failing/70 install_data rename bad size/file2.txt0
-rw-r--r--test cases/failing/70 install_data rename bad size/meson.build3
-rw-r--r--test cases/failing/71 skip only subdir/meson.build8
-rw-r--r--test cases/failing/71 skip only subdir/subdir/meson.build3
-rw-r--r--test cases/frameworks/1 boost/meson.build5
-rw-r--r--test cases/frameworks/10 gtk-doc/include/meson.build3
-rw-r--r--test cases/frameworks/10 gtk-doc/installed_files.txt (renamed from test cases/frameworks/10 gtk-doc/installed_files.txt.bak)12
-rw-r--r--test cases/frameworks/10 gtk-doc/meson.build18
-rw-r--r--test cases/frameworks/11 gir subproject/meson.build12
-rw-r--r--test cases/frameworks/12 multiple gir/meson.build5
-rw-r--r--test cases/frameworks/13 yelp/meson.build6
-rw-r--r--test cases/frameworks/14 doxygen/include/comedian.h2
-rw-r--r--test cases/frameworks/14 doxygen/include/spede.h7
-rw-r--r--test cases/frameworks/14 doxygen/meson.build6
-rw-r--r--test cases/frameworks/14 doxygen/src/spede.cpp2
-rw-r--r--test cases/frameworks/15 llvm/meson.build7
-rw-r--r--test cases/frameworks/16 sdl2/meson.build6
-rw-r--r--test cases/frameworks/19 pcap/meson.build8
-rw-r--r--test cases/frameworks/19 pcap/pcap_prog.c8
-rw-r--r--test cases/frameworks/20 cups/meson.build6
-rw-r--r--test cases/frameworks/4 qt/meson.build10
-rw-r--r--test cases/frameworks/4 qt/subfolder/generator.py6
-rw-r--r--test cases/frameworks/4 qt/subfolder/main.cpp23
-rw-r--r--test cases/frameworks/4 qt/subfolder/meson.build32
-rw-r--r--test cases/frameworks/4 qt/subfolder/resources/stuff4.qrc.in8
-rw-r--r--test cases/frameworks/6 gettext/installed_files.txt2
-rw-r--r--test cases/frameworks/6 gettext/meson.build9
-rw-r--r--test cases/frameworks/7 gnome/gdbus/meson.build10
-rw-r--r--test cases/frameworks/7 gnome/installed_files.txt1
-rw-r--r--test cases/frameworks/7 gnome/meson.build11
-rw-r--r--test cases/frameworks/7 gnome/mkenums/meson.build1
-rw-r--r--test cases/frameworks/8 flex/meson.build13
-rw-r--r--test cases/frameworks/8 flex/prog.c2
-rw-r--r--test cases/frameworks/9 wxwidgets/meson.build5
-rw-r--r--test cases/frameworks/9 wxwidgets/wxstc.cpp6
-rw-r--r--test cases/linuxlike/9 compiler checks with dependencies/meson.build2
-rw-r--r--test cases/rust/7 private crate collision/installed_files.txt2
-rw-r--r--test cases/rust/7 private crate collision/meson.build5
-rw-r--r--test cases/rust/7 private crate collision/prog.rs3
-rw-r--r--test cases/rust/7 private crate collision/rand.rs4
-rw-r--r--test cases/unit/13 testsetup selection/main.c3
-rw-r--r--test cases/unit/13 testsetup selection/meson.build10
-rw-r--r--test cases/unit/13 testsetup selection/subprojects/bar/bar.c3
-rw-r--r--test cases/unit/13 testsetup selection/subprojects/bar/meson.build6
-rw-r--r--test cases/unit/13 testsetup selection/subprojects/foo/foo.c3
-rw-r--r--test cases/unit/13 testsetup selection/subprojects/foo/meson.build4
-rw-r--r--test cases/unit/23 compiler run_command/meson.build10
-rw-r--r--test cases/unit/23 non-permitted kwargs/meson.build5
-rw-r--r--test cases/unit/24 pkgconfig usage/dependee/meson.build7
-rw-r--r--test cases/unit/24 pkgconfig usage/dependee/pkguser.c6
-rw-r--r--test cases/unit/24 pkgconfig usage/dependency/meson.build24
-rw-r--r--test cases/unit/24 pkgconfig usage/dependency/pkgdep.c7
-rw-r--r--test cases/unit/24 pkgconfig usage/dependency/pkgdep.h3
-rw-r--r--test cases/unit/24 pkgconfig usage/dependency/privatelib.c3
-rw-r--r--test cases/unit/25 ndebug if-release/main.c11
-rw-r--r--test cases/unit/25 ndebug if-release/meson.build3
-rw-r--r--test cases/unit/26 guessed linker dependencies/exe/app.c6
-rw-r--r--test cases/unit/26 guessed linker dependencies/exe/meson.build7
-rw-r--r--test cases/unit/26 guessed linker dependencies/lib/lib.c20
-rw-r--r--test cases/unit/26 guessed linker dependencies/lib/meson.build11
-rw-r--r--test cases/unit/26 guessed linker dependencies/lib/meson_options.txt1
-rw-r--r--test cases/unit/26 shared_mod linking/libfile.c14
-rw-r--r--test cases/unit/26 shared_mod linking/main.c11
-rw-r--r--test cases/unit/26 shared_mod linking/meson.build5
-rw-r--r--test cases/unit/27 forcefallback/meson.build8
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/meson.build7
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c6
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h18
-rw-r--r--test cases/unit/27 forcefallback/test_not_zlib.c8
-rw-r--r--test cases/unit/28 pkgconfig use libraries/app/app.c6
-rw-r--r--test cases/unit/28 pkgconfig use libraries/app/meson.build5
-rw-r--r--test cases/unit/28 pkgconfig use libraries/lib/liba.c2
-rw-r--r--test cases/unit/28 pkgconfig use libraries/lib/libb.c5
-rw-r--r--test cases/unit/28 pkgconfig use libraries/lib/meson.build16
286 files changed, 5626 insertions, 2302 deletions
diff --git a/.appveyor.yml b/.appveyor.yml
index 56a123a..a1a9c5f 100644
--- a/.appveyor.yml
+++ b/.appveyor.yml
@@ -52,6 +52,8 @@ platform:
branches:
only:
- master
+ # Release branches
+ - /^[0-9]+\.[0-9]+$/
init:
- ps: |
@@ -62,14 +64,35 @@ init:
}
install:
+ - ps: |
+ function DownloadFile([String] $Source, [String] $Destination) {
+ $retries = 10
+ for ($i = 1; $i -le $retries; $i++) {
+ try {
+ (New-Object net.webclient).DownloadFile($Source, $Destination)
+ break # succeeded
+ } catch [net.WebException] {
+ if ($i -eq $retries) {
+ throw # fail on last retry
+ }
+ $backoff = (10 * $i) # backoff 10s, 20s, 30s...
+ echo ('{0}: {1}' -f $Source, $_.Exception.Message)
+ echo ('Retrying in {0}s...' -f $backoff)
+ Start-Sleep -m ($backoff * 1000)
+ }
+ }
+ }
- cmd: set "ORIG_PATH=%PATH%"
# Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219
- cmd: set "MESON_FIXED_NINJA=1"
- - ps: (new-object net.webclient).DownloadFile('http://nirbheek.in/files/binaries/ninja/win32/ninja.exe', 'C:\projects\meson\ninja.exe')
+ - ps: DownloadFile -Source 'http://nirbheek.in/files/binaries/ninja/win32/ninja.exe' -Destination 'C:\projects\meson\ninja.exe'
# Use the x86 python only when building for x86 for the cpython tests.
# For all other archs (including, say, arm), use the x64 python.
- cmd: if %arch%==x86 (set MESON_PYTHON_PATH=C:\python35) else (set MESON_PYTHON_PATH=C:\python35-x64)
+ # Skip CI requires python
+ - cmd: python ./skip_ci.py --base-branch-env=APPVEYOR_REPO_BRANCH --is-pull-env=APPVEYOR_PULL_REQUEST_NUMBER
+
# Set paths for BOOST dll files
- cmd: if %compiler%==msvc2015 ( if %arch%==x86 ( set "PATH=%PATH%;C:\Libraries\boost_1_59_0\lib32-msvc-14.0" ) else ( set "PATH=%PATH%;C:\Libraries\boost_1_59_0\lib64-msvc-14.0" ) )
- cmd: if %compiler%==msvc2017 ( if %arch%==x86 ( set "PATH=%PATH%;C:\Libraries\boost_1_64_0\lib32-msvc-14.1" ) else ( set "PATH=%PATH%;C:\Libraries\boost_1_64_0\lib64-msvc-14.1" ) )
@@ -79,36 +102,32 @@ install:
- cmd: if %compiler%==msvc2015 ( call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %arch% )
- cmd: if %compiler%==msvc2017 ( call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=%arch% )
- cmd: if %compiler%==cygwin ( set PYTHON=python3 ) else ( set PYTHON=python )
- - ps: |
- If($Env:compiler -eq 'msys2-mingw') {
- If($Env:arch -eq 'x86') {
- $env:Path = 'C:\msys64\mingw32\bin;' + $env:Path
- $env:MESON_PYTHON_PATH = 'C:\msys64\mingw32\bin'
- $env:PYTHON = 'python3'
- C:\msys64\usr\bin\pacman -S --noconfirm mingw32/mingw-w64-i686-python3
- } Else {
- $env:Path = 'C:\msys64\mingw64\bin;' + $env:Path
- $env:MESON_PYTHON_PATH = 'C:\msys64\mingw64\bin'
- $env:PYTHON = 'python3'
- C:\msys64\usr\bin\pacman -S --noconfirm mingw64/mingw-w64-x86_64-python3
- }
- }
+ # MinGW setup, lines are split to prevent "The input line is too long." error.
+ - cmd: if %arch%==x86 ( set "PACMAN_ARCH=i686" ) else ( set "PACMAN_ARCH=x86_64" )
+ - cmd: if %arch%==x86 ( set "PACMAN_BITS=32" ) else ( set "PACMAN_BITS=64" )
+ - cmd: if %compiler%==msys2-mingw ( set "PATH=C:\msys64\mingw%PACMAN_BITS%\bin;%PATH%" )
+ - cmd: if %compiler%==msys2-mingw ( set "MESON_PYTHON_PATH=C:\msys64\mingw%PACMAN_BITS%\bin" )
+ - cmd: if %compiler%==msys2-mingw ( set "PYTHON=python3" )
+ - cmd: if %compiler%==msys2-mingw ( C:\msys64\usr\bin\pacman -S --needed --noconfirm "mingw%PACMAN_BITS%/mingw-w64-%PACMAN_ARCH%-python3" )
+ # Cygwin
- cmd: if not %compiler%==cygwin ( set "PATH=%cd%;%MESON_PYTHON_PATH%;%PATH%;" )
- cmd: if %compiler%==cygwin ( set WRAPPER=ci\run-in-cygwin.bat )
- cmd: if %compiler%==cygwin ( %WRAPPER% which %PYTHON% ) else ( where %PYTHON% )
# pkg-config is needed for the pkg-config tests on msvc
- - ps: If($Env:compiler.StartsWith('msvc')) {(new-object net.webclient).DownloadFile('http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe', 'C:\projects\meson\pkg-config.exe')}
+ - ps: |
+ If($Env:compiler.StartsWith('msvc')) {
+ DownloadFile -Source 'http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe' `
+ -Destination 'C:\projects\meson\pkg-config.exe'
+ }
- cmd: if %compiler%==cygwin ( call ci\appveyor-install.bat )
- ps: |
If($Env:compiler -like 'msvc*') {
- (new-object net.webclient).DownloadFile(
- "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi",
- "C:\projects\msmpisdk.msi")
+ DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi" `
+ -Destination "C:\projects\msmpisdk.msi"
c:\windows\system32\msiexec.exe /i C:\projects\msmpisdk.msi /quiet
- (new-object net.webclient).DownloadFile(
- "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe",
- "C:\projects\MSMpiSetup.exe")
+ DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe" `
+ -Destination "C:\projects\MSMpiSetup.exe"
c:\projects\MSMpiSetup.exe -unattend -full
}
diff --git a/.travis.yml b/.travis.yml
index f077c9c..16fa55b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,6 +3,8 @@ sudo: false
branches:
only:
- master
+ # Release branches
+ - /^[0-9]+\.[0-9]+$/
os:
- linux
@@ -29,8 +31,10 @@ matrix:
compiler: gcc
before_install:
+ - python ./skip_ci.py --base-branch-env=TRAVIS_BRANCH --is-pull-env=TRAVIS_PULL_REQUEST
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
- - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python3; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew uninstall python mercurial; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python@2 python@3 mercurial qt; fi
# Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkdir -p $HOME/tools; curl -L http://nirbheek.in/files/binaries/ninja/macos/ninja -o $HOME/tools/ninja; chmod +x $HOME/tools/ninja; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:artful; fi
@@ -49,4 +53,4 @@ script:
withgit \
/bin/sh -c "cd /root && mkdir -p tools; wget -c http://nirbheek.in/files/binaries/ninja/linux-amd64/ninja -O /root/tools/ninja; chmod +x /root/tools/ninja; CC=$CC CXX=$CXX OBJC=$CC OBJCXX=$CXX PATH=/root/tools:$PATH MESON_FIXED_NINJA=1 ./run_tests.py -- $MESON_ARGS && chmod -R a+rwX .coverage"
fi
- - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:$PATH MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi
diff --git a/README.md b/README.md
index 6185f8e..969e251 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@ build system.
[![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson)
[![Travis](https://travis-ci.org/mesonbuild/meson.svg?branch=master)](https://travis-ci.org/mesonbuild/meson)
-[![Appveyor](https://ci.appveyor.com/api/projects/status/l5c8v71ninew2i3p?svg=true)](https://ci.appveyor.com/project/jpakkane/meson)
+[![Appveyor](https://ci.appveyor.com/api/projects/status/7jfaotriu8d8ncov?svg=true)](https://ci.appveyor.com/project/mesonbuild/meson)
[![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master)
#### Dependencies
diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile
index 05e679e..72788c3 100644
--- a/ciimage/Dockerfile
+++ b/ciimage/Dockerfile
@@ -14,3 +14,5 @@ RUN apt-get -y update && apt-get -y upgrade \
&& apt-get -y install libwmf-dev \
&& apt-get -y install qt4-linguist-tools qttools5-dev-tools \
&& python3 -m pip install hotdoc codecov
+
+ENV LANG='C.UTF-8'
diff --git a/contributing.md b/contributing.md
new file mode 100644
index 0000000..3d4dc34
--- /dev/null
+++ b/contributing.md
@@ -0,0 +1,8 @@
+## Contributing to the Meson build system
+
+Thank you for your interest in participating to the development!
+A large fraction of Meson is contributed by people outside
+the core team and we are **excited** to see what you do.
+
+**Contribution instructions can be found on the website**
+ @ http://mesonbuild.com/Contributing.html
diff --git a/contributing.txt b/contributing.txt
deleted file mode 100644
index b1c015c..0000000
--- a/contributing.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Contributing to the Meson build system
-
-Contribution instructions can be found [on the
-website](http://mesonbuild.com/Contributing.html).
diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt
index 6246ffe..fec8ce7 100644
--- a/cross/ubuntu-armhf.txt
+++ b/cross/ubuntu-armhf.txt
@@ -1,8 +1,8 @@
[binaries]
# we could set exe_wrapper = qemu-arm-static but to test the case
# when cross compiled binaries can't be run we don't do that
-c = '/usr/bin/arm-linux-gnueabihf-gcc-7'
-cpp = '/usr/bin/arm-linux-gnueabihf-g++-7'
+c = '/usr/bin/arm-linux-gnueabihf-gcc'
+cpp = '/usr/bin/arm-linux-gnueabihf-g++'
rust = ['rustc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7']
ar = '/usr/arm-linux-gnueabihf/bin/ar'
strip = '/usr/arm-linux-gnueabihf/bin/strip'
diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson
index 877d700..481d04c 100644
--- a/data/shell-completions/zsh/_meson
+++ b/data/shell-completions/zsh/_meson
@@ -31,7 +31,7 @@ local -i ret
local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})"
local __meson_build_types="(plain debug debugoptimized minsize release)"
-local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload})"
+local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload,forcefallback})"
local -a meson_commands=(
'setup:set up a build directory'
diff --git a/data/syntax-highlighting/vim/syntax/meson.vim b/data/syntax-highlighting/vim/syntax/meson.vim
index 83dd66a..d58903e 100644
--- a/data/syntax-highlighting/vim/syntax/meson.vim
+++ b/data/syntax-highlighting/vim/syntax/meson.vim
@@ -70,6 +70,7 @@ syn keyword mesonBuiltin
\ add_project_link_arguments
\ add_test_setup
\ benchmark
+ \ both_libraries
\ build_machine
\ build_target
\ configuration_data
diff --git a/docs/markdown/Adding-arguments.md b/docs/markdown/Adding-arguments.md
index e314102..117622b 100644
--- a/docs/markdown/Adding-arguments.md
+++ b/docs/markdown/Adding-arguments.md
@@ -49,6 +49,8 @@ executable('prog', 'prog.cc', cpp_args : '-DCPPTHING')
Here we create a C++ executable with an extra argument that is used
during compilation but not for linking.
+You can find the parameter name for other languages in the [reference tables](Reference-tables.md).
+
Specifying extra linker arguments is done in the same way:
```meson
diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md
index 4420de5..58b27ba 100644
--- a/docs/markdown/Adding-new-projects-to-wrapdb.md
+++ b/docs/markdown/Adding-new-projects-to-wrapdb.md
@@ -37,11 +37,10 @@ Each project gets its own repo. It is initialized like this:
git init
git add readme.txt
- git commit -a -m 'Start of project foobar.'
- git tag commit_zero -a -m 'A tag that helps get revision ids for releases.'
+ git add LICENSE.build
+ git commit -a -m 'Create project foobar'
git remote add origin <repo url>
git push -u origin master
- git push --tags
Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches.
diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md
index 774addf..8e016e2 100644
--- a/docs/markdown/Configuring-a-build-directory.md
+++ b/docs/markdown/Configuring-a-build-directory.md
@@ -9,9 +9,7 @@ generated. For example you might want to change from a debug build
into a release build, set custom compiler flags, change the build
options provided in your `meson_options.txt` file and so on.
-The main tool for this is the `meson configure` command. You may also use the
-`mesongui` graphical application if you want. However this document
-describes the use of the command line client.
+The main tool for this is the `meson configure` command.
You invoke `meson configure` by giving it the location of your build dir. If
omitted, the current working directory is used instead. Here's a
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index 8e9847a..7b5fe73 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -18,6 +18,45 @@ Github](https://github.com/mesonbuild/meson/pulls). This causes them
to be run through the CI system. All submissions must pass a full CI
test run before they are even considered for submission.
+## Acceptance and merging
+
+The kind of review and acceptance any merge proposal gets depends on
+the changes it contains. All pull requests must be reviewed and
+accepted by someone with commit rights who is not the original
+submitter. Merge requests can be roughly split into three different
+categories.
+
+The first one consists of MRs that only change the markdown
+documentation under `docs/markdown`. Anyone with access rights can
+push changes to these directly to master. For major changes it is
+still recommended to create a MR so other people can comment on it.
+
+The second group consists of merges that don't change any
+functionality, fixes to the CI system and bug fixes that have added
+regression tests (see below) and don't change existing
+functionality. Once successfully reviewed anyone with merge rights can
+merge these to master.
+
+The final kind of merges are those that add new functionality or
+change existing functionality in a backwards incompatible way. These
+require the approval of the project lead.
+
+In a simplified list form the split would look like the following:
+
+ - members with commit access can do:
+ - documentation changes (directly to master if warranted)
+ - bug fixes that don't change functionality
+ - refactorings
+ - new dependency types
+ - new tool support (e.g. a new Doxygen-kind of tool)
+ - support for new compilers to existing languages
+ - project leader decision is needed for:
+ - new modules
+ - new functions in the Meson language
+ - syntax changes for Meson files
+ - changes breaking backwards compatibility
+ - support for new languages
+
## Tests
All new features must come with automatic tests that thoroughly prove
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index c1ad317..7d316ed 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -10,17 +10,23 @@ nomenclature. The three most important definitions are traditionally
called *build*, *host* and *target*. This is confusing because those
terms are used for quite many different things. To simplify the issue,
we are going to call these the *build machine*, *host machine* and
-*target machine*. Their definitions are the following
+*target machine*. Their definitions are the following:
-* *build machine* is the computer that is doing the actual compiling
-* *host machine* is the machine on which the compiled binary will run
-* *target machine* is the machine on which the compiled binary's output will run (this is only meaningful for programs such as compilers that, when run, produce object code for a different CPU than what the program is being run on)
+* *build machine* is the computer that is doing the actual compiling.
+* *host machine* is the machine on which the compiled binary will run.
+* *target machine* is the machine on which the compiled binary's
+ output will run, *only meaningful* if the program produces
+ machine-specific output.
The `tl/dr` summary is the following: if you are doing regular cross
-compilation, you only care about *build_machine* and
-*host_machine*. Just ignore *target_machine* altogether and you will
-be correct 99% of the time. If your needs are more complex or you are
-interested in the actual details, do read on.
+compilation, you only care about `build_machine` and
+`host_machine`. Just ignore `target_machine` altogether and you will
+be correct 99% of the time. Only compilers and similar tools care
+about the target machine. In fact, for so-called "multi-target" tools
+the target machine need not be fixed at build-time like the others but
+chosen at runtime, so `target_machine` *still* doesn't matter. If your
+needs are more complex or you are interested in the actual details, do
+read on.
This might be easier to understand through examples. Let's start with
the regular, not cross-compiling case. In these cases all of these
@@ -50,6 +56,20 @@ Wikipedia or the net in general. It is very common for them to get
build, host and target mixed up, even in consecutive sentences, which
can leave you puzzled until you figure it out.
+A lot of confusion stems from the fact that when you cross-compile
+something, the 3 systems (*build*, *host*, and *target*) used when
+building the cross compiler don't align with the ones used when
+building something with that newly-built cross compiler. To take our
+Canadian Cross scenario from above (for full generality), since its
+*host machine* is x86 Windows, the *build machine* of anything we
+build with it is *x86 Windows*. And since its *target machine* is MIPS
+Linux, the *host machine* of anything we build with it is *MIPS
+Linux*. Only the *target machine* of whatever we build with it can be
+freely chosen by us, say if we want to build another cross compiler
+that runs on MIPS Linux and targets Aarch64 iOS. As this example
+hopefully makes clear to you, the platforms are shifted over to the
+left by one position.
+
If you did not understand all of the details, don't worry. For most
people it takes a while to wrap their head around these
concepts. Don't panic, it might take a while to click, but you will
@@ -82,8 +102,9 @@ of a wrapper, these lines are all you need to write. Meson will
automatically use the given wrapper when it needs to run host
binaries. This happens e.g. when running the project's test suite.
-The next section lists properties of the cross compiler and thus of
-the target system. It looks like this:
+The next section lists properties of the cross compiler and its target
+system, and thus properties of host system of what we're building. It
+looks like this:
```ini
[properties]
@@ -261,7 +282,7 @@ myvar = meson.get_cross_property('somekey')
## Cross file locations
As of version 0.44.0 meson supports loading cross files from system locations
-on Linux and the BSDs. This will be $XDG_DATA_DIRS/meson/cross, or if
+(except on Windows). This will be $XDG_DATA_DIRS/meson/cross, or if
XDG_DATA_DIRS is undefined, then /usr/local/share/meson/cross and
/usr/share/meson/cross will be tried in that order, for system wide cross
files. User local files can be put in $XDG_DATA_HOME/meson/cross, or
@@ -272,7 +293,7 @@ The order of locations tried is as follows:
- The user local location
- The system wide locations in order
-Linux and BSD distributions are encouraged to ship cross files either with
+Distributions are encouraged to ship cross files either with
their cross compiler toolchain packages or as a standalone package, and put
them in one of the system paths referenced above.
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index 74a918a..12e1b1f 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -61,7 +61,7 @@ zdep_prefix = zdep.get_pkgconfig_variable('libdir', define_variable: ['prefix',
The dependency detector works with all libraries that provide a
`pkg-config` file. Unfortunately several packages don't provide
pkg-config files. Meson has autodetection support for some of these,
-and they are described later on this page.
+and they are described [later in this page](#dependencies-with-custom-lookup-functionality).
# Declaring your own
@@ -111,6 +111,14 @@ of all the work behind the scenes to make this work.
# Dependencies with custom lookup functionality
+## AppleFrameworks
+
+Use the `modules` keyword to list frameworks required, e.g.
+
+```meson
+dep = find_dep('appleframeworks', modules : 'foundation')
+```
+
## Boost
Boost is not a single dependency but rather a group of different
@@ -138,7 +146,11 @@ can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR
environment variables.
You can set the argument `threading` to `single` to use boost libraries that
-has been compiled for single-threaded use instead.
+have been compiled for single-threaded use instead.
+
+## GL
+
+This finds the OpenGL library in a way appropriate to the platform.
## GTest and GMock
@@ -160,9 +172,9 @@ test('gtest test', e)
MPI is supported for C, C++ and Fortran. Because dependencies are
language-specific, you must specify the requested language using the
`language` keyword argument, i.e.,
- * `dependency('mpi', language='c')` for the C MPI headers and libraries
- * `dependency('mpi', language='cpp')` for the C++ MPI headers and libraries
- * `dependency('mpi', language='fortran')` for the Fortran MPI headers and libraries
+ * `dependency('mpi', language: 'c')` for the C MPI headers and libraries
+ * `dependency('mpi', language: 'cpp')` for the C++ MPI headers and libraries
+ * `dependency('mpi', language: 'fortran')` for the Fortran MPI headers and libraries
Meson prefers pkg-config for MPI, but if your MPI implementation does
not provide them, it will search for the standard wrapper executables,
@@ -171,9 +183,9 @@ are not in your path, they can be specified by setting the standard
environment variables `MPICC`, `MPICXX`, `MPIFC`, `MPIF90`, or
`MPIF77`, during configuration.
-## Qt5
+## Qt4 & Qt5
-Meson has native Qt5 support. Its usage is best demonstrated with an
+Meson has native Qt support. Its usage is best demonstrated with an
example.
```meson
@@ -204,12 +216,26 @@ the list of sources for the target. The `modules` keyword of
`dependency` works just like it does with Boost. It tells which
subparts of Qt the program uses.
+## SDL2
+
+SDL2 can be located using `pkg-confg`, the `sdl2-config` config tool, or as an
+OSX framework.
+
+## Valgrind
+
+Meson will find valgrind using `pkg-config`, but only uses the compilation flags
+and avoids trying to link with it's non-PIC static libs.
+
+## Vulkan
+
+Vulkan can be located using `pkg-config`, or the `VULKAN_SDK` environment variable.
+
## Dependencies using config tools
-CUPS, LLVM, PCAP, WxWidgets, libwmf, and GnuStep either do not provide
-pkg-config modules or additionally can be detected via a config tool
+CUPS, LLVM, PCAP, [WxWidgets](#wxwidgets), libwmf, and GnuStep either do not
+provide pkg-config modules or additionally can be detected via a config tool
(cups-config, llvm-config, etc). Meson has native support for these tools, and
-then can be found like other dependencies:
+they can be found like other dependencies:
```meson
pcap_dep = dependency('pcap', version : '>=1.0')
@@ -224,6 +250,30 @@ tools support. You can force one or another via the method keyword:
wmf_dep = dependency('wmf', method : 'config-tool')
```
+## WxWidgets
+
+Similar to [Boost](#boost), WxWidgets is not a single library but rather
+a collection of modules. WxWidgets is supported via `wx-config`.
+Meson substitutes `modules` to `wx-config` invocation, it generates
+- `compile_args` using `wx-config --cxxflags $modules...`
+- `link_args` using `wx-config --libs $modules...`
+
+### Example
+
+```meson
+wx_dep = dependency(
+ 'wxwidgets', version : '>=3.0.0', modules : ['std', 'stc'],
+)
+```
+
+```shell
+# compile_args:
+$ wx-config --cxxflags std stc
+
+# link_args:
+$ wx-config --libs std stc
+```
+
## LLVM
Meson has native support for LLVM going back to version LLVM version 3.5.
@@ -257,10 +307,10 @@ llvm_dep = dependency(
Python3 is handled specially by meson:
1. Meson tries to use `pkg-config`.
-1. If `pkg-config` fails meson uses fallback:
- - On Windows fallback is current `python3` interpreter.
- - On OSX fallback is framework dependency from `/Library/Frameworks`.
+1. If `pkg-config` fails meson uses a fallback:
+ - On Windows the fallback is the current `python3` interpreter.
+ - On OSX the fallback is a framework dependency from `/Library/Frameworks`.
Note that `python3` found by this dependency might differ from the one used in
-`python3` module because modules uses current interpreter but dependency tries
+`python3` module because modules uses the current interpreter, but dependency tries
`pkg-config` first.
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index 441cd69..ff93216 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -7,9 +7,16 @@ See also [How do I do X in Meson](howtox.md).
## Why is it called Meson?
-When the name was originally chosen, there were two main limitations: there must not exist either a Debian package or a Sourceforge project of the given name. This ruled out tens of potential project names. At some point the name Gluon was considered. Gluons are elementary particles that hold protons and neutrons together, much like a build system's job is to take pieces of source code and a compiler and bind them to a complete whole.
+When the name was originally chosen, there were two main limitations:
+there must not exist either a Debian package or a Sourceforge project
+of the given name. This ruled out tens of potential project names. At
+some point the name Gluon was considered. Gluons are elementary
+particles that hold protons and neutrons together, much like a build
+system's job is to take pieces of source code and a compiler and bind
+them to a complete whole.
-Unfortunately this name was taken, too. Then the rest of subatomic particles were examined and Meson was found to be available.
+Unfortunately this name was taken, too. Then the rest of subatomic
+particles were examined and Meson was found to be available.
## What is the correct way to use threads (such as pthreads)?
@@ -17,23 +24,34 @@ Unfortunately this name was taken, too. Then the rest of subatomic particles wer
thread_dep = dependency('threads')
```
-This will set up everything on your behalf. People coming from Autotools or CMake want to do this by looking for `libpthread.so` manually. Don't do that, it has tricky corner cases especially when cross compiling.
+This will set up everything on your behalf. People coming from
+Autotools or CMake want to do this by looking for `libpthread.so`
+manually. Don't do that, it has tricky corner cases especially when
+cross compiling.
## How to use Meson on a host where it is not available in system packages?
-Starting from version 0.29.0, Meson is available from the [Python Package Index](https://pypi.python.org/pypi/meson/), so installing it simply a matter of running this command:
+Starting from version 0.29.0, Meson is available from the [Python
+Package Index](https://pypi.python.org/pypi/meson/), so installing it
+simply a matter of running this command:
```console
$ pip3 install <your options here> meson
```
-If you don't have access to PyPI, that is not a problem either. Meson has been designed to be easily runnable from an extracted source tarball or even a git checkout. First you need to download Meson. Then use this command to set up you build instead of plain `meson`.
+If you don't have access to PyPI, that is not a problem either. Meson
+has been designed to be easily runnable from an extracted source
+tarball or even a git checkout. First you need to download Meson. Then
+use this command to set up you build instead of plain `meson`.
```console
$ /path/to/meson.py <options>
```
-After this you don't have to care about invoking Meson any more. It remembers where it was originally invoked from and calls itself appropriately. As a user the only thing you need to do is to `cd` into your build directory and invoke `ninja`.
+After this you don't have to care about invoking Meson any more. It
+remembers where it was originally invoked from and calls itself
+appropriately. As a user the only thing you need to do is to `cd` into
+your build directory and invoke `ninja`.
## Why can't I specify target files with a wildcard?
@@ -43,17 +61,34 @@ Instead of specifying files explicitly, people seem to want to do this:
executable('myprog', sources : '*.cpp') # This does NOT work!
```
-Meson does not support this syntax and the reason for this is simple. This can not be made both reliable and fast. By reliable we mean that if the user adds a new source file to the subdirectory, Meson should detect that and make it part of the build automatically.
+Meson does not support this syntax and the reason for this is
+simple. This can not be made both reliable and fast. By reliable we
+mean that if the user adds a new source file to the subdirectory,
+Meson should detect that and make it part of the build automatically.
-One of the main requirements of Meson is that it must be fast. This means that a no-op build in a tree of 10 000 source files must take no more than a fraction of a second. This is only possible because Meson knows the exact list of files to check. If any target is specified as a wildcard glob, this is no longer possible. Meson would need to re-evaluate the glob every time and compare the list of files produced against the previous list. This means inspecting the entire source tree (because the glob pattern could be `src/\*/\*/\*/\*.cpp` or something like that). This is impossible to do efficiently.
+One of the main requirements of Meson is that it must be fast. This
+means that a no-op build in a tree of 10 000 source files must take no
+more than a fraction of a second. This is only possible because Meson
+knows the exact list of files to check. If any target is specified as
+a wildcard glob, this is no longer possible. Meson would need to
+re-evaluate the glob every time and compare the list of files produced
+against the previous list. This means inspecting the entire source
+tree (because the glob pattern could be `src/\*/\*/\*/\*.cpp` or
+something like that). This is impossible to do efficiently.
-The main backend of Meson is Ninja, which does not support wildcard matches either, and for the same reasons.
+The main backend of Meson is Ninja, which does not support wildcard
+matches either, and for the same reasons.
Because of this, all source files must be specified explicitly.
## But I really want to use wildcards!
-If the tradeoff between reliability and convenience is acceptable to you, then Meson gives you all the tools necessary to do wildcard globbing. You are allowed to run arbitrary commands during configuration. First you need to write a script that locates the files to compile. Here's a simple shell script that writes all `.c` files in the current directory, one per line.
+If the tradeoff between reliability and convenience is acceptable to
+you, then Meson gives you all the tools necessary to do wildcard
+globbing. You are allowed to run arbitrary commands during
+configuration. First you need to write a script that locates the files
+to compile. Here's a simple shell script that writes all `.c` files in
+the current directory, one per line.
```bash
@@ -72,17 +107,37 @@ sources = c.stdout().strip().split('\n')
e = executable('prog', sources)
```
-The script can be any executable, so it can be written in shell, Python, Lua, Perl or whatever you wish.
+The script can be any executable, so it can be written in shell,
+Python, Lua, Perl or whatever you wish.
-As mentioned above, the tradeoff is that just adding new files to the source directory does *not* add them to the build automatically. To add them you need to tell Meson to reinitialize itself. The simplest way is to touch the `meson.build` file in your source root. Then Meson will reconfigure itself next time the build command is run. Advanced users can even write a small background script that utilizes a filesystem event queue, such as [inotify](https://en.wikipedia.org/wiki/Inotify), to do this automatically.
+As mentioned above, the tradeoff is that just adding new files to the
+source directory does *not* add them to the build automatically. To
+add them you need to tell Meson to reinitialize itself. The simplest
+way is to touch the `meson.build` file in your source root. Then Meson
+will reconfigure itself next time the build command is run. Advanced
+users can even write a small background script that utilizes a
+filesystem event queue, such as
+[inotify](https://en.wikipedia.org/wiki/Inotify), to do this
+automatically.
## Should I use `subdir` or `subproject`?
-The answer is almost always `subdir`. Subproject exists for a very specific use case: embedding external dependencies into your build process. As an example, suppose we are writing a game and wish to use SDL. Let us further suppose that SDL comes with a Meson build definition. Let us suppose even further that we don't want to use prebuilt binaries but want to compile SDL for ourselves.
+The answer is almost always `subdir`. Subproject exists for a very
+specific use case: embedding external dependencies into your build
+process. As an example, suppose we are writing a game and wish to use
+SDL. Let us further suppose that SDL comes with a Meson build
+definition. Let us suppose even further that we don't want to use
+prebuilt binaries but want to compile SDL for ourselves.
-In this case you would use `subproject`. The way to do it would be to grab the source code of SDL and put it inside your own source tree. Then you would do `sdl = subproject('sdl')`, which would cause Meson to build SDL as part of your build and would then allow you to link against it or do whatever else you may prefer.
+In this case you would use `subproject`. The way to do it would be to
+grab the source code of SDL and put it inside your own source
+tree. Then you would do `sdl = subproject('sdl')`, which would cause
+Meson to build SDL as part of your build and would then allow you to
+link against it or do whatever else you may prefer.
-For every other use you would use `subdir`. As an example, if you wanted to build a shared library in one dir and link tests against it in another dir, you would do something like this:
+For every other use you would use `subdir`. As an example, if you
+wanted to build a shared library in one dir and link tests against it
+in another dir, you would do something like this:
```meson
project('simple', 'c')
@@ -92,27 +147,53 @@ subdir('tests') # test binaries would link against the library here
## Why is there not a Make backend?
-Because Make is slow. This is not an implementation issue, Make simply can not be made fast. For further info we recommend you read [this post](http://neugierig.org/software/chromium/notes/2011/02/ninja.html) by Evan Martin, the author of Ninja. Makefiles also have a syntax that is very unpleasant to write which makes them a big maintenance burden.
+Because Make is slow. This is not an implementation issue, Make simply
+can not be made fast. For further info we recommend you read [this
+post](http://neugierig.org/software/chromium/notes/2011/02/ninja.html)
+by Evan Martin, the author of Ninja. Makefiles also have a syntax that
+is very unpleasant to write which makes them a big maintenance burden.
-The only reason why one would use Make instead of Ninja is working on a platform that does not have a Ninja port. Even in this case it is an order of magnitude less work to port Ninja than it is to write a Make backend for Meson.
+The only reason why one would use Make instead of Ninja is working on
+a platform that does not have a Ninja port. Even in this case it is an
+order of magnitude less work to port Ninja than it is to write a Make
+backend for Meson.
Just use Ninja, you'll be happier that way. I guarantee it.
## Why is Meson not just a Python module so I could code my build setup in Python?
-A related question to this is *Why is Meson's configuration language not Turing-complete?*
+A related question to this is *Why is Meson's configuration language
+not Turing-complete?*
-There are many good reasons for this, most of which are summarized on this web page: [Against The Use Of Programming Languages in Configuration Files](https://taint.org/2011/02/18/001527a.html).
+There are many good reasons for this, most of which are summarized on
+this web page: [Against The Use Of Programming Languages in
+Configuration Files](https://taint.org/2011/02/18/001527a.html).
-In addition to those reasons, not exposing Python or any other "real" programming language makes it possible to port Meson's implementation to a different language. This might become necessary if, for example, Python turns out to be a performance bottleneck. This is an actual problem that has caused complications for GNU Autotools and SCons.
+In addition to those reasons, not exposing Python or any other "real"
+programming language makes it possible to port Meson's implementation
+to a different language. This might become necessary if, for example,
+Python turns out to be a performance bottleneck. This is an actual
+problem that has caused complications for GNU Autotools and SCons.
## How do I do the equivalent of Libtools export-symbol and export-regex?
-Either by using [GCC symbol visibility](https://gcc.gnu.org/wiki/Visibility) or by writing a [linker script](https://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_mono/ld.html). This has the added benefit that your symbol definitions are in a standalone file instead of being buried inside your build definitions. An example can be found [here](https://github.com/jpakkane/meson/tree/master/test%20cases/linuxlike/3%20linker%20script).
+Either by using [GCC symbol
+visibility](https://gcc.gnu.org/wiki/Visibility) or by writing a
+[linker
+script](https://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_mono/ld.html). This
+has the added benefit that your symbol definitions are in a standalone
+file instead of being buried inside your build definitions. An example
+can be found
+[here](https://github.com/jpakkane/meson/tree/master/test%20cases/linuxlike/3%20linker%20script).
## My project works fine on Linux and MinGW but fails with MSVC due to a missing .lib file
-With GCC, all symbols on shared libraries are exported automatically unless you specify otherwise. With MSVC no symbols are exported by default. If your shared library exports no symbols, MSVC will silently not produce an import library file leading to failures. The solution is to add symbol visibility definitions [as specified in GCC wiki](https://gcc.gnu.org/wiki/Visibility).
+With GCC, all symbols on shared libraries are exported automatically
+unless you specify otherwise. With MSVC no symbols are exported by
+default. If your shared library exports no symbols, MSVC will silently
+not produce an import library file leading to failures. The solution
+is to add symbol visibility definitions [as specified in GCC
+wiki](https://gcc.gnu.org/wiki/Visibility).
## I added some compiler flags and now the build fails with weird errors. What is happening?
@@ -123,7 +204,13 @@ executable('foobar', ...
c_args : '-some_arg -other_arg')
```
-Meson is *explicit*. In this particular case it will **not** automatically split your strings at whitespaces, instead it will take it as is and work extra hard to pass it to the compiler unchanged, including quoting it properly over shell invocations. This is mandatory to make e.g. files with spaces in them work flawlessly. To pass multiple command line arguments, you need to explicitly put them in an array like this:
+Meson is *explicit*. In this particular case it will **not**
+automatically split your strings at whitespaces, instead it will take
+it as is and work extra hard to pass it to the compiler unchanged,
+including quoting it properly over shell invocations. This is
+mandatory to make e.g. files with spaces in them work flawlessly. To
+pass multiple command line arguments, you need to explicitly put them
+in an array like this:
```meson
executable('foobar', ...
@@ -138,20 +225,109 @@ You probably had a project that looked something like this:
project('foobar', 'cpp')
```
-This defaults to `c++11` on GCC compilers. Suppose you want to use `c++14` instead, so you change the definition to this:
+This defaults to `c++11` on GCC compilers. Suppose you want to use
+`c++14` instead, so you change the definition to this:
```meson
project('foobar', 'cpp', default_options : ['cpp_std=c++14'])
```
-But when you recompile, it still uses `c++11`. The reason for this is that default options are only looked at when you are setting up a build directory for the very first time. After that the setting is considered to have a value and thus the default value is ignored. To change an existing build dir to `c++14`, either reconfigure your build dir with `meson configure` or delete the build dir and recreate it from scratch.
+But when you recompile, it still uses `c++11`. The reason for this is
+that default options are only looked at when you are setting up a
+build directory for the very first time. After that the setting is
+considered to have a value and thus the default value is ignored. To
+change an existing build dir to `c++14`, either reconfigure your build
+dir with `meson configure` or delete the build dir and recreate it
+from scratch.
+
+The reason we don't automatically change the option value when the
+default is changed is that it is impossible to know to do that
+reliably. The actual question that we need to solve is "if the
+option's value is foo and the default value is bar, should we change
+the option value to bar also". There are many choices:
+
+ - if the user has changed the value themselves from the default, then
+ we must not change it back
+
+ - if the user has not changed the value, but changes the default
+ value, then this section's premise would seem to indicate that the
+ value should be changed
+
+ - suppose the user changes the value from the default to foo, then
+ back to bar and then changes the default value to bar, the correct
+ step to take is ambiguous by itself
+
+In order to solve the latter question we would need to remember not
+only the current and old value, but also all the times the user has
+changed the value and from which value to which other value. Since
+people don't remember their own actions that far back, toggling
+between states based on long history would be confusing.
+
+Because of this we do the simple and understandable thing: default
+values are only defaults and will never affect the value of an option
+once set.
## Does wrap download sources behind my back?
-It does not. In order for Meson to download anything from the net while building, two conditions must be met.
-
-First of all there needs to be a `.wrap` file with a download URL in the `subprojects` directory. If one does not exist, Meson will not download anything.
-
-The second requirement is that there needs to be an explicit subproject invocation in your `meson.build` files. Either `subproject('foobar')` or `dependency('foobar', fallback : ['foobar', 'foo_dep'])`. If these declarations either are not in any build file or they are not called (due to e.g. `if/else`) then nothing is downloaded.
-
-If this is not sufficient for you, starting from release 0.40.0 Meson has a option called `wrap-mode` which can be used to disable wrap downloads altogether with `--wrap-mode=nodownload`. You can also disable dependency fallbacks altogether with `--wrap-mode=nofallback`, which also implies the `nodownload` option.
+It does not. In order for Meson to download anything from the net
+while building, two conditions must be met.
+
+First of all there needs to be a `.wrap` file with a download URL in
+the `subprojects` directory. If one does not exist, Meson will not
+download anything.
+
+The second requirement is that there needs to be an explicit
+subproject invocation in your `meson.build` files. Either
+`subproject('foobar')` or `dependency('foobar', fallback : ['foobar',
+'foo_dep'])`. If these declarations either are not in any build file
+or they are not called (due to e.g. `if/else`) then nothing is
+downloaded.
+
+If this is not sufficient for you, starting from release 0.40.0 Meson
+has a option called `wrap-mode` which can be used to disable wrap
+downloads altogether with `--wrap-mode=nodownload`. You can also
+disable dependency fallbacks altogether with `--wrap-mode=nofallback`,
+which also implies the `nodownload` option.
+
+If on the other hand, you want meson to always use the fallback
+for dependencies, even when an external dependency exists and could
+satisfy the version requirements, for example in order to make
+sure your project builds when fallbacks are used, you can use
+`--wrap-mode=forcefallback` since 0.46.0.
+
+## Why is Meson implemented in Python rather than [programming language X]?
+
+Because build systems are special in ways normal applications aren't.
+
+Perhaps the biggest limitation is that because Meson is used to build
+software at the very lowest levels of the OS, it is part of the core
+bootstrap for new systems. Whenever support for a new CPU architecture
+is added, Meson must run on the system before software using it can be
+compiled natively. This requirement adds two hard limitations.
+
+The first one is that Meson must have the minimal amount of
+dependencies, because they must all be built during the bootstrap to
+get Meson to work.
+
+The second is that Meson must support all CPU architectures, both
+existing and future ones. As an example many new programming languages
+have only an LLVM based compiler available. LLVM has limited CPU
+support compared to, say, GCC, and thus bootstrapping Meson on such
+platforms would first require adding new processor support to
+LLVM. This is in most cases unfeasible.
+
+A further limitation is that we want developers on as many platforms
+as possible to submit to Meson development using the default tools
+provided by their operating system. In practice what this means is
+that Windows developers should be able to contribute using nothing but
+Visual Studio.
+
+At the time of writing (April 2018) there are only three languages
+that could fullfill these requirements:
+
+ - C
+ - C++
+ - Python
+
+Out of these we have chosen Python because it is the best fit for our
+needs.
diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md
index 65318ec..f865174 100644
--- a/docs/markdown/Feature-autodetection.md
+++ b/docs/markdown/Feature-autodetection.md
@@ -16,4 +16,4 @@ If you do not wish to use CCache for some reason, just specify your compiler wit
Coverage
--
-When doing a code coverage build, Meson will check the existence of binaries `gcovr`, `lcov` and `genhtml`. If the first one is found, it will create targets called *coverage-text* and *coverage-xml*. If the latter two are found, it generates the target *coverage-html*. You can then generate coverage reports just by calling e.g. `ninja coverage-xml`.
+When doing a code coverage build, Meson will check the existence of binaries `gcovr`, `lcov` and `genhtml`. If the first one is found, it will create targets called *coverage-text* and *coverage-xml*. If the latter two or a new enough `gcovr` is found, it generates the target *coverage-html*. You can then generate coverage reports just by calling e.g. `ninja coverage-xml`.
diff --git a/docs/markdown/Generating-sources.md b/docs/markdown/Generating-sources.md
index 2ea1021..cbe6c0d 100644
--- a/docs/markdown/Generating-sources.md
+++ b/docs/markdown/Generating-sources.md
@@ -4,23 +4,32 @@ short-description: Generation of source files before compilation
# Generating sources
- Sometimes source files need to be preprocessed before they are passed to the actual compiler. As an example you might want build an IDL compiler and then run some files through that to generate actual source files. In Meson this is done with [`generator()`](Reference-manual.md#generator) or [`custom_target()`](Reference-manual.md#custom_target).
+Sometimes source files need to be preprocessed before they are passed
+to the actual compiler. As an example you might want build an IDL
+compiler and then run some files through that to generate actual
+source files. In Meson this is done with
+[`generator()`](Reference-manual.md#generator) or
+[`custom_target()`](Reference-manual.md#custom_target).
## Using custom_target()
-Let's say you have a build target that must be built using sources generated by a compiler. The compiler can either be a built target:
+Let's say you have a build target that must be built using sources
+generated by a compiler. The compiler can either be a built target:
```meson
mycomp = executable('mycompiler', 'compiler.c')
```
-Or an external program provided by the system, or script inside the source tree:
+Or an external program provided by the system, or script inside the
+source tree:
```meson
mycomp = find_program('mycompiler')
```
-Custom targets can take zero or more input files and use them to generate one or more output files. Using a custom target, you can run this compiler at build time to generate the sources:
+Custom targets can take zero or more input files and use them to
+generate one or more output files. Using a custom target, you can run
+this compiler at build time to generate the sources:
```meson
gen_src = custom_target('gen-output',
@@ -31,7 +40,9 @@ gen_src = custom_target('gen-output',
'--h-out', '@OUTPUT1@'])
```
-The `@INPUT@` there will be transformed to `'somefile1.c' 'file2.c'`. Just like the output, you can also refer to each input file individually by index.
+The `@INPUT@` there will be transformed to `'somefile1.c'
+'file2.c'`. Just like the output, you can also refer to each input
+file individually by index.
Then you just put that in your program and you're done.
@@ -41,11 +52,21 @@ executable('program', 'main.c', gen_src)
## Using generator()
-Generators are similar to custom targets, except that we define a *generator*, which defines how to transform an input file into one or more output files, and then use that on as many input files as we want.
+Generators are similar to custom targets, except that we define a
+*generator*, which defines how to transform an input file into one or
+more output files, and then use that on as many input files as we
+want.
-Note that generators should only be used for outputs that will only be used as inputs for a build target or a custom target. When you use the processed output of a generator in multiple targets, the generator will be run multiple times to create outputs for each target. Each output will be created in a target-private directory `@BUILD_DIR@`.
+Note that generators should only be used for outputs that will only be
+used as inputs for a build target or a custom target. When you use the
+processed output of a generator in multiple targets, the generator
+will be run multiple times to create outputs for each target. Each
+output will be created in a target-private directory `@BUILD_DIR@`.
-If you want to generate files for general purposes such as for generating headers to be used by several sources, or data that will be installed, and so on, use a [`custom_target()`](Reference-manual.md#custom_target) instead.
+If you want to generate files for general purposes such as for
+generating headers to be used by several sources, or data that will be
+installed, and so on, use a
+[`custom_target()`](Reference-manual.md#custom_target) instead.
```meson
@@ -54,9 +75,23 @@ gen = generator(mycomp,
arguments : ['@INPUT@', '@OUTPUT@'])
```
-The first argument is the executable file to run. The next file specifies a name generation rule. It specifies how to build the output file name for a given input name. `@BASENAME@` is a placeholder for the input file name without preceding path or suffix (if any). So if the input file name were `some/path/filename.idl`, then the output name would be `filename.c`. You can also use `@PLAINNAME@`, which preserves the suffix which would result in a file called `filename.idl.c`. The last line specifies the command line arguments to pass to the executable. `@INPUT@` and `@OUTPUT@` are placeholders for the input and output files, respectively, and will be automatically filled in by Meson. If your rule produces multiple output files and you need to pass them to the command line, append the location to the output holder like this: `@OUTPUT0@`, `@OUTPUT1@` and so on.
-
-With this rule specified we can generate source files and add them to a target.
+The first argument is the executable file to run. The next file
+specifies a name generation rule. It specifies how to build the output
+file name for a given input name. `@BASENAME@` is a placeholder for
+the input file name without preceding path or suffix (if any). So if
+the input file name were `some/path/filename.idl`, then the output
+name would be `filename.c`. You can also use `@PLAINNAME@`, which
+preserves the suffix which would result in a file called
+`filename.idl.c`. The last line specifies the command line arguments
+to pass to the executable. `@INPUT@` and `@OUTPUT@` are placeholders
+for the input and output files, respectively, and will be
+automatically filled in by Meson. If your rule produces multiple
+output files and you need to pass them to the command line, append the
+location to the output holder like this: `@OUTPUT0@`, `@OUTPUT1@` and
+so on.
+
+With this rule specified we can generate source files and add them to
+a target.
```meson
gen_src = gen.process('input1.idl', 'input2.idl')
@@ -67,8 +102,32 @@ Generators can also generate multiple output files with unknown names:
```meson
gen2 = generator(someprog,
- outputs : ['@BASENAME@.c', '@BASENAME@.h'],
+ output : ['@BASENAME@.c', '@BASENAME@.h'],
arguments : ['--out_dir=@BUILD_DIR@', '@INPUT@'])
```
-In this case you can not use the plain `@OUTPUT@` variable, as it would be ambiguous. This program only needs to know the output directory, it will generate the file names by itself.
+In this case you can not use the plain `@OUTPUT@` variable, as it
+would be ambiguous. This program only needs to know the output
+directory, it will generate the file names by itself.
+
+To make passing different additional arguments to the generator
+program at each use possible, you can use the `@EXTRA_ARGS@` string in
+the `arguments` list. Note that this placeholder can only be present
+as a whole string, and not as a substring. The main reason is that it
+represents a list of strings, which may be empty, or contain multiple
+elements; and in either case, interpolating it into the middle of a
+single string would be troublesome. If there are no extra arguments
+passed in from a `process()` invocation, the placeholder is entirely
+omitted from the actual list of arguments, so an empty string won't be
+passed to the generator program because of this. If there are multiple
+elements in `extra_args`, they are inserted into to the actual
+argument list as separate elements.
+
+```meson
+gen3 = generator(genprog,
+ output : '@BASENAME@.cc',
+ arguments : ['@INPUT@', '@EXTRA_ARGS@', '@OUTPUT@'])
+gen3_src1 = gen3.process('input1.y')
+gen3_src2 = gen3.process('input2.y', extra_args: '--foo')
+gen3_src3 = gen3.process('input3.y', extra_args: ['--foo', '--bar'])
+```
diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md
index fbf9530..3db6cc0 100644
--- a/docs/markdown/Gnome-module.md
+++ b/docs/markdown/Gnome-module.md
@@ -123,7 +123,9 @@ Returns an array of two elements which are: `[c_source, header_file]`
### gnome.mkenums()
Generates enum files for GObject using the `glib-mkenums` tool. The
-first argument is the base name of the output files.
+first argument is the base name of the output files, unless `c_template`
+and `h_template` are specified. In this case, the output files will be
+the base name of the values passed as templates.
This method is essentially a wrapper around the `glib-mkenums` tool's
command line API. It is the most featureful method for enum creation.
@@ -233,9 +235,21 @@ files and the second specifies the XML file name.
* `object_manager`: *(Added 0.40.0)* if true generates object manager code
* `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'`
* `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks
+* `build_by_default`: causes, when set to true, to have this target be
+ built by default, that is, when invoking plain `ninja`, the default
+ value is true for all built target types
+* `install_dir`: (*Added 0.46.0*) location to install the header or
+ bundle depending on previous options
+* `install_header`: (*Added 0.46.0*) if true, install the header file
+
+Starting *0.46.0*, this function returns a list of at least two custom targets
+(in order): one for the source code and one for the header. The list will
+contain a third custom target for the generated docbook files if that keyword
+argument is passed.
-Returns an opaque object containing the source files. Add it to a top
-level target's source list.
+Earlier versions return a single custom target representing all the outputs.
+Generally, you should just add this list of targets to a top level target's
+source list.
Example:
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index f7939dd..f608c5c 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -6,7 +6,7 @@ short-description: Meson's API to integrate Meson support into an IDE
Meson has exporters for Visual Studio and XCode, but writing a custom backend for every IDE out there is not a scalable approach. To solve this problem, Meson provides an API that makes it easy for any IDE or build tool to integrate Meson builds and provide an experience comparable to a solution native to the IDE.
-The basic tool for this is a script called `mesonintrospect.py`. Some distro packages might not expose this script in the regular path, and in this case you need to execute it from the install directory.
+The basic tool for this is `meson introspect`.
The first thing to do when setting up a Meson project in an IDE is to select the source and build directories. For this example we assume that the source resides in an Eclipse-like directory called `workspace/project` and the build tree is nested inside it as `workspace/project/build`. First we initialise Meson by running the following command in the source directory.
@@ -16,13 +16,13 @@ For the remainder of the document we assume that all commands are executed insid
The first thing you probably want is to get a list of top level targets. For that we use the introspection tool. It comes with extensive command line help so we recommend using that in case problems appear.
- mesonintrospect.py --targets
+ meson introspect --targets
The JSON formats will not be specified in this document. The easiest way of learning them is to look at sample output from the tool.
Once you have a list of targets, you probably need the list of source files that comprise the target. To get this list for a target, say `exampletarget`, issue the following command.
- mesonintrospect.py --target-files exampletarget
+ meson introspect --target-files exampletarget
In order to make code completion work, you need the compiler flags for each compilation step. Meson does not provide this itself, but the Ninja tool Meson uses to build does provide it. To find out the compile steps necessary to build target foo, issue the following command.
@@ -32,7 +32,7 @@ Note that if the target has dependencies (such as generated sources), then the c
The next thing to display is the list of options that can be set. These include build type and so on. Here's how to extract them.
- mesonintrospect.py --buildoptions
+ meson introspect --buildoptions
To set the options, use the `meson configure` command.
@@ -40,6 +40,6 @@ Compilation and unit tests are done as usual by running the `ninja` and `ninja t
When these tests fail, the user probably wants to run the failing test in a debugger. To make this as integrated as possible, extract the test test setups with this command.
- mesonintrospect.py --tests
+ meson introspect --tests
This provides you with all the information needed to run the test: what command to execute, command line arguments and environment variable settings.
diff --git a/docs/markdown/Icestorm-module.md b/docs/markdown/Icestorm-module.md
index 6aa8481..bc2ad61 100644
--- a/docs/markdown/Icestorm-module.md
+++ b/docs/markdown/Icestorm-module.md
@@ -1,6 +1,6 @@
# Unstable IceStorm module
-This module provides is available since version 0.45.0.
+This module is available since version 0.45.0.
**Note**: this module is unstable. It is only provided as a technology
preview. Its API may change in arbitrary ways between releases or it
@@ -8,7 +8,7 @@ might be removed from Meson altogether.
## Usage
-This module provides an experimental to create FPGA bitstreams using
+This module provides an experimental method to create FPGA bitstreams using
the [IceStorm](http://www.clifford.at/icestorm/) suite of tools.
The module exposes only one method called `project` and it is used
@@ -24,4 +24,4 @@ constraint file. This produces output files called `projname.asc`,
`projname.blif` and `projname.bin`. In addition it creates two run
targets called `projname-time` for running timing analysis and
`projname-upload` that uploads the generated bitstream to an FPGA
-devide using the `iceprog` programming executable.
+device using the `iceprog` programming executable.
diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md
index 4670544..b8e6a81 100644
--- a/docs/markdown/Installing.md
+++ b/docs/markdown/Installing.md
@@ -29,6 +29,19 @@ install_man('foo.1') # -> share/man/man1/foo.1.gz
install_data('datafile.dat', install_dir : join_paths(get_option('datadir'), 'progname')) # -> share/progname/datafile.dat
```
+`install_data()` supports rename of the file *since 0.46.0*.
+
+```meson
+# file.txt -> {datadir}/{projectname}/new-name.txt
+install_data('file.txt', rename : 'new-name.txt')
+
+# file1.txt -> share/myapp/dir1/data.txt
+# file2.txt -> share/myapp/dir2/data.txt
+install_data(['file1.txt', 'file2.txt'],
+ rename : ['dir1/data.txt', 'dir2/data.txt'],
+ install_dir : 'share/myapp')
+```
+
Sometimes you want to copy an entire subtree directly. For this use case there is the `install_subdir` command, which can be used like this.
```meson
diff --git a/docs/markdown/Pkg-config-files.md b/docs/markdown/Pkg-config-files.md
index dde4ac9..ddb8bab 100644
--- a/docs/markdown/Pkg-config-files.md
+++ b/docs/markdown/Pkg-config-files.md
@@ -1,6 +1,6 @@
# Pkg config files
-[Pkg-config](https://en.wikipedia.org/wiki/Pkg-config) is a way for shared libraries to declare the compiler flags needed to use them. There are two different ways of generating Pkg-config files in Meson. The first way is to build them manually with the `configure_files` command. The second way is to use Meson's built in Pkg-config file generator. The difference between the two is that the latter is very simple and meant for basic use cases. The former should be used when you need to provide a more customized solution.
+[Pkg-config](https://en.wikipedia.org/wiki/Pkg-config) is a way for shared libraries to declare the compiler flags needed to use them. There are two different ways of generating Pkg-config files in Meson. The first way is to build them manually with the `configure_file` command. The second way is to use Meson's built in Pkg-config file generator. The difference between the two is that the latter is very simple and meant for basic use cases. The former should be used when you need to provide a more customized solution.
In this document we describe the simple generator approach. It is used in the following way.
diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md
index cbe01b4..77db809 100644
--- a/docs/markdown/Pkgconfig-module.md
+++ b/docs/markdown/Pkgconfig-module.md
@@ -38,8 +38,9 @@ keyword arguments.
search path, for example if you install headers into
`${PREFIX}/include/foobar-1`, the correct value for this argument
would be `foobar-1`
-- `requires` list of strings to put in the `Requires` field
-- `requires_private` list of strings to put in the `Requires.private`
+- `requires` list of strings, pkgconfig-dependencies or libraries that
+ `pkgconfig.generate()` was used on to put in the `Requires` field
+- `requires_private` same as `requires` but for `Requires.private` field
field
- `url` a string with a url for the library
- `variables` a list of strings with custom variables to add to the
@@ -50,3 +51,10 @@ keyword arguments.
- `version` a string describing the version of this library
- `d_module_versions` a list of module version flags used when compiling
D sources referred to by this pkg-config file
+
+Since 0.46 a `StaticLibrary` or `SharedLibrary` object can optionally be passed
+as first positional argument. If one is provided a default value will be
+provided for all required fields of the pc file:
+- `install_dir` is set to `pkgconfig` folder in the same location than the provided library.
+- `description` is set to the project's name followed by the library's name.
+- `name` is set to the library's name.
diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md
index d8459c6..5f323bd 100644
--- a/docs/markdown/Project-templates.md
+++ b/docs/markdown/Project-templates.md
@@ -25,6 +25,6 @@ $ ninja -C builddir
```
The generator has many different projects and settings. They can all
-be listed by invoking the command `meson test --help`.
+be listed by invoking the command `meson init --help`.
This feature is available since Meson version 0.45.0.
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index af01dff..da4c92b 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -112,6 +112,24 @@ run. The behavior of this function is identical to `test` with the
exception that there is no `is_parallel` keyword, because benchmarks
are never run in parallel.
+### both_libraries()
+
+``` meson
+ buildtarget both_libraries(library_name, list_of_sources, ...)
+```
+
+Builds both a static and shared library with the given sources. Positional and
+keyword arguments are otherwise the same as for [`library`](#library). Source
+files will be compiled only once and object files will be reused to build both
+shared and static libraries, unless `b_staticpic` user option or `pic` argument
+are set to false in which case sources will be compiled twice.
+
+The returned [buildtarget](#build-target-object) always represents the shared
+library. In addition it supports the following extra methods:
+
+- `get_shared_lib()` returns the shared library build target
+- `get_static_lib()` returns the static library build target
+
### build_target()
Creates a build target whose type can be set dynamically with the
@@ -168,12 +186,18 @@ These are all the supported keyword arguments:
`output`. Available since v0.41.0.
- `command` as explained above, if specified, Meson does not create
the file itself but rather runs the specified command, which allows
- you to do fully custom file generation
+ you to do fully custom file generation.
+- `format` *(added 0.46.0)* the format of defines. It defaults to `meson`, and so substitutes
+`#mesondefine` statements and variables surrounded by `@` characters, you can also use `cmake`
+to replace `#cmakedefine` statements and variables with the `${variable}` syntax. Finally you can use
+`cmake@` in which case substitutions will apply on `#cmakedefine` statements and variables with
+the `@variable@` syntax.
- `input` the input file name. If it's not specified in configuration
mode, all the variables in the `configuration:` object (see above)
are written to the `output:` file.
- `install_dir` the subdirectory to install the generated file to
- (e.g. `share/myproject`), if omitted the file is not installed.
+ (e.g. `share/myproject`), if omitted or given the value of empty
+ string, the file is not installed.
- `output` the output file name (since v0.41.0, may contain
`@PLAINNAME@` or `@BASENAME@` substitutions). In configuration mode,
the permissions of the input file (if it is specified) are copied to
@@ -267,6 +291,8 @@ keyword arguments.
- `include_directories`, the directories to add to header search path
- `link_args`, link arguments to use
- `link_with`, libraries to link against
+ - `link_whole`, libraries to link fully, same as [`executable`](#executable)
+ Since 0.46.0
- `sources`, sources to add to targets (or generated header files
that should be built before sources including them are built)
- `version`, the version of this dependency, such as `1.2.3`
@@ -366,9 +392,8 @@ can be of the following types:
These input files can be sources, objects, libraries, or any other
file. Meson will automatically categorize them based on the extension
and use them accordingly. For instance, sources (`.c`, `.cpp`,
-`.vala`, `.rs`, etc) will be compiled, objects (`.o`, `.obj`) and
-libraries (`.so`, `.dll`, etc) will be linked, and all other files
-(headers, unknown extensions, etc) will be ignored.
+`.vala`, `.rs`, etc) will be compiled and objects (`.o`, `.obj`) and
+libraries (`.so`, `.dll`, etc) will be linked.
With the Ninja backend, Meson will create a build-time [order-only
dependency](https://ninja-build.org/manual.html#ref_dependencies) on
@@ -433,7 +458,7 @@ be passed to [shared and static libraries](#library).
- `install_dir` override install directory for this file. The value is
relative to the `prefix` specified. F.ex, if you want to install
plugins into a subdir, you'd use something like this: `install_dir :
- get_option('libdir') + '/projectname-1.0'`.
+ join_paths(get_option('libdir'), 'projectname-1.0'`).
- `install_rpath` a string to set the target's rpath to after install
(but *not* before that)
- `objects` list of prebuilt object files (usually for third party
@@ -580,7 +605,7 @@ the following special substitutions:
- `@PLAINNAME@`: the complete input file name, e.g: `foo.c` becomes `foo.c` (unchanged)
- `@BASENAME@`: the base of the input filename, e.g.: `foo.c.y` becomes `foo.c` (extension is removed)
-Each string passed to the `outputs` keyword argument *must* be
+Each string passed to the `output` keyword argument *must* be
constructed using one or both of these two substitutions.
In addition to the above substitutions, the `arguments` keyword
@@ -613,8 +638,13 @@ Obtains the value of the [project build option](Build-options.md) specified in t
Note that the value returned for built-in options that end in `dir` such as
`bindir` and `libdir` is always a path relative to (and inside) the `prefix`.
+
The only exceptions are: `sysconfdir`, `localstatedir`, and `sharedstatedir`
-which will return the value passed during configuration as-is.
+which will return the value passed during configuration as-is, which may be
+absolute, or relative to `prefix`. [`install_dir` arguments](Installing.md)
+handles that as expected, but if you need the absolute path to one of these
+e.g. to use in a define etc., you should use `join_paths(get_option('prefix'),
+get_option('localstatedir')))`
### get_variable()
@@ -661,6 +691,10 @@ Note that this function call itself does not add the directories into
the search path, since there is no global search path. For something
like that, see [`add_project_arguments()`](#add_project_arguments).
+See also `implicit_include_directories` parameter of
+[executable()](#executable), which adds current source and build directories
+to include path.
+
Each directory given is converted to two include paths: one that is
relative to the source root and one relative to the build root.
@@ -712,6 +746,8 @@ arguments. The following keyword arguments are supported:
directory. If this is a relative path, it is assumed to be relative
to the prefix.
+ If omitted, the directory defaults to `{datadir}/{projectname}` *(added 0.45.0)*.
+
- `install_mode` specify the file mode in symbolic format and
optionally the owner/uid and group/gid for the installed files. For
example:
@@ -724,6 +760,13 @@ arguments. The following keyword arguments are supported:
To leave any of these three as the default, specify `false`.
+- `rename` if specified renames each source file into corresponding file
+ from `rename` list. Nested paths are allowed and they are joined with
+ `install_dir`. Length of `rename` list must be equal to the number of sources.
+ *(added 0.46.0)*
+
+See [Installing](Installing.md) for more examples.
+
### install_headers()
``` meson
@@ -865,10 +908,11 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`.
buildtarget library(library_name, list_of_sources, ...)
```
-Builds a library that is either static or shared depending on the
-value of `default_library` user option. You should use this instead of
-[`shared_library`](#shared_library) or
-[`static_library`](#static_library) most of the time. This allows you
+Builds a library that is either static, shared or both depending on the value of
+`default_library` user option. You should use this instead of
+[`shared_library`](#shared_library),
+[`static_library`](#static_library) or
+[`both_libraries`](#both_libraries) most of the time. This allows you
to toggle your entire project (including subprojects) from shared to
static with only one option.
@@ -891,7 +935,8 @@ The keyword arguments for this are the same as for [`executable`](#executable) w
libraries. Defaults to `dylib` for shared libraries and `rlib` for
static libraries.
-`static_library` and `shared_library` also accept these keyword arguments.
+`static_library`, `shared_library` and `both_libraries` also accept these keyword
+arguments.
### message()
@@ -974,14 +1019,20 @@ Project supports the following keyword arguments.
runresult run_command(command, list_of_args)
```
-Runs the command specified in positional arguments. Returns [an opaque
-object](#run-result-object) containing the result of the
-invocation. The script is run from an *unspecified* directory, and
+Runs the command specified in positional arguments.
+`command` can be a string, or the output of [`find_program()`](#find_program),
+[`files()`](#files) or [`configure_file()`](#configure_file), or
+[a compiler object](#compiler-object).
+
+Returns [an opaque object](#run-result-object) containing the result of the
+invocation. The command is run from an *unspecified* directory, and
Meson will set three environment variables `MESON_SOURCE_ROOT`,
`MESON_BUILD_ROOT` and `MESON_SUBDIR` that specify the source
directory, build directory and subdirectory the target was defined in,
respectively.
+See also [External commands](External-commands.md).
+
### run_target
``` meson
@@ -1103,6 +1154,33 @@ This function has one keyword argument.
recurse in the subdir if they all return `true` when queried with
`.found()`
+### subdir_done()
+
+``` meson
+ subdir_done()
+```
+
+Stops further interpretation of the meson script file from the point of
+the invocation. All steps executed up to this point are valid and will
+be executed by meson. This means that all targets defined before the call
+of `subdir_done` will be build.
+
+If the current script was called by `subdir` the execution returns to the
+calling directory and continues as if the script had reached the end.
+If the current script is the top level script meson configures the project
+as defined up to this point.
+
+Example:
+```meson
+project('example exit', 'cpp')
+executable('exe1', 'exe1.cpp')
+subdir_done()
+executable('exe2', 'exe2.cpp')
+```
+
+The executable `exe1` will be build, while the executable `exe2` is not
+build.
+
### subproject()
``` meson
@@ -1132,15 +1210,25 @@ subproject. However, if you want to use a dependency object from
inside a subproject, an easier way is to use the `fallback:` keyword
argument to [`dependency()`](#dependency).
+[See additional documentation](Subprojects.md).
+
### test()
``` meson
void test(name, executable, ...)
```
-Defines a unit test. Takes two positional arguments, the first is the
-name of this test and the second is the executable to run. Keyword
-arguments are the following.
+Defines a test to run with the test harness. Takes two positional arguments,
+the first is the name of the test and the second is the executable to run.
+The executable can be an [executable build target object](#build-target-object)
+returned by [`executable()`](#executable) or an
+[external program object](#external-program-object) returned by
+[`find_program()`](#find_program). The executable's exit code is used by the
+test harness to record the outcome of the test, for example exit code zero
+indicates success. For more on the Meson test harness protocol read
+[Unit Tests](Unit-tests.md).
+
+Keyword arguments are the following:
- `args` arguments to pass to the executable
@@ -1155,6 +1243,12 @@ arguments are the following.
- `should_fail` when true the test is considered passed if the
executable returns a non-zero return value (i.e. reports an error)
+- `suite` `'label'` (or list of labels `['label1', 'label2']`)
+ attached to this test. The suite name is qualified by a (sub)project
+ name resulting in `(sub)project_name:label`. In the case of a list
+ of strings, the suite names will be `(sub)project_name:label1`,
+ `(sub)project_name:label2`, etc.
+
- `timeout` the amount of seconds the test is allowed to run, a test
that exceeds its time limit is always considered failed, defaults to
30 seconds
@@ -1189,10 +1283,18 @@ be up to date on every build. Keywords are similar to `custom_target`.
Meson will read the contents of `input`, substitute the
`replace_string` with the detected revision number, and write the
-result to `output`. This method returns an opaque
-[`custom_target`](#custom_target) object that can be used as
-source. If you desire more specific behavior than what this command
-provides, you should use `custom_target`.
+result to `output`. This method returns a
+[`custom_target`](#custom_target) object that (as usual) should be
+used to signal dependencies if other targets use the file outputted
+by this.
+
+For example, if you generate a header with this and want to use that in
+a build target, you must add the return value to the sources of that
+build target. Without that, Meson will not know the order in which to
+build the targets.
+
+If you desire more specific behavior than what this command provides,
+you should use `custom_target`.
## Built-in objects
@@ -1599,7 +1701,8 @@ These are objects returned by the [functions listed above](#functions).
### `build target` object
A build target is either an [executable](#executable),
-[shared](#shared_library), [static library](#static_library) or
+[shared library](#shared_library), [static library](#static_library),
+[both shared and static library](#both_libraries) or
[shared module](#shared_module).
- `extract_all_objects()` is same as `extract_objects` but returns all
@@ -1778,7 +1881,7 @@ opaque object representing it.
- `get_variable(name)` fetches the specified variable from inside the
subproject. This is useful to, for instance, get a [declared
- dependency](#declare_dependency) from the subproject.
+ dependency](#declare_dependency) from the [subproject](Subprojects.md).
### `run result` object
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index 55e1cd0..5b4d7f0 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -65,3 +65,21 @@ These are provided by the `.system()` method call.
Any string not listed above is not guaranteed to remain stable in
future releases.
+
+
+## Language arguments parameter names
+
+These are the parameter names for passing language specific arguments to your build target.
+
+| Language | Parameter name |
+| ----- | ----- |
+| C | c_args |
+| C++ | cpp_args |
+| C# | cs_args |
+| D | d_args |
+| Fortran | fortran_args |
+| Java | java_args |
+| Objective C | objc_args |
+| Objective C++ | objcpp_args |
+| Rust | rust_args |
+| Vala | vala_args |
diff --git a/docs/markdown/Release-notes-for-0.45.0.md b/docs/markdown/Release-notes-for-0.45.0.md
index b3df71c..19d65b8 100644
--- a/docs/markdown/Release-notes-for-0.45.0.md
+++ b/docs/markdown/Release-notes-for-0.45.0.md
@@ -1,16 +1,200 @@
---
title: Release 0.45
-short-description: Release notes for 0.45 (preliminary)
+short-description: Release notes for 0.45
...
# New features
-This page is a placeholder for the eventual release notes.
+## Python minimum version is now 3.5
-Notable new features should come with release note updates. This is
-done by creating a file snippet called `snippets/featurename.md` and
-whose contents should look like this:
+Meson will from this version on require Python version 3.5 or newer.
- ## Feature name
+## Config-Tool based dependencies can be specified in a cross file
- A short description explaining the new feature and how it should be used.
+Tools like LLVM and pcap use a config tool for dependencies, this is a
+script or binary that is run to get configuration information (cflags,
+ldflags, etc) from.
+
+These binaries may now be specified in the `binaries` section of a
+cross file.
+
+```ini
+[binaries]
+cc = ...
+llvm-config = '/usr/bin/llvm-config32'
+```
+
+## Visual Studio C# compiler support
+
+In addition to the Mono C# compiler we also support Visual Studio's C#
+compiler. Currently this is only supported on the Ninja backend.
+
+## Removed two deprecated features
+
+The standalone `find_library` function has been a no-op for a long
+time. Starting with this version it becomes a hard error.
+
+There used to be a keywordless version of `run_target` which looked
+like this:
+
+```meson
+run_target('targetname', 'command', 'arg1', 'arg2')
+```
+
+This is now an error. The correct format for this is now:
+
+```meson
+run_target('targetname',
+ command : ['command', 'arg1', 'arg2'])
+```
+
+## Experimental FPGA support
+
+This version adds support for generating, analysing and uploading FPGA
+programs using the [IceStorm
+toolchain](http://www.clifford.at/icestorm/). This support is
+experimental and is currently limited to the `iCE 40` series of FPGA
+chips.
+
+FPGA generation integrates with other parts of Meson seamlessly. As an
+example, [here](https://github.com/jpakkane/lm32) is an example
+project that compiles a simple firmware into Verilog and combines that
+with an lm32 softcore processor.
+
+## Generator outputs can preserve directory structure
+
+Normally when generating files with a generator, Meson flattens the
+input files so they all go in the same directory. Some code
+generators, such as Protocol Buffers, require that the generated files
+have the same directory layout as the input files used to generate
+them. This can now be achieved like this:
+
+```meson
+g = generator(...) # Compiles protobuf sources
+generated = gen.process('com/mesonbuild/one.proto',
+ 'com/mesonbuild/two.proto',
+ preserve_path_from : meson.current_source_dir())
+```
+
+This would cause the following files to be generated inside the target
+private directory:
+
+ com/mesonbuild/one.pb.h
+ com/mesonbuild/one.pb.cc
+ com/mesonbuild/two.pb.h
+ com/mesonbuild/two.pb.cc
+
+## Hexadecimal string literals
+
+Hexadecimal integer literals can now be used in build and option files.
+
+```meson
+int_255 = 0xFF
+```
+
+## b_ndebug : if-release
+
+The value `if-release` can be given for the `b_ndebug` project option.
+
+This will make the `NDEBUG` pre-compiler macro to be defined for release
+type builds as if the `b_ndebug` project option had had the value `true`
+defined for it.
+
+## `install_data()` defaults to `{datadir}/{projectname}`
+
+If `install_data()` is not given an `install_dir` keyword argument, the
+target directory defaults to `{datadir}/{projectname}` (e.g.
+`/usr/share/myproj`).
+
+## install_subdir() supports strip_directory
+
+If strip_directory=true install_subdir() installs directory contents
+instead of directory itself, stripping basename of the source directory.
+
+## Integer options
+
+There is a new integer option type with optional minimum and maximum
+values. It can be specified like this in the `meson_options.txt` file:
+
+```meson
+option('integer_option', type : 'integer', min : 0, max : 5, value : 3)
+```
+
+## New method meson.project_license()
+
+The `meson` builtin object now has a `project_license()` method that
+returns a list of all licenses for the project.
+
+## Rust cross-compilation
+
+Cross-compilation is now supported for Rust targets. Like other
+cross-compilers, the Rust binary must be specified in your cross
+file. It should specify a `--target` (as installed by `rustup target`)
+and a custom linker pointing to your C cross-compiler. For example:
+
+```ini
+[binaries]
+c = '/usr/bin/arm-linux-gnueabihf-gcc-7'
+rust = [
+ 'rustc',
+ '--target', 'arm-unknown-linux-gnueabihf',
+ '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7',
+]
+```
+
+## Rust compiler-private library disambiguation
+
+When building a Rust target with Rust library dependencies, an
+`--extern` argument is now specified to avoid ambiguity between the
+dependency library, and any crates of the same name in `rustc`'s
+private sysroot.
+
+## Project templates
+
+Meson ships with predefined project templates. To start a new project from
+scratch, simply go to an empty directory and type:
+
+ meson init --name=myproject --type=executable --language=c
+
+## Improve test setup selection
+
+Test setups are now identified (also) by the project they belong to
+and it is possible to select the used test setup from a specific
+project. E.g. to use a test setup `some_setup` from project
+`some_project` for all executed tests one can use
+
+ meson test --setup some_project:some_setup
+
+Should one rather want test setups to be used from the same project as
+where the current test itself has been defined, one can use just
+
+ meson test --setup some_setup
+
+In the latter case every (sub)project must have a test setup `some_setup`
+defined in it.
+
+## Can use custom targets as Windows resource files
+
+The `compile_resources()` function of the `windows` module can now be used on custom targets as well as regular files.
+# Can promote dependencies with wrap command
+
+The `promote` command makes it easy to copy nested dependencies to the top level.
+
+ meson wrap promote scommon
+
+This will search the project tree for a subproject called `scommon`
+and copy it to the top level.
+
+If there are many embedded subprojects with the same name, you have to
+specify which one to promote manually like this:
+
+ meson wrap promote subprojects/s1/subprojects/scommon
+
+## Yielding subproject option to superproject
+
+Normally project options are specific to the current project. However
+sometimes you want to have an option whose value is the same over all
+projects. This can be achieved with the new `yield` keyword for
+options. When set to `true`, getting the value of this option in
+`meson.build` files gets the value from the option with the same name
+in the master project (if such an option exists).
diff --git a/docs/markdown/Release-notes-for-0.46.0.md b/docs/markdown/Release-notes-for-0.46.0.md
new file mode 100644
index 0000000..e062459
--- /dev/null
+++ b/docs/markdown/Release-notes-for-0.46.0.md
@@ -0,0 +1,23 @@
+---
+title: Release 0.46
+short-description: Release notes for 0.46 (preliminary)
+...
+
+# New features
+
+This page is a placeholder for the eventual release notes.
+
+Notable new features should come with release note updates. This is
+done by creating a file snippet called `snippets/featurename.md` and
+whose contents should look like this:
+
+ ## Feature name
+
+ A short description explaining the new feature and how it should be used.
+
+## Allow early return from a script
+
+Added the function `subdir_done()`. Its invocation exits the current script at
+the point of invocation. All previously invoked build targets and commands are
+build/executed. All following ones are ignored. If the current script was
+invoked via `subdir()` the parent script continues normally.
diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md
index 0d1442e..ad2aae2 100644
--- a/docs/markdown/Subprojects.md
+++ b/docs/markdown/Subprojects.md
@@ -77,3 +77,27 @@ subproject `b` and have `b` also use `a`.
Meson ships with a dependency system to automatically obtain
dependency subprojects. It is documented in the [Wrap dependency
system manual](Wrap-dependency-system-manual.md).
+
+# Why must all subprojects be inside a single directory?
+
+There are several reasons.
+
+First of all, to maintain any sort of sanity, the system must prevent going
+inside other subprojects with `subdir()` or variations thereof. Having the
+subprojects in well defined places makes this easy. If subprojects could be
+anywhere at all, it would be a lot harder.
+
+Second of all it is extremely important that end users can easily see what
+subprojects any project has. Because they are in one, and only one, place,
+reviewing them becomes easy.
+
+This is also a question of convention. Since all Meson projects have the same
+layout w.r.t subprojects, switching between projects becomes easier. You don't
+have to spend time on a new project traipsing through the source tree looking
+for subprojects. They are always in the same place.
+
+Finally if you can have subprojects anywhere, this increases the possibility of
+having many different (possibly incompatible) versions of a dependency in your
+source tree. Then changing some code (such as changing the order you traverse
+directories) may cause a completely different version of the subproject to be
+used by accident.
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index afbeaa0..e5e4107 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -30,7 +30,7 @@ Note how you need to specify multiple values as an array.
Coverage
--
-If you enable coverage measurements by giving Meson the command line flag `-Db_coverage=true`, you can generate coverage reports. Meson will autodetect what coverage generator tools you have installed and will generate the corresponding targets. These targets are `coverage-xml` and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) and `coverage-html`, which requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and [GenHTML](https://linux.die.net/man/1/genhtml).
+If you enable coverage measurements by giving Meson the command line flag `-Db_coverage=true`, you can generate coverage reports. Meson will autodetect what coverage generator tools you have installed and will generate the corresponding targets. These targets are `coverage-xml` and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) and `coverage-html`, which requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and [GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com) with html support.
The output of these commands is written to the log directory `meson-logs` in your build directory.
@@ -71,6 +71,14 @@ You can also run only a single test by giving its name:
$ meson test testname
```
+Tests belonging to a suite `suite` can be run as follows
+
+```console
+$ meson test --suite (sub)project_name:suite
+```
+
+Since version *0.46*, `(sub)project_name` can be omitted if it is the top-level project.
+
Sometimes you need to run the tests multiple times, which is done like this:
```console
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index e0193de..558378c 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -4,7 +4,8 @@ title: Users
# List of projects using Meson
-If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here.
+If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here. Some additional projects are
+listed in the [`meson` GitHub topic](https://github.com/topics/meson).
- [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3
- [Arduino sample project](https://github.com/jpakkane/mesonarduino)
@@ -19,16 +20,19 @@ If you have a project that uses Meson that you want to add to this list, please
- [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware
- [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop.
- [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer (not the default yet)
+ - [Gnome Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a Gnome hypervisor
- [Gnome Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the Gnome platform
- [Gnome MPV](https://github.com/gnome-mpv/gnome-mpv), Gnome frontend to the mpv video player
- [Gnome Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes
- [Gnome Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for Gnome
- [Gnome Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on Gnome desktop
+ - [Gnome Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a Gnome application for visualizing system resources
- [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics
- [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework
- [GStreamer](https://cgit.freedesktop.org/gstreamer/gstreamer/), multimedia framework (not the default yet)
- [GTK+](https://gitlab.gnome.org/GNOME/gtk), the multi-platform toolkit used by GNOME
- [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D
+ - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO
- [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux
- [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C
- [IGT](https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/), Linux kernel graphics driver test suite.
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 4e7e220..acc18d7 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -125,9 +125,9 @@ Install scan-build and configure your project. Then do this:
$ ninja scan-build
```
-You can use the `SCAN_BUILD` environment variable to choose the scan-build executable.
+You can use the `SCANBUILD` environment variable to choose the scan-build executable.
```console
-$ SCAN_BUILD=<your exe> ninja scan-build
+$ SCANBUILD=<your exe> ninja scan-build
```
diff --git a/docs/markdown/index.md b/docs/markdown/index.md
index 6893564..cffd488 100644
--- a/docs/markdown/index.md
+++ b/docs/markdown/index.md
@@ -33,6 +33,16 @@ developers. The first one is the mailing list, which is hosted at
The second way is via IRC. The channel to use is `#mesonbuild` at
[Freenode](https://freenode.net/).
+### [Projects using Meson](http://mesonbuild.com/Users.html)
+
+Many projects out there are using Meson and their communities are also
+a great resource for learning about what (and what not too!) do when
+trying to convert to using Meson.
+
+[A short list of Meson users can be found here](http://mesonbuild.com/Users.html)
+but there are many more. We would love to hear about your success
+stories too and how things could be improved too!
+
## Development
All development on Meson is done on the [GitHub
diff --git a/docs/markdown/snippets/altered-logging.md b/docs/markdown/snippets/altered-logging.md
new file mode 100644
index 0000000..4ff9bb0
--- /dev/null
+++ b/docs/markdown/snippets/altered-logging.md
@@ -0,0 +1,5 @@
+## Log output slightly changed
+
+The format of some human-readable diagnostic messages has changed in
+minor ways. In case you are parsing these messages, you may need to
+adjust your code.
diff --git a/docs/markdown/snippets/both-libraries.md b/docs/markdown/snippets/both-libraries.md
new file mode 100644
index 0000000..1632f63
--- /dev/null
+++ b/docs/markdown/snippets/both-libraries.md
@@ -0,0 +1,9 @@
+## Building both shared and static libraries
+
+A new function `both_libraries()` has been added to build both shared and static
+libraries at the same time. Source files will be compiled only once and object
+files will be reused to build both shared and static libraries, unless
+`b_staticpic` user option or `pic` argument are set to false in which case
+sources will be compiled twice.
+
+The returned `buildtarget` object always represents the shared library.
diff --git a/docs/markdown/snippets/compiler-object-run_command.md b/docs/markdown/snippets/compiler-object-run_command.md
new file mode 100644
index 0000000..0308416
--- /dev/null
+++ b/docs/markdown/snippets/compiler-object-run_command.md
@@ -0,0 +1,10 @@
+## Compiler object can now be passed to run_command()
+
+This can be used to run the current compiler with the specified arguments
+to obtain additional information from it.
+One of the use cases is to get the location of development files for the
+GCC plugins:
+
+ cc = meson.get_compiler('c')
+ result = run_command(cc, '-print-file-name=plugin')
+ plugin_dev_path = result.stdout().strip()
diff --git a/docs/markdown/snippets/config-tool-cross.md b/docs/markdown/snippets/config-tool-cross.md
deleted file mode 100644
index 1102481..0000000
--- a/docs/markdown/snippets/config-tool-cross.md
+++ /dev/null
@@ -1,13 +0,0 @@
-# Config-Tool based dependencies can be specified in a cross file
-
-Tools like LLVM and pcap use a config tool for dependencies, this is a script
-or binary that is run to get configuration information (cflags, ldflags, etc)
-from.
-
-These binaries may now be specified in the `binaries` section of a cross file.
-
-```dosini
-[binaries]
-cc = ...
-llvm-config = '/usr/bin/llvm-config32'
-```
diff --git a/docs/markdown/snippets/declare_dependency-link_whole.md b/docs/markdown/snippets/declare_dependency-link_whole.md
new file mode 100644
index 0000000..827b1f6
--- /dev/null
+++ b/docs/markdown/snippets/declare_dependency-link_whole.md
@@ -0,0 +1,4 @@
+## declare_dependency() supports link_whole
+
+`declare_dependency()` supports `link_whole` parameter.
+`link_whole` propagates to build target that uses dependency.
diff --git a/docs/markdown/snippets/del-old-names.md b/docs/markdown/snippets/del-old-names.md
new file mode 100644
index 0000000..5ac5873
--- /dev/null
+++ b/docs/markdown/snippets/del-old-names.md
@@ -0,0 +1,7 @@
+## Old command names are now errors
+
+Old executable names `mesonintrospect`, `mesonconf`, `mesonrewriter`
+and `mesontest` have been deprecated for a long time. Starting from
+this version they no longer do anything but instead always error
+out. All functionality is available as subcommands in the main `meson`
+binary.
diff --git a/docs/markdown/snippets/deprecations.md b/docs/markdown/snippets/deprecations.md
deleted file mode 100644
index adab2e6..0000000
--- a/docs/markdown/snippets/deprecations.md
+++ /dev/null
@@ -1,14 +0,0 @@
-## Removed two deprecated features
-
-The standalone `find_library` function has been a no-op for a long
-time. Starting with this version it becomes a hard error.
-
-There used to be a keywordless version of `run_target` which looked
-like this:
-
- run_target('targetname', 'command', 'arg1', 'arg2')
-
-This is now an error. The correct format for this is now:
-
- run_target('targetname',
- command : ['command', 'arg1', 'arg2'])
diff --git a/docs/markdown/snippets/fpga.md b/docs/markdown/snippets/fpga.md
deleted file mode 100644
index b5e4938..0000000
--- a/docs/markdown/snippets/fpga.md
+++ /dev/null
@@ -1,12 +0,0 @@
-## Experimental FPGA support
-
-This version adds support for generating, analysing and uploading FPGA
-programs using the [IceStorm
-toolchain](http://www.clifford.at/icestorm/). This support is
-experimental and is currently limited to the `iCE 40` series of FPGA
-chips.
-
-FPGA generation integrates with other parts of Meson seamlessly. As an
-example, [here](https://github.com/jpakkane/lm32) is an example
-project that compiles a simple firmware into Verilog and combines that
-with an lm32 softcore processor.
diff --git a/docs/markdown/snippets/gen-subdirs.md b/docs/markdown/snippets/gen-subdirs.md
deleted file mode 100644
index fdb5945..0000000
--- a/docs/markdown/snippets/gen-subdirs.md
+++ /dev/null
@@ -1,21 +0,0 @@
-## Generator outputs can preserve directory structure
-
-Normally when generating files with a generator, Meson flattens the
-input files so they all go in the same directory. Some code
-generators, such as Protocol Buffers, require that the generated files
-have the same directory layout as the input files used to generate
-them. This can now be achieved like this:
-
-```meson
-g = generator(...) # Compiles protobuf sources
-generated = gen.process('com/mesonbuild/one.proto',
- 'com/mesonbuild/two.proto',
- preserve_path_from : meson.current_source_dir())
-
-This would cause the following files to be generated inside the target
-private directory:
-
- com/mesonbuild/one.pb.h
- com/mesonbuild/one.pb.cc
- com/mesonbuild/two.pb.h
- com/mesonbuild/two.pb.cc
diff --git a/docs/markdown/snippets/hexnumbers.md b/docs/markdown/snippets/hexnumbers.md
deleted file mode 100644
index 840c0cb..0000000
--- a/docs/markdown/snippets/hexnumbers.md
+++ /dev/null
@@ -1,5 +0,0 @@
-## Hexadecimal string literals
-
-Hexadecimal integer literals can now be used in build and option files.
-
- int_255 = 0xFF
diff --git a/docs/markdown/snippets/if-release.md b/docs/markdown/snippets/if-release.md
deleted file mode 100644
index 96e12ef..0000000
--- a/docs/markdown/snippets/if-release.md
+++ /dev/null
@@ -1,7 +0,0 @@
-## b_ndebug : if-release
-
-The value `if-release` can be given for the `b_ndebug` project option.
-
-This will make the `NDEBUG` pre-compiler macro to be defined for release
-type builds as if the `b_ndebug` project option had had the value `true`
-defined for it.
diff --git a/docs/markdown/snippets/improved-help.md b/docs/markdown/snippets/improved-help.md
new file mode 100644
index 0000000..db7e852
--- /dev/null
+++ b/docs/markdown/snippets/improved-help.md
@@ -0,0 +1,6 @@
+## "meson help" now shows command line help
+
+Command line parsing is now less surprising. "meson help" is now
+equivalent to "meson --help" and "meson help <subcommand>" is
+equivalent to "meson <subcommand> --help", instead of creating a build
+directory called "help" in these cases.
diff --git a/docs/markdown/snippets/improved-meson-init.md b/docs/markdown/snippets/improved-meson-init.md
new file mode 100644
index 0000000..ec17bc4
--- /dev/null
+++ b/docs/markdown/snippets/improved-meson-init.md
@@ -0,0 +1,19 @@
+## Autogeneration of simple meson.build files
+
+A feature to generate a meson.build file compiling given C/C++ source
+files into a single executable has been added to "meson init". By
+default, it will take all recognizable source files in the current
+directory. You can also specify a list of dependencies with the -d
+flag and automatically invoke a build with the -b flag to check if the
+code builds with those dependencies.
+
+For example,
+
+```meson
+meson init -fbd sdl2,gl
+```
+
+will look for C or C++ files in the current directory, generate a
+meson.build for them with the dependencies of sdl2 and gl and
+immediately try to build it, overwriting any previous meson.build and
+build directory.
diff --git a/docs/markdown/snippets/install_data-rename.md b/docs/markdown/snippets/install_data-rename.md
new file mode 100644
index 0000000..6378d0f
--- /dev/null
+++ b/docs/markdown/snippets/install_data-rename.md
@@ -0,0 +1,11 @@
+## install_data() supports rename
+
+`rename` parameter is used to change names of the installed files.
+In order to install
+- `file1.txt` into `share/myapp/dir1/data.txt`
+- `file2.txt` into `share/myapp/dir2/data.txt`
+```meson
+install_data(['file1.txt', 'file2.txt'],
+ rename : ['dir1/data.txt', 'dir2/data.txt'],
+ install_dir : 'share/myapp')
+```
diff --git a/docs/markdown/snippets/install_subdir-strip_directory.md b/docs/markdown/snippets/install_subdir-strip_directory.md
deleted file mode 100644
index 9ddb4a4..0000000
--- a/docs/markdown/snippets/install_subdir-strip_directory.md
+++ /dev/null
@@ -1,4 +0,0 @@
-## install_subdir() supports strip_directory
-
-If strip_directory=true install_subdir() installs directory contents
-instead of directory itself, stripping basename of the source directory.
diff --git a/docs/markdown/snippets/intopt.md b/docs/markdown/snippets/intopt.md
deleted file mode 100644
index daf660b..0000000
--- a/docs/markdown/snippets/intopt.md
+++ /dev/null
@@ -1,6 +0,0 @@
-## Integer options
-
-There is a new integer option type with optional minimum and maximum
-values. It can be specified like this in the `meson_options.txt` file:
-
- option('integer_option', type : 'integer', min : 0, max : 5, value : 3)
diff --git a/docs/markdown/snippets/new-wrap-mode.md b/docs/markdown/snippets/new-wrap-mode.md
new file mode 100644
index 0000000..e33dd83
--- /dev/null
+++ b/docs/markdown/snippets/new-wrap-mode.md
@@ -0,0 +1,3 @@
+A new wrap mode was added, `--wrap-mode=forcefallback`. When this is set,
+dependencies for which a fallback was provided will always use it, even
+if an external dependency exists and satisfies the version requirements.
diff --git a/docs/markdown/snippets/pkg-config-fix-static-only.md b/docs/markdown/snippets/pkg-config-fix-static-only.md
new file mode 100644
index 0000000..31cd389
--- /dev/null
+++ b/docs/markdown/snippets/pkg-config-fix-static-only.md
@@ -0,0 +1,12 @@
+## Improved generation of pkg-config files for static only libraries.
+
+Previously pkg-config files generated by the pkgconfig modules for static libraries
+with dependencies could only be used in a dependencies with `static: true`.
+
+Now the generated file contains the needed dependencies libraries directly within
+`Requires` and `Libs` for build static libraries passed via the `libraries` keyword
+argument.
+
+Projects that install both a static and a shared version of a library should use
+the result of `both_libraries` to the pkg config file generator or use
+configure_file for more complicated setups.
diff --git a/docs/markdown/snippets/pkgconfig-generator.md b/docs/markdown/snippets/pkgconfig-generator.md
new file mode 100644
index 0000000..93920d3
--- /dev/null
+++ b/docs/markdown/snippets/pkgconfig-generator.md
@@ -0,0 +1,14 @@
+## Improvements to pkgconfig module
+
+A `StaticLibrary` or `SharedLibrary` object can optionally be passed
+as first positional argument of the `generate()` method. If one is provided a
+default value will be provided for all required fields of the pc file:
+- `install_dir` is set to `pkgconfig` folder in the same location than the provided library.
+- `description` is set to the project's name followed by the library's name.
+- `name` is set to the library's name.
+
+Generating a .pc file is now as simple as:
+
+```
+pkgconfig.generate(mylib)
+```
diff --git a/docs/markdown/snippets/pkgconfig-requires-non-string.md b/docs/markdown/snippets/pkgconfig-requires-non-string.md
new file mode 100644
index 0000000..abf85b0
--- /dev/null
+++ b/docs/markdown/snippets/pkgconfig-requires-non-string.md
@@ -0,0 +1,5 @@
+## pkgconfig.generate() requires parameters non-string arguments
+
+`pkgconfig.generate()` `requires` and `requires_private` parameters
+accept pkgconfig-dependencies and libraries that pkgconfig-files were
+generated for.
diff --git a/docs/markdown/snippets/project-license.md b/docs/markdown/snippets/project-license.md
deleted file mode 100644
index 5da2c6a..0000000
--- a/docs/markdown/snippets/project-license.md
+++ /dev/null
@@ -1,4 +0,0 @@
-## New method meson.project_license()
-
-The `meson` builtin object now has a `project_license()` method that returns a
-list of all licenses for the project.
diff --git a/docs/markdown/snippets/rust-cross.md b/docs/markdown/snippets/rust-cross.md
deleted file mode 100644
index 7f18c44..0000000
--- a/docs/markdown/snippets/rust-cross.md
+++ /dev/null
@@ -1,16 +0,0 @@
-## Rust cross-compilation
-
-Cross-compilation is now supported for Rust targets. Like other
-cross-compilers, the Rust binary must be specified in your cross
-file. It should specify a `--target` (as installed by `rustup target`)
-and a custom linker pointing to your C cross-compiler. For example:
-
-```
-[binaries]
-c = '/usr/bin/arm-linux-gnueabihf-gcc-7'
-rust = [
- 'rustc',
- '--target', 'arm-unknown-linux-gnueabihf',
- '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7',
-]
-```
diff --git a/docs/markdown/snippets/templates.md b/docs/markdown/snippets/templates.md
deleted file mode 100644
index 6f0474d..0000000
--- a/docs/markdown/snippets/templates.md
+++ /dev/null
@@ -1,8 +0,0 @@
-## Project templates
-
-Meson ships with predefined project templates. To start a new project from
-scratch, simply go to an empty directory and type:
-
-```meson
-meson init --name=myproject --type=executable --language=c
-```
diff --git a/docs/markdown/snippets/windows-resources-custom-targets.md b/docs/markdown/snippets/windows-resources-custom-targets.md
deleted file mode 100644
index a2dce3a..0000000
--- a/docs/markdown/snippets/windows-resources-custom-targets.md
+++ /dev/null
@@ -1,3 +0,0 @@
-## Can use custom targets as Windows resource files
-
-The `compile_resources()` function of the `windows` module can now be used on custom targets as well as regular files.
diff --git a/docs/markdown/snippets/wrap_promote.md b/docs/markdown/snippets/wrap_promote.md
deleted file mode 100644
index 20fee47..0000000
--- a/docs/markdown/snippets/wrap_promote.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Can promote dependencies with wrap command
-
-The `promote` command makes it easy to copy nested dependencies to the top level.
-
- meson wrap promote scommon
-
-This will search the project tree for a subproject called `scommon` and copy it to the top level.
-
-If there are many embedded subprojects with the same name, you have to specify which one to promote manually like this:
-
- meson wrap promote subprojects/s1/subprojects/scommon
diff --git a/docs/markdown/snippets/yield.md b/docs/markdown/snippets/yield.md
deleted file mode 100644
index 3880e67..0000000
--- a/docs/markdown/snippets/yield.md
+++ /dev/null
@@ -1,8 +0,0 @@
-## Yielding subproject option to superproject
-
-Normally project options are specific to the current project. However
-sometimes you want to have an option whose value is the same over all
-projects. This can be achieved with the new `yield` keyword for
-options. When set to `true`, getting the value of this option in
-`meson.build` files gets the value from the option with the same name
-in the master project (if such an option exists).
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index 144ca4a..844b600 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -65,6 +65,7 @@ index.md
Shipping-prebuilt-binaries-as-wraps.md
fallback-wraptool.md
Release-notes.md
+ Release-notes-for-0.46.0.md
Release-notes-for-0.45.0.md
Release-notes-for-0.44.0.md
Release-notes-for-0.43.0.md
diff --git a/ghwt.py b/ghwt.py
index bf06e19..32db4be 100755
--- a/ghwt.py
+++ b/ghwt.py
@@ -55,7 +55,7 @@ def unpack(sproj, branch, outdir):
print(' expected:', dig)
print(' obtained:', should)
return 1
- spdir = os.path.split(outdir)[0]
+ spdir = os.path.dirname(outdir)
ofilename = os.path.join(spdir, config['wrap-file']['source_filename'])
with open(ofilename, 'wb') as ofile:
ofile.write(us)
diff --git a/man/meson.1 b/man/meson.1
index 4429fa2..19ad737 100644
--- a/man/meson.1
+++ b/man/meson.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands"
+.TH MESON "1" "March 2018" "meson 0.45.0" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
diff --git a/man/mesonconf.1 b/man/mesonconf.1
index 3a83473..b189663 100644
--- a/man/mesonconf.1
+++ b/man/mesonconf.1
@@ -1,4 +1,4 @@
-.TH MESONCONF "1" "December 2017" "mesonconf 0.44.0" "User Commands"
+.TH MESONCONF "1" "March 2018" "mesonconf 0.45.0" "User Commands"
.SH NAME
mesonconf - a tool to configure Meson builds
.SH DESCRIPTION
diff --git a/man/mesonintrospect.1 b/man/mesonintrospect.1
index 27f39c0..61aa381 100644
--- a/man/mesonintrospect.1
+++ b/man/mesonintrospect.1
@@ -1,4 +1,4 @@
-.TH MESONINTROSPECT "1" "December 2017" "mesonintrospect 0.44.0" "User Commands"
+.TH MESONINTROSPECT "1" "March 2017" "mesonintrospect 0.45.0" "User Commands"
.SH NAME
mesonintrospect - a tool to extract information about a Meson build
.SH DESCRIPTION
diff --git a/man/mesontest.1 b/man/mesontest.1
index d2b2743..9a9f743 100644
--- a/man/mesontest.1
+++ b/man/mesontest.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands"
+.TH MESON "1" "March 2018" "meson 0.45.0" "User Commands"
.SH NAME
mesontest - test tool for the Meson build system
.SH DESCRIPTION
diff --git a/man/wraptool.1 b/man/wraptool.1
index 113b33c..93ec457 100644
--- a/man/wraptool.1
+++ b/man/wraptool.1
@@ -1,4 +1,4 @@
-.TH WRAPTOOL "1" "December 2017" "meson 0.44.0" "User Commands"
+.TH WRAPTOOL "1" "March 2018" "meson 0.45.0" "User Commands"
.SH NAME
wraptool - source dependency downloader
.SH DESCRIPTION
diff --git a/meson.py b/meson.py
index 13bc870..abbac6f 100755
--- a/meson.py
+++ b/meson.py
@@ -14,8 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import mesonmain, mesonlib
-import sys, os, locale
+from mesonbuild import mesonmain
+import sys, os
def main():
# Always resolve the command path so Ninja can find it for regen, tests, etc.
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 62cc756..916f680 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -22,6 +22,7 @@ import json
import subprocess
from ..mesonlib import MesonException
from ..mesonlib import get_compiler_for_source, classify_unity_sources
+from ..mesonlib import File
from ..compilers import CompilerArgs
from collections import OrderedDict
import shlex
@@ -65,9 +66,10 @@ class ExecutableSerialisation:
self.capture = capture
class TestSerialisation:
- def __init__(self, name, suite, fname, is_cross_built, exe_wrapper, is_parallel, cmd_args, env,
- should_fail, timeout, workdir, extra_paths):
+ def __init__(self, name, project, suite, fname, is_cross_built, exe_wrapper, is_parallel,
+ cmd_args, env, should_fail, timeout, workdir, extra_paths):
self.name = name
+ self.project_name = project
self.suite = suite
self.fname = fname
self.is_cross_built = is_cross_built
@@ -88,12 +90,17 @@ class OptionProxy:
class OptionOverrideProxy:
'''Mimic an option list but transparently override
selected option values.'''
- def __init__(self, overrides, options):
+ def __init__(self, overrides, *options):
self.overrides = overrides
self.options = options
def __getitem__(self, option_name):
- base_opt = self.options[option_name]
+ for opts in self.options:
+ if option_name in opts:
+ return self._get_override(option_name, opts[option_name])
+ raise KeyError('Option not found', option_name)
+
+ def _get_override(self, option_name, base_opt):
if option_name in self.overrides:
return OptionProxy(base_opt.name, base_opt.validate_value(self.overrides[option_name]))
return base_opt
@@ -107,9 +114,6 @@ class Backend:
self.processed_targets = {}
self.build_to_src = os.path.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
- for t in self.build.targets:
- priv_dirname = self.get_target_private_dir_abs(t)
- os.makedirs(priv_dirname, exist_ok=True)
def get_target_filename(self, t):
if isinstance(t, build.CustomTarget):
@@ -125,6 +129,20 @@ class Backend:
def get_target_filename_abs(self, target):
return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))
+ def get_builtin_options_for_target(self, target):
+ return OptionOverrideProxy(target.option_overrides,
+ self.environment.coredata.builtins)
+
+ def get_base_options_for_target(self, target):
+ return OptionOverrideProxy(target.option_overrides,
+ self.environment.coredata.builtins,
+ self.environment.coredata.base_options)
+
+ def get_compiler_options_for_target(self, target):
+ return OptionOverrideProxy(target.option_overrides,
+ # no code depends on builtins for now
+ self.environment.coredata.compiler_options)
+
def get_option_for_target(self, option_name, target):
if option_name in target.option_overrides:
override = target.option_overrides[option_name]
@@ -169,12 +187,10 @@ class Backend:
return self.build_to_src
def get_target_private_dir(self, target):
- dirname = os.path.join(self.get_target_dir(target), target.get_basename() + target.type_suffix())
- return dirname
+ return os.path.join(self.get_target_dir(target), target.get_id())
def get_target_private_dir_abs(self, target):
- dirname = os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
- return dirname
+ return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
def get_target_generated_dir(self, target, gensrc, src):
"""
@@ -312,7 +328,7 @@ class Backend:
def rpaths_for_bundled_shared_libraries(self, target):
paths = []
for dep in target.external_deps:
- if isinstance(dep, dependencies.ExternalLibrary):
+ if isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)):
la = dep.link_args
if len(la) == 1 and os.path.isabs(la[0]):
# The only link argument is an absolute path to a library file.
@@ -399,11 +415,20 @@ class Backend:
objname = objname.replace('/', '_').replace('\\', '_')
objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
return [objpath]
- for osrc in extobj.srclist:
+
+ sources = list(extobj.srclist)
+ for gensrc in extobj.genlist:
+ for s in gensrc.get_outputs():
+ path = self.get_target_generated_dir(extobj.target, gensrc, s)
+ dirpart, fnamepart = os.path.split(path)
+ sources.append(File(True, dirpart, fnamepart))
+
+ for osrc in sources:
objname = self.object_filename_from_source(extobj.target, osrc, False)
if objname:
objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
result.append(objpath)
+
return result
def get_pch_include_args(self, compiler, target):
@@ -448,7 +473,7 @@ class Backend:
# starting from hard-coded defaults followed by build options and so on.
commands = CompilerArgs(compiler)
- copt_proxy = OptionOverrideProxy(target.option_overrides, self.environment.coredata.compiler_options)
+ copt_proxy = self.get_compiler_options_for_target(target)
# First, the trivial ones that are impossible to override.
#
# Add -nostdinc/-nostdinc++ if needed; can't be overridden
@@ -518,9 +543,8 @@ class Backend:
# Fortran requires extra include directives.
if compiler.language == 'fortran':
for lt in target.link_targets:
- priv_dir = os.path.join(self.get_target_dir(lt), lt.get_basename() + lt.type_suffix())
- incflag = compiler.get_include_args(priv_dir, False)
- commands += incflag
+ priv_dir = self.get_target_private_dir(lt)
+ commands += compiler.get_include_args(priv_dir, False)
return commands
def build_target_link_arguments(self, compiler, deps):
@@ -603,9 +627,9 @@ class Backend:
cmd_args.append(self.get_target_filename(a))
else:
raise MesonException('Bad object in test command.')
- ts = TestSerialisation(t.get_name(), t.suite, cmd, is_cross, exe_wrapper,
- t.is_parallel, cmd_args, t.env, t.should_fail,
- t.timeout, t.workdir, extra_paths)
+ ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
+ exe_wrapper, t.is_parallel, cmd_args, t.env,
+ t.should_fail, t.timeout, t.workdir, extra_paths)
arr.append(ts)
pickle.dump(arr, datafile)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index a52d1f7..c941319 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os, pickle, re, shlex, subprocess, sys
+import os, pickle, re, shlex, subprocess
from collections import OrderedDict
+import itertools
from pathlib import PurePath
from . import backends
@@ -26,7 +27,7 @@ from .. import compilers
from ..compilers import CompilerArgs
from ..linkers import ArLinker
from ..mesonlib import File, MesonException, OrderedSet
-from ..mesonlib import get_compiler_for_source
+from ..mesonlib import get_compiler_for_source, has_path_sep
from .backends import CleanTrees, InstallData
from ..build import InvalidArguments
@@ -103,7 +104,8 @@ class NinjaBuildElement:
# This is the only way I could find to make this work on all
# platforms including Windows command shell. Slash is a dir separator
# on Windows, too, so all characters are unambiguous and, more importantly,
- # do not require quoting.
+ # do not require quoting, unless explicitely specified, which is necessary for
+ # the csc compiler.
line = line.replace('\\', '/')
outfile.write(line)
@@ -114,7 +116,6 @@ class NinjaBuildElement:
(name, elems) = e
should_quote = name not in raw_names
line = ' %s = ' % name
- noq_templ = "%s"
newelems = []
for i in elems:
if not should_quote or i == '&&': # Hackety hack hack
@@ -263,7 +264,7 @@ int dummy;
vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header)
header_deps.append(vala_header)
# Recurse and find generated headers
- for dep in target.link_targets:
+ for dep in itertools.chain(target.link_targets, target.link_whole_targets):
if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
header_deps += self.get_generated_headers(dep)
return header_deps
@@ -473,8 +474,7 @@ int dummy;
def process_target_dependencies(self, target, outfile):
for t in target.get_dependencies():
- tname = t.get_basename() + t.type_suffix()
- if tname not in self.processed_targets:
+ if t.get_id() not in self.processed_targets:
self.generate_target(t, outfile)
def custom_target_generator_inputs(self, target, outfile):
@@ -613,13 +613,19 @@ int dummy;
self.create_target_alias(target_name, outfile)
self.processed_targets[target.get_id()] = True
+ def generate_coverage_command(self, elem, outputs):
+ elem.add_item('COMMAND', self.environment.get_build_command() +
+ ['--internal', 'coverage'] +
+ outputs +
+ [self.environment.get_source_dir(),
+ os.path.join(self.environment.get_source_dir(),
+ self.build.get_subproject_dir()),
+ self.environment.get_build_dir(),
+ self.environment.get_log_dir()])
+
def generate_coverage_rules(self, outfile):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
- e.add_item('COMMAND', self.environment.get_build_command() +
- ['--internal', 'coverage',
- self.environment.get_source_dir(),
- self.environment.get_build_dir(),
- self.environment.get_log_dir()])
+ self.generate_coverage_command(e, [])
e.add_item('description', 'Generates coverage reports.')
e.write(outfile)
# Alias that runs the target defined above
@@ -627,43 +633,26 @@ int dummy;
self.generate_coverage_legacy_rules(outfile)
def generate_coverage_legacy_rules(self, outfile):
- (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
- added_rule = False
- if gcovr_exe:
- added_rule = True
- elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', '')
- elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_source_dir(),
- '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')])
- elem.add_item('DESC', 'Generating XML coverage report.')
- elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-xml', outfile)
- elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', '')
- elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_source_dir(),
- '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')])
- elem.add_item('DESC', 'Generating text coverage report.')
- elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-text', outfile)
- if lcov_exe and genhtml_exe:
- added_rule = True
- htmloutdir = os.path.join(self.environment.get_log_dir(), 'coveragereport')
- covinfo = os.path.join(self.environment.get_log_dir(), 'coverage.info')
- phony_elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'phony', os.path.join(htmloutdir, 'index.html'))
- phony_elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-html', outfile)
- elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '')
- command = [lcov_exe, '--directory', self.environment.get_build_dir(),
- '--capture', '--output-file', covinfo, '--no-checksum',
- '&&', genhtml_exe, '--prefix', self.environment.get_build_dir(),
- '--output-directory', htmloutdir, '--title', 'Code coverage',
- '--legend', '--show-details', covinfo]
- elem.add_item('COMMAND', command)
- elem.add_item('DESC', 'Generating HTML coverage report.')
- elem.write(outfile)
- if not added_rule:
- mlog.warning('coverage requested but neither gcovr nor lcov/genhtml found.')
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--xml'])
+ e.add_item('description', 'Generates XML coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-xml', outfile)
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--text'])
+ e.add_item('description', 'Generates text coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-text', outfile)
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--html'])
+ e.add_item('description', 'Generates HTML coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-html', outfile)
def generate_install(self, outfile):
install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
@@ -839,11 +828,12 @@ int dummy;
for de in data:
assert(isinstance(de, build.Data))
subdir = de.install_dir
- for f in de.sources:
- assert(isinstance(f, mesonlib.File))
- plain_f = os.path.basename(f.fname)
- dstabs = os.path.join(subdir, plain_f)
- i = [f.absolute_path(srcdir, builddir), dstabs, de.install_mode]
+ if not subdir:
+ subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name)
+ for src_file, dst_name in zip(de.sources, de.rename):
+ assert(isinstance(src_file, mesonlib.File))
+ dst_abs = os.path.join(subdir, dst_name)
+ i = [src_file.absolute_path(srcdir, builddir), dst_abs, de.install_mode]
d.data.append(i)
def generate_subdir_install(self, d):
@@ -986,7 +976,7 @@ int dummy;
outname_rel = os.path.join(self.get_target_dir(target), fname)
src_list = target.get_sources()
compiler = target.compilers['cs']
- rel_srcs = [s.rel_to_builddir(self.build_to_src) for s in src_list]
+ rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
deps = []
commands = CompilerArgs(compiler, target.extra_args.get('cs', []))
commands += compiler.get_buildtype_args(buildtype)
@@ -1012,8 +1002,8 @@ int dummy;
for rel_src in generated_sources.keys():
dirpart, fnamepart = os.path.split(rel_src)
if rel_src.lower().endswith('.cs'):
- rel_srcs.append(rel_src)
- deps.append(rel_src)
+ rel_srcs.append(os.path.normpath(rel_src))
+ deps.append(os.path.normpath(rel_src))
for dep in target.get_external_deps():
commands.extend_direct(dep.get_link_args())
@@ -1062,7 +1052,7 @@ int dummy;
the build directory.
"""
result = OrderedSet()
- for dep in target.link_targets + target.link_whole_targets:
+ for dep in itertools.chain(target.link_targets, target.link_whole_targets):
for i in dep.sources:
if hasattr(i, 'fname'):
i = i.fname
@@ -1274,6 +1264,10 @@ int dummy;
linkdirs = OrderedDict()
for d in target.link_targets:
linkdirs[d.subdir] = True
+ # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust
+ # dependency, so that collisions with libraries in rustc's
+ # sysroot don't cause ambiguity
+ args += ['--extern', '{}={}'.format(d.name, os.path.join(d.subdir, d.filename))]
for d in linkdirs.keys():
if d == '':
d = '.'
@@ -1292,7 +1286,7 @@ int dummy;
# Set runtime-paths so we can run executables without needing to set
# LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
- if '/' in target.name or '\\' in target.name:
+ if has_path_sep(target.name):
# Target names really should not have slashes in them, but
# unfortunately we did not check for that and some downstream projects
# now have them. Once slashes are forbidden, remove this bit.
@@ -1582,7 +1576,15 @@ int dummy;
def generate_cs_compile_rule(self, compiler, outfile):
rule = 'rule %s_COMPILER\n' % compiler.get_language()
invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
- command = ' command = %s $ARGS $in\n' % invoc
+
+ if mesonlib.is_windows():
+ command = ''' command = {executable} @$out.rsp
+ rspfile = $out.rsp
+ rspfile_content = $ARGS $in
+'''.format(executable=invoc)
+ else:
+ command = ' command = %s $ARGS $in\n' % invoc
+
description = ' description = Compiling C Sharp target $out.\n'
outfile.write(rule)
outfile.write(command)
@@ -1837,7 +1839,6 @@ rule FORTRAN_DEP_HACK
infilelist = genlist.get_inputs()
outfilelist = genlist.get_outputs()
extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends]
- source_target_dir = self.get_target_source_dir(target)
for i in range(len(infilelist)):
if len(generator.outputs) == 1:
sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
@@ -1862,7 +1863,6 @@ rule FORTRAN_DEP_HACK
# We have consumed output files, so drop them from the list of remaining outputs.
if sole_output == '':
outfilelist = outfilelist[len(generator.outputs):]
- relout = self.get_target_private_dir(target)
args = self.replace_paths(target, args, override_subdir=subdir)
cmdlist = exe_arr + self.replace_extra_args(args, genlist)
if generator.capture:
@@ -2089,8 +2089,7 @@ rule FORTRAN_DEP_HACK
return incs
def _generate_single_compile(self, target, compiler, is_generated=False):
- base_proxy = backends.OptionOverrideProxy(target.option_overrides,
- self.environment.coredata.base_options)
+ base_proxy = self.get_base_options_for_target(target)
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
commands = CompilerArgs(compiler)
@@ -2122,7 +2121,10 @@ rule FORTRAN_DEP_HACK
# Hence, we must reverse the list so that the order is preserved.
for i in reversed(target.get_include_dirs()):
basedir = i.get_curdir()
- for d in i.get_incdirs():
+ # We should iterate include dirs in reversed orders because
+ # -Ipath will add to begin of array. And without reverse
+ # flags will be added in reversed order.
+ for d in reversed(i.get_incdirs()):
# Avoid superfluous '/.' at the end of paths when d is '.'
if d not in ('', '.'):
expdir = os.path.join(basedir, d)
@@ -2149,6 +2151,11 @@ rule FORTRAN_DEP_HACK
# near the end since these are supposed to override everything else.
commands += self.escape_extra_args(compiler,
target.get_extra_args(compiler.get_language()))
+
+ # D specific additional flags
+ if compiler.language == 'd':
+ commands += compiler.get_feature_args(target.d_features, self.build_to_src)
+
# Add source dir and build dir. Project-specific and target-specific
# include paths must override per-target compile args, include paths
# from external dependencies, internal dependencies, and from
@@ -2272,7 +2279,7 @@ rule FORTRAN_DEP_HACK
# FIXME FIXME: The usage of this is a terrible and unreliable hack
if isinstance(fname, File):
return fname.subdir != ''
- return '/' in fname or '\\' in fname
+ return has_path_sep(fname)
# Fortran is a bit weird (again). When you link against a library, just compiling a source file
# requires the mod files that are output when single files are built. To do this right we would need to
@@ -2318,7 +2325,7 @@ rule FORTRAN_DEP_HACK
pch = target.get_pch(lang)
if not pch:
continue
- if '/' not in pch[0] or '/' not in pch[-1]:
+ if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]):
msg = 'Precompiled header of {!r} must not be in the same ' \
'directory as source, please put it in a subdirectory.' \
''.format(target.get_basename())
@@ -2403,6 +2410,74 @@ rule FORTRAN_DEP_HACK
target_args = self.build_target_link_arguments(linker, target.link_whole_targets)
return linker.get_link_whole_for(target_args) if len(target_args) else []
+ def guess_library_absolute_path(self, libname, search_dirs, prefixes, suffixes):
+ for directory in search_dirs:
+ for suffix in suffixes:
+ for prefix in prefixes:
+ trial = os.path.join(directory, prefix + libname + '.' + suffix)
+ if os.path.isfile(trial):
+ return trial
+
+ def guess_external_link_dependencies(self, linker, target, commands, internal):
+ # Ideally the linker would generate dependency information that could be used.
+ # But that has 2 problems:
+ # * currently ld can not create dependency information in a way that ninja can use:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=22843
+ # * Meson optimizes libraries from the same build using the symbol extractor.
+ # Just letting ninja use ld generated dependencies would undo this optimization.
+ search_dirs = []
+ libs = []
+ absolute_libs = []
+
+ build_dir = self.environment.get_build_dir()
+ # the following loop sometimes consumes two items from command in one pass
+ it = iter(commands)
+ for item in it:
+ if item in internal and not item.startswith('-'):
+ continue
+
+ if item.startswith('-L'):
+ if len(item) > 2:
+ path = item[2:]
+ else:
+ try:
+ path = next(it)
+ except StopIteration:
+ mlog.warning("Generated linker command has -L argument without following path")
+ break
+ if not os.path.isabs(path):
+ path = os.path.join(build_dir, path)
+ search_dirs.append(path)
+ elif item.startswith('-l'):
+ if len(item) > 2:
+ libs.append(item[2:])
+ else:
+ try:
+ libs.append(next(it))
+ except StopIteration:
+ mlog.warning("Generated linker command has '-l' argument without following library name")
+ break
+ elif os.path.isabs(item) and self.environment.is_library(item) and os.path.isfile(item):
+ absolute_libs.append(item)
+
+ guessed_dependencies = []
+ # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker
+ if hasattr(linker, 'get_library_naming'):
+ search_dirs += linker.get_library_dirs()
+ prefixes_static, suffixes_static = linker.get_library_naming(self.environment, 'static', strict=True)
+ prefixes_shared, suffixes_shared = linker.get_library_naming(self.environment, 'shared', strict=True)
+ for libname in libs:
+ # be conservative and record most likely shared and static resolution, because we don't know exactly
+ # which one the linker will prefer
+ static_resolution = self.guess_library_absolute_path(libname, search_dirs, prefixes_static, suffixes_static)
+ shared_resolution = self.guess_library_absolute_path(libname, search_dirs, prefixes_shared, suffixes_shared)
+ if static_resolution:
+ guessed_dependencies.append(os.path.realpath(static_resolution))
+ if shared_resolution:
+ guessed_dependencies.append(os.path.realpath(shared_resolution))
+
+ return guessed_dependencies + absolute_libs
+
def generate_link(self, target, outfile, outname, obj_list, linker, extra_args=[]):
if isinstance(target, build.StaticLibrary):
linker_base = 'STATIC'
@@ -2469,7 +2544,8 @@ rule FORTRAN_DEP_HACK
dependencies = []
else:
dependencies = target.get_dependencies()
- commands += self.build_target_link_arguments(linker, dependencies)
+ internal = self.build_target_link_arguments(linker, dependencies)
+ commands += internal
# For 'automagic' deps: Boost and GTest. Also dependency('threads').
# pkg-config puts the thread flags itself via `Cflags:`
for d in target.external_deps:
@@ -2493,9 +2569,13 @@ rule FORTRAN_DEP_HACK
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
+
+ dep_targets = []
+ dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal))
+
# Set runtime-paths so we can run executables without needing to set
# LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
- if '/' in target.name or '\\' in target.name:
+ if has_path_sep(target.name):
# Target names really should not have slashes in them, but
# unfortunately we did not check for that and some downstream projects
# now have them. Once slashes are forbidden, remove this bit.
@@ -2516,7 +2596,7 @@ rule FORTRAN_DEP_HACK
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
- dep_targets = [self.get_dependency_filename(t) for t in dependencies]
+ dep_targets.extend([self.get_dependency_filename(t) for t in dependencies])
dep_targets.extend([self.get_dependency_filename(t)
for t in target.link_depends])
elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list)
@@ -2676,3 +2756,9 @@ rule FORTRAN_DEP_HACK
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
elem.write(outfile)
+
+def load(build_dir):
+ filename = os.path.join(build_dir, 'meson-private', 'install.dat')
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
+ return obj
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 057e7c9..e9a3519 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os, sys
+import os
import pickle
import xml.dom.minidom
import xml.etree.ElementTree as ET
@@ -304,6 +304,7 @@ class Vs2010Backend(backends.Backend):
projlist = []
for name, target in self.build.targets.items():
outdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(outdir, exist_ok=True)
fname = name + '.vcxproj'
relname = os.path.join(target.subdir, fname)
projfile = os.path.join(outdir, fname)
@@ -730,7 +731,7 @@ class Vs2010Backend(backends.Backend):
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
if l in file_args:
- file_args[l] += compilers.get_base_compile_args(self.environment.coredata.base_options, comp)
+ file_args[l] += compilers.get_base_compile_args(self.get_base_options_for_target(target), comp)
file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options)
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args.get(target.subproject, {}).items():
@@ -768,7 +769,8 @@ class Vs2010Backend(backends.Backend):
# These are per-target, but we still add them as per-file because we
# need them to be looked in first.
for d in reversed(target.get_include_dirs()):
- for i in d.get_incdirs():
+ # reversed is used to keep order of includes
+ for i in reversed(d.get_incdirs()):
curdir = os.path.join(d.get_curdir(), i)
args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir
@@ -944,7 +946,8 @@ class Vs2010Backend(backends.Backend):
self.add_project_reference(root, tvcxproj, tid)
else:
# Other libraries go into AdditionalDependencies
- additional_links.append(linkname)
+ if linkname not in additional_links:
+ additional_links.append(linkname)
for lib in self.get_custom_target_provided_libraries(target):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
additional_objects = []
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 3ae31e4..9a9f88b 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -16,7 +16,7 @@ from . import backends
from .. import build
from .. import dependencies
from .. import mesonlib
-import uuid, os, sys
+import uuid, os
from ..mesonlib import MesonException
@@ -565,9 +565,7 @@ class XCodeBackend(backends.Backend):
self.write_line(');')
self.write_line('runOnlyForDeploymentPostprocessing = 0;')
self.write_line('shellPath = /bin/sh;')
- script_root = self.environment.get_script_dir()
- test_script = os.path.join(script_root, 'meson_test.py')
- cmd = mesonlib.python_command + [test_script, test_data, '--wd', self.environment.get_build_dir()]
+ cmd = mesonlib.meson_command + ['test', test_data, '-C', self.environment.get_build_dir()]
cmdstr = ' '.join(["'%s'" % i for i in cmd])
self.write_line('shellScript = "%s";' % cmdstr)
self.write_line('showEnvVarsInLog = 0;')
@@ -708,7 +706,7 @@ class XCodeBackend(backends.Backend):
if isinstance(target, build.SharedLibrary):
ldargs = ['-dynamiclib', '-Wl,-headerpad_max_install_names'] + dep_libs
install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype)
- dylib_version = target.version
+ dylib_version = target.soversion
else:
ldargs = dep_libs
install_path = ''
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 400b9e5..8d16c95 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -15,6 +15,7 @@
import copy, os, re
from collections import OrderedDict
import itertools, pathlib
+import pickle
from . import environment
from . import dependencies
@@ -22,68 +23,67 @@ from . import mlog
from .mesonlib import File, MesonException, listify, extract_as_list
from .mesonlib import typeslistify, stringlistify, classify_unity_sources
from .mesonlib import get_filenames_templates_dict, substitute_values
-from .mesonlib import for_windows, for_darwin, for_cygwin, for_android
+from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep
from .compilers import is_object, clike_langs, sort_clike, lang_suffixes
-known_basic_kwargs = {'install': True,
- 'c_pch': True,
- 'cpp_pch': True,
- 'c_args': True,
- 'objc_args': True,
- 'objcpp_args': True,
- 'cpp_args': True,
- 'cs_args': True,
- 'vala_args': True,
- 'fortran_args': True,
- 'd_args': True,
- 'd_import_dirs': True,
- 'd_unittest': True,
- 'd_module_versions': True,
- 'java_args': True,
- 'rust_args': True,
- 'link_args': True,
- 'link_depends': True,
- 'link_with': True,
- 'link_whole': True,
- 'implicit_include_directories': True,
- 'include_directories': True,
- 'dependencies': True,
- 'install_dir': True,
- 'main_class': True,
- 'name_suffix': True,
- 'gui_app': True,
- 'extra_files': True,
- 'install_rpath': True,
- 'build_rpath': True,
- 'resources': True,
- 'sources': True,
- 'objects': True,
- 'native': True,
- 'build_by_default': True,
- 'override_options': True,
- }
-
-# These contain kwargs supported by both static and shared libraries. These are
-# combined here because a library() call might be shared_library() or
-# static_library() at runtime based on the configuration.
-# FIXME: Find a way to pass that info down here so we can have proper target
-# kwargs checking when specifically using shared_library() or static_library().
-known_lib_kwargs = known_basic_kwargs.copy()
-known_lib_kwargs.update({'version': True, # Only for shared libs
- 'soversion': True, # Only for shared libs
- 'name_prefix': True,
- 'vs_module_defs': True, # Only for shared libs
- 'vala_header': True,
- 'vala_vapi': True,
- 'vala_gir': True,
- 'pic': True, # Only for static libs
- 'rust_crate_type': True, # Only for Rust libs
- })
-
-known_exe_kwargs = known_basic_kwargs.copy()
-known_exe_kwargs.update({'implib': True,
- 'export_dynamic': True
- })
+pch_kwargs = set(['c_pch', 'cpp_pch'])
+
+lang_arg_kwargs = set([
+ 'c_args',
+ 'cpp_args',
+ 'd_args',
+ 'd_import_dirs',
+ 'd_unittest',
+ 'd_module_versions',
+ 'fortran_args',
+ 'java_args',
+ 'objc_args',
+ 'objcpp_args',
+ 'rust_args',
+ 'vala_args',
+ 'cs_args',
+])
+
+vala_kwargs = set(['vala_header', 'vala_gir', 'vala_vapi'])
+rust_kwargs = set(['rust_crate_type'])
+cs_kwargs = set(['resources', 'cs_args'])
+
+buildtarget_kwargs = set([
+ 'build_by_default',
+ 'build_rpath',
+ 'dependencies',
+ 'extra_files',
+ 'gui_app',
+ 'link_with',
+ 'link_whole',
+ 'link_args',
+ 'link_depends',
+ 'implicit_include_directories',
+ 'include_directories',
+ 'install',
+ 'install_rpath',
+ 'install_dir',
+ 'name_prefix',
+ 'name_suffix',
+ 'native',
+ 'objects',
+ 'override_options',
+ 'sources',
+])
+
+known_build_target_kwargs = (
+ buildtarget_kwargs |
+ lang_arg_kwargs |
+ pch_kwargs |
+ vala_kwargs |
+ rust_kwargs |
+ cs_kwargs)
+
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic'}
+known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs'}
+known_shmod_kwargs = known_build_target_kwargs
+known_stlib_kwargs = known_build_target_kwargs | {'pic'}
+known_jar_kwargs = known_exe_kwargs | {'main_class'}
class InvalidArguments(MesonException):
pass
@@ -113,6 +113,7 @@ class Build:
self.static_linker = None
self.static_cross_linker = None
self.subprojects = {}
+ self.subproject_dir = ''
self.install_scripts = []
self.postconf_scripts = []
self.install_dirs = []
@@ -138,6 +139,9 @@ class Build:
def get_project(self):
return self.projects['']
+ def get_subproject_dir(self):
+ return self.subproject_dir
+
def get_targets(self):
return self.targets
@@ -207,9 +211,10 @@ class ExtractedObjects:
'''
Holds a list of sources for which the objects must be extracted
'''
- def __init__(self, target, srclist, is_unity):
+ def __init__(self, target, srclist, genlist, is_unity):
self.target = target
self.srclist = srclist
+ self.genlist = genlist
if is_unity:
self.check_unity_compatible()
@@ -281,7 +286,7 @@ class EnvironmentVariables:
class Target:
def __init__(self, name, subdir, subproject, build_by_default):
- if '/' in name or '\\' in name:
+ if has_path_sep(name):
# Fix failing test 53 when this becomes an error.
mlog.warning('''Target "%s" has a path separator in its name.
This is not supported, it can cause unexpected failures and will become
@@ -330,6 +335,8 @@ a hard error in the future.''' % name)
class BuildTarget(Target):
+ known_kwargs = known_build_target_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, True)
self.is_cross = is_cross
@@ -355,6 +362,7 @@ class BuildTarget(Target):
self.extra_args = {}
self.generated = []
self.extra_files = []
+ self.d_features = {}
# Sources can be:
# 1. Pre-existing source files in the source tree
# 2. Pre-existing sources generated by configure_file in the build tree
@@ -364,14 +372,15 @@ class BuildTarget(Target):
# 1. Pre-existing objects provided by the user with the `objects:` kwarg
# 2. Compiled objects created by and extracted from another target
self.process_objectlist(objects)
- self.process_compilers()
self.process_kwargs(kwargs, environment)
self.check_unknown_kwargs(kwargs)
+ self.process_compilers()
if not any([self.sources, self.generated, self.objects, self.link_whole]):
raise InvalidArguments('Build target %s has no sources.' % name)
self.process_compilers_late()
self.validate_sources()
self.validate_cross_install(environment)
+ self.check_module_linking()
def __lt__(self, other):
return self.get_id() < other.get_id()
@@ -387,7 +396,7 @@ class BuildTarget(Target):
def check_unknown_kwargs(self, kwargs):
# Override this method in derived classes that have more
# keywords.
- self.check_unknown_kwargs_int(kwargs, known_basic_kwargs)
+ self.check_unknown_kwargs_int(kwargs, self.known_kwargs)
def check_unknown_kwargs_int(self, kwargs, known_kwargs):
unknowns = []
@@ -499,6 +508,13 @@ class BuildTarget(Target):
# which is what we need.
if not is_object(s):
sources.append(s)
+ for d in self.external_deps:
+ if hasattr(d, 'held_object'):
+ d = d.held_object
+ for s in d.sources:
+ if isinstance(s, (str, File)):
+ sources.append(s)
+
# Sources that were used to create our extracted objects
for o in self.objects:
if not isinstance(o, ExtractedObjects):
@@ -610,13 +626,17 @@ class BuildTarget(Target):
if not isinstance(src, str):
raise MesonException('Object extraction arguments must be strings.')
src = File(False, self.subdir, src)
+ # FIXME: It could be a generated source
if src not in self.sources:
raise MesonException('Tried to extract unknown source %s.' % src)
obj_src.append(src)
- return ExtractedObjects(self, obj_src, self.is_unity)
+ return ExtractedObjects(self, obj_src, [], self.is_unity)
def extract_all_objects(self):
- return ExtractedObjects(self, self.sources, self.is_unity)
+ # FIXME: We should add support for transitive extract_objects()
+ if self.objects:
+ raise MesonException('Cannot extract objects from a target that itself has extracted objects')
+ return ExtractedObjects(self, self.sources, self.generated, self.is_unity)
def get_all_link_deps(self):
return self.get_transitive_link_deps()
@@ -650,10 +670,6 @@ just like those detected with the dependency() function.''')
self.link(linktarget)
lwhole = extract_as_list(kwargs, 'link_whole')
for linktarget in lwhole:
- # Sorry for this hack. Keyword targets are kept in holders
- # in kwargs. Unpack here without looking at the exact type.
- if hasattr(linktarget, "held_object"):
- linktarget = linktarget.held_object
self.link_whole(linktarget)
c_pchlist, cpp_pchlist, clist, cpplist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
@@ -682,12 +698,14 @@ just like those detected with the dependency() function.''')
dfeature_versions = kwargs.get('d_module_versions', None)
if dfeature_versions:
dfeatures['versions'] = dfeature_versions
- dfeature_import_dirs = kwargs.get('d_import_dirs', None)
- if dfeature_import_dirs:
+ if 'd_import_dirs' in kwargs:
+ dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True)
+ for d in dfeature_import_dirs:
+ if not isinstance(d, IncludeDirs):
+ raise InvalidArguments('Arguments to d_import_dirs must be include_directories.')
dfeatures['import_dirs'] = dfeature_import_dirs
if dfeatures:
- if 'd' in self.compilers:
- self.add_compiler_args('d', self.compilers['d'].get_feature_args(dfeatures))
+ self.d_features = dfeatures
self.link_args = extract_as_list(kwargs, 'link_args')
for i in self.link_args:
@@ -785,12 +803,16 @@ This will become a hard error in a future Meson release.''')
def get_extra_args(self, language):
return self.extra_args.get(language, [])
- def get_dependencies(self):
+ def get_dependencies(self, exclude=None):
transitive_deps = []
- for t in self.link_targets + self.link_whole_targets:
+ if exclude is None:
+ exclude = []
+ for t in itertools.chain(self.link_targets, self.link_whole_targets):
+ if t in transitive_deps or t in exclude:
+ continue
transitive_deps.append(t)
if isinstance(t, StaticLibrary):
- transitive_deps += t.get_dependencies()
+ transitive_deps += t.get_dependencies(transitive_deps + exclude)
return transitive_deps
def get_source_subdir(self):
@@ -831,13 +853,16 @@ This will become a hard error in a future Meson release.''')
self.add_include_dirs(dep.include_directories)
for l in dep.libraries:
self.link(l)
- # Those parts that are external.
- extpart = dependencies.InternalDependency('undefined',
- [],
- dep.compile_args,
- dep.link_args,
- [], [], [])
- self.external_deps.append(extpart)
+ for l in dep.whole_libraries:
+ self.link_whole(l)
+ if dep.compile_args or dep.link_args:
+ # Those parts that are external.
+ extpart = dependencies.InternalDependency('undefined',
+ [],
+ dep.compile_args,
+ dep.link_args,
+ [], [], [], [])
+ self.external_deps.append(extpart)
# Deps of deps.
self.add_deps(dep.ext_deps)
elif isinstance(dep, dependencies.Dependency):
@@ -1012,6 +1037,15 @@ You probably should put it in link_with instead.''')
def is_linkable_target(self):
return False
+ def check_module_linking(self):
+ '''
+ Warn if shared modules are linked with target: (link_with) #2865
+ '''
+ for link_target in self.link_targets:
+ if isinstance(link_target, SharedModule):
+ mlog.warning('''target links against shared modules. This is not
+recommended as it can lead to undefined behaviour on some platforms''')
+ return
class Generator:
def __init__(self, args, kwargs):
@@ -1054,7 +1088,7 @@ class Generator:
raise InvalidArguments('"output" may only contain strings.')
if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
- if '/' in rule or '\\' in rule:
+ if has_path_sep(rule):
raise InvalidArguments('"outputs" must not contain a directory separator.')
if len(outputs) > 1:
for o in outputs:
@@ -1159,6 +1193,8 @@ class GeneratedList:
return self.extra_args
class Executable(BuildTarget):
+ known_kwargs = known_exe_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
# Unless overridden, executables have no suffix or prefix. Except on
@@ -1214,9 +1250,6 @@ class Executable(BuildTarget):
def type_suffix(self):
return "@exe"
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_exe_kwargs)
-
def get_import_filename(self):
"""
The name of the import library that will be outputted by the compiler
@@ -1234,6 +1267,8 @@ class Executable(BuildTarget):
return self.is_linkwithable
class StaticLibrary(BuildTarget):
+ known_kwargs = known_stlib_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
if 'pic' not in kwargs and 'b_staticpic' in environment.coredata.base_options:
kwargs['pic'] = environment.coredata.base_options['b_staticpic'].value
@@ -1272,9 +1307,6 @@ class StaticLibrary(BuildTarget):
def type_suffix(self):
return "@sta"
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
-
def process_kwargs(self, kwargs, environment):
super().process_kwargs(kwargs, environment)
if 'rust_crate_type' in kwargs:
@@ -1288,6 +1320,8 @@ class StaticLibrary(BuildTarget):
return True
class SharedLibrary(BuildTarget):
+ known_kwargs = known_shlib_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
self.soversion = None
self.ltversion = None
@@ -1476,9 +1510,6 @@ class SharedLibrary(BuildTarget):
else:
raise InvalidArguments('Invalid rust_crate_type "{0}": must be a string.'.format(rust_crate_type))
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
-
def get_import_filename(self):
"""
The name of the import library that will be outputted by the compiler
@@ -1534,6 +1565,8 @@ class SharedLibrary(BuildTarget):
# A shared library that is meant to be used with dlopen rather than linking
# into something else.
class SharedModule(SharedLibrary):
+ known_kwargs = known_shmod_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
if 'version' in kwargs:
raise MesonException('Shared modules must not specify the version kwarg.')
@@ -1543,19 +1576,20 @@ class SharedModule(SharedLibrary):
self.import_filename = None
class CustomTarget(Target):
- known_kwargs = {'input': True,
- 'output': True,
- 'command': True,
- 'capture': False,
- 'install': True,
- 'install_dir': True,
- 'build_always': True,
- 'depends': True,
- 'depend_files': True,
- 'depfile': True,
- 'build_by_default': True,
- 'override_options': True,
- }
+ known_kwargs = set([
+ 'input',
+ 'output',
+ 'command',
+ 'capture',
+ 'install',
+ 'install_dir',
+ 'build_always',
+ 'depends',
+ 'depend_files',
+ 'depfile',
+ 'build_by_default',
+ 'override_options',
+ ])
def __init__(self, name, subdir, subproject, kwargs, absolute_paths=False):
super().__init__(name, subdir, subproject, False)
@@ -1653,7 +1687,7 @@ class CustomTarget(Target):
raise InvalidArguments('Output must not be empty.')
if i.strip() == '':
raise InvalidArguments('Output must not consist only of whitespace.')
- if '/' in i:
+ if has_path_sep(i):
raise InvalidArguments('Output must not contain a path segment.')
if '@INPUT@' in i or '@INPUT0@' in i:
m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \
@@ -1789,6 +1823,8 @@ class RunTarget(Target):
return "@run"
class Jar(BuildTarget):
+ known_kwargs = known_jar_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
for s in self.sources:
@@ -1811,7 +1847,6 @@ class Jar(BuildTarget):
# All jar targets are installable.
pass
-
class CustomTargetIndex:
"""A special opaque object returned by indexing a CustomTarget. This object
@@ -1881,13 +1916,19 @@ class ConfigurationData:
# A bit poorly named, but this represents plain data files to copy
# during install.
class Data:
- def __init__(self, sources, install_dir, install_mode=None):
+ def __init__(self, sources, install_dir, install_mode=None, rename=None):
self.sources = sources
self.install_dir = install_dir
self.install_mode = install_mode
self.sources = listify(self.sources)
for s in self.sources:
assert(isinstance(s, File))
+ if rename is None:
+ self.rename = [os.path.basename(f.fname) for f in self.sources]
+ else:
+ self.rename = stringlistify(rename)
+ if len(self.rename) != len(self.sources):
+ raise MesonException('Size of rename argument is different from number of sources')
class RunScript(dict):
def __init__(self, script, args):
@@ -1922,3 +1963,22 @@ def get_sources_string_names(sources):
else:
raise AssertionError('Unknown source type: {!r}'.format(s))
return names
+
+def load(build_dir):
+ filename = os.path.join(build_dir, 'meson-private', 'build.dat')
+ load_fail_msg = 'Build data file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
+ nonexisting_fail_msg = 'No such build data file as "{!r}".'.format(filename)
+ try:
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
+ except FileNotFoundError:
+ raise MesonException(nonexisting_fail_msg)
+ except pickle.UnpicklingError:
+ raise MesonException(load_fail_msg)
+ if not isinstance(obj, Build):
+ raise MesonException(load_fail_msg)
+ return obj
+
+def save(obj, filename):
+ with open(filename, 'wb') as f:
+ pickle.dump(obj, f)
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index 1679243..89b46b5 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -70,6 +70,7 @@ __all__ = [
'JavaCompiler',
'LLVMDCompiler',
'MonoCompiler',
+ 'VisualStudioCsCompiler',
'NAGFortranCompiler',
'ObjCCompiler',
'ObjCPPCompiler',
@@ -132,7 +133,7 @@ from .cpp import (
IntelCPPCompiler,
VisualStudioCPPCompiler,
)
-from .cs import MonoCompiler
+from .cs import MonoCompiler, VisualStudioCsCompiler
from .d import (
DCompiler,
DmdDCompiler,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 47acdd9..71fff05 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -320,16 +320,16 @@ class CCompiler(Compiler):
args += extra_args
return args
- def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'):
+ def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile', want_output=False):
args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
# We only want to compile; not link
with self.compile(code, args.to_native(), mode) as p:
return p.returncode == 0
- def _links_wrapper(self, code, env, extra_args, dependencies):
+ def _links_wrapper(self, code, env, extra_args, dependencies, want_output=False):
"Shares common code between self.links and self.run"
args = self._get_compiler_check_args(env, extra_args, dependencies, mode='link')
- return self.compile(code, args)
+ return self.compile(code, args, want_output=want_output)
def links(self, code, env, extra_args=None, dependencies=None):
with self._links_wrapper(code, env, extra_args, dependencies) as p:
@@ -338,7 +338,7 @@ class CCompiler(Compiler):
def run(self, code, env, extra_args=None, dependencies=None):
if self.is_cross and self.exe_wrapper is None:
raise CrossNoRunException('Can not run test applications in this cross environment.')
- with self._links_wrapper(code, env, extra_args, dependencies) as p:
+ with self._links_wrapper(code, env, extra_args, dependencies, True) as p:
if p.returncode != 0:
mlog.debug('Could not compile test file %s: %d\n' % (
p.input_name,
@@ -368,24 +368,52 @@ class CCompiler(Compiler):
return self.compiles(t.format(**fargs), env, extra_args, dependencies)
def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies):
+ # Try user's guess first
if isinstance(guess, int):
if self._compile_int('%s == %d' % (expression, guess), prefix, env, extra_args, dependencies):
return guess
- cur = low
- while low < high:
- cur = int((low + high) / 2)
- if cur == low:
- break
-
- if self._compile_int('%s >= %d' % (expression, cur), prefix, env, extra_args, dependencies):
- low = cur
+ # If no bounds are given, compute them in the limit of int32
+ maxint = 0x7fffffff
+ minint = -0x80000000
+ if not isinstance(low, int) or not isinstance(high, int):
+ if self._compile_int('%s >= 0' % (expression), prefix, env, extra_args, dependencies):
+ low = cur = 0
+ while self._compile_int('%s > %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ low = cur + 1
+ if low > maxint:
+ raise EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2 + 1
+ if cur > maxint:
+ cur = maxint
+ high = cur
else:
+ low = cur = -1
+ while self._compile_int('%s < %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ high = cur - 1
+ if high < minint:
+ raise EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2
+ if cur < minint:
+ cur = minint
+ low = cur
+ else:
+ # Sanity check limits given by user
+ if high < low:
+ raise EnvironmentException('high limit smaller than low limit')
+ condition = '%s <= %d && %s >= %d' % (expression, high, expression, low)
+ if not self._compile_int(condition, prefix, env, extra_args, dependencies):
+ raise EnvironmentException('Value out of given range')
+
+ # Binary search
+ while low != high:
+ cur = low + int((high - low) / 2)
+ if self._compile_int('%s <= %d' % (expression, cur), prefix, env, extra_args, dependencies):
high = cur
+ else:
+ low = cur + 1
- if self._compile_int('%s == %d' % (expression, cur), prefix, env, extra_args, dependencies):
- return cur
- raise EnvironmentException('Cross-compile check overflowed')
+ return low
def compute_int(self, expression, low, high, guess, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
@@ -417,7 +445,7 @@ class CCompiler(Compiler):
}}'''
if not self.compiles(t.format(**fargs), env, extra_args, dependencies):
return -1
- return self.cross_compute_int('sizeof(%s)' % typename, 1, 128, None, prefix, env, extra_args, dependencies)
+ return self.cross_compute_int('sizeof(%s)' % typename, None, None, None, prefix, env, extra_args, dependencies)
def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
@@ -455,7 +483,7 @@ class CCompiler(Compiler):
char c;
{type} target;
}};'''
- return self.cross_compute_int('offsetof(struct tmp, target)', 1, 1024, None, t.format(**fargs), env, extra_args, dependencies)
+ return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t.format(**fargs), env, extra_args, dependencies)
def alignment(self, typename, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
@@ -526,7 +554,7 @@ class CCompiler(Compiler):
elif rtype == 'int':
try:
return int(res.stdout.strip())
- except:
+ except ValueError:
m = 'Return value of {}() is not an int'
raise EnvironmentException(m.format(fname))
@@ -709,7 +737,7 @@ class CCompiler(Compiler):
args = self.get_cross_extra_flags(env, link=False)
args += self.get_compiler_check_args()
n = 'symbols_have_underscore_prefix'
- with self.compile(code, args, 'compile') as p:
+ with self.compile(code, args, 'compile', want_output=True) as p:
if p.returncode != 0:
m = 'BUG: Unable to compile {!r} check: {}'
raise RuntimeError(m.format(n, p.stdo))
@@ -727,7 +755,7 @@ class CCompiler(Compiler):
return False
raise RuntimeError('BUG: {!r} check failed unexpectedly'.format(n))
- def get_library_naming(self, env, libtype):
+ def get_library_naming(self, env, libtype, strict=False):
'''
Get library prefixes and suffixes for the target platform ordered by
priority
@@ -735,7 +763,10 @@ class CCompiler(Compiler):
stlibext = ['a']
# We've always allowed libname to be both `foo` and `libfoo`,
# and now people depend on it
- prefixes = ['lib', '']
+ if strict and self.id != 'msvc': # lib prefix is not usually used with msvc
+ prefixes = ['lib']
+ else:
+ prefixes = ['lib', '']
# Library suffixes and prefixes
if for_darwin(env.is_cross_build(), env):
shlibext = ['dylib']
@@ -960,7 +991,7 @@ class VisualStudioCCompiler(CCompiler):
self.warn_args = {'1': ['/W2'],
'2': ['/W3'],
'3': ['/W4']}
- self.base_options = ['b_pch'] # FIXME add lto, pgo and the like
+ self.base_options = ['b_pch', 'b_ndebug'] # FIXME add lto, pgo and the like
self.is_64 = is_64
# Override CCompiler.get_always_args
@@ -1164,7 +1195,7 @@ class VisualStudioCCompiler(CCompiler):
# See boost/config/compiler/visualc.cpp for up to date mapping
try:
version = int(''.join(self.version.split('.')[0:2]))
- except:
+ except ValueError:
return None
if version < 1310:
return '7.0'
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index c072bb6..934b079 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -55,7 +55,6 @@ for _l in clike_langs:
clike_suffixes += lang_suffixes[_l]
clike_suffixes += ('h', 'll', 's')
-# XXX: Use this in is_library()?
soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
# All these are only for C-like languages; see `clike_langs` above.
@@ -102,6 +101,10 @@ def is_object(fname):
def is_library(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
+
+ if soregex.match(fname):
+ return True
+
suffix = fname.split('.')[-1]
return suffix in lib_suffixes
@@ -315,7 +318,9 @@ def get_base_compile_args(options, compiler):
except KeyError:
pass
try:
- if options['b_ndebug'].value == 'true' or (options['b_ndebug'].value == 'if-release' and options['buildtype'] == 'release'):
+ if (options['b_ndebug'].value == 'true' or
+ (options['b_ndebug'].value == 'if-release' and
+ options['buildtype'].value == 'release')):
args += ['-DNDEBUG']
except KeyError:
pass
@@ -348,7 +353,7 @@ def get_base_link_args(options, linker, is_shared_module):
pass
try:
if 'b_asneeded' in linker.base_options and options['b_asneeded'].value:
- args.append('-Wl,--as-needed')
+ args.append(linker.get_asneeded_args())
except KeyError:
pass
try:
@@ -524,15 +529,22 @@ class CompilerArgs(list):
def append_direct(self, arg):
'''
Append the specified argument without any reordering or de-dup
+ except for absolute paths where the order of include search directories
+ is not relevant
'''
- super().append(arg)
+ if os.path.isabs(arg):
+ self.append(arg)
+ else:
+ super().append(arg)
def extend_direct(self, iterable):
'''
Extend using the elements in the specified iterable without any
- reordering or de-dup
+ reordering or de-dup except for absolute paths where the order of
+ include search directories is not relevant
'''
- super().extend(iterable)
+ for elem in iterable:
+ self.append_direct(elem)
def __add__(self, args):
new = CompilerArgs(self, self.compiler)
@@ -599,6 +611,8 @@ class Compiler:
# Libraries to ignore in find_library() since they are provided by the
# compiler or the C library. Currently only used for MSVC.
ignore_libs = ()
+ # Cache for the result of compiler checks which can be cached
+ compiler_check_cache = {}
def __init__(self, exelist, version, **kwargs):
if isinstance(exelist, str):
@@ -751,9 +765,23 @@ class Compiler:
return os.path.join(dirname, 'output.' + suffix)
@contextlib.contextmanager
- def compile(self, code, extra_args=None, mode='link'):
+ def compile(self, code, extra_args=None, mode='link', want_output=False):
if extra_args is None:
+ textra_args = None
extra_args = []
+ else:
+ textra_args = tuple(extra_args)
+ key = (code, textra_args, mode)
+ if not want_output:
+ if key in self.compiler_check_cache:
+ p = self.compiler_check_cache[key]
+ mlog.debug('Using cached compile:')
+ mlog.debug('Cached command line: ', ' '.join(p.commands), '\n')
+ mlog.debug('Code:\n', code)
+ mlog.debug('Cached compiler stdout:\n', p.stdo)
+ mlog.debug('Cached compiler stderr:\n', p.stde)
+ yield p
+ raise StopIteration
try:
with tempfile.TemporaryDirectory() as tmpdirname:
if isinstance(code, str):
@@ -763,7 +791,6 @@ class Compiler:
ofile.write(code)
elif isinstance(code, mesonlib.File):
srcname = code.fname
- output = self._get_compile_output(tmpdirname, mode)
# Construct the compiler command-line
commands = CompilerArgs(self)
@@ -776,6 +803,7 @@ class Compiler:
if mode == 'preprocess':
commands += self.get_preprocess_only_args()
else:
+ output = self._get_compile_output(tmpdirname, mode)
commands += self.get_output_args(output)
# Generate full command-line with the exelist
commands = self.get_exelist() + commands.to_native()
@@ -786,8 +814,12 @@ class Compiler:
p, p.stdo, p.stde = Popen_safe(commands, cwd=tmpdirname)
mlog.debug('Compiler stdout:\n', p.stdo)
mlog.debug('Compiler stderr:\n', p.stde)
+ p.commands = commands
p.input_name = srcname
- p.output_name = output
+ if want_output:
+ p.output_name = output
+ else:
+ self.compiler_check_cache[key] = p
yield p
except (PermissionError, OSError):
# On Windows antivirus programs and the like hold on to files so
@@ -900,6 +932,13 @@ ICC_STANDARD = 0
ICC_OSX = 1
ICC_WIN = 2
+# GNU ld cannot be installed on macOS
+# https://github.com/Homebrew/homebrew-core/issues/17794#issuecomment-328174395
+# Hence, we don't need to differentiate between OS and ld
+# for the sake of adding as-needed support
+GNU_LD_AS_NEEDED = '-Wl,--as-needed'
+APPLE_LD_AS_NEEDED = '-Wl,-dead_strip_dylibs'
+
def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module):
if soversion is None:
sostr = ''
@@ -1002,10 +1041,18 @@ class GnuCompiler:
'b_colorout', 'b_ndebug', 'b_staticpic']
if self.gcc_type != GCC_OSX:
self.base_options.append('b_lundef')
- self.base_options.append('b_asneeded')
+ self.base_options.append('b_asneeded')
# All GCC backends can do assembly
self.can_compile_suffixes.add('s')
+ # TODO: centralise this policy more globally, instead
+ # of fragmenting it into GnuCompiler and ClangCompiler
+ def get_asneeded_args(self):
+ if self.gcc_type == GCC_OSX:
+ return APPLE_LD_AS_NEEDED
+ else:
+ return GNU_LD_AS_NEEDED
+
def get_colorout_args(self, colortype):
if mesonlib.version_compare(self.version, '>=4.9.0'):
return gnu_color_args[colortype][:]
@@ -1107,10 +1154,18 @@ class ClangCompiler:
'b_ndebug', 'b_staticpic', 'b_colorout']
if self.clang_type != CLANG_OSX:
self.base_options.append('b_lundef')
- self.base_options.append('b_asneeded')
+ self.base_options.append('b_asneeded')
# All Clang backends can do assembly and LLVM IR
self.can_compile_suffixes.update(['ll', 's'])
+ # TODO: centralise this policy more globally, instead
+ # of fragmenting it into GnuCompiler and ClangCompiler
+ def get_asneeded_args(self):
+ if self.clang_type == CLANG_OSX:
+ return APPLE_LD_AS_NEEDED
+ else:
+ return GNU_LD_AS_NEEDED
+
def get_pic_args(self):
if self.clang_type in (CLANG_WIN, CLANG_OSX):
return [] # On Window and OS X, pic is always on.
@@ -1230,6 +1285,14 @@ class IntelCompiler:
raise MesonException('Unreachable code when converting icc type to gcc type.')
return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+ # TODO: centralise this policy more globally, instead
+ # of fragmenting it into GnuCompiler and ClangCompiler
+ def get_asneeded_args(self):
+ if self.icc_type == CLANG_OSX:
+ return APPLE_LD_AS_NEEDED
+ else:
+ return GNU_LD_AS_NEEDED
+
def get_std_shared_lib_link_args(self):
# FIXME: Don't know how icc works on OSX
# if self.icc_type == ICC_OSX:
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index d2d2585..3804059 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -113,7 +113,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler):
if self.gcc_type == GCC_MINGW:
opts.update({
'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Standard Win libraries to link against',
- gnu_winlibs), })
+ gnu_winlibs), })
return opts
def get_option_compile_args(self, options):
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index dd7a433..f78e364 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -15,19 +15,26 @@
import os.path, subprocess
from ..mesonlib import EnvironmentException
+from ..mesonlib import is_windows
from .compilers import Compiler, mono_buildtype_args
-class MonoCompiler(Compiler):
- def __init__(self, exelist, version, **kwargs):
+class CsCompiler(Compiler):
+ def __init__(self, exelist, version, id, runner=None):
self.language = 'cs'
- super().__init__(exelist, version, **kwargs)
- self.id = 'mono'
- self.monorunner = 'mono'
+ super().__init__(exelist, version)
+ self.id = id
+ self.runner = runner
def get_display_language(self):
return 'C sharp'
+ def get_always_args(self):
+ return ['/nologo']
+
+ def get_linker_always_args(self):
+ return ['/nologo']
+
def get_output_args(self, fname):
return ['-out:' + fname]
@@ -92,11 +99,14 @@ class MonoCompiler(Compiler):
}
}
''')
- pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
+ pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('Mono compiler %s can not compile programs.' % self.name_string())
- cmdlist = [self.monorunner, obj]
+ if self.runner:
+ cmdlist = [self.runner, obj]
+ else:
+ cmdlist = [os.path.join(work_dir, obj)]
pe = subprocess.Popen(cmdlist, cwd=work_dir)
pe.wait()
if pe.returncode != 0:
@@ -107,3 +117,25 @@ class MonoCompiler(Compiler):
def get_buildtype_args(self, buildtype):
return mono_buildtype_args[buildtype]
+
+
+class MonoCompiler(CsCompiler):
+ def __init__(self, exelist, version):
+ super().__init__(exelist, version, 'mono',
+ 'mono')
+
+
+class VisualStudioCsCompiler(CsCompiler):
+ def __init__(self, exelist, version):
+ super().__init__(exelist, version, 'csc')
+
+ def get_buildtype_args(self, buildtype):
+ res = mono_buildtype_args[buildtype]
+ if not is_windows():
+ tmp = []
+ for flag in res:
+ if flag == '-debug':
+ flag = '-debug:portable'
+ tmp.append(flag)
+ res = tmp
+ return res
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 3320736..474e1bd 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -93,7 +93,7 @@ class DCompiler(Compiler):
# FIXME: Make this work for Windows, MacOS and cross-compiling
return get_gcc_soname_args(GCC_STANDARD, prefix, shlib_name, suffix, path, soversion, is_shared_module)
- def get_feature_args(self, kwargs):
+ def get_feature_args(self, kwargs, build_to_src):
res = []
if 'unittest' in kwargs:
unittest = kwargs.pop('unittest')
@@ -122,8 +122,16 @@ class DCompiler(Compiler):
import_dir_arg = d_feature_args[self.id]['import_dir']
if not import_dir_arg:
raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string())
- for d in import_dirs:
- res.append('{0}{1}'.format(import_dir_arg, d))
+ for idir_obj in import_dirs:
+ basedir = idir_obj.get_curdir()
+ for idir in idir_obj.get_incdirs():
+ # Avoid superfluous '/.' at the end of paths when d is '.'
+ if idir not in ('', '.'):
+ expdir = os.path.join(basedir, idir)
+ else:
+ expdir = basedir
+ srctreedir = os.path.join(build_to_src, expdir)
+ res.append('{0}{1}'.format(import_dir_arg, srctreedir))
if kwargs:
raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys()))
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index 9ab5c8a..6194d1a 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -35,10 +35,10 @@ class ValaCompiler(Compiler):
return False # Because compiles into C.
def get_output_args(self, target):
- return ['-o', target]
+ return [] # Because compiles into C.
def get_compile_only_args(self):
- return ['-C']
+ return [] # Because compiles into C.
def get_pic_args(self):
return []
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index f87e62c..a696cca 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -21,7 +21,7 @@ from .mesonlib import MesonException
from .mesonlib import default_libdir, default_libexecdir, default_prefix
import ast
-version = '0.45.0.dev1'
+version = '0.46.0.dev1'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode']
default_yielding = False
@@ -44,21 +44,17 @@ class UserOption:
def validate_value(self, value):
raise RuntimeError('Derived option class did not override validate_value.')
+ def set_value(self, newvalue):
+ self.value = self.validate_value(newvalue)
+
class UserStringOption(UserOption):
def __init__(self, name, description, value, choices=None, yielding=None):
super().__init__(name, description, choices, yielding)
self.set_value(value)
- def validate(self, value):
+ def validate_value(self, value):
if not isinstance(value, str):
raise MesonException('Value "%s" for string option "%s" is not a string.' % (str(value), self.name))
-
- def set_value(self, newvalue):
- self.validate(newvalue)
- self.value = newvalue
-
- def validate_value(self, value):
- self.validate(value)
return value
class UserBooleanOption(UserOption):
@@ -66,23 +62,17 @@ class UserBooleanOption(UserOption):
super().__init__(name, description, [True, False], yielding)
self.set_value(value)
- def tobool(self, thing):
- if isinstance(thing, bool):
- return thing
- if thing.lower() == 'true':
- return True
- if thing.lower() == 'false':
- return False
- raise MesonException('Value %s is not boolean (true or false).' % thing)
-
- def set_value(self, newvalue):
- self.value = self.tobool(newvalue)
-
def __bool__(self):
return self.value
def validate_value(self, value):
- return self.tobool(value)
+ if isinstance(value, bool):
+ return value
+ if value.lower() == 'true':
+ return True
+ if value.lower() == 'false':
+ return False
+ raise MesonException('Value %s is not boolean (true or false).' % value)
class UserIntegerOption(UserOption):
def __init__(self, name, description, min_value, max_value, value, yielding=None):
@@ -97,26 +87,23 @@ class UserIntegerOption(UserOption):
c.append('<=' + str(max_value))
self.choices = ', '.join(c)
- def set_value(self, newvalue):
- if isinstance(newvalue, str):
- newvalue = self.toint(newvalue)
- if not isinstance(newvalue, int):
+ def validate_value(self, value):
+ if isinstance(value, str):
+ value = self.toint(value)
+ if not isinstance(value, int):
raise MesonException('New value for integer option is not an integer.')
- if self.min_value is not None and newvalue < self.min_value:
- raise MesonException('New value %d is less than minimum value %d.' % (newvalue, self.min_value))
- if self.max_value is not None and newvalue > self.max_value:
- raise MesonException('New value %d is more than maximum value %d.' % (newvalue, self.max_value))
- self.value = newvalue
+ if self.min_value is not None and value < self.min_value:
+ raise MesonException('New value %d is less than minimum value %d.' % (value, self.min_value))
+ if self.max_value is not None and value > self.max_value:
+ raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
+ return value
def toint(self, valuestring):
try:
return int(valuestring)
- except:
+ except ValueError:
raise MesonException('Value string "%s" is not convertable to an integer.' % valuestring)
- def validate_value(self, value):
- return self.toint(value)
-
class UserComboOption(UserOption):
def __init__(self, name, description, choices, value, yielding=None):
super().__init__(name, description, choices, yielding)
@@ -127,23 +114,18 @@ class UserComboOption(UserOption):
raise MesonException('Combo choice elements must be strings.')
self.set_value(value)
- def set_value(self, newvalue):
- if newvalue not in self.choices:
- optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
- raise MesonException('Value "%s" for combo option "%s" is not one of the choices. Possible choices are: %s.' % (newvalue, self.name, optionsstring))
- self.value = newvalue
-
def validate_value(self, value):
if value not in self.choices:
- raise MesonException('Value %s not one of accepted values.' % value)
+ optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
+ raise MesonException('Value "%s" for combo option "%s" is not one of the choices. Possible choices are: %s.' % (value, self.name, optionsstring))
return value
class UserArrayOption(UserOption):
def __init__(self, name, description, value, **kwargs):
super().__init__(name, description, kwargs.get('choices', []), yielding=kwargs.get('yielding', None))
- self.set_value(value, user_input=False)
+ self.value = self.validate_value(value, user_input=False)
- def validate(self, value, user_input):
+ def validate_value(self, value, user_input=True):
# User input is for options defined on the command line (via -D
# options). Users can put their input in as a comma separated
# string, but for defining options in meson_options.txt the format
@@ -176,13 +158,6 @@ This will become a hard error in the future.''')
', '.join(bad), ', '.join(self.choices)))
return newvalue
- def set_value(self, newvalue, user_input=True):
- self.value = self.validate(newvalue, user_input)
-
- def validate_value(self, value):
- self.validate(value)
- return value
-
# This class contains all data that must persist over multiple
# invocations of Meson. It is roughly the same thing as
# cmakecache.
@@ -222,17 +197,17 @@ class CoreData:
(after resolving variables and ~), return that absolute path. Next,
check if the file is relative to the current source dir. If the path
still isn't resolved do the following:
- Linux + BSD:
+ Windows:
+ - Error
+ *:
- $XDG_DATA_HOME/meson/cross (or ~/.local/share/meson/cross if
undefined)
- $XDG_DATA_DIRS/meson/cross (or
/usr/local/share/meson/cross:/usr/share/meson/cross if undefined)
- Error
- *:
- - Error
- BSD follows the Linux path and will honor XDG_* if set. This simplifies
- the implementation somewhat, especially since most BSD users wont set
- those environment variables.
+
+ Non-Windows follows the Linux path and will honor XDG_* if set. This
+ simplifies the implementation somewhat.
"""
if filename is None:
return None
@@ -242,7 +217,7 @@ class CoreData:
path_to_try = os.path.abspath(filename)
if os.path.exists(path_to_try):
return path_to_try
- if sys.platform == 'linux' or 'bsd' in sys.platform.lower():
+ if sys.platform != 'win32':
paths = [
os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
@@ -340,7 +315,8 @@ class CoreData:
return opt.validate_value(override_value)
raise MesonException('Tried to validate unknown option %s.' % option_name)
-def load(filename):
+def load(build_dir):
+ filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
try:
with open(filename, 'rb') as f:
@@ -354,7 +330,8 @@ def load(filename):
(obj.version, version))
return obj
-def save(obj, filename):
+def save(obj, build_dir):
+ filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
if obj.version != version:
raise MesonException('Fatal version mismatch corruption.')
with open(filename, 'wb') as f:
@@ -420,7 +397,7 @@ builtin_options = {
'werror': [UserBooleanOption, 'Treat warnings as errors.', False],
'warning_level': [UserComboOption, 'Compiler warning level to use.', ['1', '2', '3'], '1'],
'layout': [UserComboOption, 'Build directory layout.', ['mirror', 'flat'], 'mirror'],
- 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static'], 'shared'],
+ 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static', 'both'], 'shared'],
'backend': [UserComboOption, 'Backend to use.', backendlist, 'ninja'],
'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True],
'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True],
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index 69235da..4796980 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from .boost import BoostDependency
from .base import ( # noqa: F401
Dependency, DependencyException, DependencyMethods, ExternalProgram, NonExistingExternalProgram,
ExternalDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
PkgConfigDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
-from .misc import (BoostDependency, MPIDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency)
+from .misc import (MPIDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency)
from .platform import AppleFrameworks
from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 3357e8e..4127081 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -145,7 +145,7 @@ class Dependency:
class InternalDependency(Dependency):
- def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps):
+ def __init__(self, version, incdirs, compile_args, link_args, libraries, whole_libraries, sources, ext_deps):
super().__init__('internal', {})
self.version = version
self.is_found = True
@@ -153,6 +153,7 @@ class InternalDependency(Dependency):
self.compile_args = compile_args
self.link_args = link_args
self.libraries = libraries
+ self.whole_libraries = whole_libraries
self.sources = sources
self.ext_deps = ext_deps
@@ -361,6 +362,8 @@ class PkgConfigDependency(ExternalDependency):
# The class's copy of the pkg-config path. Avoids having to search for it
# multiple times in the same Meson invocation.
class_pkgbin = None
+ # We cache all pkg-config subprocess invocations to avoid redundant calls
+ pkgbin_cache = {}
def __init__(self, name, environment, kwargs, language=None):
super().__init__('pkgconfig', environment, language, kwargs)
@@ -380,9 +383,7 @@ class PkgConfigDependency(ExternalDependency):
pkgname = environment.cross_info.config['binaries']['pkgconfig']
potential_pkgbin = ExternalProgram(pkgname, silent=True)
if potential_pkgbin.found():
- # FIXME, we should store all pkg-configs in ExternalPrograms.
- # However that is too destabilizing a change to do just before release.
- self.pkgbin = potential_pkgbin.get_command()[0]
+ self.pkgbin = potential_pkgbin
PkgConfigDependency.class_pkgbin = self.pkgbin
else:
mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name)
@@ -404,7 +405,7 @@ class PkgConfigDependency(ExternalDependency):
self.type_string = 'Native'
mlog.debug('Determining dependency {!r} with pkg-config executable '
- '{!r}'.format(name, self.pkgbin))
+ '{!r}'.format(name, self.pkgbin.get_path()))
ret, self.version = self._call_pkgbin(['--modversion', name])
if ret != 0:
if self.required:
@@ -460,11 +461,21 @@ class PkgConfigDependency(ExternalDependency):
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
+ def _call_pkgbin_real(self, args, env):
+ p, out = Popen_safe(self.pkgbin.get_command() + args, env=env)[0:2]
+ return p.returncode, out.strip()
+
def _call_pkgbin(self, args, env=None):
- if not env:
+ if env is None:
+ fenv = env
env = os.environ
- p, out = Popen_safe([self.pkgbin] + args, env=env)[0:2]
- return p.returncode, out.strip()
+ else:
+ fenv = frozenset(env.items())
+ targs = tuple(args)
+ cache = PkgConfigDependency.pkgbin_cache
+ if (self.pkgbin, targs, fenv) not in cache:
+ cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env)
+ return cache[(self.pkgbin, targs, fenv)]
def _convert_mingw_paths(self, args):
'''
@@ -495,7 +506,13 @@ class PkgConfigDependency(ExternalDependency):
return converted
def _set_cargs(self):
- ret, out = self._call_pkgbin(['--cflags', self.name])
+ env = None
+ if self.language == 'fortran':
+ # gfortran doesn't appear to look in system paths for INCLUDE files,
+ # so don't allow pkg-config to suppress -I flags for system paths
+ env = os.environ.copy()
+ env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+ ret, out = self._call_pkgbin(['--cflags', self.name], env=env)
if ret != 0:
raise DependencyException('Could not generate cargs for %s:\n\n%s' %
(self.name, out))
@@ -602,21 +619,23 @@ class PkgConfigDependency(ExternalDependency):
pkgbin = os.environ[evar].strip()
else:
pkgbin = 'pkg-config'
- try:
- p, out = Popen_safe([pkgbin, '--version'])[0:2]
- if p.returncode != 0:
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
+ pkgbin = ExternalProgram(pkgbin, silent=True)
+ if pkgbin.found():
+ try:
+ p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found pkg-config {!r} but couldn\'t run it'
+ ''.format(' '.join(pkgbin.get_command())))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ pkgbin = False
+ except (FileNotFoundError, PermissionError):
pkgbin = False
- except (FileNotFoundError, PermissionError):
+ else:
pkgbin = False
- if pkgbin and not os.path.isabs(pkgbin) and shutil.which(pkgbin):
- # Sometimes shutil.which fails where Popen succeeds, so
- # only find the abs path if it can be found by shutil.which
- pkgbin = shutil.which(pkgbin)
if not self.silent:
if pkgbin:
- mlog.log('Found pkg-config:', mlog.bold(pkgbin),
+ mlog.log('Found pkg-config:', mlog.bold(pkgbin.get_path()),
'(%s)' % out.strip())
else:
mlog.log('Found Pkg-config:', mlog.red('NO'))
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
new file mode 100644
index 0000000..a17fb58
--- /dev/null
+++ b/mesonbuild/dependencies/boost.py
@@ -0,0 +1,683 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+import glob
+import os
+
+from .. import mlog
+from .. import mesonlib
+from ..environment import detect_cpu_family
+
+from .base import (DependencyException, ExternalDependency)
+
+# On windows 3 directory layouts are supported:
+# * The default layout (versioned) installed:
+# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The non-default layout (system) installed:
+# - $BOOST_ROOT/include/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The pre-built binaries from sf.net:
+# - $BOOST_ROOT/boost/*.hpp
+# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
+#
+# Note that we should also try to support:
+# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
+# libboost_<module>-mt.dll.a
+#
+# Library names supported:
+# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static)
+# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared)
+# - libboost_<module>.lib (static)
+# - boost_<module>.lib|.dll (shared)
+# where compiler is vc141 for example.
+#
+# NOTE: -gd means runtime and build time debugging is on
+# -mt means threading=multi
+#
+# The `modules` argument accept library names. This is because every module that
+# has libraries to link against also has multiple options regarding how to
+# link. See for example:
+# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
+# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
+# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
+
+# **On Unix**, official packaged versions of boost libraries follow the following schemes:
+#
+# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib)
+# libboost_<module>.a
+# cygboost_<module>_1_64.dll (location = /usr/bin)
+# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib)
+# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib)
+#
+# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
+#
+# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
+#
+# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
+# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
+# However, its not clear that any Unix distribution follows this scheme.
+# Furthermore, the boost documentation for unix above uses examples from windows like
+# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
+#
+# Probably we should use the linker search path to decide which libraries to use. This will
+# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will
+# also mean that it would be possible to use user-installed boost libraries when official
+# packages are installed.
+#
+# We thus follow the following strategy:
+# 1. Look for libraries using compiler.find_library( )
+# 1.1 On Linux, just look for boost_<module>
+# 1.2 On other systems (e.g. Mac) look for boost_<module>-mt if multithreading.
+# 1.3 Otherwise look for boost_<module>
+# 2. Fall back to previous approach
+# 2.1. Search particular directories.
+# 2.2. Find boost libraries with unknown suffixes using file-name globbing.
+
+# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set.
+# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug.
+
+class BoostDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('boost', environment, 'cpp', kwargs)
+ self.need_static_link = ['boost_exception', 'boost_test_exec_monitor']
+ # FIXME: is this the right way to find the build type?
+ self.is_debug = environment.cmd_line_options.buildtype.startswith('debug')
+ threading = kwargs.get("threading", "multi")
+ self.is_multithreading = threading == "multi"
+
+ self.requested_modules = self.get_requested(kwargs)
+
+ self.boost_root = None
+ self.boost_roots = []
+ self.incdir = None
+ self.libdir = None
+
+ if 'BOOST_ROOT' in os.environ:
+ self.boost_root = os.environ['BOOST_ROOT']
+ self.boost_roots = [self.boost_root]
+ if not os.path.isabs(self.boost_root):
+ raise DependencyException('BOOST_ROOT must be an absolute path.')
+ if 'BOOST_INCLUDEDIR' in os.environ:
+ self.incdir = os.environ['BOOST_INCLUDEDIR']
+ if 'BOOST_LIBRARYDIR' in os.environ:
+ self.libdir = os.environ['BOOST_LIBRARYDIR']
+
+ if self.boost_root is None:
+ if mesonlib.for_windows(self.want_cross, self.env):
+ self.boost_roots = self.detect_win_roots()
+ else:
+ self.boost_roots = self.detect_nix_roots()
+
+ if self.incdir is None:
+ if mesonlib.for_windows(self.want_cross, self.env):
+ self.incdir = self.detect_win_incdir()
+ else:
+ self.incdir = self.detect_nix_incdir()
+
+ if self.check_invalid_modules():
+ self.log_fail()
+ return
+
+ mlog.debug('Boost library root dir is', mlog.bold(self.boost_root))
+ mlog.debug('Boost include directory is', mlog.bold(self.incdir))
+
+ # 1. check if we can find BOOST headers.
+ self.detect_headers_and_version()
+
+ # 2. check if we can find BOOST libraries.
+ if self.is_found:
+ self.detect_lib_modules()
+ mlog.debug('Boost library directory is', mlog.bold(self.libdir))
+
+ # 3. Report success or failure
+ if self.is_found:
+ self.log_success()
+ else:
+ self.log_fail()
+
+ def check_invalid_modules(self):
+ invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in BOOST_LIBS]
+
+ # previous versions of meson allowed include dirs as modules
+ remove = []
+ for m in invalid_modules:
+ if m in BOOST_DIRS:
+ mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. '
+ 'This will be an error in the future')
+ remove.append(m)
+
+ self.requested_modules = [x for x in self.requested_modules if x not in remove]
+ invalid_modules = [x for x in invalid_modules if x not in remove]
+
+ if invalid_modules:
+ mlog.error('Invalid Boost modules: ' + ', '.join(invalid_modules))
+ return True
+ else:
+ return False
+
+ def log_fail(self):
+ module_str = ', '.join(self.requested_modules)
+ mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
+
+ def log_success(self):
+ module_str = ', '.join(self.requested_modules)
+ if self.boost_root:
+ info = self.version + ', ' + self.boost_root
+ else:
+ info = self.version
+ mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
+
+ def detect_nix_roots(self):
+ return [os.path.abspath(os.path.join(x, '..'))
+ for x in self.compiler.get_default_include_dirs()]
+
+ def detect_win_roots(self):
+ res = []
+ # Where boost documentation says it should be
+ globtext = 'C:\\Program Files\\boost\\boost_*'
+ files = glob.glob(globtext)
+ res.extend(files)
+
+ # Where boost built from source actually installs it
+ if os.path.isdir('C:\\Boost'):
+ res.append('C:\\Boost')
+
+ # Where boost prebuilt binaries are
+ globtext = 'C:\\local\\boost_*'
+ files = glob.glob(globtext)
+ res.extend(files)
+ return res
+
+ def detect_nix_incdir(self):
+ if self.boost_root:
+ return os.path.join(self.boost_root, 'include')
+ return None
+
+ # FIXME: Should pick a version that matches the requested version
+ # Returns the folder that contains the boost folder.
+ def detect_win_incdir(self):
+ for root in self.boost_roots:
+ globtext = os.path.join(root, 'include', 'boost-*')
+ incdirs = glob.glob(globtext)
+ if len(incdirs) > 0:
+ return incdirs[0]
+ incboostdir = os.path.join(root, 'include', 'boost')
+ if os.path.isdir(incboostdir):
+ return os.path.join(root, 'include')
+ incboostdir = os.path.join(root, 'boost')
+ if os.path.isdir(incboostdir):
+ return root
+ return None
+
+ def get_compile_args(self):
+ args = []
+ include_dir = self.incdir
+
+ # Use "-isystem" when including boost headers instead of "-I"
+ # to avoid compiler warnings/failures when "-Werror" is used
+
+ # Careful not to use "-isystem" on default include dirs as it
+ # breaks some of the headers for certain gcc versions
+
+ # For example, doing g++ -isystem /usr/include on a simple
+ # "int main()" source results in the error:
+ # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
+
+ # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
+ # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
+ # for more details
+
+ if include_dir and include_dir not in self.compiler.get_default_include_dirs():
+ args.append("".join(self.compiler.get_include_args(include_dir, True)))
+ return args
+
+ def get_requested(self, kwargs):
+ candidates = mesonlib.extract_as_list(kwargs, 'modules')
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('Boost module argument is not a string.')
+ return candidates
+
+ def detect_headers_and_version(self):
+ try:
+ version = self.compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), [])
+ except mesonlib.EnvironmentException:
+ return
+ except TypeError:
+ return
+ # Remove quotes
+ version = version[1:-1]
+ # Fix version string
+ self.version = version.replace('_', '.')
+ self.is_found = True
+
+ def detect_lib_modules(self):
+ self.lib_modules = {}
+
+ # 1. Try to find modules using compiler.find_library( )
+ if self.find_libraries_with_abi_tags(self.abi_tags()):
+ pass
+ # 2. Fall back to the old method
+ else:
+ if mesonlib.for_windows(self.want_cross, self.env):
+ self.detect_lib_modules_win()
+ else:
+ self.detect_lib_modules_nix()
+
+ # 3. Check if we can find the modules
+ for m in self.requested_modules:
+ if 'boost_' + m not in self.lib_modules:
+ mlog.debug('Requested Boost library {!r} not found'.format(m))
+ self.is_found = False
+
+ def modname_from_filename(self, filename):
+ modname = os.path.basename(filename)
+ modname = modname.split('.', 1)[0]
+ modname = modname.split('-', 1)[0]
+ if modname.startswith('libboost'):
+ modname = modname[3:]
+ return modname
+
+ def compiler_tag(self):
+ tag = None
+ compiler = self.env.detect_cpp_compiler(self.want_cross)
+ if mesonlib.for_windows(self.want_cross, self.env):
+ if compiler.get_id() == 'msvc':
+ comp_ts_version = compiler.get_toolset_version()
+ compiler_ts = comp_ts_version.split('.')
+ # FIXME - what about other compilers?
+ tag = '-vc{}{}'.format(compiler_ts[0], compiler_ts[1])
+ else:
+ tag = ''
+ return tag
+
+ def threading_tag(self):
+ if not self.is_multithreading:
+ return ''
+
+ if mesonlib.for_darwin(self.want_cross, self.env):
+ # - Mac: requires -mt for multithreading, so should not fall back to non-mt libraries.
+ return '-mt'
+ elif mesonlib.for_windows(self.want_cross, self.env):
+ # - Windows: requires -mt for multithreading, so should not fall back to non-mt libraries.
+ return '-mt'
+ else:
+ # - Linux: leaves off -mt but libraries are multithreading-aware.
+ # - Cygwin: leaves off -mt but libraries are multithreading-aware.
+ return ''
+
+ def version_tag(self):
+ return '-' + self.version.replace('.', '_')
+
+ def debug_tag(self):
+ return '-gd' if self.is_debug else ''
+
+ def versioned_abi_tag(self):
+ return self.compiler_tag() + self.threading_tag() + self.debug_tag() + self.version_tag()
+
+ # FIXME - how to handle different distributions, e.g. for Mac? Currently we handle homebrew and macports, but not fink.
+ def abi_tags(self):
+ if mesonlib.for_windows(self.want_cross, self.env):
+ return [self.versioned_abi_tag(), self.threading_tag()]
+ else:
+ return [self.threading_tag()]
+
+ def sourceforge_dir(self):
+ if self.env.detect_cpp_compiler(self.want_cross).get_id() != 'msvc':
+ return None
+ comp_ts_version = self.env.detect_cpp_compiler(self.want_cross).get_toolset_version()
+ arch = detect_cpu_family(self.env.coredata.compilers)
+ if arch == 'x86':
+ return 'lib32-msvc-{}'.format(comp_ts_version)
+ elif arch == 'x86_64':
+ return 'lib64-msvc-{}'.format(comp_ts_version)
+ else:
+ # Does anyone do Boost cross-compiling to other archs on Windows?
+ return None
+
+ def find_libraries_with_abi_tag(self, tag):
+
+ # All modules should have the same tag
+ self.lib_modules = {}
+
+ all_found = True
+
+ for module in self.requested_modules:
+ libname = 'boost_' + module + tag
+
+ args = self.compiler.find_library(libname, self.env, self.extra_lib_dirs())
+ if args is None:
+ mlog.debug("Couldn\'t find library '{}' for boost module '{}' (ABI tag = '{}')".format(libname, module, tag))
+ all_found = False
+ else:
+ mlog.debug('Link args for boost module "{}" are {}'.format(module, args))
+ self.lib_modules['boost_' + module] = args
+
+ return all_found
+
+ def find_libraries_with_abi_tags(self, tags):
+ for tag in tags:
+ if self.find_libraries_with_abi_tag(tag):
+ return True
+ return False
+
+ def detect_lib_modules_win(self):
+ if not self.libdir:
+ # The libdirs in the distributed binaries (from sf)
+ lib_sf = self.sourceforge_dir()
+
+ if self.boost_root:
+ roots = [self.boost_root]
+ else:
+ roots = self.boost_roots
+ for root in roots:
+ # The default libdir when building
+ libdir = os.path.join(root, 'lib')
+ if os.path.isdir(libdir):
+ self.libdir = libdir
+ break
+ if lib_sf:
+ full_path = os.path.join(root, lib_sf)
+ if os.path.isdir(full_path):
+ self.libdir = full_path
+ break
+
+ if not self.libdir:
+ return
+
+ for name in self.need_static_link:
+ # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
+ libname = 'lib' + name + self.versioned_abi_tag() + '.lib'
+ if os.path.isfile(os.path.join(self.libdir, libname)):
+ self.lib_modules[self.modname_from_filename(libname)] = [libname]
+ else:
+ libname = "lib{}.lib".format(name)
+ if os.path.isfile(os.path.join(self.libdir, libname)):
+ self.lib_modules[name[3:]] = [libname]
+
+ # globber1 applies to a layout=system installation
+ # globber2 applies to a layout=versioned installation
+ globber1 = 'libboost_*' if self.static else 'boost_*'
+ globber2 = globber1 + self.versioned_abi_tag()
+ # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
+ globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib'))
+ for entry in globber2_matches:
+ fname = os.path.basename(entry)
+ self.lib_modules[self.modname_from_filename(fname)] = [fname]
+ if len(globber2_matches) == 0:
+ # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
+ for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')):
+ if self.static:
+ fname = os.path.basename(entry)
+ self.lib_modules[self.modname_from_filename(fname)] = [fname]
+
+ def detect_lib_modules_nix(self):
+ if self.static:
+ libsuffix = 'a'
+ elif mesonlib.for_darwin(self.want_cross, self.env):
+ libsuffix = 'dylib'
+ else:
+ libsuffix = 'so'
+
+ globber = 'libboost_*.{}'.format(libsuffix)
+ if self.libdir:
+ libdirs = [self.libdir]
+ elif self.boost_root is None:
+ libdirs = mesonlib.get_library_dirs()
+ else:
+ libdirs = [os.path.join(self.boost_root, 'lib')]
+ for libdir in libdirs:
+ for name in self.need_static_link:
+ libname = 'lib{}.a'.format(name)
+ if os.path.isfile(os.path.join(libdir, libname)):
+ self.lib_modules[name] = [libname]
+ for entry in glob.glob(os.path.join(libdir, globber)):
+ # I'm not 100% sure what to do here. Some distros
+ # have modules such as thread only as -mt versions.
+ # On debian all packages are built threading=multi
+ # but not suffixed with -mt.
+ # FIXME: implement detect_lib_modules_{debian, redhat, ...}
+ # FIXME: this wouldn't work with -mt-gd either. -BDR
+ if self.is_multithreading and mesonlib.is_debianlike():
+ pass
+ elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)):
+ pass
+ elif not entry.endswith('-mt.{}'.format(libsuffix)):
+ pass
+ else:
+ continue
+ modname = self.modname_from_filename(entry)
+ if modname not in self.lib_modules:
+ self.lib_modules[modname] = [entry]
+
+ def extra_lib_dirs(self):
+ if self.libdir:
+ return [self.libdir]
+ elif self.boost_root:
+ return [os.path.join(self.boost_root, 'lib')]
+ return []
+
+ def get_link_args(self):
+ args = []
+ for dir in self.extra_lib_dirs():
+ args += self.compiler.get_linker_search_args(dir)
+ for lib in self.requested_modules:
+ args += self.lib_modules['boost_' + lib]
+ return args
+
+ def get_sources(self):
+ return []
+
+ def need_threads(self):
+ return 'thread' in self.requested_modules
+
+
+# Generated with boost_names.py
+BOOST_LIBS = [
+ 'boost_atomic',
+ 'boost_chrono',
+ 'boost_chrono',
+ 'boost_container',
+ 'boost_context',
+ 'boost_coroutine',
+ 'boost_date_time',
+ 'boost_exception',
+ 'boost_fiber',
+ 'boost_filesystem',
+ 'boost_graph',
+ 'boost_iostreams',
+ 'boost_locale',
+ 'boost_log',
+ 'boost_log_setup',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_mpi',
+ 'boost_program_options',
+ 'boost_python',
+ 'boost_python3',
+ 'boost_numpy',
+ 'boost_numpy3',
+ 'boost_random',
+ 'boost_regex',
+ 'boost_serialization',
+ 'boost_wserialization',
+ 'boost_signals',
+ 'boost_stacktrace_noop',
+ 'boost_stacktrace_backtrace',
+ 'boost_stacktrace_addr2line',
+ 'boost_stacktrace_basic',
+ 'boost_stacktrace_windbg',
+ 'boost_stacktrace_windbg_cached',
+ 'boost_system',
+ 'boost_prg_exec_monitor',
+ 'boost_test_exec_monitor',
+ 'boost_unit_test_framework',
+ 'boost_thread',
+ 'boost_timer',
+ 'boost_type_erasure',
+ 'boost_wave'
+]
+
+BOOST_DIRS = [
+ 'lambda',
+ 'optional',
+ 'convert',
+ 'system',
+ 'uuid',
+ 'archive',
+ 'align',
+ 'timer',
+ 'chrono',
+ 'gil',
+ 'logic',
+ 'signals',
+ 'predef',
+ 'tr1',
+ 'multi_index',
+ 'property_map',
+ 'multi_array',
+ 'context',
+ 'random',
+ 'endian',
+ 'circular_buffer',
+ 'proto',
+ 'assign',
+ 'format',
+ 'math',
+ 'phoenix',
+ 'graph',
+ 'locale',
+ 'mpl',
+ 'pool',
+ 'unordered',
+ 'core',
+ 'exception',
+ 'ptr_container',
+ 'flyweight',
+ 'range',
+ 'typeof',
+ 'thread',
+ 'move',
+ 'spirit',
+ 'dll',
+ 'compute',
+ 'serialization',
+ 'ratio',
+ 'msm',
+ 'config',
+ 'metaparse',
+ 'coroutine2',
+ 'qvm',
+ 'program_options',
+ 'concept',
+ 'detail',
+ 'hana',
+ 'concept_check',
+ 'compatibility',
+ 'variant',
+ 'type_erasure',
+ 'mpi',
+ 'test',
+ 'fusion',
+ 'log',
+ 'sort',
+ 'local_function',
+ 'units',
+ 'functional',
+ 'preprocessor',
+ 'integer',
+ 'container',
+ 'polygon',
+ 'interprocess',
+ 'numeric',
+ 'iterator',
+ 'wave',
+ 'lexical_cast',
+ 'multiprecision',
+ 'utility',
+ 'tti',
+ 'asio',
+ 'dynamic_bitset',
+ 'algorithm',
+ 'xpressive',
+ 'bimap',
+ 'signals2',
+ 'type_traits',
+ 'regex',
+ 'statechart',
+ 'parameter',
+ 'icl',
+ 'python',
+ 'lockfree',
+ 'intrusive',
+ 'io',
+ 'pending',
+ 'geometry',
+ 'tuple',
+ 'iostreams',
+ 'heap',
+ 'atomic',
+ 'filesystem',
+ 'smart_ptr',
+ 'function',
+ 'fiber',
+ 'type_index',
+ 'accumulators',
+ 'function_types',
+ 'coroutine',
+ 'vmd',
+ 'date_time',
+ 'property_tree',
+ 'bind'
+]
diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py
index c254947..eae8ff7 100644
--- a/mesonbuild/dependencies/dev.py
+++ b/mesonbuild/dependencies/dev.py
@@ -120,18 +120,19 @@ class LLVMDependency(ConfigToolDependency):
# newest back to oldest (3.5 is arbitrary), and finally the devel version.
# Please note that llvm-config-6.0 is a development snapshot and it should
# not be moved to the beginning of the list. The only difference between
- # llvm-config-6.0 and llvm-config-devel is that the former is used by
+ # llvm-config-7 and llvm-config-devel is that the former is used by
# Debian and the latter is used by FreeBSD.
tools = [
'llvm-config', # base
- 'llvm-config-5.0', 'llvm-config50', # latest stable release
- 'llvm-config-4.0', 'llvm-config40', # old stable releases
+ 'llvm-config-6.0', 'llvm-config60',
+ 'llvm-config-5.0', 'llvm-config50',
+ 'llvm-config-4.0', 'llvm-config40',
'llvm-config-3.9', 'llvm-config39',
'llvm-config-3.8', 'llvm-config38',
'llvm-config-3.7', 'llvm-config37',
'llvm-config-3.6', 'llvm-config36',
'llvm-config-3.5', 'llvm-config35',
- 'llvm-config-6.0', 'llvm-config-devel', # development snapshot
+ 'llvm-config-7', 'llvm-config-devel', # development snapshot
]
tool_name = 'llvm-config'
__cpp_blacklist = {'-DNDEBUG'}
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 9e9441f..2a218be 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -14,7 +14,6 @@
# This file contains the detection logic for miscellaneous external dependencies.
-import glob
import os
import re
import shlex
@@ -32,471 +31,6 @@ from .base import (
ConfigToolDependency,
)
-# On windows 3 directory layouts are supported:
-# * The default layout (versioned) installed:
-# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
-# - $BOOST_ROOT/lib/*.lib
-# * The non-default layout (system) installed:
-# - $BOOST_ROOT/include/boost/*.hpp
-# - $BOOST_ROOT/lib/*.lib
-# * The pre-built binaries from sf.net:
-# - $BOOST_ROOT/boost/*.hpp
-# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
-#
-# Note that we should also try to support:
-# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
-# libboost_<module>-mt.dll.a
-#
-# Library names supported:
-# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static)
-# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared)
-# - libboost_<module>.lib (static)
-# - boost_<module>.lib|.dll (shared)
-# where compiler is vc141 for example.
-#
-# NOTE: -gd means runtime and build time debugging is on
-# -mt means threading=multi
-#
-# The `modules` argument accept library names. This is because every module that
-# has libraries to link against also has multiple options regarding how to
-# link. See for example:
-# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
-# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
-# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
-
-# **On Unix**, official packaged versions of boost libraries follow the following schemes:
-#
-# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0
-# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0
-# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0
-# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib)
-# libboost_<module>.a
-# cygboost_<module>_1_64.dll (location = /usr/bin)
-# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib)
-# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib)
-#
-# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
-#
-# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
-#
-# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
-# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
-# However, its not clear that any Unix distribution follows this scheme.
-# Furthermore, the boost documentation for unix above uses examples from windows like
-# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
-#
-# Probably we should use the linker search path to decide which libraries to use. This will
-# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will
-# also mean that it would be possible to use user-installed boost libraries when official
-# packages are installed.
-#
-# We thus follow the following strategy:
-# 1. Look for libraries using compiler.find_library( )
-# 1.1 On Linux, just look for boost_<module>
-# 1.2 On other systems (e.g. Mac) look for boost_<module>-mt if multithreading.
-# 1.3 Otherwise look for boost_<module>
-# 2. Fall back to previous approach
-# 2.1. Search particular directories.
-# 2.2. Find boost libraries with unknown suffixes using file-name globbing.
-
-# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set.
-# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug.
-
-class BoostDependency(ExternalDependency):
- def __init__(self, environment, kwargs):
- super().__init__('boost', environment, 'cpp', kwargs)
- self.need_static_link = ['boost_exception', 'boost_test_exec_monitor']
- # FIXME: is this the right way to find the build type?
- self.is_debug = environment.cmd_line_options.buildtype.startswith('debug')
- threading = kwargs.get("threading", "multi")
- self.is_multithreading = threading == "multi"
-
- self.requested_modules = self.get_requested(kwargs)
-
- self.boost_root = None
- self.boost_roots = []
- self.incdir = None
- self.libdir = None
-
- if 'BOOST_ROOT' in os.environ:
- self.boost_root = os.environ['BOOST_ROOT']
- self.boost_roots = [self.boost_root]
- if not os.path.isabs(self.boost_root):
- raise DependencyException('BOOST_ROOT must be an absolute path.')
- if 'BOOST_INCLUDEDIR' in os.environ:
- self.incdir = os.environ['BOOST_INCLUDEDIR']
- if 'BOOST_LIBRARYDIR' in os.environ:
- self.libdir = os.environ['BOOST_LIBRARYDIR']
-
- if self.boost_root is None:
- if mesonlib.for_windows(self.want_cross, self.env):
- self.boost_roots = self.detect_win_roots()
- else:
- self.boost_roots = self.detect_nix_roots()
-
- if self.incdir is None:
- if mesonlib.for_windows(self.want_cross, self.env):
- self.incdir = self.detect_win_incdir()
- else:
- self.incdir = self.detect_nix_incdir()
-
- if self.check_invalid_modules():
- self.log_fail()
- return
-
- mlog.debug('Boost library root dir is', mlog.bold(self.boost_root))
- mlog.debug('Boost include directory is', mlog.bold(self.incdir))
-
- # 1. check if we can find BOOST headers.
- self.detect_headers_and_version()
-
- # 2. check if we can find BOOST libraries.
- if self.is_found:
- self.detect_lib_modules()
- mlog.debug('Boost library directory is', mlog.bold(self.libdir))
-
- # 3. Report success or failure
- if self.is_found:
- self.log_success()
- else:
- self.log_fail()
-
- def check_invalid_modules(self):
- invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in BOOST_LIBS]
-
- # previous versions of meson allowed include dirs as modules
- remove = []
- for m in invalid_modules:
- if m in BOOST_DIRS:
- mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. '
- 'This will be an error in the future')
- remove.append(m)
-
- self.requested_modules = [x for x in self.requested_modules if x not in remove]
- invalid_modules = [x for x in invalid_modules if x not in remove]
-
- if invalid_modules:
- mlog.log(mlog.red('ERROR:'), 'Invalid Boost modules: ' + ', '.join(invalid_modules))
- return True
- else:
- return False
-
- def log_fail(self):
- module_str = ', '.join(self.requested_modules)
- mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
-
- def log_success(self):
- module_str = ', '.join(self.requested_modules)
- if self.boost_root:
- info = self.version + ', ' + self.boost_root
- else:
- info = self.version
- mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
-
- def detect_nix_roots(self):
- return [os.path.abspath(os.path.join(x, '..'))
- for x in self.compiler.get_default_include_dirs()]
-
- def detect_win_roots(self):
- res = []
- # Where boost documentation says it should be
- globtext = 'C:\\Program Files\\boost\\boost_*'
- files = glob.glob(globtext)
- res.extend(files)
-
- # Where boost built from source actually installs it
- if os.path.isdir('C:\\Boost'):
- res.append('C:\\Boost')
-
- # Where boost prebuilt binaries are
- globtext = 'C:\\local\\boost_*'
- files = glob.glob(globtext)
- res.extend(files)
- return res
-
- def detect_nix_incdir(self):
- if self.boost_root:
- return os.path.join(self.boost_root, 'include')
- return None
-
- # FIXME: Should pick a version that matches the requested version
- # Returns the folder that contains the boost folder.
- def detect_win_incdir(self):
- for root in self.boost_roots:
- globtext = os.path.join(root, 'include', 'boost-*')
- incdirs = glob.glob(globtext)
- if len(incdirs) > 0:
- return incdirs[0]
- incboostdir = os.path.join(root, 'include', 'boost')
- if os.path.isdir(incboostdir):
- return os.path.join(root, 'include')
- incboostdir = os.path.join(root, 'boost')
- if os.path.isdir(incboostdir):
- return root
- return None
-
- def get_compile_args(self):
- args = []
- include_dir = self.incdir
-
- # Use "-isystem" when including boost headers instead of "-I"
- # to avoid compiler warnings/failures when "-Werror" is used
-
- # Careful not to use "-isystem" on default include dirs as it
- # breaks some of the headers for certain gcc versions
-
- # For example, doing g++ -isystem /usr/include on a simple
- # "int main()" source results in the error:
- # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
-
- # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
- # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
- # for more details
-
- if include_dir and include_dir not in self.compiler.get_default_include_dirs():
- args.append("".join(self.compiler.get_include_args(include_dir, True)))
- return args
-
- def get_requested(self, kwargs):
- candidates = mesonlib.extract_as_list(kwargs, 'modules')
- for c in candidates:
- if not isinstance(c, str):
- raise DependencyException('Boost module argument is not a string.')
- return candidates
-
- def detect_headers_and_version(self):
- try:
- version = self.compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), [])
- except mesonlib.EnvironmentException:
- return
- except TypeError:
- return
- # Remove quotes
- version = version[1:-1]
- # Fix version string
- self.version = version.replace('_', '.')
- self.is_found = True
-
- def detect_lib_modules(self):
- self.lib_modules = {}
-
- # 1. Try to find modules using compiler.find_library( )
- if self.find_libraries_with_abi_tags(self.abi_tags()):
- pass
- # 2. Fall back to the old method
- else:
- if mesonlib.for_windows(self.want_cross, self.env):
- self.detect_lib_modules_win()
- else:
- self.detect_lib_modules_nix()
-
- # 3. Check if we can find the modules
- for m in self.requested_modules:
- if 'boost_' + m not in self.lib_modules:
- mlog.debug('Requested Boost library {!r} not found'.format(m))
- self.is_found = False
-
- def modname_from_filename(self, filename):
- modname = os.path.basename(filename)
- modname = modname.split('.', 1)[0]
- modname = modname.split('-', 1)[0]
- if modname.startswith('libboost'):
- modname = modname[3:]
- return modname
-
- def compiler_tag(self):
- tag = None
- compiler = self.env.detect_cpp_compiler(self.want_cross)
- if mesonlib.for_windows(self.want_cross, self.env):
- if compiler.get_id() == 'msvc':
- comp_ts_version = compiler.get_toolset_version()
- compiler_ts = comp_ts_version.split('.')
- # FIXME - what about other compilers?
- tag = '-vc{}{}'.format(compiler_ts[0], compiler_ts[1])
- else:
- tag = ''
- return tag
-
- def threading_tag(self):
- if not self.is_multithreading:
- return ''
-
- if mesonlib.for_darwin(self.want_cross, self.env):
- # - Mac: requires -mt for multithreading, so should not fall back to non-mt libraries.
- return '-mt'
- elif mesonlib.for_windows(self.want_cross, self.env):
- # - Windows: requires -mt for multithreading, so should not fall back to non-mt libraries.
- return '-mt'
- else:
- # - Linux: leaves off -mt but libraries are multithreading-aware.
- # - Cygwin: leaves off -mt but libraries are multithreading-aware.
- return ''
-
- def version_tag(self):
- return '-' + self.version.replace('.', '_')
-
- def debug_tag(self):
- return '-gd' if self.is_debug else ''
-
- def versioned_abi_tag(self):
- return self.compiler_tag() + self.threading_tag() + self.debug_tag() + self.version_tag()
-
- # FIXME - how to handle different distributions, e.g. for Mac? Currently we handle homebrew and macports, but not fink.
- def abi_tags(self):
- if mesonlib.for_windows(self.want_cross, self.env):
- return [self.versioned_abi_tag(), self.threading_tag()]
- else:
- return [self.threading_tag()]
-
- def sourceforge_dir(self):
- if self.env.detect_cpp_compiler(self.want_cross).get_id() != 'msvc':
- return None
- comp_ts_version = self.env.detect_cpp_compiler(self.want_cross).get_toolset_version()
- arch = detect_cpu_family(self.env.coredata.compilers)
- if arch == 'x86':
- return 'lib32-msvc-{}'.format(comp_ts_version)
- elif arch == 'x86_64':
- return 'lib64-msvc-{}'.format(comp_ts_version)
- else:
- # Does anyone do Boost cross-compiling to other archs on Windows?
- return None
-
- def find_libraries_with_abi_tag(self, tag):
-
- # All modules should have the same tag
- self.lib_modules = {}
-
- all_found = True
-
- for module in self.requested_modules:
- libname = 'boost_' + module + tag
-
- args = self.compiler.find_library(libname, self.env, self.extra_lib_dirs())
- if args is None:
- mlog.debug("Couldn\'t find library '{}' for boost module '{}' (ABI tag = '{}')".format(libname, module, tag))
- all_found = False
- else:
- mlog.debug('Link args for boost module "{}" are {}'.format(module, args))
- self.lib_modules['boost_' + module] = args
-
- return all_found
-
- def find_libraries_with_abi_tags(self, tags):
- for tag in tags:
- if self.find_libraries_with_abi_tag(tag):
- return True
- return False
-
- def detect_lib_modules_win(self):
- if not self.libdir:
- # The libdirs in the distributed binaries (from sf)
- lib_sf = self.sourceforge_dir()
-
- if self.boost_root:
- roots = [self.boost_root]
- else:
- roots = self.boost_roots
- for root in roots:
- # The default libdir when building
- libdir = os.path.join(root, 'lib')
- if os.path.isdir(libdir):
- self.libdir = libdir
- break
- if lib_sf:
- full_path = os.path.join(root, lib_sf)
- if os.path.isdir(full_path):
- self.libdir = full_path
- break
-
- if not self.libdir:
- return
-
- for name in self.need_static_link:
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- libname = 'lib' + name + self.versioned_abi_tag() + '.lib'
- if os.path.isfile(os.path.join(self.libdir, libname)):
- self.lib_modules[self.modname_from_filename(libname)] = [libname]
- else:
- libname = "lib{}.lib".format(name)
- if os.path.isfile(os.path.join(self.libdir, libname)):
- self.lib_modules[name[3:]] = [libname]
-
- # globber1 applies to a layout=system installation
- # globber2 applies to a layout=versioned installation
- globber1 = 'libboost_*' if self.static else 'boost_*'
- globber2 = globber1 + self.versioned_abi_tag()
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib'))
- for entry in globber2_matches:
- fname = os.path.basename(entry)
- self.lib_modules[self.modname_from_filename(fname)] = [fname]
- if len(globber2_matches) == 0:
- # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
- for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')):
- if self.static:
- fname = os.path.basename(entry)
- self.lib_modules[self.modname_from_filename(fname)] = [fname]
-
- def detect_lib_modules_nix(self):
- if self.static:
- libsuffix = 'a'
- elif mesonlib.for_darwin(self.want_cross, self.env):
- libsuffix = 'dylib'
- else:
- libsuffix = 'so'
-
- globber = 'libboost_*.{}'.format(libsuffix)
- if self.libdir:
- libdirs = [self.libdir]
- elif self.boost_root is None:
- libdirs = mesonlib.get_library_dirs()
- else:
- libdirs = [os.path.join(self.boost_root, 'lib')]
- for libdir in libdirs:
- for name in self.need_static_link:
- libname = 'lib{}.a'.format(name)
- if os.path.isfile(os.path.join(libdir, libname)):
- self.lib_modules[name] = [libname]
- for entry in glob.glob(os.path.join(libdir, globber)):
- # I'm not 100% sure what to do here. Some distros
- # have modules such as thread only as -mt versions.
- # On debian all packages are built threading=multi
- # but not suffixed with -mt.
- # FIXME: implement detect_lib_modules_{debian, redhat, ...}
- # FIXME: this wouldn't work with -mt-gd either. -BDR
- if self.is_multithreading and mesonlib.is_debianlike():
- pass
- elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)):
- pass
- elif not entry.endswith('-mt.{}'.format(libsuffix)):
- pass
- else:
- continue
- modname = self.modname_from_filename(entry)
- if modname not in self.lib_modules:
- self.lib_modules[modname] = [entry]
-
- def extra_lib_dirs(self):
- if self.libdir:
- return [self.libdir]
- elif self.boost_root:
- return [os.path.join(self.boost_root, 'lib')]
- return []
-
- def get_link_args(self):
- args = []
- for dir in self.extra_lib_dirs():
- args += self.compiler.get_linker_search_args(dir)
- for lib in self.requested_modules:
- args += self.lib_modules['boost_' + lib]
- return args
-
- def get_sources(self):
- return []
-
- def need_threads(self):
- return 'thread' in self.requested_modules
-
class MPIDependency(ExternalDependency):
def __init__(self, environment, kwargs):
@@ -972,197 +506,3 @@ class LibWmfDependency(ExternalDependency):
return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
else:
return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
-
-
-# Generated with boost_names.py
-BOOST_LIBS = [
- 'boost_atomic',
- 'boost_chrono',
- 'boost_chrono',
- 'boost_container',
- 'boost_context',
- 'boost_coroutine',
- 'boost_date_time',
- 'boost_exception',
- 'boost_fiber',
- 'boost_filesystem',
- 'boost_graph',
- 'boost_iostreams',
- 'boost_locale',
- 'boost_log',
- 'boost_log_setup',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_math_tr1',
- 'boost_math_tr1f',
- 'boost_math_tr1l',
- 'boost_math_c99',
- 'boost_math_c99f',
- 'boost_math_c99l',
- 'boost_mpi',
- 'boost_program_options',
- 'boost_python',
- 'boost_python3',
- 'boost_numpy',
- 'boost_numpy3',
- 'boost_random',
- 'boost_regex',
- 'boost_serialization',
- 'boost_wserialization',
- 'boost_signals',
- 'boost_stacktrace_noop',
- 'boost_stacktrace_backtrace',
- 'boost_stacktrace_addr2line',
- 'boost_stacktrace_basic',
- 'boost_stacktrace_windbg',
- 'boost_stacktrace_windbg_cached',
- 'boost_system',
- 'boost_prg_exec_monitor',
- 'boost_test_exec_monitor',
- 'boost_unit_test_framework',
- 'boost_thread',
- 'boost_timer',
- 'boost_type_erasure',
- 'boost_wave'
-]
-
-BOOST_DIRS = [
- 'lambda',
- 'optional',
- 'convert',
- 'system',
- 'uuid',
- 'archive',
- 'align',
- 'timer',
- 'chrono',
- 'gil',
- 'logic',
- 'signals',
- 'predef',
- 'tr1',
- 'multi_index',
- 'property_map',
- 'multi_array',
- 'context',
- 'random',
- 'endian',
- 'circular_buffer',
- 'proto',
- 'assign',
- 'format',
- 'math',
- 'phoenix',
- 'graph',
- 'locale',
- 'mpl',
- 'pool',
- 'unordered',
- 'core',
- 'exception',
- 'ptr_container',
- 'flyweight',
- 'range',
- 'typeof',
- 'thread',
- 'move',
- 'spirit',
- 'dll',
- 'compute',
- 'serialization',
- 'ratio',
- 'msm',
- 'config',
- 'metaparse',
- 'coroutine2',
- 'qvm',
- 'program_options',
- 'concept',
- 'detail',
- 'hana',
- 'concept_check',
- 'compatibility',
- 'variant',
- 'type_erasure',
- 'mpi',
- 'test',
- 'fusion',
- 'log',
- 'sort',
- 'local_function',
- 'units',
- 'functional',
- 'preprocessor',
- 'integer',
- 'container',
- 'polygon',
- 'interprocess',
- 'numeric',
- 'iterator',
- 'wave',
- 'lexical_cast',
- 'multiprecision',
- 'utility',
- 'tti',
- 'asio',
- 'dynamic_bitset',
- 'algorithm',
- 'xpressive',
- 'bimap',
- 'signals2',
- 'type_traits',
- 'regex',
- 'statechart',
- 'parameter',
- 'icl',
- 'python',
- 'lockfree',
- 'intrusive',
- 'io',
- 'pending',
- 'geometry',
- 'tuple',
- 'iostreams',
- 'heap',
- 'atomic',
- 'filesystem',
- 'smart_ptr',
- 'function',
- 'fiber',
- 'type_index',
- 'accumulators',
- 'function_types',
- 'coroutine',
- 'vmd',
- 'date_time',
- 'property_tree',
- 'bind'
-]
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index a6307c4..2f31196 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -17,14 +17,13 @@
import os
import re
-import shutil
import subprocess
from collections import OrderedDict
from .. import mlog
from .. import mesonlib
from ..mesonlib import (
- MesonException, Popen_safe, extract_as_list, for_windows,
+ MesonException, Popen_safe, extract_as_list, for_windows, for_cygwin,
version_compare_many
)
from ..environment import detect_cpu
@@ -282,10 +281,15 @@ class QtBaseDependency(ExternalDependency):
(k, v) = tuple(line.split(':', 1))
qvars[k] = v
if mesonlib.is_osx():
- return self._framework_detect(qvars, mods, kwargs)
+ self._framework_detect(qvars, mods, kwargs)
+ return qmake
incdir = qvars['QT_INSTALL_HEADERS']
self.compile_args.append('-I' + incdir)
libdir = qvars['QT_INSTALL_LIBS']
+ if for_cygwin(self.env.is_cross_build(), self.env):
+ shlibext = '.dll.a'
+ else:
+ shlibext = '.so'
# Used by self.compilers_detect()
self.bindir = self.get_qmake_host_bins(qvars)
self.is_found = True
@@ -307,7 +311,7 @@ class QtBaseDependency(ExternalDependency):
self.is_found = False
break
else:
- libfile = os.path.join(libdir, 'lib{}{}.so'.format(self.qtpkgname, module))
+ libfile = os.path.join(libdir, 'lib{}{}{}'.format(self.qtpkgname, module, shlibext))
if not os.path.isfile(libfile):
self.is_found = False
break
@@ -316,15 +320,23 @@ class QtBaseDependency(ExternalDependency):
def _framework_detect(self, qvars, modules, kwargs):
libdir = qvars['QT_INSTALL_LIBS']
+
+ # ExtraFrameworkDependency doesn't support any methods
+ fw_kwargs = kwargs.copy()
+ fw_kwargs.pop('method', None)
+
for m in modules:
fname = 'Qt' + m
fwdep = ExtraFrameworkDependency(fname, False, libdir, self.env,
- self.language, kwargs)
+ self.language, fw_kwargs)
self.compile_args.append('-F' + libdir)
if fwdep.found():
- self.is_found = True
self.compile_args += fwdep.get_compile_args()
self.link_args += fwdep.get_link_args()
+ else:
+ break
+ else:
+ self.is_found = True
# Used by self.compilers_detect()
self.bindir = self.get_qmake_host_bins(qvars)
@@ -432,8 +444,8 @@ class WxDependency(ConfigToolDependency):
self.requested_modules = self.get_requested(kwargs)
# wx-config seems to have a cflags as well but since it requires C++,
# this should be good, at least for now.
- self.compile_args = self.get_config_value(['--cxxflags'], 'compile_args')
- self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.compile_args = self.get_config_value(['--cxxflags'] + self.requested_modules, 'compile_args')
+ self.link_args = self.get_config_value(['--libs'] + self.requested_modules, 'link_args')
def get_requested(self, kwargs):
if 'modules' not in kwargs:
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index cb23a5b..cd8d92c 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -19,7 +19,6 @@ from .linkers import ArLinker, VisualStudioLinker
from . import mesonlib
from .mesonlib import EnvironmentException, Popen_safe
from . import mlog
-import sys
from . import compilers
from .compilers import (
@@ -57,6 +56,7 @@ from .compilers import (
IntelFortranCompiler,
JavaCompiler,
MonoCompiler,
+ VisualStudioCsCompiler,
NAGFortranCompiler,
Open64FortranCompiler,
PathScaleFortranCompiler,
@@ -79,19 +79,32 @@ cflags_mapping = {'c': 'CFLAGS',
'd': 'DFLAGS',
'vala': 'VALAFLAGS'}
+def detect_gcovr(version='3.1', log=False):
+ gcovr_exe = 'gcovr'
+ try:
+ p, found = Popen_safe([gcovr_exe, '--version'])[0:2]
+ except (FileNotFoundError, PermissionError):
+ # Doesn't exist in PATH or isn't executable
+ return None, None
+ found = search_version(found)
+ if p.returncode == 0:
+ if log:
+ mlog.log('Found gcovr-{} at {}'.format(found, shlex.quote(shutil.which(gcovr_exe))))
+ return gcovr_exe, mesonlib.version_compare(found, '>=' + version)
+ return None, None
def find_coverage_tools():
- gcovr_exe = 'gcovr'
+ gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+
lcov_exe = 'lcov'
genhtml_exe = 'genhtml'
- if not mesonlib.exe_exists([gcovr_exe, '--version']):
- gcovr_exe = None
if not mesonlib.exe_exists([lcov_exe, '--version']):
lcov_exe = None
if not mesonlib.exe_exists([genhtml_exe, '--version']):
genhtml_exe = None
- return gcovr_exe, lcov_exe, genhtml_exe
+
+ return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe
def detect_ninja(version='1.5', log=False):
for n in ['ninja', 'ninja-build']:
@@ -257,8 +270,7 @@ class Environment:
os.makedirs(self.scratch_dir, exist_ok=True)
os.makedirs(self.log_dir, exist_ok=True)
try:
- cdf = os.path.join(self.get_build_dir(), Environment.coredata_file)
- self.coredata = coredata.load(cdf)
+ self.coredata = coredata.load(self.get_build_dir())
self.first_invocation = False
except FileNotFoundError:
# WARNING: Don't use any values from coredata in __init__. It gets
@@ -281,6 +293,10 @@ class Environment:
else:
self.default_c = ['cc', 'gcc', 'clang']
self.default_cpp = ['c++', 'g++', 'clang++']
+ if mesonlib.is_windows():
+ self.default_cs = ['csc', 'mcs']
+ else:
+ self.default_cs = ['mcs', 'csc']
self.default_objc = ['cc']
self.default_objcpp = ['c++']
self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort']
@@ -317,9 +333,8 @@ class Environment:
return self.cross_info is not None
def dump_coredata(self):
- cdf = os.path.join(self.get_build_dir(), Environment.coredata_file)
- coredata.save(self.coredata, cdf)
- return cdf
+ coredata.save(self.coredata, self.get_build_dir())
+ return os.path.join(self.get_build_dir(), Environment.coredata_file)
def get_script_dir(self):
import mesonbuild.scripts
@@ -434,7 +449,7 @@ class Environment:
def _get_compilers(self, lang, evar, want_cross):
'''
The list of compilers is detected in the exact same way for
- C, C++, ObjC, ObjC++, Fortran so consolidate it here.
+ C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here.
'''
if self.is_cross_build() and want_cross:
compilers = mesonlib.stringlistify(self.cross_info.config['binaries'][lang])
@@ -697,21 +712,29 @@ class Environment:
except OSError:
raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist))
version = search_version(err)
- if 'javac' in err:
+ if 'javac' in out or 'javac' in err:
return JavaCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
def detect_cs_compiler(self):
- exelist = ['mcs']
- try:
- p, out, err = Popen_safe(exelist + ['--version'])
- except OSError:
- raise EnvironmentException('Could not execute C# compiler "%s"' % ' '.join(exelist))
- version = search_version(out)
- full_version = out.split('\n', 1)[0]
- if 'Mono' in out:
- return MonoCompiler(exelist, version, full_version=full_version)
- raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', 'CSC', False)
+ popen_exceptions = {}
+ for comp in compilers:
+ if not isinstance(comp, list):
+ comp = [comp]
+ try:
+ p, out, err = Popen_safe(comp + ['--version'])
+ except OSError as e:
+ popen_exceptions[' '.join(comp + ['--version'])] = e
+ continue
+
+ version = search_version(out)
+ if 'Mono' in out:
+ return MonoCompiler(comp, version)
+ elif "Visual C#" in out:
+ return VisualStudioCsCompiler(comp, version)
+
+ self._handle_exceptions(popen_exceptions, compilers)
def detect_vala_compiler(self):
if 'VALAC' in os.environ:
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 8041526..b99a413 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -21,12 +21,12 @@ from . import optinterpreter
from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
-from .mesonlib import FileMode, Popen_safe, listify, extract_as_list
+from .mesonlib import FileMode, Popen_safe, listify, extract_as_list, has_path_sep
from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, DependencyException
from .interpreterbase import InterpreterBase
-from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs
-from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
+from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs, permittedMethodKwargs
+from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler
from .modules import ModuleReturnValue
@@ -37,6 +37,7 @@ from pathlib import PurePath
import importlib
+
def stringifyUserArguments(args):
if isinstance(args, list):
return '[%s]' % ', '.join([stringifyUserArguments(x) for x in args])
@@ -247,7 +248,7 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
return val
def get(self, name):
- return self.held_object.values[name] # (val, desc)
+ return self.held_object.values[name] # (val, desc)
def keys(self):
return self.held_object.values.keys()
@@ -604,6 +605,31 @@ class StaticLibraryHolder(BuildTargetHolder):
class SharedLibraryHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
+ # Set to True only when called from self.func_shared_lib().
+ target.shared_library_only = False
+
+class BothLibrariesHolder(BuildTargetHolder):
+ def __init__(self, shared_holder, static_holder, interp):
+ # FIXME: This build target always represents the shared library, but
+ # that should be configurable.
+ super().__init__(shared_holder.held_object, interp)
+ self.shared_holder = shared_holder
+ self.static_holder = static_holder
+ self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+ 'get_static_lib': self.get_static_lib_method,
+ })
+
+ def __repr__(self):
+ r = '<{} {}: {}, {}: {}>'
+ h1 = self.shared_holder.held_object
+ h2 = self.static_holder.held_object
+ return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+ def get_shared_lib_method(self, args, kwargs):
+ return self.shared_holder
+
+ def get_static_lib_method(self, args, kwargs):
+ return self.static_holder
class SharedModuleHolder(BuildTargetHolder):
def __init__(self, target, interp):
@@ -652,10 +678,11 @@ class RunTargetHolder(InterpreterObject, ObjectHolder):
return r.format(self.__class__.__name__, h.get_id(), h.command)
class Test(InterpreterObject):
- def __init__(self, name, suite, exe, is_parallel, cmd_args, env, should_fail, timeout, workdir):
+ def __init__(self, name, project, suite, exe, is_parallel, cmd_args, env, should_fail, timeout, workdir):
InterpreterObject.__init__(self)
self.name = name
self.suite = suite
+ self.project_name = project
self.exe = exe
self.is_parallel = is_parallel
self.cmd_args = cmd_args
@@ -718,9 +745,11 @@ class CompilerHolder(InterpreterObject):
'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
})
+ @permittedMethodKwargs({})
def version_method(self, args, kwargs):
return self.compiler.version
+ @permittedMethodKwargs({})
def cmd_array_method(self, args, kwargs):
return self.compiler.exelist
@@ -760,6 +789,11 @@ class CompilerHolder(InterpreterObject):
deps = final_deps
return deps
+ @permittedMethodKwargs({
+ 'prefix',
+ 'args',
+ 'dependencies',
+ })
def alignment_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Alignment method takes exactly one positional argument.')
@@ -774,6 +808,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking for alignment of "', mlog.bold(typename), '": ', result, sep='')
return result
+ @permittedMethodKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def run_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Run method takes exactly one positional argument.')
@@ -799,9 +840,11 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking if "', mlog.bold(testname), '" runs: ', h, sep='')
return TryRunResultHolder(result)
+ @permittedMethodKwargs({})
def get_id_method(self, args, kwargs):
return self.compiler.get_id()
+ @permittedMethodKwargs({})
def symbols_have_underscore_prefix_method(self, args, kwargs):
'''
Check if the compiler prefixes _ (underscore) to global C symbols
@@ -809,6 +852,7 @@ class CompilerHolder(InterpreterObject):
'''
return self.compiler.symbols_have_underscore_prefix(self.environment)
+ @permittedMethodKwargs({})
def unittest_args_method(self, args, kwargs):
'''
This function is deprecated and should not be used.
@@ -816,8 +860,16 @@ class CompilerHolder(InterpreterObject):
'''
if not hasattr(self.compiler, 'get_feature_args'):
raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language()))
- return self.compiler.get_feature_args({'unittest': 'true'})
-
+ build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir())
+ return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src)
+
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_member_method(self, args, kwargs):
if len(args) != 2:
raise InterpreterException('Has_member takes exactly two arguments.')
@@ -839,6 +891,13 @@ class CompilerHolder(InterpreterObject):
'" has member "', mlog.bold(membername), '": ', hadtxt, sep='')
return had
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_members_method(self, args, kwargs):
check_stringlist(args)
typename = args[0]
@@ -859,6 +918,13 @@ class CompilerHolder(InterpreterObject):
'" has members ', members, ': ', hadtxt, sep='')
return had
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_function_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Has_function takes exactly one argument.')
@@ -877,6 +943,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking for function "', mlog.bold(funcname), '": ', hadtxt, sep='')
return had
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_type_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Has_type takes exactly one argument.')
@@ -895,29 +968,46 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking for type "', mlog.bold(typename), '": ', hadtxt, sep='')
return had
+ @permittedMethodKwargs({
+ 'prefix',
+ 'low',
+ 'high',
+ 'guess',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def compute_int_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Compute_int takes exactly one argument.')
check_stringlist(args)
expression = args[0]
prefix = kwargs.get('prefix', '')
- l = kwargs.get('low', -1024)
- h = kwargs.get('high', 1024)
+ low = kwargs.get('low', None)
+ high = kwargs.get('high', None)
guess = kwargs.get('guess', None)
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of compute_int must be a string.')
- if not isinstance(l, int):
+ if low is not None and not isinstance(low, int):
raise InterpreterException('Low argument of compute_int must be an int.')
- if not isinstance(h, int):
+ if high is not None and not isinstance(high, int):
raise InterpreterException('High argument of compute_int must be an int.')
if guess is not None and not isinstance(guess, int):
raise InterpreterException('Guess argument of compute_int must be an int.')
extra_args = self.determine_args(kwargs)
deps = self.determine_dependencies(kwargs)
- res = self.compiler.compute_int(expression, l, h, guess, prefix, self.environment, extra_args, deps)
+ res = self.compiler.compute_int(expression, low, high, guess, prefix, self.environment, extra_args, deps)
mlog.log('Computing int of "%s": %d' % (expression, res))
return res
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def sizeof_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Sizeof takes exactly one argument.')
@@ -932,6 +1022,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking for size of "%s": %d' % (element, esize))
return esize
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def get_define_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('get_define() takes exactly one argument.')
@@ -946,6 +1043,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Fetching value of define "%s": %s' % (element, value))
return value
+ @permittedMethodKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def compiles_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('compiles method takes exactly one argument.')
@@ -969,6 +1073,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking if "', mlog.bold(testname), '" compiles: ', h, sep='')
return result
+ @permittedMethodKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def links_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('links method takes exactly one argument.')
@@ -992,6 +1103,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Checking if "', mlog.bold(testname), '" links: ', h, sep='')
return result
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_header_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('has_header method takes exactly one argument.')
@@ -1010,6 +1128,13 @@ class CompilerHolder(InterpreterObject):
mlog.log('Has header "%s":' % hname, h)
return haz
+ @permittedMethodKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
def has_header_symbol_method(self, args, kwargs):
if len(args) != 2:
raise InterpreterException('has_header_symbol method takes exactly two arguments.')
@@ -1029,6 +1154,10 @@ class CompilerHolder(InterpreterObject):
mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h)
return haz
+ @permittedMethodKwargs({
+ 'required',
+ 'dirs',
+ })
def find_library_method(self, args, kwargs):
# TODO add dependencies support?
if len(args) != 1:
@@ -1050,6 +1179,7 @@ class CompilerHolder(InterpreterObject):
self.compiler.language)
return ExternalLibraryHolder(lib)
+ @permittedMethodKwargs({})
def has_argument_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
if len(args) != 1:
@@ -1062,6 +1192,7 @@ class CompilerHolder(InterpreterObject):
mlog.log('Compiler for {} supports argument {}:'.format(self.compiler.get_display_language(), args[0]), h)
return result
+ @permittedMethodKwargs({})
def has_multi_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
result = self.compiler.has_multi_arguments(args, self.environment)
@@ -1075,6 +1206,7 @@ class CompilerHolder(InterpreterObject):
h)
return result
+ @permittedMethodKwargs({})
def get_supported_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
result = self.compiler.get_supported_arguments(args, self.environment)
@@ -1090,6 +1222,7 @@ class CompilerHolder(InterpreterObject):
h)
return result
+ @permittedMethodKwargs({})
def first_supported_argument_method(self, args, kwargs):
for i in mesonlib.stringlistify(args):
if self.compiler.has_argument(i, self.environment):
@@ -1309,71 +1442,18 @@ class MesonMain(InterpreterObject):
return args[1]
raise InterpreterException('Unknown cross property: %s.' % propname)
-pch_kwargs = set(['c_pch', 'cpp_pch'])
-
-lang_arg_kwargs = set([
- 'c_args',
- 'cpp_args',
- 'd_args',
- 'd_import_dirs',
- 'd_unittest',
- 'd_module_versions',
- 'fortran_args',
- 'java_args',
- 'objc_args',
- 'objcpp_args',
- 'rust_args',
- 'vala_args',
- 'cs_args',
-])
-
-vala_kwargs = set(['vala_header', 'vala_gir', 'vala_vapi'])
-rust_kwargs = set(['rust_crate_type'])
-cs_kwargs = set(['resources', 'cs_args'])
-
-buildtarget_kwargs = set([
- 'build_by_default',
- 'build_rpath',
- 'dependencies',
- 'extra_files',
- 'gui_app',
- 'link_with',
- 'link_whole',
- 'link_args',
- 'link_depends',
- 'implicit_include_directories',
- 'include_directories',
- 'install',
- 'install_rpath',
- 'install_dir',
- 'name_prefix',
- 'name_suffix',
- 'native',
- 'objects',
- 'override_options',
- 'pic',
- 'sources',
- 'vs_module_defs',
-])
-
-build_target_common_kwargs = (
- buildtarget_kwargs |
- lang_arg_kwargs |
- pch_kwargs |
- vala_kwargs |
- rust_kwargs |
- cs_kwargs)
-
-exe_kwargs = (build_target_common_kwargs) | {'implib', 'export_dynamic'}
-shlib_kwargs = (build_target_common_kwargs) | {'version', 'soversion'}
-shmod_kwargs = shlib_kwargs
-stlib_kwargs = shlib_kwargs
-
-jar_kwargs = exe_kwargs.copy()
-jar_kwargs.update(['main_class'])
-
-build_target_kwargs = exe_kwargs.copy()
-build_target_kwargs.update(['target_type'])
+
+known_library_kwargs = (
+ build.known_shlib_kwargs |
+ build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+ known_library_kwargs |
+ build.known_exe_kwargs |
+ build.known_jar_kwargs |
+ {'target_type'}
+)
permitted_kwargs = {'add_global_arguments': {'language'},
'add_global_link_arguments': {'language'},
@@ -1382,25 +1462,27 @@ permitted_kwargs = {'add_global_arguments': {'language'},
'add_project_arguments': {'language'},
'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env'},
'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'},
- 'build_target': build_target_kwargs,
- 'configure_file': {'input', 'output', 'configuration', 'command', 'install_dir', 'capture', 'install'},
+ 'build_target': known_build_target_kwargs,
+ 'configure_file': {'input', 'output', 'configuration', 'command', 'install_dir', 'capture', 'install', 'format'},
'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'},
'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version'},
- 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'version'},
- 'executable': exe_kwargs,
+ 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'},
+ 'executable': build.known_exe_kwargs,
'find_program': {'required', 'native'},
'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'},
'include_directories': {'is_system'},
- 'install_data': {'install_dir', 'install_mode', 'sources'},
+ 'install_data': {'install_dir', 'install_mode', 'rename', 'sources'},
'install_headers': {'install_dir', 'subdir'},
'install_man': {'install_dir'},
'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},
- 'jar': jar_kwargs,
+ 'jar': build.known_jar_kwargs,
'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
'run_target': {'command', 'depends'},
- 'shared_library': shlib_kwargs,
- 'shared_module': shmod_kwargs,
- 'static_library': stlib_kwargs,
+ 'shared_library': build.known_shlib_kwargs,
+ 'shared_module': build.known_shmod_kwargs,
+ 'static_library': build.known_stlib_kwargs,
+ 'both_libraries': known_library_kwargs,
+ 'library': known_library_kwargs,
'subdir': {'if_found'},
'subproject': {'version', 'default_options'},
'test': {'args', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
@@ -1497,12 +1579,14 @@ class Interpreter(InterpreterBase):
'run_command': self.func_run_command,
'set_variable': self.func_set_variable,
'subdir': self.func_subdir,
+ 'subdir_done': self.func_subdir_done,
'subproject': self.func_subproject,
'shared_library': self.func_shared_lib,
'shared_module': self.func_shared_module,
'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
'test': self.func_test,
- 'vcs_tag': self.func_vcs_tag,
+ 'vcs_tag': self.func_vcs_tag
})
if 'MESON_UNIT_TEST' in os.environ:
self.funcs.update({'exception': self.func_exception})
@@ -1618,6 +1702,7 @@ class Interpreter(InterpreterBase):
raise InterpreterException('Version must be a string.')
incs = extract_as_list(kwargs, 'include_directories', unholder=True)
libs = extract_as_list(kwargs, 'link_with', unholder=True)
+ libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True)
sources = extract_as_list(kwargs, 'sources')
sources = listify(self.source_strings_to_files(sources), unholder=True)
deps = extract_as_list(kwargs, 'dependencies', unholder=True)
@@ -1637,7 +1722,7 @@ class Interpreter(InterpreterBase):
raise InterpreterException('''Entries in "link_with" may only be self-built targets,
external dependencies (including libraries) must go to "dependencies".''')
dep = dependencies.InternalDependency(version, incs, compile_args,
- link_args, libs, sources, final_deps)
+ link_args, libs, libs_whole, sources, final_deps)
return DependencyHolder(dep)
@noKwargs
@@ -1675,10 +1760,17 @@ external dependencies (including libraries) must go to "dependencies".''')
cargs = args[1:]
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
- m = 'must be a string, or the output of find_program(), files(), or ' \
- 'configure_file(); not {!r}'
+ m = 'must be a string, or the output of find_program(), files() '\
+ 'or configure_file(), or a compiler object; not {!r}'
if isinstance(cmd, ExternalProgramHolder):
cmd = cmd.held_object
+ elif isinstance(cmd, CompilerHolder):
+ cmd = cmd.compiler.get_exelist()[0]
+ prog = ExternalProgram(cmd, silent=True)
+ if not prog.found():
+ raise InterpreterException('Program {!r} not found '
+ 'or not executable'.format(cmd))
+ cmd = prog
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
@@ -1746,7 +1838,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Subproject name must not contain a ".." path segment.')
if os.path.isabs(dirname):
raise InterpreterException('Subproject name must not be an absolute path.')
- if '\\' in dirname or '/' in dirname:
+ if has_path_sep(dirname):
mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.')
if dirname in self.subproject_stack:
fullstack = self.subproject_stack + [dirname]
@@ -1759,26 +1851,34 @@ external dependencies (including libraries) must go to "dependencies".''')
try:
resolved = r.resolve(dirname)
except RuntimeError as e:
+ # if the reason subproject execution failed was because
+ # the directory doesn't exist, try to give some helpful
+ # advice if it's a nested subproject that needs
+ # promotion...
+ self.print_nested_info(dirname)
+
msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}'
raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e))
subdir = os.path.join(self.subproject_dir, resolved)
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
- mlog.log('\nExecuting subproject ', mlog.bold(dirname), '.\n', sep='')
- subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir,
- mesonlib.stringlistify(kwargs.get('default_options', [])))
- subi.subprojects = self.subprojects
-
- subi.subproject_stack = self.subproject_stack + [dirname]
- current_active = self.active_projectname
- subi.run()
+ mlog.log()
+ with mlog.nested():
+ mlog.log('Executing subproject ', mlog.bold(dirname), '.\n', sep='')
+ subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir,
+ mesonlib.stringlistify(kwargs.get('default_options', [])))
+ subi.subprojects = self.subprojects
+
+ subi.subproject_stack = self.subproject_stack + [dirname]
+ current_active = self.active_projectname
+ subi.run()
+ mlog.log('\nSubproject', mlog.bold(dirname), 'finished.')
if 'version' in kwargs:
pv = subi.project_version
wanted = kwargs['version']
if pv == 'undefined' or not mesonlib.version_compare(pv, wanted):
raise InterpreterException('Subproject %s version is %s but %s required.' % (dirname, pv, wanted))
self.active_projectname = current_active
- mlog.log('\nSubproject', mlog.bold(dirname), 'finished.')
self.build.subprojects[dirname] = subi.project_version
self.subprojects.update(subi.subprojects)
self.subprojects[dirname] = SubprojectHolder(subi)
@@ -1933,6 +2033,8 @@ to directly access options of other subprojects.''')
raise InterpreterException('Subproject_dir must not contain a ".." segment.')
self.subproject_dir = spdirname
+ self.build.subproject_dir = self.subproject_dir
+
if 'meson_version' in kwargs:
cv = coredata.version
pv = kwargs['meson_version']
@@ -2084,6 +2186,19 @@ to directly access options of other subprojects.''')
else:
version_string = ' (%s %s)' % (comp.id, comp.version)
mlog.log('Native %s compiler: ' % comp.get_display_language(), mlog.bold(' '.join(comp.get_exelist())), version_string, sep='')
+
+ # If <language>_args/_link_args settings are given on the
+ # command line, use them.
+ for optspec in self.build.environment.cmd_line_options.projectoptions:
+ (optname, optvalue) = optspec.split('=', maxsplit=1)
+ if optname.endswith('_link_args'):
+ lang = optname[:-10]
+ self.coredata.external_link_args.setdefault(lang, []).append(optvalue)
+ elif optname.endswith('_args'):
+ lang = optname[:-5]
+ self.coredata.external_args.setdefault(lang, []).append(optvalue)
+ # Otherwise, look for definitions from environment
+ # variables such as CFLAGS.
if not comp.get_language() in self.coredata.external_args:
(preproc_args, compile_args, link_args) = environment.get_args_from_envvars(comp)
self.coredata.external_preprocess_args[comp.get_language()] = preproc_args
@@ -2166,13 +2281,16 @@ to directly access options of other subprojects.''')
if progobj is None:
progobj = self.program_from_system(args)
if required and (progobj is None or not progobj.found()):
- raise InvalidArguments('Program "%s" not found or not executable' % args[0])
+ raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
if progobj is None:
return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
return progobj
def func_find_library(self, node, args, kwargs):
- raise InvalidCode('find_library() is removed, use the corresponding method in a compiler object instead.')
+ raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+ 'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+ 'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+ )
def _find_cached_dep(self, name, kwargs):
# Check if we want this as a cross-dep or a native-dep
@@ -2211,7 +2329,7 @@ to directly access options of other subprojects.''')
def check_subproject_version(wanted, found):
if wanted == 'undefined':
return True
- if found == 'undefined' or not mesonlib.version_compare(found, wanted):
+ if found == 'undefined' or not mesonlib.version_compare_many(found, wanted)[0]:
return False
return True
@@ -2288,10 +2406,13 @@ to directly access options of other subprojects.''')
dep = None
# Search for it outside the project
- try:
- dep = dependencies.find_external_dependency(name, self.environment, kwargs)
- except DependencyException as e:
- exception = e
+ if self.coredata.wrap_mode != WrapMode.forcefallback or 'fallback' not in kwargs:
+ try:
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ except DependencyException as e:
+ exception = e
+ else:
+ exception = DependencyException("fallback for %s not found" % name)
# Search inside the projects list
if not dep or not dep.found():
@@ -2303,7 +2424,6 @@ to directly access options of other subprojects.''')
# we won't actually read all the build files.
return fallback_dep
if not dep:
- self.print_nested_info(name)
assert(exception is not None)
raise exception
@@ -2335,7 +2455,7 @@ root and issuing %s.
cmds = []
command_templ = 'meson wrap promote '
for l in found:
- cmds.append(command_templ + l[len(self.source_root)+1:])
+ cmds.append(command_templ + l[len(self.source_root) + 1:])
final_message = message + '\n'.join(cmds)
print(final_message)
@@ -2398,20 +2518,24 @@ root and issuing %s.
@permittedKwargs(permitted_kwargs['shared_library'])
def func_shared_lib(self, node, args, kwargs):
- return self.build_target(node, args, kwargs, SharedLibraryHolder)
+ holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
+ holder.held_object.shared_library_only = True
+ return holder
+
+ @permittedKwargs(permitted_kwargs['both_libraries'])
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_both_libraries(node, args, kwargs)
@permittedKwargs(permitted_kwargs['shared_module'])
def func_shared_module(self, node, args, kwargs):
return self.build_target(node, args, kwargs, SharedModuleHolder)
+ @permittedKwargs(permitted_kwargs['library'])
def func_library(self, node, args, kwargs):
- if self.coredata.get_builtin_option('default_library') == 'shared':
- return self.func_shared_lib(node, args, kwargs)
- return self.func_static_lib(node, args, kwargs)
+ return self.build_library(node, args, kwargs)
@permittedKwargs(permitted_kwargs['jar'])
def func_jar(self, node, args, kwargs):
- kwargs['target_type'] = 'jar'
return self.build_target(node, args, kwargs, JarHolder)
@permittedKwargs(permitted_kwargs['build_target'])
@@ -2420,15 +2544,17 @@ root and issuing %s.
raise InterpreterException('Missing target_type keyword argument')
target_type = kwargs.pop('target_type')
if target_type == 'executable':
- return self.func_executable(node, args, kwargs)
+ return self.build_target(node, args, kwargs, ExecutableHolder)
elif target_type == 'shared_library':
- return self.func_shared_lib(node, args, kwargs)
+ return self.build_target(node, args, kwargs, SharedLibraryHolder)
elif target_type == 'static_library':
- return self.func_static_lib(node, args, kwargs)
+ return self.build_target(node, args, kwargs, StaticLibraryHolder)
+ elif target_type == 'both_libraries':
+ return self.build_both_libraries(node, args, kwargs)
elif target_type == 'library':
- return self.func_library(node, args, kwargs)
+ return self.build_library(node, args, kwargs)
elif target_type == 'jar':
- return self.func_jar(node, args, kwargs)
+ return self.build_target(node, args, kwargs, JarHolder)
else:
raise InterpreterException('Unknown target_type.')
@@ -2470,6 +2596,14 @@ root and issuing %s.
return self.func_custom_target(node, [kwargs['output']], kwargs)
@stringArgs
+ def func_subdir_done(self, node, args, kwargs):
+ if len(kwargs) > 0:
+ raise InterpreterException('exit does not take named arguments')
+ if len(args) > 0:
+ raise InterpreterException('exit does not take any arguments')
+ raise SubdirDoneRequest()
+
+ @stringArgs
@permittedKwargs(permitted_kwargs['custom_target'])
def func_custom_target(self, node, args, kwargs):
if len(args) != 1:
@@ -2583,14 +2717,12 @@ root and issuing %s.
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
suite = []
+ prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
if len(s) > 0:
s = ':' + s
- if self.is_subproject():
- suite.append(self.subproject.replace(' ', '_').replace(':', '_') + s)
- else:
- suite.append(self.build.project_name.replace(' ', '_').replace(':', '_') + s)
- t = Test(args[0], suite, exe.held_object, par, cmd_args, env, should_fail, timeout, workdir)
+ suite.append(prj.replace(' ', '_').replace(':', '_') + s)
+ t = Test(args[0], prj, suite, exe.held_object, par, cmd_args, env, should_fail, timeout, workdir)
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='')
@@ -2691,7 +2823,8 @@ root and issuing %s.
if not isinstance(install_dir, (str, type(None))):
raise InvalidArguments('Keyword argument install_dir not a string.')
install_mode = self._get_kwarg_install_mode(kwargs)
- data = DataHolder(build.Data(sources, install_dir, install_mode))
+ rename = kwargs.get('rename', None)
+ data = DataHolder(build.Data(sources, install_dir, install_mode, rename))
self.build.data.append(data.held_object)
return data
@@ -2754,6 +2887,16 @@ root and issuing %s.
if 'command' not in kwargs:
raise InterpreterException('"capture" keyword requires "command" keyword.')
+ if 'format' in kwargs:
+ format = kwargs['format']
+ if not isinstance(format, str):
+ raise InterpreterException('"format" keyword must be a string.')
+ else:
+ format = 'meson'
+
+ if format not in ('meson', 'cmake', 'cmake@'):
+ raise InterpreterException('"format" possible values are "meson", "cmake" or "cmake@".')
+
# Validate input
inputfile = None
ifile_abs = None
@@ -2793,7 +2936,7 @@ root and issuing %s.
if inputfile is not None:
os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
missing_variables = mesonlib.do_conf_file(ifile_abs, ofile_abs,
- conf.held_object)
+ conf.held_object, format)
if missing_variables:
var_list = ", ".join(map(repr, sorted(missing_variables)))
mlog.warning(
@@ -2837,9 +2980,11 @@ root and issuing %s.
conffile = os.path.normpath(inputfile.relative_name())
if conffile not in self.build_def_files:
self.build_def_files.append(conffile)
- # Install file if requested
+ # Install file if requested, we check for the empty string
+ # for backwards compatibility. That was the behaviour before
+ # 0.45.0 so preserve it.
idir = kwargs.get('install_dir', None)
- if isinstance(idir, str):
+ if isinstance(idir, str) and idir:
cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
self.build.data.append(build.Data([cfile], idir))
return mesonlib.File.from_built_file(self.subdir, output)
@@ -2847,12 +2992,17 @@ root and issuing %s.
@permittedKwargs(permitted_kwargs['include_directories'])
@stringArgs
def func_include_directories(self, node, args, kwargs):
+ return self.build_incdir_object(args, kwargs.get('is_system', False))
+
+ def build_incdir_object(self, incdir_strings, is_system=False):
+ if not isinstance(is_system, bool):
+ raise InvalidArguments('Is_system must be boolean.')
src_root = self.environment.get_source_dir()
build_root = self.environment.get_build_dir()
absbase_src = os.path.join(src_root, self.subdir)
absbase_build = os.path.join(build_root, self.subdir)
- for a in args:
+ for a in incdir_strings:
if a.startswith(src_root):
raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
relative paths instead.
@@ -2875,10 +3025,7 @@ different subdirectory.
absdir_build = os.path.join(absbase_build, a)
if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
raise InvalidArguments('Include dir %s does not exist.' % a)
- is_system = kwargs.get('is_system', False)
- if not isinstance(is_system, bool):
- raise InvalidArguments('Is_system must be boolean.')
- i = IncludeDirsHolder(build.IncludeDirs(self.subdir, args, is_system))
+ i = IncludeDirsHolder(build.IncludeDirs(self.subdir, incdir_strings, is_system))
return i
@permittedKwargs(permitted_kwargs['add_test_setup'])
@@ -2887,8 +3034,10 @@ different subdirectory.
if len(args) != 1:
raise InterpreterException('Add_test_setup needs one argument for the setup name.')
setup_name = args[0]
- if re.fullmatch('[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+ if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
raise InterpreterException('Setup name may only contain alphanumeric characters.')
+ if ":" not in setup_name:
+ setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
try:
inp = extract_as_list(kwargs, 'exe_wrapper')
exe_wrapper = []
@@ -2912,14 +3061,10 @@ different subdirectory.
if not isinstance(timeout_multiplier, int):
raise InterpreterException('Timeout multiplier must be a number.')
env = self.unpack_env_kwarg(kwargs)
- setupobj = build.TestSetup(exe_wrapper=exe_wrapper,
- gdb=gdb,
- timeout_multiplier=timeout_multiplier,
- env=env)
- if self.subproject == '':
- # Dunno what we should do with subprojects really. Let's start simple
- # and just use the master project ones.
- self.build.test_setups[setup_name] = setupobj
+ self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper=exe_wrapper,
+ gdb=gdb,
+ timeout_multiplier=timeout_multiplier,
+ env=env)
@permittedKwargs(permitted_kwargs['add_global_arguments'])
@stringArgs
@@ -3072,6 +3217,41 @@ different subdirectory.
if idname not in self.coredata.target_guids:
self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+ def build_both_libraries(self, node, args, kwargs):
+ shared_holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
+
+ # Check if user forces non-PIC static library.
+ pic = True
+ if 'pic' in kwargs:
+ pic = kwargs['pic']
+ elif 'b_staticpic' in self.environment.coredata.base_options:
+ pic = self.environment.coredata.base_options['b_staticpic'].value
+
+ if pic:
+ # Exclude sources from args and kwargs to avoid building them twice
+ static_args = [args[0]]
+ static_kwargs = kwargs.copy()
+ static_kwargs['sources'] = []
+ static_kwargs['objects'] = shared_holder.held_object.extract_all_objects()
+ else:
+ static_args = args
+ static_kwargs = kwargs
+
+ static_holder = self.build_target(node, static_args, static_kwargs, StaticLibraryHolder)
+
+ return BothLibrariesHolder(shared_holder, static_holder, self)
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_builtin_option('default_library')
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, SharedLibraryHolder)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, StaticLibraryHolder)
+ elif default_library == 'both':
+ return self.build_both_libraries(node, args, kwargs)
+ else:
+ raise InterpreterException('Unknown default_library value: %s.', default_library)
+
def build_target(self, node, args, kwargs, targetholder):
if not args:
raise InterpreterException('Target does not have a name.')
@@ -3106,7 +3286,14 @@ different subdirectory.
else:
mlog.debug('Unknown target type:', str(targetholder))
raise RuntimeError('Unreachable code')
+ self.kwarg_strings_to_includedirs(kwargs)
+
+ # Filter out kwargs from other target types. For example 'soversion'
+ # passed to library() when default_library == 'static'.
+ kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs)
+
if is_cross:
self.add_cross_stdlib_info(target)
l = targetholder(target, self)
@@ -3114,6 +3301,23 @@ different subdirectory.
self.project_args_frozen = True
return l
+ def kwarg_strings_to_includedirs(self, kwargs):
+ if 'd_import_dirs' in kwargs:
+ items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
+ cleaned_items = []
+ for i in items:
+ if isinstance(i, str):
+ # BW compatibility. This was permitted so we must support it
+ # for a few releases so people can transition to "correct"
+ # path declarations.
+ if i.startswith(self.environment.get_source_dir()):
+ mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
+This will become a hard error in the future.''')
+ i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
+ i = self.build_incdir_object([i])
+ cleaned_items.append(i)
+ kwargs['d_import_dirs'] = cleaned_items
+
def get_used_languages(self, target):
result = {}
for i in target.sources:
@@ -3152,6 +3356,7 @@ different subdirectory.
if idx >= len(arg_strings):
raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx))
return arg_strings[idx]
+
return re.sub(r'@(\d+)@', arg_replace, templ)
# Only permit object extraction from the same subproject
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 0539b14..f957d90 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -80,6 +80,22 @@ class permittedKwargs:
return wrapped
+class permittedMethodKwargs:
+
+ def __init__(self, permitted):
+ self.permitted = permitted
+
+ def __call__(self, f):
+ @wraps(f)
+ def wrapped(obj, args, kwargs):
+ for k in kwargs:
+ if k not in self.permitted:
+ mlog.warning('''Passed invalid keyword argument "{}".'''.format(k))
+ mlog.warning('This will become a hard error in the future.')
+ return f(obj, args, kwargs)
+ return wrapped
+
+
class InterpreterException(mesonlib.MesonException):
pass
@@ -89,6 +105,9 @@ class InvalidCode(InterpreterException):
class InvalidArguments(InterpreterException):
pass
+class SubdirDoneRequest(BaseException):
+ pass
+
class InterpreterObject:
def __init__(self):
self.methods = {}
@@ -187,6 +206,8 @@ class InterpreterBase:
try:
self.current_lineno = cur.lineno
self.evaluate_statement(cur)
+ except SubdirDoneRequest:
+ break
except Exception as e:
if not(hasattr(e, 'lineno')):
e.lineno = cur.lineno
@@ -408,7 +429,7 @@ The result of this is undefined and will become a hard error in a future Meson r
varname = node.var_name
addition = self.evaluate_statement(node.value)
if is_disabler(addition):
- set_variable(varname, addition)
+ self.set_variable(varname, addition)
return
# Remember that all variables are immutable. We must always create a
# full new variable and then assign it.
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 771e9ee..cadd306 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -12,108 +12,88 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys, os
-import pickle
+import os
+import sys
import argparse
-from . import coredata, mesonlib
+from . import (coredata, mesonlib, build)
-parser = argparse.ArgumentParser(prog='meson configure')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson configure')
+
+ parser.add_argument('-D', action='append', default=[], dest='sets',
+ help='Set an option to the given value.')
+ parser.add_argument('directory', nargs='*')
+ parser.add_argument('--clearcache', action='store_true', default=False,
+ help='Clear cached state (e.g. found dependencies)')
+ return parser
-parser.add_argument('-D', action='append', default=[], dest='sets',
- help='Set an option to the given value.')
-parser.add_argument('directory', nargs='*')
-parser.add_argument('--clearcache', action='store_true', default=False,
- help='Clear cached state (e.g. found dependencies)')
class ConfException(mesonlib.MesonException):
pass
+
class Conf:
def __init__(self, build_dir):
self.build_dir = build_dir
- self.coredata_file = os.path.join(build_dir, 'meson-private/coredata.dat')
- self.build_file = os.path.join(build_dir, 'meson-private/build.dat')
- if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file):
+ if not os.path.isdir(os.path.join(build_dir, 'meson-private')):
raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
- with open(self.coredata_file, 'rb') as f:
- self.coredata = pickle.load(f)
- with open(self.build_file, 'rb') as f:
- self.build = pickle.load(f)
- if self.coredata.version != coredata.version:
- raise ConfException('Version mismatch (%s vs %s)' %
- (coredata.version, self.coredata.version))
+ self.build = build.load(self.build_dir)
+ self.coredata = coredata.load(self.build_dir)
def clear_cache(self):
self.coredata.deps = {}
def save(self):
# Only called if something has changed so overwrite unconditionally.
- with open(self.coredata_file, 'wb') as f:
- pickle.dump(self.coredata, f)
+ coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
- def print_aligned(self, arr):
+ @staticmethod
+ def print_aligned(arr):
+ def make_lower_case(val):
+ if isinstance(val, bool):
+ return str(val).lower()
+ elif isinstance(val, list):
+ return [make_lower_case(i) for i in val]
+ else:
+ return str(val)
+
if not arr:
return
- titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
- len_name = longest_name = len(titles['name'])
- len_descr = longest_descr = len(titles['descr'])
- len_value = longest_value = len(titles['value'])
- longest_choices = 0 # not printed if we don't get any optional values
-
- # calculate the max length of each
- for x in arr:
- name = x['name']
- descr = x['descr']
- value = x['value'] if isinstance(x['value'], str) else str(x['value']).lower()
- choices = ''
- if isinstance(x['choices'], list):
- if x['choices']:
- x['choices'] = [s if isinstance(s, str) else str(s).lower() for s in x['choices']]
- choices = '[%s]' % ', '.join(map(str, x['choices']))
- elif x['choices']:
- choices = x['choices'] if isinstance(x['choices'], str) else str(x['choices']).lower()
- longest_name = max(longest_name, len(name))
- longest_descr = max(longest_descr, len(descr))
- longest_value = max(longest_value, len(value))
- longest_choices = max(longest_choices, len(choices))
+ titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
- # update possible non strings
- x['value'] = value
- x['choices'] = choices
+ name_col = [titles['name'], '-' * len(titles['name'])]
+ value_col = [titles['value'], '-' * len(titles['value'])]
+ choices_col = [titles['choices'], '-' * len(titles['choices'])]
+ descr_col = [titles['descr'], '-' * len(titles['descr'])]
- # prints header
- namepad = ' ' * (longest_name - len_name)
- valuepad = ' ' * (longest_value - len_value)
- if longest_choices:
- len_choices = len(titles['choices'])
- longest_choices = max(longest_choices, len_choices)
- choicepad = ' ' * (longest_choices - len_choices)
- print(' %s%s %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['choices'], choicepad, titles['descr']))
- print(' %s%s %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_choices, choicepad, '-' * len_descr))
- else:
- print(' %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['descr']))
- print(' %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_descr))
+ choices_found = False
+ for opt in arr:
+ name_col.append(opt['name'])
+ descr_col.append(opt['descr'])
+ if isinstance(opt['value'], list):
+ value_col.append('[{0}]'.format(', '.join(make_lower_case(opt['value']))))
+ else:
+ value_col.append(make_lower_case(opt['value']))
+ if opt['choices']:
+ choices_found = True
+ choices_col.append('[{0}]'.format(', '.join(make_lower_case(opt['choices']))))
+ else:
+ choices_col.append('')
- # print values
- for i in arr:
- name = i['name']
- descr = i['descr']
- value = i['value']
- choices = i['choices']
+ col_widths = (max([len(i) for i in name_col], default=0),
+ max([len(i) for i in value_col], default=0),
+ max([len(i) for i in choices_col], default=0),
+ max([len(i) for i in descr_col], default=0))
- namepad = ' ' * (longest_name - len(name))
- valuepad = ' ' * (longest_value - len(value))
- if longest_choices:
- choicespad = ' ' * (longest_choices - len(choices))
- f = ' %s%s %s%s %s%s %s' % (name, namepad, value, valuepad, choices, choicespad, descr)
+ for line in zip(name_col, value_col, choices_col, descr_col):
+ if choices_found:
+ print(' {0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3:{width[3]}}'.format(*line, width=col_widths))
else:
- f = ' %s%s %s%s %s' % (name, namepad, value, valuepad, descr)
-
- print(f)
+ print(' {0:{width[0]}} {1:{width[1]}} {3:{width[3]}}'.format(*line, width=col_widths))
def set_options(self, options):
for o in options:
@@ -156,8 +136,7 @@ class Conf:
print('Core properties:')
print(' Source dir', self.build.environment.source_dir)
print(' Build dir ', self.build.environment.build_dir)
- print('')
- print('Core options:')
+ print('\nCore options:\n')
carr = []
for key in ['buildtype', 'warning_level', 'werror', 'strip', 'unity', 'default_library']:
carr.append({'name': key,
@@ -165,48 +144,39 @@ class Conf:
'value': self.coredata.get_builtin_option(key),
'choices': coredata.get_builtin_option_choices(key)})
self.print_aligned(carr)
- print('')
- bekeys = sorted(self.coredata.backend_options.keys())
- if not bekeys:
+ if not self.coredata.backend_options:
print(' No backend options\n')
else:
bearr = []
- for k in bekeys:
+ for k in sorted(self.coredata.backend_options):
o = self.coredata.backend_options[k]
bearr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''})
self.print_aligned(bearr)
- print('')
- print('Base options:')
- okeys = sorted(self.coredata.base_options.keys())
- if not okeys:
+ print('\nBase options:')
+ if not self.coredata.base_options:
print(' No base options\n')
else:
coarr = []
- for k in okeys:
+ for k in sorted(self.coredata.base_options):
o = self.coredata.base_options[k]
coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': o.choices})
self.print_aligned(coarr)
- print('')
- print('Compiler arguments:')
+ print('\nCompiler arguments:')
for (lang, args) in self.coredata.external_args.items():
print(' ' + lang + '_args', str(args))
- print('')
- print('Linker args:')
+ print('\nLinker args:')
for (lang, args) in self.coredata.external_link_args.items():
print(' ' + lang + '_link_args', str(args))
- print('')
- print('Compiler options:')
- okeys = sorted(self.coredata.compiler_options.keys())
- if not okeys:
+ print('\nCompiler options:')
+ if not self.coredata.compiler_options:
print(' No compiler options\n')
else:
coarr = []
- for k in okeys:
+ for k in self.coredata.compiler_options:
o = self.coredata.compiler_options[k]
coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''})
self.print_aligned(coarr)
- print('')
- print('Directories:')
+ print('\nDirectories:')
parr = []
for key in ['prefix',
'libdir',
@@ -227,30 +197,24 @@ class Conf:
'value': self.coredata.get_builtin_option(key),
'choices': coredata.get_builtin_option_choices(key)})
self.print_aligned(parr)
- print('')
- print('Project options:')
+ print('\nProject options:')
if not self.coredata.user_options:
print(' This project does not have any options')
else:
- options = self.coredata.user_options
- keys = list(options.keys())
- keys.sort()
optarr = []
- for key in keys:
- opt = options[key]
+ for key in sorted(self.coredata.user_options):
+ opt = self.coredata.user_options[key]
if (opt.choices is None) or (not opt.choices):
# Zero length list or string
choices = ''
else:
- # A non zero length list or string, convert to string
- choices = str(opt.choices)
+ choices = opt.choices
optarr.append({'name': key,
'descr': opt.description,
'value': opt.value,
'choices': choices})
self.print_aligned(optarr)
- print('')
- print('Testing options:')
+ print('\nTesting options:')
tarr = []
for key in ['stdsplit', 'errorlogs']:
tarr.append({'name': key,
@@ -259,11 +223,12 @@ class Conf:
'choices': coredata.get_builtin_option_choices(key)})
self.print_aligned(tarr)
+
def run(args):
args = mesonlib.expand_arguments(args)
if not args:
args = [os.getcwd()]
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if len(options.directory) > 1:
print('%s <build directory>' % args[0])
print('If you omit the build directory, the current directory is substituted.')
@@ -286,10 +251,10 @@ def run(args):
if save:
c.save()
except ConfException as e:
- print('Meson configurator encountered an error:\n')
- print(e)
- return 1
+ print('Meson configurator encountered an error:')
+ raise e
return 0
+
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 9e0508b..8a2b67c 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -21,6 +21,21 @@ import platform, subprocess, operator, os, shutil, re
import collections
from mesonbuild import mlog
+have_fcntl = False
+have_msvcrt = False
+
+try:
+ import fcntl
+ have_fcntl = True
+except Exception:
+ pass
+
+try:
+ import msvcrt
+ have_msvcrt = True
+except Exception:
+ pass
+
from glob import glob
def detect_meson_py_location():
@@ -36,12 +51,11 @@ def detect_meson_py_location():
# $ <mesontool> <args> (gets run from /usr/bin/<mesontool>)
in_path_exe = shutil.which(c_fname)
if in_path_exe:
- m_dir, c_fname = os.path.split(in_path_exe)
- # Special case: when run like "./meson.py <opts>",
- # we need to expand it out, because, for example,
- # "ninja test" will be run from a different directory.
- if m_dir == '.':
+ if not os.path.isabs(in_path_exe):
m_dir = os.getcwd()
+ c_fname = in_path_exe
+ else:
+ m_dir, c_fname = os.path.split(in_path_exe)
else:
m_dir = os.path.abspath(c_dir)
@@ -520,9 +534,20 @@ def get_library_dirs():
unixdirs += glob('/lib/' + plat + '*')
return unixdirs
+def has_path_sep(name, sep='/\\'):
+ 'Checks if any of the specified @sep path separators are in @name'
+ for each in sep:
+ if each in name:
+ return True
+ return False
-def do_replacement(regex, line, confdata):
+def do_replacement(regex, line, format, confdata):
missing_variables = set()
+ start_tag = '@'
+ backslash_tag = '\\@'
+ if format == 'cmake':
+ start_tag = '${'
+ backslash_tag = '\\${'
def variable_replace(match):
# Pairs of escape characters before '@' or '\@'
@@ -530,8 +555,8 @@ def do_replacement(regex, line, confdata):
num_escapes = match.end(0) - match.start(0)
return '\\' * (num_escapes // 2)
# Single escape character and '@'
- elif match.group(0) == '\\@':
- return '@'
+ elif match.group(0) == backslash_tag:
+ return start_tag
# Template variable to be replaced
else:
varname = match.group(1)
@@ -571,7 +596,7 @@ def do_mesondefine(line, confdata):
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
-def do_conf_file(src, dst, confdata):
+def do_conf_file(src, dst, confdata, format):
try:
with open(src, encoding='utf-8') as f:
data = f.readlines()
@@ -579,14 +604,24 @@ def do_conf_file(src, dst, confdata):
raise MesonException('Could not read input file %s: %s' % (src, str(e)))
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
# Also allow escaping '@' with '\@'
- regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ if format in ['meson', 'cmake@']:
+ regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ elif format == 'cmake':
+ regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
+ else:
+ raise MesonException('Format "{}" not handled'.format(format))
+
+ search_token = '#mesondefine'
+ if format != 'meson':
+ search_token = '#cmakedefine'
+
result = []
missing_variables = set()
for line in data:
- if line.startswith('#mesondefine'):
+ if line.startswith(search_token):
line = do_mesondefine(line, confdata)
else:
- line, missing = do_replacement(regex, line, confdata)
+ line, missing = do_replacement(regex, line, format, confdata)
missing_variables.update(missing)
result.append(line)
dst_tmp = dst + '~'
@@ -714,7 +749,9 @@ def expand_arguments(args):
return expended_args
def Popen_safe(args, write=None, stderr=subprocess.PIPE, **kwargs):
- if sys.version_info < (3, 6) or not sys.stdout.encoding:
+ import locale
+ encoding = locale.getpreferredencoding()
+ if sys.version_info < (3, 6) or not sys.stdout.encoding or encoding.upper() != 'UTF-8':
return Popen_safe_legacy(args, write=write, stderr=stderr, **kwargs)
p = subprocess.Popen(args, universal_newlines=True,
close_fds=False,
@@ -973,3 +1010,26 @@ class OrderedSet(collections.MutableSet):
def difference(self, set_):
return type(self)(e for e in self if e not in set_)
+
+class BuildDirLock:
+
+ def __init__(self, builddir):
+ self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
+
+ def __enter__(self):
+ self.lockfile = open(self.lockfilename, 'w')
+ try:
+ if have_fcntl:
+ fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ elif have_msvcrt:
+ msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
+ except (BlockingIOError, PermissionError):
+ self.lockfile.close()
+ raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+ def __exit__(self, *args):
+ if have_fcntl:
+ fcntl.flock(self.lockfile, fcntl.LOCK_UN)
+ elif have_msvcrt:
+ msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
+ self.lockfile.close()
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index 7966d70..651224e 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys, stat, traceback, pickle, argparse
-import time, datetime
+import sys, stat, traceback, argparse
+import datetime
import os.path
from . import environment, interpreter, mesonlib
from . import build
@@ -147,7 +147,8 @@ class MesonApp:
def generate(self):
env = environment.Environment(self.source_dir, self.build_dir, self.meson_script_launcher, self.options, self.original_cmd_line_args)
mlog.initialize(env.get_log_dir())
- self._generate(env)
+ with mesonlib.BuildDirLock(self.build_dir):
+ self._generate(env)
def _generate(self, env):
mlog.debug('Build started at', datetime.datetime.now().isoformat())
@@ -196,6 +197,7 @@ class MesonApp:
mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
intr.run()
try:
+ dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
# We would like to write coredata as late as possible since we use the existence of
# this file to check if we generated the build file successfully. Since coredata
# includes settings, the build files must depend on it and appear newer. However, due
@@ -204,16 +206,13 @@ class MesonApp:
# possible, but before build files, and if any error occurs, delete it.
cdf = env.dump_coredata()
g.generate(intr)
- dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
- with open(dumpfile, 'wb') as f:
- pickle.dump(b, f)
+ build.save(b, dumpfile)
# Post-conf scripts must be run after writing coredata or else introspection fails.
g.run_postconf_scripts()
except:
os.unlink(cdf)
raise
-
def run_script_command(args):
cmdname = args[0]
cmdargs = args[1:]
@@ -286,6 +285,13 @@ def run(original_args, mainfile=None):
# First check if we want to run a subcommand.
cmd_name = args[0]
remaining_args = args[1:]
+ # "help" is a special case: Since printing of the help may be
+ # delegated to a subcommand, we edit cmd_name before executing
+ # the rest of the logic here.
+ if cmd_name == 'help':
+ remaining_args += ['--help']
+ args = remaining_args
+ cmd_name = args[0]
if cmd_name == 'test':
return mtest.run(remaining_args)
elif cmd_name == 'setup':
@@ -299,7 +305,7 @@ def run(original_args, mainfile=None):
try:
return mconf.run(remaining_args)
except MesonException as e:
- mlog.log(mlog.red('\nError configuring project:'), e)
+ mlog.exception(e)
sys.exit(1)
elif cmd_name == 'wrap':
return wraptool.run(remaining_args)
@@ -319,8 +325,8 @@ def run(original_args, mainfile=None):
try:
sys.exit(run_script_command(args[1:]))
except MesonException as e:
- mlog.log(mlog.red('\nError in {} helper script:'.format(script)))
- mlog.log(e)
+ mlog.error('\nError in {} helper script:'.format(script))
+ mlog.exception(e)
sys.exit(1)
args = args[2:]
handshake = True
@@ -363,13 +369,7 @@ def run(original_args, mainfile=None):
app.generate()
except Exception as e:
if isinstance(e, MesonException):
- mlog.log()
- if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
- mlog.log('%s:%d:%d:' % (e.file, e.lineno, e.colno), mlog.red('ERROR: '), end='')
- else:
- mlog.log(mlog.red('ERROR: '), end='')
- # Error message
- mlog.log(e)
+ mlog.exception(e)
# Path to log file
mlog.shutdown()
logfile = os.path.join(app.build_dir, environment.Environment.log_dir, mlog.log_fname)
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index 98817cb..0461cd9 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -1,5 +1,4 @@
# Copyright 2017 The Meson development team
-from pyclbr import Function
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,8 +14,10 @@ from pyclbr import Function
"""Code that creates simple startup projects."""
-import os, sys, argparse, re
+import os, sys, argparse, re, shutil
from glob import glob
+from mesonbuild import mesonlib
+from mesonbuild.environment import detect_ninja
lib_h_template = '''#pragma once
#if defined _WIN32 || defined __CYGWIN__
@@ -107,7 +108,7 @@ pkg_mod.generate(
)
'''
-hello_c_template = '''#include <stdio.h>
+hello_c_template = '''#include <stdio.h>
#define PROJECT_NAME "{project_name}"
@@ -123,16 +124,15 @@ int main(int argc, char **argv) {{
hello_c_meson_template = '''project('{project_name}', 'c',
version : '{version}',
- default_options : ['warning_level=3',
- 'cpp_std=c++14'])
+ default_options : ['warning_level=3'])
exe = executable('{exe_name}', '{source_name}',
install : true)
-
+
test('basic', exe)
'''
-hello_cpp_template = '''#include <iostream>
+hello_cpp_template = '''#include <iostream>
#define PROJECT_NAME "{project_name}"
@@ -148,11 +148,12 @@ int main(int argc, char **argv) {{
hello_cpp_meson_template = '''project('{project_name}', 'cpp',
version : '{version}',
- default_options : ['warning_level=3'])
+ default_options : ['warning_level=3',
+ 'cpp_std=c++14'])
exe = executable('{exe_name}', '{source_name}',
install : true)
-
+
test('basic', exe)
'''
@@ -178,9 +179,9 @@ class {utoken}_PUBLIC {class_name} {{
public:
{class_name}();
int get_number() const;
-
+
private:
-
+
int number;
}};
@@ -270,7 +271,6 @@ ninja -C builddir
def create_exe_c_sample(project_name, project_version):
lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
- uppercase_token = lowercase_token.upper()
source_name = lowercase_token + '.c'
open(source_name, 'w').write(hello_c_template.format(project_name=project_name))
open('meson.build', 'w').write(hello_c_meson_template.format(project_name=project_name,
@@ -291,7 +291,7 @@ def create_lib_c_sample(project_name, version):
'function_name': function_name,
'header_file': lib_h_name,
'source_file': lib_c_name,
- 'test_source_file': test_c_name,
+ 'test_source_file': test_c_name,
'test_exe_name': lowercase_token,
'project_name': project_name,
'lib_name': lowercase_token,
@@ -305,13 +305,12 @@ def create_lib_c_sample(project_name, version):
def create_exe_cpp_sample(project_name, project_version):
lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
- uppercase_token = lowercase_token.upper()
source_name = lowercase_token + '.cpp'
open(source_name, 'w').write(hello_cpp_template.format(project_name=project_name))
open('meson.build', 'w').write(hello_cpp_meson_template.format(project_name=project_name,
- exe_name=lowercase_token,
- source_name=source_name,
- version=project_version))
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=project_version))
def create_lib_cpp_sample(project_name, version):
lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
@@ -328,7 +327,7 @@ def create_lib_cpp_sample(project_name, version):
'namespace': namespace,
'header_file': lib_h_name,
'source_file': lib_c_name,
- 'test_source_file': test_c_name,
+ 'test_source_file': test_c_name,
'test_exe_name': lowercase_token,
'project_name': project_name,
'lib_name': lowercase_token,
@@ -359,15 +358,123 @@ def create_sample(options):
raise RuntimeError('Unreachable code')
print(info_message)
+def autodetect_options(options, sample=False):
+ if not options.name:
+ options.name = os.path.basename(os.getcwd())
+ if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample:
+ print('Name of current directory "{}" is not usable as a sample project name.\n'
+ 'Specify a project name with --name.'.format(options.name))
+ sys.exit(1)
+ print('Using "{}" (name of current directory) as project name.'
+ .format(options.name))
+ if not options.executable:
+ options.executable = options.name
+ print('Using "{}" (project name) as name of executable to build.'
+ .format(options.executable))
+ if sample:
+ # The rest of the autodetection is not applicable to generating sample projects.
+ return
+ if not options.srcfiles:
+ srcfiles = []
+ for f in os.listdir():
+ if f.endswith('.cc') or f.endswith('.cpp') or f.endswith('.c'):
+ srcfiles.append(f)
+ if not srcfiles:
+ print("No recognizable source files found.\n"
+ "Run me in an empty directory to create a sample project.")
+ sys.exit(1)
+ options.srcfiles = srcfiles
+ print("Detected source files: " + ' '.join(srcfiles))
+ if not options.language:
+ for f in options.srcfiles:
+ if f.endswith('.cc') or f.endswith('.cpp'):
+ options.language = 'cpp'
+ break
+ if f.endswith('.c'):
+ options.language = 'c'
+ break
+ if not options.language:
+ print("Can't autodetect language, please specify it with -l.")
+ sys.exit(1)
+ print("Detected language: " + options.language)
+
+meson_executable_template = '''project('{project_name}', '{language}',
+ version : '{version}',
+ default_options : [{default_options}])
+
+executable('{executable}',
+ {sourcespec},{depspec}
+ install : true)
+'''
+
+def create_meson_build(options):
+ if options.type != 'executable':
+ print('\nGenerating a meson.build file from existing sources is\n'
+ 'supported only for project type "executable".\n'
+ 'Run me in an empty directory to create a sample project.')
+ sys.exit(1)
+ default_options = ['warning_level=3']
+ if options.language == 'cpp':
+ # This shows how to set this very common option.
+ default_options += ['cpp_std=c++14']
+ # If we get a meson.build autoformatter one day, this code could
+ # be simplified quite a bit.
+ formatted_default_options = ', '.join("'{}'".format(x) for x in default_options)
+ sourcespec = ',\n '.join("'{}'".format(x) for x in options.srcfiles)
+ depspec = ''
+ if options.deps:
+ depspec = '\n dependencies : [\n '
+ depspec += ',\n '.join("dependency('{}')".format(x)
+ for x in options.deps.split(','))
+ depspec += '],'
+ content = meson_executable_template.format(project_name=options.name,
+ language=options.language,
+ version=options.version,
+ executable=options.executable,
+ sourcespec=sourcespec,
+ depspec=depspec,
+ default_options=formatted_default_options)
+ open('meson.build', 'w').write(content)
+ print('Generated meson.build file:\n\n' + content)
+
def run(args):
parser = argparse.ArgumentParser(prog='meson')
- parser.add_argument('--name', default = 'mesonsample')
+ parser.add_argument("srcfiles", metavar="sourcefile", nargs="*",
+ help="source files. default: all recognized files in current directory")
+ parser.add_argument("-n", "--name", help="project name. default: name of current directory")
+ parser.add_argument("-e", "--executable", help="executable name. default: project name")
+ parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
+ parser.add_argument("-l", "--language", choices=['c', 'cpp'],
+ help="project language. default: autodetected based on source files")
+ parser.add_argument("-b", "--build", help="build after generation", action='store_true')
+ parser.add_argument("--builddir", help="directory for build", default='build')
+ parser.add_argument("-f", "--force", action="store_true",
+ help="force overwrite of existing files and directories.")
parser.add_argument('--type', default='executable',
choices=['executable', 'library'])
- parser.add_argument('--language', default='c', choices=['c', 'cpp'])
- parser.add_argument('--version', default='1.0')
+ parser.add_argument('--version', default='0.1')
options = parser.parse_args(args)
- if len(glob('*')) != 0:
- sys.exit('This command must be run in an empty directory.')
- create_sample(options)
+ if len(glob('*')) == 0:
+ autodetect_options(options, sample=True)
+ if not options.language:
+ print('Defaulting to generating a C language project.')
+ options.language = 'c'
+ create_sample(options)
+ else:
+ autodetect_options(options)
+ if os.path.isfile('meson.build') and not options.force:
+ print('meson.build already exists. Use --force to overwrite.')
+ sys.exit(1)
+ create_meson_build(options)
+ if options.build:
+ if os.path.isdir(options.builddir) and options.force:
+ print('Build directory already exists, deleting it.')
+ shutil.rmtree(options.builddir)
+ print('Building...')
+ err = os.system('{} "{}"'.format(' '.join(mesonlib.meson_command), options.builddir))
+ if err:
+ sys.exit(1)
+ err = os.system('{} -C "{}"'.format(detect_ninja(), options.builddir))
+ if err:
+ sys.exit(1)
return 0
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 8cf66af..5a9d4cf 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -19,32 +19,35 @@ tests and so on. All output is in JSON for simple parsing.
Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
-import json, pickle
-from . import coredata, build
+import json
+from . import build, mtest, coredata as cdata
+from .backend import ninjabackend
import argparse
import sys, os
import pathlib
-parser = argparse.ArgumentParser(prog='meson introspect')
-parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
- help='List top level targets.')
-parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
- help='List all installed files and directories.')
-parser.add_argument('--target-files', action='store', dest='target_files', default=None,
- help='List source files for a given target.')
-parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
- help='List files that make up the build system.')
-parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
- help='List all build options.')
-parser.add_argument('--tests', action='store_true', dest='tests', default=False,
- help='List all unit tests.')
-parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
- help='List all benchmarks.')
-parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
- help='List external dependencies.')
-parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
- help='Information about projects.')
-parser.add_argument('builddir', nargs='?', help='The build directory')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson introspect')
+ parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
+ help='List top level targets.')
+ parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
+ help='List all installed files and directories.')
+ parser.add_argument('--target-files', action='store', dest='target_files', default=None,
+ help='List source files for a given target.')
+ parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
+ help='List files that make up the build system.')
+ parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
+ help='List all build options.')
+ parser.add_argument('--tests', action='store_true', dest='tests', default=False,
+ help='List all unit tests.')
+ parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
+ help='List all benchmarks.')
+ parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
+ help='List external dependencies.')
+ parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
+ help='Information about projects.')
+ parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+ return parser
def determine_installed_path(target, installdata):
install_target = None
@@ -132,16 +135,16 @@ def add_keys(optlist, options):
for key in keys:
opt = options[key]
optdict = {'name': key, 'value': opt.value}
- if isinstance(opt, coredata.UserStringOption):
+ if isinstance(opt, cdata.UserStringOption):
typestr = 'string'
- elif isinstance(opt, coredata.UserBooleanOption):
+ elif isinstance(opt, cdata.UserBooleanOption):
typestr = 'boolean'
- elif isinstance(opt, coredata.UserComboOption):
+ elif isinstance(opt, cdata.UserComboOption):
optdict['choices'] = opt.choices
typestr = 'combo'
- elif isinstance(opt, coredata.UserIntegerOption):
+ elif isinstance(opt, cdata.UserIntegerOption):
typestr = 'integer'
- elif isinstance(opt, coredata.UserArrayOption):
+ elif isinstance(opt, cdata.UserArrayOption):
typestr = 'array'
else:
raise RuntimeError("Unknown option type")
@@ -149,7 +152,7 @@ def add_keys(optlist, options):
optdict['description'] = opt.description
optlist.append(optdict)
-def list_buildsystem_files(coredata, builddata):
+def list_buildsystem_files(builddata):
src_dir = builddata.environment.get_source_dir()
# I feel dirty about this. But only slightly.
filelist = []
@@ -185,6 +188,7 @@ def list_tests(testdata):
to['workdir'] = t.workdir
to['timeout'] = t.timeout
to['suite'] = t.suite
+ to['is_parallel'] = t.is_parallel
result.append(to)
print(json.dumps(result))
@@ -200,7 +204,7 @@ def list_projinfo(builddata):
def run(args):
datadir = 'meson-private'
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
if not os.path.isdir(datadir):
@@ -208,26 +212,15 @@ def run(args):
'change the working directory to it.')
return 1
- corefile = os.path.join(datadir, 'coredata.dat')
- buildfile = os.path.join(datadir, 'build.dat')
- installfile = os.path.join(datadir, 'install.dat')
- testfile = os.path.join(datadir, 'meson_test_setup.dat')
- benchmarkfile = os.path.join(datadir, 'meson_benchmark_setup.dat')
+ coredata = cdata.load(options.builddir)
+ builddata = build.load(options.builddir)
+ testdata = mtest.load_tests(options.builddir)
+ benchmarkdata = mtest.load_benchmarks(options.builddir)
- # Load all data files
- with open(corefile, 'rb') as f:
- coredata = pickle.load(f)
- with open(buildfile, 'rb') as f:
- builddata = pickle.load(f)
- with open(testfile, 'rb') as f:
- testdata = pickle.load(f)
- with open(benchmarkfile, 'rb') as f:
- benchmarkdata = pickle.load(f)
# Install data is only available with the Ninja backend
- if os.path.isfile(installfile):
- with open(installfile, 'rb') as f:
- installdata = pickle.load(f)
- else:
+ try:
+ installdata = ninjabackend.load(options.builddir)
+ except FileNotFoundError:
installdata = None
if options.list_targets:
@@ -237,7 +230,7 @@ def run(args):
elif options.target_files is not None:
list_target_files(options.target_files, coredata, builddata)
elif options.buildsystem_files:
- list_buildsystem_files(coredata, builddata)
+ list_buildsystem_files(builddata)
elif options.buildoptions:
list_buildoptions(coredata, builddata)
elif options.tests:
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index 273552d..6cbaf60 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -13,6 +13,7 @@
# limitations under the License.
import sys, os, platform, io
+from contextlib import contextmanager
"""This is (mostly) a standalone module used to write logging
information about Meson runs. Some output goes to screen,
@@ -25,6 +26,7 @@ else:
log_dir = None
log_file = None
log_fname = 'meson-log.txt'
+log_depth = 0
def initialize(logdir):
global log_dir, log_file
@@ -77,15 +79,21 @@ def process_markup(args, keep):
return arr
def force_print(*args, **kwargs):
+ iostr = io.StringIO()
+ kwargs['file'] = iostr
+ print(*args, **kwargs)
+
+ raw = iostr.getvalue()
+ if log_depth > 0:
+ prepend = '|' * log_depth
+ raw = prepend + raw.replace('\n', '\n' + prepend, raw.count('\n') - 1)
+
# _Something_ is going to get printed.
try:
- print(*args, **kwargs)
+ print(raw, end='')
except UnicodeEncodeError:
- iostr = io.StringIO()
- kwargs['file'] = iostr
- print(*args, **kwargs)
- cleaned = iostr.getvalue().encode('ascii', 'replace').decode('ascii')
- print(cleaned)
+ cleaned = raw.encode('ascii', 'replace').decode('ascii')
+ print(cleaned, end='')
def debug(*args, **kwargs):
arr = process_markup(args, False)
@@ -102,19 +110,38 @@ def log(*args, **kwargs):
arr = process_markup(args, True)
force_print(*arr, **kwargs)
-def warning(*args, **kwargs):
+def _log_error(severity, *args, **kwargs):
from . import environment
+ if severity == 'warning':
+ args = (yellow('WARNING:'),) + args
+ elif severity == 'error':
+ args = (red('ERROR:'),) + args
+ else:
+ assert False, 'Invalid severity ' + severity
- args = (yellow('WARNING:'),) + args
-
- if kwargs.get('location'):
+ if 'location' in kwargs:
location = kwargs['location']
del kwargs['location']
- location = '{}:{}:'.format(os.path.join(location.subdir, environment.build_filename), location.lineno)
- args = (location,) + args
+ location_str = '{}:{}:'.format(os.path.join(location.subdir,
+ environment.build_filename),
+ location.lineno)
+ args = (location_str,) + args
log(*args, **kwargs)
+def error(*args, **kwargs):
+ return _log_error('error', *args, **kwargs)
+
+def warning(*args, **kwargs):
+ return _log_error('warning', *args, **kwargs)
+
+def exception(e):
+ log()
+ if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
+ log('%s:%d:%d:' % (e.file, e.lineno, e.colno), red('ERROR: '), e)
+ else:
+ log(red('ERROR:'), e)
+
# Format a list for logging purposes as a string. It separates
# all but the last item with commas, and the last with 'and'.
def format_list(list):
@@ -127,3 +154,12 @@ def format_list(list):
return list[0]
else:
return ''
+
+@contextmanager
+def nested():
+ global log_depth
+ log_depth += 1
+ try:
+ yield
+ finally:
+ log_depth -= 1
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 569011e..4dc29c3 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -792,7 +792,7 @@ This will become a hard error in the future.''')
state.backend.get_target_dir(s),
s.get_outputs()[0]))
elif isinstance(s, mesonlib.File):
- content_files.append(s.rel_to_builddir(state.build_to_src))
+ content_files.append(os.path.join(state.environment.get_build_dir(), s.subdir, s.fname))
elif isinstance(s, build.GeneratedList):
depends.append(s)
for gen_src in s.get_outputs():
@@ -869,7 +869,7 @@ This will become a hard error in the future.''')
return []
@permittedKwargs({'interface_prefix', 'namespace', 'object_manager', 'build_by_default',
- 'annotations', 'docbook'})
+ 'annotations', 'docbook', 'install', 'install_header'})
def gdbus_codegen(self, state, args, kwargs):
if len(args) != 2:
raise MesonException('Gdbus_codegen takes two arguments, name and xml file.')
@@ -883,8 +883,7 @@ This will become a hard error in the future.''')
cmd += ['--c-namespace', kwargs.pop('namespace')]
if kwargs.get('object_manager', False):
cmd += ['--c-generate-object-manager']
- if 'docbook' in kwargs:
- cmd += ['--generate-docbook', kwargs.pop('docbook')]
+ build_by_default = kwargs.get('build_by_default', False)
# Annotations are a bit ugly in that they are a list of lists of strings...
annotations = kwargs.pop('annotations', [])
@@ -898,21 +897,74 @@ This will become a hard error in the future.''')
raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE')
cmd += ['--annotate'] + annotation
- # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
- if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.3'):
- cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+ # https://git.gnome.org/browse/glib/commit/?id=e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.55.2'):
+ targets = []
+ install_header = kwargs.get('install_header', False)
+ install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('includedir'))
+
+ output = namebase + '.c'
+ custom_kwargs = {'input': xml_file,
+ 'output': output,
+ 'command': cmd + ['--body', '--output', '@OUTDIR@/' + output, '@INPUT@'],
+ 'build_by_default': build_by_default
+ }
+ targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs))
+
+ output = namebase + '.h'
+ custom_kwargs = {'input': xml_file,
+ 'output': output,
+ 'command': cmd + ['--header', '--output', '@OUTDIR@/' + output, '@INPUT@'],
+ 'build_by_default': build_by_default,
+ 'install': install_header,
+ 'install_dir': install_dir
+ }
+ targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs))
+
+ if 'docbook' in kwargs:
+ docbook = kwargs['docbook']
+ if not isinstance(docbook, str):
+ raise MesonException('docbook value must be a string.')
+
+ docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@']
+
+ output = namebase + '-docbook'
+ custom_kwargs = {'input': xml_file,
+ 'output': output,
+ 'command': docbook_cmd,
+ 'build_by_default': build_by_default
+ }
+ targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs))
+
+ objects = targets
else:
- self._print_gdbus_warning()
- cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
- outputs = [namebase + '.c', namebase + '.h']
- custom_kwargs = {'input': xml_file,
- 'output': outputs,
- 'command': cmd
- }
- if 'build_by_default' in kwargs:
- custom_kwargs['build_by_default'] = kwargs['build_by_default']
- ct = build.CustomTarget(target_name, state.subdir, state.subproject, custom_kwargs)
- return ModuleReturnValue(ct, [ct])
+ if 'docbook' in kwargs:
+ docbook = kwargs['docbook']
+ if not isinstance(docbook, str):
+ raise MesonException('docbook value must be a string.')
+
+ cmd += ['--generate-docbook', docbook]
+
+ # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.3'):
+ cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+ else:
+ self._print_gdbus_warning()
+ cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
+ outputs = [namebase + '.c', namebase + '.h']
+ custom_kwargs = {'input': xml_file,
+ 'output': outputs,
+ 'command': cmd,
+ 'build_by_default': build_by_default
+ }
+ ct = build.CustomTarget(target_name, state.subdir, state.subproject, custom_kwargs)
+ # Ensure that the same number (and order) of arguments are returned
+ # regardless of the gdbus-codegen (glib) version being used
+ targets = [ct, ct]
+ if 'docbook' in kwargs:
+ targets.append(ct)
+ objects = [ct]
+ return ModuleReturnValue(targets, objects)
@permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
@@ -1046,6 +1098,12 @@ This will become a hard error in the future.''')
raise MesonException(
'Sources keyword argument must be a string or array.')
+ # The `install_header` argument will be used by mkenums() when
+ # not using template files, so we need to forcibly unset it
+ # when generating the C source file, otherwise we will end up
+ # installing it
+ c_file_kwargs['install_header'] = False
+
header_prefix = kwargs.get('header_prefix', '')
decl_decorator = kwargs.get('decorator', '')
func_prefix = kwargs.get('function_prefix', '')
@@ -1350,7 +1408,7 @@ G_END_DECLS'''
# - add relevant directories to include dirs
incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
sources = [vapi_target] + vapi_depends
- rv = InternalDependency(None, incs, [], [], link_with, sources, [])
+ rv = InternalDependency(None, incs, [], [], link_with, [], sources, [])
created_values.append(rv)
return ModuleReturnValue(rv, created_values)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 5573a2e..6f0e717 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -31,6 +31,7 @@ class DependenciesHelper:
self.priv_libs = []
self.priv_reqs = []
self.cflags = []
+ self.version_reqs = {}
def add_pub_libs(self, libs):
libs, reqs, cflags = self._process_libs(libs, True)
@@ -44,30 +45,60 @@ class DependenciesHelper:
self.priv_reqs += reqs
def add_pub_reqs(self, reqs):
- self.pub_reqs += mesonlib.stringlistify(reqs)
+ self.pub_reqs += self._process_reqs(reqs)
def add_priv_reqs(self, reqs):
- self.priv_reqs += mesonlib.stringlistify(reqs)
+ self.priv_reqs += self._process_reqs(reqs)
+
+ def _process_reqs(self, reqs):
+ '''Returns string names of requirements'''
+ processed_reqs = []
+ for obj in mesonlib.listify(reqs, unholder=True):
+ if hasattr(obj, 'generated_pc'):
+ processed_reqs.append(obj.generated_pc)
+ elif hasattr(obj, 'pcdep'):
+ pcdeps = mesonlib.listify(obj.pcdep)
+ for d in pcdeps:
+ processed_reqs += d.name
+ self.add_version_reqs(d.name, obj.version_reqs)
+ elif isinstance(obj, dependencies.PkgConfigDependency):
+ if obj.found():
+ processed_reqs.append(obj.name)
+ self.add_version_reqs(obj.name, obj.version_reqs)
+ elif isinstance(obj, str):
+ name, version_req = self.split_version_req(obj)
+ processed_reqs.append(name)
+ self.add_version_reqs(name, version_req)
+ elif isinstance(obj, dependencies.Dependency) and not obj.found():
+ pass
+ else:
+ raise mesonlib.MesonException('requires argument not a string, '
+ 'library with pkgconfig-generated file '
+ 'or pkgconfig-dependency object, '
+ 'got {!r}'.format(obj))
+ return processed_reqs
def add_cflags(self, cflags):
self.cflags += mesonlib.stringlistify(cflags)
def _process_libs(self, libs, public):
- libs = mesonlib.listify(libs)
+ libs = mesonlib.listify(libs, unholder=True)
processed_libs = []
processed_reqs = []
processed_cflags = []
for obj in libs:
- if hasattr(obj, 'held_object'):
- obj = obj.held_object
+ shared_library_only = getattr(obj, 'shared_library_only', False)
if hasattr(obj, 'pcdep'):
pcdeps = mesonlib.listify(obj.pcdep)
- processed_reqs += [i.name for i in pcdeps]
+ for d in pcdeps:
+ processed_reqs.append(d.name)
+ self.add_version_reqs(d.name, obj.version_reqs)
elif hasattr(obj, 'generated_pc'):
processed_reqs.append(obj.generated_pc)
elif isinstance(obj, dependencies.PkgConfigDependency):
if obj.found():
processed_reqs.append(obj.name)
+ self.add_version_reqs(obj.name, obj.version_reqs)
elif isinstance(obj, dependencies.ThreadDependency):
processed_libs += obj.get_compiler().thread_link_flags(obj.env)
processed_cflags += obj.get_compiler().thread_flags(obj.env)
@@ -75,11 +106,26 @@ class DependenciesHelper:
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
+ elif isinstance(obj, build.SharedLibrary) and shared_library_only:
+ # Do not pull dependencies for shared libraries because they are
+ # only required for static linking. Adding private requires has
+ # the side effect of exposing their cflags, which is the
+ # intended behaviour of pkg-config but force Debian to add more
+ # than needed build deps.
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=105572
+ processed_libs.append(obj)
+ if public:
+ if not hasattr(obj, 'generated_pc'):
+ obj.generated_pc = self.name
elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
processed_libs.append(obj)
if public:
if not hasattr(obj, 'generated_pc'):
obj.generated_pc = self.name
+ if isinstance(obj, build.StaticLibrary) and public:
+ self.add_pub_libs(obj.get_dependencies())
+ self.add_pub_libs(obj.get_external_deps())
+ else:
self.add_priv_libs(obj.get_dependencies())
self.add_priv_libs(obj.get_external_deps())
elif isinstance(obj, str):
@@ -89,14 +135,53 @@ class DependenciesHelper:
return processed_libs, processed_reqs, processed_cflags
- def remove_dups(self):
- self.pub_libs = list(set(self.pub_libs))
- self.pub_reqs = list(set(self.pub_reqs))
- self.priv_libs = list(set(self.priv_libs))
- self.priv_reqs = list(set(self.priv_reqs))
- self.cflags = list(set(self.cflags))
+ def add_version_reqs(self, name, version_reqs):
+ if version_reqs:
+ if name not in self.version_reqs:
+ self.version_reqs[name] = set()
+ # We could have '>=1.0' or '>= 1.0', remove spaces to normalize
+ new_vreqs = [s.replace(' ', '') for s in mesonlib.stringlistify(version_reqs)]
+ self.version_reqs[name].update(new_vreqs)
+
+ def split_version_req(self, s):
+ for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+ pos = s.find(op)
+ if pos > 0:
+ return s[0:pos].strip(), s[pos:].strip()
+ return s, None
+
+ def format_vreq(self, vreq):
+ # vreq are '>=1.0' and pkgconfig wants '>= 1.0'
+ for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+ if vreq.startswith(op):
+ return op + ' ' + vreq[len(op):]
+ return vreq
+
+ def format_reqs(self, reqs):
+ result = []
+ for name in reqs:
+ vreqs = self.version_reqs.get(name, None)
+ if vreqs:
+ result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs]
+ else:
+ result += [name]
+ return ', '.join(result)
- # Remove from pivate libs/reqs if they are in public already
+ def remove_dups(self):
+ def _fn(xs):
+ # Remove duplicates whilst preserving original order
+ result = []
+ for x in xs:
+ if x not in result:
+ result.append(x)
+ return result
+ self.pub_libs = _fn(self.pub_libs)
+ self.pub_reqs = _fn(self.pub_reqs)
+ self.priv_libs = _fn(self.priv_libs)
+ self.priv_reqs = _fn(self.priv_reqs)
+ self.cflags = _fn(self.cflags)
+
+ # Remove from private libs/reqs if they are in public already
self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs]
self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs]
@@ -166,11 +251,12 @@ class PkgConfigModule(ExtensionModule):
if len(url) > 0:
ofile.write('URL: %s\n' % url)
ofile.write('Version: %s\n' % version)
- if len(deps.pub_reqs) > 0:
- ofile.write('Requires: {}\n'.format(' '.join(deps.pub_reqs)))
- if len(deps.priv_reqs) > 0:
- ofile.write(
- 'Requires.private: {}\n'.format(' '.join(deps.priv_reqs)))
+ reqs_str = deps.format_reqs(deps.pub_reqs)
+ if len(reqs_str) > 0:
+ ofile.write('Requires: {}\n'.format(reqs_str))
+ reqs_str = deps.format_reqs(deps.priv_reqs)
+ if len(reqs_str) > 0:
+ ofile.write('Requires.private: {}\n'.format(reqs_str))
if len(conflicts) > 0:
ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
@@ -220,20 +306,34 @@ class PkgConfigModule(ExtensionModule):
'subdirs', 'requires', 'requires_private', 'libraries_private',
'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'})
def generate(self, state, args, kwargs):
- if len(args) > 0:
- raise mesonlib.MesonException('Pkgconfig_gen takes no positional arguments.')
+ default_version = state.project_version['version']
+ default_install_dir = None
+ default_description = None
+ default_name = None
+ mainlib = None
+ if len(args) == 1:
+ mainlib = getattr(args[0], 'held_object', args[0])
+ if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
+ raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
+ default_name = mainlib.name
+ default_description = state.project_name + ': ' + mainlib.name
+ install_dir = mainlib.get_custom_install_dir()[0]
+ if isinstance(install_dir, str):
+ default_install_dir = os.path.join(install_dir, 'pkgconfig')
+ elif len(args) > 1:
+ raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
subdirs = mesonlib.stringlistify(kwargs.get('subdirs', ['.']))
- version = kwargs.get('version', None)
+ version = kwargs.get('version', default_version)
if not isinstance(version, str):
raise mesonlib.MesonException('Version must be specified.')
- name = kwargs.get('name', None)
+ name = kwargs.get('name', default_name)
if not isinstance(name, str):
raise mesonlib.MesonException('Name not specified.')
filebase = kwargs.get('filebase', name)
if not isinstance(filebase, str):
raise mesonlib.MesonException('Filebase must be a string.')
- description = kwargs.get('description', None)
+ description = kwargs.get('description', default_description)
if not isinstance(description, str):
raise mesonlib.MesonException('Description is not a string.')
url = kwargs.get('url', '')
@@ -242,6 +342,8 @@ class PkgConfigModule(ExtensionModule):
conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
deps = DependenciesHelper(filebase)
+ if mainlib:
+ deps.add_pub_libs(mainlib)
deps.add_pub_libs(kwargs.get('libraries', []))
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
@@ -252,7 +354,7 @@ class PkgConfigModule(ExtensionModule):
if dversions:
compiler = state.environment.coredata.compilers.get('d')
if compiler:
- deps.add_cflags(compiler.get_feature_args({'versions': dversions}))
+ deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
def parse_variable_list(stringlist):
reserved = ['prefix', 'libdir', 'includedir']
@@ -281,7 +383,7 @@ class PkgConfigModule(ExtensionModule):
variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('variables', [])))
pcfile = filebase + '.pc'
- pkgroot = kwargs.get('install_dir', None)
+ pkgroot = kwargs.get('install_dir', default_install_dir)
if pkgroot is None:
pkgroot = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'pkgconfig')
if not isinstance(pkgroot, str):
diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py
index 989e839..d2bf1dc 100644
--- a/mesonbuild/modules/python3.py
+++ b/mesonbuild/modules/python3.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
import sysconfig
from .. import mesonlib, dependencies
@@ -20,10 +19,7 @@ from . import ExtensionModule
from mesonbuild.modules import ModuleReturnValue
from . import permittedSnippetKwargs
from ..interpreterbase import noKwargs
-from ..interpreter import shlib_kwargs
-
-mod_kwargs = set()
-mod_kwargs.update(shlib_kwargs)
+from ..build import known_shmod_kwargs
class Python3Module(ExtensionModule):
@@ -31,7 +27,7 @@ class Python3Module(ExtensionModule):
super().__init__()
self.snippets.add('extension_module')
- @permittedSnippetKwargs(mod_kwargs)
+ @permittedSnippetKwargs(known_shmod_kwargs)
def extension_module(self, interpreter, state, args, kwargs):
if 'name_prefix' in kwargs:
raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index f5ce1ed..39c65ed 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -15,7 +15,7 @@
import os
from .. import mlog
from .. import build
-from ..mesonlib import MesonException, Popen_safe, extract_as_list
+from ..mesonlib import MesonException, Popen_safe, extract_as_list, File
from ..dependencies import Qt4Dependency, Qt5Dependency
import xml.etree.ElementTree as ET
from . import ModuleReturnValue, get_include_args
@@ -71,19 +71,47 @@ class QtBaseModule:
mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO'))
self.tools_detected = True
- def parse_qrc(self, state, fname):
- abspath = os.path.join(state.environment.source_dir, state.subdir, fname)
- relative_part = os.path.dirname(fname)
+ def parse_qrc(self, state, rcc_file):
+ if type(rcc_file) is str:
+ abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
+ rcc_dirname = os.path.dirname(abspath)
+ elif type(rcc_file) is File:
+ abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ rcc_dirname = os.path.dirname(abspath)
+
try:
tree = ET.parse(abspath)
root = tree.getroot()
result = []
for child in root[0]:
if child.tag != 'file':
- mlog.warning("malformed rcc file: ", os.path.join(state.subdir, fname))
+ mlog.warning("malformed rcc file: ", os.path.join(state.subdir, rcc_file))
break
else:
- result.append(os.path.join(relative_part, child.text))
+ resource_path = child.text
+ # We need to guess if the pointed resource is:
+ # a) in build directory -> implies a generated file
+ # b) in source directory
+ # c) somewhere else external dependency file to bundle
+ #
+ # Also from qrc documentation: relative path are always from qrc file
+ # So relative path must always be computed from qrc file !
+ if os.path.isabs(resource_path):
+ # a)
+ if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
+ resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
+ result.append(File(is_built=True, subdir='', fname=resource_relpath))
+ # either b) or c)
+ else:
+ result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
+ else:
+ path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
+ # a)
+ if path_from_rcc.startswith(state.environment.build_dir):
+ result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
+ # b)
+ else:
+ result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
return result
except Exception:
return []
@@ -102,11 +130,11 @@ class QtBaseModule:
if len(rcc_files) > 0:
if not self.rcc.found():
raise MesonException(err_msg.format('RCC', 'rcc-qt{}'.format(self.qt_version), self.qt_version))
- qrc_deps = []
- for i in rcc_files:
- qrc_deps += self.parse_qrc(state, i)
# custom output name set? -> one output file, multiple otherwise
if len(args) > 0:
+ qrc_deps = []
+ for i in rcc_files:
+ qrc_deps += self.parse_qrc(state, i)
name = args[0]
rcc_kwargs = {'input': rcc_files,
'output': name + '.cpp',
@@ -116,7 +144,11 @@ class QtBaseModule:
sources.append(res_target)
else:
for rcc_file in rcc_files:
- basename = os.path.basename(rcc_file)
+ qrc_deps = self.parse_qrc(state, rcc_file)
+ if type(rcc_file) is str:
+ basename = os.path.basename(rcc_file)
+ elif type(rcc_file) is File:
+ basename = os.path.basename(rcc_file.fname)
name = 'qt' + str(self.qt_version) + '-' + basename.replace('.', '_')
rcc_kwargs = {'input': rcc_file,
'output': name + '.cpp',
diff --git a/mesonbuild/modules/unstable_icestorm.py b/mesonbuild/modules/unstable_icestorm.py
index 0b7b339..1f548b6 100644
--- a/mesonbuild/modules/unstable_icestorm.py
+++ b/mesonbuild/modules/unstable_icestorm.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from .. import mesonlib, compilers, mlog
+from .. import mesonlib
from . import ExtensionModule
@@ -33,7 +33,6 @@ class IceStormModule(ExtensionModule):
def project(self, interpreter, state, args, kwargs):
if not self.yosys_bin:
self.detect_binaries(interpreter)
- result = []
if not len(args):
raise mesonlib.MesonException('Project requires at least one argument, which is the project name.')
proj_name = args[0]
@@ -46,7 +45,7 @@ class IceStormModule(ExtensionModule):
all_sources = interpreter.source_strings_to_files(interpreter.flatten(arg_sources + kwarg_sources))
if 'constraint_file' not in kwargs:
raise mesonlib.MesonException('Constraint file not specified.')
-
+
constraint_file = interpreter.source_strings_to_files(kwargs['constraint_file'])
if len(constraint_file) != 1:
raise mesonlib.MesonException('Constraint file must contain one and only one entry.')
@@ -73,13 +72,13 @@ class IceStormModule(ExtensionModule):
'input': asc_target,
'output': bin_fname,
'command': [self.icepack_bin, '@INPUT@', '@OUTPUT@'],
- 'build_by_default' : True})
+ 'build_by_default': True})
- up_target = interpreter.func_run_target(None, [upload_name], {
+ interpreter.func_run_target(None, [upload_name], {
'command': [self.iceprog_bin, bin_target]})
- time_target = interpreter.func_run_target(None, [time_name], {
- 'command' : [self.icetime_bin, bin_target]})
+ interpreter.func_run_target(None, [time_name], {
+ 'command': [self.icetime_bin, bin_target]})
def initialize():
return IceStormModule()
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 2db3375..0e7524c 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -143,7 +143,8 @@ class Lexer:
elif tid == 'string':
# Handle here and not on the regexp to give a better error message.
if match_text.find("\n") != -1:
- raise ParseException("Use ''' (three single quotes) for multiline strings.", self.getline(line_start), lineno, col)
+ mlog.warning("""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
+This will become a hard error in a future Meson release.""", self.getline(line_start), lineno, col)
value = match_text[1:-1].replace(r"\'", "'")
value = newline_rx.sub(r'\1\n', value)
value = value.replace(r" \\ ".strip(), r" \ ".strip())
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index a697106..110a94e 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -28,6 +28,8 @@ import concurrent.futures as conc
import platform
import signal
import random
+from copy import deepcopy
+import enum
# GNU autotools interprets a return code of 77 from tests it executes to
# mean that the test should be skipped.
@@ -58,56 +60,68 @@ def determine_worker_count():
num_workers = 1
return num_workers
-parser = argparse.ArgumentParser(prog='meson test')
-parser.add_argument('--repeat', default=1, dest='repeat', type=int,
- help='Number of times to run the tests.')
-parser.add_argument('--no-rebuild', default=False, action='store_true',
- help='Do not rebuild before running tests.')
-parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
- help='Run test under gdb.')
-parser.add_argument('--list', default=False, dest='list', action='store_true',
- help='List available tests.')
-parser.add_argument('--wrapper', default=None, dest='wrapper', type=shlex.split,
- help='wrapper to run tests with (e.g. Valgrind)')
-parser.add_argument('-C', default='.', dest='wd',
- help='directory to cd into before running')
-parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
- help='Only run tests belonging to the given suite.')
-parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
- help='Do not run tests belonging to the given suite.')
-parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
- help='Do not split stderr and stdout in test logs.')
-parser.add_argument('--print-errorlogs', default=False, action='store_true',
- help="Whether to print failing tests' logs.")
-parser.add_argument('--benchmark', default=False, action='store_true',
- help="Run benchmarks instead of tests.")
-parser.add_argument('--logbase', default='testlog',
- help="Base name for log file.")
-parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
- help='How many parallel processes to use.')
-parser.add_argument('-v', '--verbose', default=False, action='store_true',
- help='Do not redirect stdout and stderr')
-parser.add_argument('-q', '--quiet', default=False, action='store_true',
- help='Produce less output to the terminal.')
-parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
- help='Define a multiplier for test timeout, for example '
- ' when running tests in particular conditions they might take'
- ' more time to execute.')
-parser.add_argument('--setup', default=None, dest='setup',
- help='Which test setup to use.')
-parser.add_argument('--test-args', default=[], type=shlex.split,
- help='Arguments to pass to the specified test(s) or all tests')
-parser.add_argument('args', nargs='*',
- help='Optional list of tests to run')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson test')
+ parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+ help='Number of times to run the tests.')
+ parser.add_argument('--no-rebuild', default=False, action='store_true',
+ help='Do not rebuild before running tests.')
+ parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+ help='Run test under gdb.')
+ parser.add_argument('--list', default=False, dest='list', action='store_true',
+ help='List available tests.')
+ parser.add_argument('--wrapper', default=None, dest='wrapper', type=shlex.split,
+ help='wrapper to run tests with (e.g. Valgrind)')
+ parser.add_argument('-C', default='.', dest='wd',
+ help='directory to cd into before running')
+ parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+ help='Only run tests belonging to the given suite.')
+ parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+ help='Do not run tests belonging to the given suite.')
+ parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+ help='Do not split stderr and stdout in test logs.')
+ parser.add_argument('--print-errorlogs', default=False, action='store_true',
+ help="Whether to print failing tests' logs.")
+ parser.add_argument('--benchmark', default=False, action='store_true',
+ help="Run benchmarks instead of tests.")
+ parser.add_argument('--logbase', default='testlog',
+ help="Base name for log file.")
+ parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+ help='How many parallel processes to use.')
+ parser.add_argument('-v', '--verbose', default=False, action='store_true',
+ help='Do not redirect stdout and stderr')
+ parser.add_argument('-q', '--quiet', default=False, action='store_true',
+ help='Produce less output to the terminal.')
+ parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+ help='Define a multiplier for test timeout, for example '
+ ' when running tests in particular conditions they might take'
+ ' more time to execute.')
+ parser.add_argument('--setup', default=None, dest='setup',
+ help='Which test setup to use.')
+ parser.add_argument('--test-args', default=[], type=shlex.split,
+ help='Arguments to pass to the specified test(s) or all tests')
+ parser.add_argument('args', nargs='*',
+ help='Optional list of tests to run')
+ return parser
class TestException(mesonlib.MesonException):
pass
+@enum.unique
+class TestResult(enum.Enum):
+
+ OK = 'OK'
+ TIMEOUT = 'TIMEOUT'
+ SKIP = 'SKIP'
+ FAIL = 'FAIL'
+
+
class TestRun:
def __init__(self, res, returncode, should_fail, duration, stdo, stde, cmd,
env):
+ assert isinstance(res, TestResult)
self.res = res
self.returncode = returncode
self.duration = duration
@@ -122,7 +136,7 @@ class TestRun:
if self.cmd is None:
res += 'NONE\n'
else:
- res += "%s%s\n" % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd))
+ res += '%s%s\n' % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd))
if self.stdo:
res += '--- stdout ---\n'
res += self.stdo
@@ -147,7 +161,7 @@ def decode(stream):
def write_json_log(jsonlogfile, test_name, result):
jresult = {'name': test_name,
'stdout': result.stdo,
- 'result': result.res,
+ 'result': result.res.value,
'duration': result.duration,
'returncode': result.returncode,
'command': result.cmd}
@@ -164,6 +178,155 @@ def run_with_mono(fname):
return True
return False
+def load_benchmarks(build_dir):
+ datafile = os.path.join(build_dir, 'meson-private', 'meson_benchmark_setup.dat')
+ if not os.path.isfile(datafile):
+ raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir))
+ with open(datafile, 'rb') as f:
+ obj = pickle.load(f)
+ return obj
+
+def load_tests(build_dir):
+ datafile = os.path.join(build_dir, 'meson-private', 'meson_test_setup.dat')
+ if not os.path.isfile(datafile):
+ raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir))
+ with open(datafile, 'rb') as f:
+ obj = pickle.load(f)
+ return obj
+
+
+class SingleTestRunner:
+
+ def __init__(self, test, env, options):
+ self.test = test
+ self.env = env
+ self.options = options
+
+ def _get_cmd(self):
+ if self.test.fname[0].endswith('.jar'):
+ return ['java', '-jar'] + self.test.fname
+ elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
+ return ['mono'] + self.test.fname
+ else:
+ if self.test.is_cross_built:
+ if self.test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ return None
+ else:
+ return [self.test.exe_runner] + self.test.fname
+ else:
+ return self.test.fname
+
+ def run(self):
+ cmd = self._get_cmd()
+ if cmd is None:
+ skip_stdout = 'Not run because can not execute cross compiled binaries.'
+ return TestRun(res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
+ should_fail=self.test.should_fail, duration=0.0,
+ stdo=skip_stdout, stde=None, cmd=None, env=self.test.env)
+ else:
+ wrap = TestHarness.get_wrapper(self.options)
+ if self.options.gdb:
+ self.test.timeout = None
+ return self._run_cmd(wrap + cmd + self.test.cmd_args + self.options.test_args)
+
+ def _run_cmd(self, cmd):
+ starttime = time.time()
+
+ if len(self.test.extra_paths) > 0:
+ self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH']
+
+ # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+ # (i.e., the test or the environment don't explicitly set it), set
+ # it ourselves. We do this unconditionally for regular tests
+ # because it is extremely useful to have.
+ # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+ if ('MALLOC_PERTURB_' not in self.env or not self.env['MALLOC_PERTURB_']) and not self.options.benchmark:
+ self.env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+ stdout = None
+ stderr = None
+ if not self.options.verbose:
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT
+
+ # Let gdb handle ^C instead of us
+ if self.options.gdb:
+ previous_sigint_handler = signal.getsignal(signal.SIGINT)
+ # Make the meson executable ignore SIGINT while gdb is running.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ def preexec_fn():
+ if self.options.gdb:
+ # Restore the SIGINT handler for the child process to
+ # ensure it can handle it.
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
+ else:
+ # We don't want setsid() in gdb because gdb needs the
+ # terminal in order to handle ^C and not show tcsetpgrp()
+ # errors avoid not being able to use the terminal.
+ os.setsid()
+
+ p = subprocess.Popen(cmd,
+ stdout=stdout,
+ stderr=stderr,
+ env=self.env,
+ cwd=self.test.workdir,
+ preexec_fn=preexec_fn if not is_windows() else None)
+ timed_out = False
+ kill_test = False
+ if self.test.timeout is None:
+ timeout = None
+ elif self.options.timeout_multiplier is not None:
+ timeout = self.test.timeout * self.options.timeout_multiplier
+ else:
+ timeout = self.test.timeout
+ try:
+ (stdo, stde) = p.communicate(timeout=timeout)
+ except subprocess.TimeoutExpired:
+ if self.options.verbose:
+ print('%s time out (After %d seconds)' % (self.test.name, timeout))
+ timed_out = True
+ except KeyboardInterrupt:
+ mlog.warning('CTRL-C detected while running %s' % (self.test.name))
+ kill_test = True
+ finally:
+ if self.options.gdb:
+ # Let us accept ^C again
+ signal.signal(signal.SIGINT, previous_sigint_handler)
+
+ if kill_test or timed_out:
+ # Python does not provide multiplatform support for
+ # killing a process and all its children so we need
+ # to roll our own.
+ if is_windows():
+ subprocess.call(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+ else:
+ try:
+ os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+ except ProcessLookupError:
+ # Sometimes (e.g. with Wine) this happens.
+ # There's nothing we can do (maybe the process
+ # already died) so carry on.
+ pass
+ (stdo, stde) = p.communicate()
+ endtime = time.time()
+ duration = endtime - starttime
+ stdo = decode(stdo)
+ if stde:
+ stde = decode(stde)
+ if timed_out:
+ res = TestResult.TIMEOUT
+ elif p.returncode == GNU_SKIP_RETURNCODE:
+ res = TestResult.SKIP
+ elif self.test.should_fail == bool(p.returncode):
+ res = TestResult.OK
+ else:
+ res = TestResult.FAIL
+ return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env)
+
+
class TestHarness:
def __init__(self, options):
self.options = options
@@ -179,12 +342,10 @@ class TestHarness:
self.logfile = None
self.jsonlogfile = None
if self.options.benchmark:
- datafile = os.path.join(options.wd, 'meson-private', 'meson_benchmark_setup.dat')
+ self.tests = load_benchmarks(options.wd)
else:
- datafile = os.path.join(options.wd, 'meson-private', 'meson_test_setup.dat')
- if not os.path.isfile(datafile):
- raise TestException('Directory %s does not seem to be a Meson build directory.' % options.wd)
- self.load_datafile(datafile)
+ self.tests = load_tests(options.wd)
+ self.load_suites()
def __del__(self):
if self.logfile:
@@ -192,130 +353,64 @@ class TestHarness:
if self.jsonlogfile:
self.jsonlogfile.close()
- def run_single_test(self, wrap, test):
- if test.fname[0].endswith('.jar'):
- cmd = ['java', '-jar'] + test.fname
- elif not test.is_cross_built and run_with_mono(test.fname[0]):
- cmd = ['mono'] + test.fname
+ def merge_suite_options(self, options, test):
+ if ':' in options.setup:
+ if options.setup not in self.build_data.test_setups:
+ sys.exit("Unknown test setup '%s'." % options.setup)
+ current = self.build_data.test_setups[options.setup]
else:
- if test.is_cross_built:
- if test.exe_runner is None:
- # Can not run test on cross compiled executable
- # because there is no execute wrapper.
- cmd = None
- else:
- cmd = [test.exe_runner] + test.fname
- else:
- cmd = test.fname
-
- if cmd is None:
- res = 'SKIP'
- duration = 0.0
- stdo = 'Not run because can not execute cross compiled binaries.'
- stde = None
- returncode = GNU_SKIP_RETURNCODE
+ full_name = test.project_name + ":" + options.setup
+ if full_name not in self.build_data.test_setups:
+ sys.exit("Test setup '%s' not found from project '%s'." % (options.setup, test.project_name))
+ current = self.build_data.test_setups[full_name]
+ if not options.gdb:
+ options.gdb = current.gdb
+ if options.timeout_multiplier is None:
+ options.timeout_multiplier = current.timeout_multiplier
+ # if options.env is None:
+ # options.env = current.env # FIXME, should probably merge options here.
+ if options.wrapper is not None and current.exe_wrapper is not None:
+ sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
+ if options.wrapper is None:
+ options.wrapper = current.exe_wrapper
+ return current.env.get_env(os.environ.copy())
+
+ def get_test_runner(self, test):
+ options = deepcopy(self.options)
+ if options.setup:
+ env = self.merge_suite_options(options, test)
else:
- cmd = wrap + cmd + test.cmd_args + self.options.test_args
- starttime = time.time()
- child_env = os.environ.copy()
- child_env.update(self.options.global_env.get_env(child_env))
- if isinstance(test.env, build.EnvironmentVariables):
- test.env = test.env.get_env(child_env)
-
- child_env.update(test.env)
- if len(test.extra_paths) > 0:
- child_env['PATH'] = os.pathsep.join(test.extra_paths + ['']) + child_env['PATH']
-
- # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
- # (i.e., the test or the environment don't explicitly set it), set
- # it ourselves. We do this unconditionally for regular tests
- # because it is extremely useful to have.
- # Setting MALLOC_PERTURB_="0" will completely disable this feature.
- if ('MALLOC_PERTURB_' not in child_env or not child_env['MALLOC_PERTURB_']) and not self.options.benchmark:
- child_env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
-
- setsid = None
- stdout = None
- stderr = None
- if not self.options.verbose:
- stdout = subprocess.PIPE
- stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT
-
- if not is_windows():
- setsid = os.setsid
-
- p = subprocess.Popen(cmd,
- stdout=stdout,
- stderr=stderr,
- env=child_env,
- cwd=test.workdir,
- preexec_fn=setsid)
- timed_out = False
- kill_test = False
- if test.timeout is None:
- timeout = None
- else:
- timeout = test.timeout * self.options.timeout_multiplier
- try:
- (stdo, stde) = p.communicate(timeout=timeout)
- except subprocess.TimeoutExpired:
- if self.options.verbose:
- print("%s time out (After %d seconds)" % (test.name, timeout))
- timed_out = True
- except KeyboardInterrupt:
- mlog.warning("CTRL-C detected while running %s" % (test.name))
- kill_test = True
-
- if kill_test or timed_out:
- # Python does not provide multiplatform support for
- # killing a process and all its children so we need
- # to roll our own.
- if is_windows():
- subprocess.call(['taskkill', '/F', '/T', '/PID', str(p.pid)])
- else:
- try:
- os.killpg(os.getpgid(p.pid), signal.SIGKILL)
- except ProcessLookupError:
- # Sometimes (e.g. with Wine) this happens.
- # There's nothing we can do (maybe the process
- # already died) so carry on.
- pass
- (stdo, stde) = p.communicate()
- endtime = time.time()
- duration = endtime - starttime
- stdo = decode(stdo)
- if stde:
- stde = decode(stde)
- if timed_out:
- res = 'TIMEOUT'
- self.timeout_count += 1
- self.fail_count += 1
- elif p.returncode == GNU_SKIP_RETURNCODE:
- res = 'SKIP'
- self.skip_count += 1
- elif test.should_fail == bool(p.returncode):
- res = 'OK'
- self.success_count += 1
- else:
- res = 'FAIL'
- self.fail_count += 1
- returncode = p.returncode
- result = TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env)
-
- return result
+ env = os.environ.copy()
+ if isinstance(test.env, build.EnvironmentVariables):
+ test.env = test.env.get_env(env)
+ env.update(test.env)
+ return SingleTestRunner(test, env, options)
+
+ def process_test_result(self, result):
+ if result.res is TestResult.TIMEOUT:
+ self.timeout_count += 1
+ self.fail_count += 1
+ elif result.res is TestResult.SKIP:
+ self.skip_count += 1
+ elif result.res is TestResult.OK:
+ self.success_count += 1
+ elif result.res is TestResult.FAIL:
+ self.fail_count += 1
+ else:
+ sys.exit('Unknown test result encountered: {}'.format(result.res))
def print_stats(self, numlen, tests, name, result, i):
startpad = ' ' * (numlen - len('%d' % (i + 1)))
num = '%s%d/%d' % (startpad, i + 1, len(tests))
padding1 = ' ' * (38 - len(name))
- padding2 = ' ' * (8 - len(result.res))
+ padding2 = ' ' * (8 - len(result.res.value))
result_str = '%s %s %s%s%s%5.2f s' % \
- (num, name, padding1, result.res, padding2, result.duration)
- if not self.options.quiet or result.res != 'OK':
- if result.res != 'OK' and mlog.colorize_console:
- if result.res == 'FAIL' or result.res == 'TIMEOUT':
+ (num, name, padding1, result.res.value, padding2, result.duration)
+ if not self.options.quiet or result.res is not TestResult.OK:
+ if result.res is not TestResult.OK and mlog.colorize_console:
+ if result.res is TestResult.FAIL or result.res is TestResult.TIMEOUT:
decorator = mlog.red
- elif result.res == 'SKIP':
+ elif result.res is TestResult.SKIP:
decorator = mlog.yellow
else:
sys.exit('Unreachable code was ... well ... reached.')
@@ -361,9 +456,6 @@ TIMEOUT: %4d
def doit(self):
if self.is_run:
raise RuntimeError('Test harness object can only be used once.')
- if not os.path.isfile(self.datafile):
- print('Test data file. Probably this means that you did not run this in the build directory.')
- return 1
self.is_run = True
tests = self.get_tests()
if not tests:
@@ -384,6 +476,25 @@ TIMEOUT: %4d
(prj_match, st_match) = TestHarness.split_suite_string(suite)
for prjst in test.suite:
(prj, st) = TestHarness.split_suite_string(prjst)
+
+ # the SUITE can be passed as
+ # suite_name
+ # or
+ # project_name:suite_name
+ # so we need to select only the test belonging to project_name
+
+ # this if hanlde the first case (i.e., SUITE == suite_name)
+
+ # in this way we can run tests belonging to different
+ # (sub)projects which share the same suite_name
+ if not st_match and st == prj_match:
+ return True
+
+ # these two conditions are needed to handle the second option
+ # i.e., SUITE == project_name:suite_name
+
+ # in this way we select the only the tests of
+ # project_name with suite_name
if prj_match and prj != prj_match:
continue
if st_match and st != st_match:
@@ -402,15 +513,6 @@ TIMEOUT: %4d
ss.add(s)
self.suites = list(ss)
- def load_tests(self):
- with open(self.datafile, 'rb') as f:
- self.tests = pickle.load(f)
-
- def load_datafile(self, datafile):
- self.datafile = datafile
- self.load_tests()
- self.load_suites()
-
def get_tests(self):
if not self.tests:
print('No tests defined.')
@@ -444,31 +546,32 @@ TIMEOUT: %4d
logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
if self.options.wrapper:
- namebase = os.path.basename(self.get_wrapper()[0])
+ namebase = os.path.basename(self.get_wrapper(self.options)[0])
elif self.options.setup:
- namebase = self.options.setup
+ namebase = self.options.setup.replace(":", "_")
if namebase:
logfile_base += '-' + namebase.replace(' ', '_')
self.logfilename = logfile_base + '.txt'
self.jsonlogfilename = logfile_base + '.json'
- self.jsonlogfile = open(self.jsonlogfilename, 'w')
- self.logfile = open(self.logfilename, 'w')
+ self.jsonlogfile = open(self.jsonlogfilename, 'w', encoding='utf-8')
+ self.logfile = open(self.logfilename, 'w', encoding='utf-8')
self.logfile.write('Log of Meson test suite run on %s\n\n'
% datetime.datetime.now().isoformat())
- def get_wrapper(self):
+ @staticmethod
+ def get_wrapper(options):
wrap = []
- if self.options.gdb:
+ if options.gdb:
wrap = ['gdb', '--quiet', '--nh']
- if self.options.repeat > 1:
+ if options.repeat > 1:
wrap += ['-ex', 'run', '-ex', 'quit']
# Signal the end of arguments to gdb
wrap += ['--args']
- if self.options.wrapper:
- wrap += self.options.wrapper
+ if options.wrapper:
+ wrap += options.wrapper
assert(isinstance(wrap, list))
return wrap
@@ -487,28 +590,28 @@ TIMEOUT: %4d
futures = []
numlen = len('%d' % len(tests))
self.open_log_files()
- wrap = self.get_wrapper()
startdir = os.getcwd()
if self.options.wd:
os.chdir(self.options.wd)
+ self.build_data = build.load(os.getcwd())
try:
for _ in range(self.options.repeat):
for i, test in enumerate(tests):
visible_name = self.get_pretty_suite(test)
- if self.options.gdb:
- test.timeout = None
-
if not test.is_parallel or self.options.gdb:
self.drain_futures(futures)
futures = []
- res = self.run_single_test(wrap, test)
+ single_test = self.get_test_runner(test)
+ res = single_test.run()
+ self.process_test_result(res)
self.print_stats(numlen, tests, visible_name, res, i)
else:
if not executor:
executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
- f = executor.submit(self.run_single_test, wrap, test)
+ single_test = self.get_test_runner(test)
+ f = executor.submit(single_test.run)
futures.append((f, numlen, tests, visible_name, i))
if self.options.repeat > 1 and self.fail_count:
break
@@ -531,10 +634,11 @@ TIMEOUT: %4d
result.cancel()
if self.options.verbose:
result.result()
+ self.process_test_result(result.result())
self.print_stats(numlen, tests, name, result.result(), i)
def run_special(self):
- 'Tests run by the user, usually something like "under gdb 1000 times".'
+ '''Tests run by the user, usually something like "under gdb 1000 times".'''
if self.is_run:
raise RuntimeError('Can not use run_special after a full run.')
tests = self.get_tests()
@@ -549,29 +653,9 @@ def list_tests(th):
for t in tests:
print(th.get_pretty_suite(t))
-def merge_suite_options(options):
- buildfile = os.path.join(options.wd, 'meson-private/build.dat')
- with open(buildfile, 'rb') as f:
- build = pickle.load(f)
- setups = build.test_setups
- if options.setup not in setups:
- sys.exit('Unknown test setup: %s' % options.setup)
- current = setups[options.setup]
- if not options.gdb:
- options.gdb = current.gdb
- if options.timeout_multiplier is None:
- options.timeout_multiplier = current.timeout_multiplier
-# if options.env is None:
-# options.env = current.env # FIXME, should probably merge options here.
- if options.wrapper is not None and current.exe_wrapper is not None:
- sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
- if options.wrapper is None:
- options.wrapper = current.exe_wrapper
- return current.env
-
def rebuild_all(wd):
if not os.path.isfile(os.path.join(wd, 'build.ninja')):
- print("Only ninja backend is supported to rebuild tests before running them.")
+ print('Only ninja backend is supported to rebuild tests before running them.')
return True
ninja = environment.detect_ninja()
@@ -583,26 +667,17 @@ def rebuild_all(wd):
p.communicate()
if p.returncode != 0:
- print("Could not rebuild")
+ print('Could not rebuild')
return False
return True
def run(args):
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.benchmark:
options.num_processes = 1
- if options.setup is not None:
- global_env = merge_suite_options(options)
- else:
- global_env = build.EnvironmentVariables()
- if options.timeout_multiplier is None:
- options.timeout_multiplier = 1
-
- setattr(options, 'global_env', global_env)
-
if options.verbose and options.quiet:
print('Can not be both quiet and verbose at the same time.')
return 1
@@ -621,7 +696,7 @@ def run(args):
if check_bin is not None:
exe = ExternalProgram(check_bin, silent=True)
if not exe.found():
- sys.exit("Could not find requested program: %s" % check_bin)
+ sys.exit('Could not find requested program: %s' % check_bin)
options.wd = os.path.abspath(options.wd)
if not options.list and not options.no_rebuild:
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index 16eaf78..b4156ff 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -78,7 +78,7 @@ def BooleanParser(name, description, kwargs):
kwargs.get('value', True),
kwargs.get('yield', coredata.default_yielding))
-@permitted_kwargs({'value', 'yiel', 'choices'})
+@permitted_kwargs({'value', 'yield', 'choices'})
def ComboParser(name, description, kwargs):
if 'choices' not in kwargs:
raise OptionException('Combo option missing "choices" keyword.')
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index 0191c30..1127288 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -29,18 +29,20 @@ from mesonbuild import mlog
import sys, traceback
import argparse
-parser = argparse.ArgumentParser(prog='meson rewrite')
-
-parser.add_argument('--sourcedir', default='.',
- help='Path to source directory.')
-parser.add_argument('--target', default=None,
- help='Name of target to edit.')
-parser.add_argument('--filename', default=None,
- help='Name of source file to add or remove to target.')
-parser.add_argument('commands', nargs='+')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson rewrite')
+
+ parser.add_argument('--sourcedir', default='.',
+ help='Path to source directory.')
+ parser.add_argument('--target', default=None,
+ help='Name of target to edit.')
+ parser.add_argument('--filename', default=None,
+ help='Name of source file to add or remove to target.')
+ parser.add_argument('commands', nargs='+')
+ return parser
def run(args):
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.target is None or options.filename is None:
sys.exit("Must specify both target and filename.")
print('This tool is highly experimental, use with care.')
@@ -54,11 +56,7 @@ def run(args):
sys.exit('Unknown command: ' + options.commands[0])
except Exception as e:
if isinstance(e, MesonException):
- if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
- mlog.log(mlog.red('\nMeson encountered an error in file %s, line %d, column %d:' % (e.file, e.lineno, e.colno)))
- else:
- mlog.log(mlog.red('\nMeson encountered an error:'))
- mlog.log(e)
+ mlog.exception(e)
else:
traceback.print_exc()
return 1
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 47f4cda..916c84f 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -14,74 +14,135 @@
from mesonbuild import environment
-import sys, os, subprocess, pathlib
-
-def coverage(source_root, build_root, log_dir):
- (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
- if gcovr_exe:
- subprocess.check_call([gcovr_exe,
- '-x',
- '-r', source_root,
- '-o', os.path.join(log_dir, 'coverage.xml'),
- ])
- subprocess.check_call([gcovr_exe,
- '-r', source_root,
- '-o', os.path.join(log_dir, 'coverage.txt'),
- ])
- if lcov_exe and genhtml_exe:
- htmloutdir = os.path.join(log_dir, 'coveragereport')
- covinfo = os.path.join(log_dir, 'coverage.info')
- initial_tracefile = covinfo + '.initial'
- run_tracefile = covinfo + '.run'
- raw_tracefile = covinfo + '.raw'
- subprocess.check_call([lcov_exe,
- '--directory', build_root,
- '--capture',
- '--initial',
- '--output-file',
- initial_tracefile])
- subprocess.check_call([lcov_exe,
- '--directory', build_root,
- '--capture',
- '--output-file', run_tracefile,
- '--no-checksum',
- '--rc', 'lcov_branch_coverage=1',
- ])
- # Join initial and test results.
- subprocess.check_call([lcov_exe,
- '-a', initial_tracefile,
- '-a', run_tracefile,
- '-o', raw_tracefile])
- # Remove all directories outside the source_root from the covinfo
- subprocess.check_call([lcov_exe,
- '--extract', raw_tracefile,
- os.path.join(source_root, '*'),
- '--output-file', covinfo])
- subprocess.check_call([genhtml_exe,
- '--prefix', build_root,
- '--output-directory', htmloutdir,
- '--title', 'Code coverage',
- '--legend',
- '--show-details',
- '--branch-coverage',
- covinfo])
- if gcovr_exe:
+import argparse, sys, os, subprocess, pathlib
+
+def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+ outfiles = []
+ exitcode = 0
+
+ (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+
+ # gcovr >= 3.1 interprets rootdir differently
+ if gcovr_new_rootdir:
+ gcovr_rootdir = build_root
+ else:
+ gcovr_rootdir = source_root
+
+ if not outputs or 'xml' in outputs:
+ if gcovr_exe:
+ subprocess.check_call([gcovr_exe,
+ '-x',
+ '-r', gcovr_rootdir,
+ '-e', subproject_root,
+ '-o', os.path.join(log_dir, 'coverage.xml'),
+ ])
+ outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+ elif outputs:
+ print('gcovr needed to generate Xml coverage report')
+ exitcode = 1
+
+ if not outputs or 'text' in outputs:
+ if gcovr_exe:
+ subprocess.check_call([gcovr_exe,
+ '-r', gcovr_rootdir,
+ '-e', subproject_root,
+ '-o', os.path.join(log_dir, 'coverage.txt'),
+ ])
+ outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+ elif outputs:
+ print('gcovr needed to generate text coverage report')
+ exitcode = 1
+
+ if not outputs or 'html' in outputs:
+ if lcov_exe and genhtml_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ covinfo = os.path.join(log_dir, 'coverage.info')
+ initial_tracefile = covinfo + '.initial'
+ run_tracefile = covinfo + '.run'
+ raw_tracefile = covinfo + '.raw'
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--initial',
+ '--output-file',
+ initial_tracefile])
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--output-file', run_tracefile,
+ '--no-checksum',
+ '--rc', 'lcov_branch_coverage=1',
+ ])
+ # Join initial and test results.
+ subprocess.check_call([lcov_exe,
+ '-a', initial_tracefile,
+ '-a', run_tracefile,
+ '-o', raw_tracefile])
+ # Remove all directories outside the source_root from the covinfo
+ subprocess.check_call([lcov_exe,
+ '--extract', raw_tracefile,
+ os.path.join(source_root, '*'),
+ '--output-file', covinfo])
+ # Remove all directories inside subproject dir
+ subprocess.check_call([lcov_exe,
+ '--remove', covinfo,
+ os.path.join(subproject_root, '*'),
+ '--output-file', covinfo])
+ subprocess.check_call([genhtml_exe,
+ '--prefix', build_root,
+ '--output-directory', htmloutdir,
+ '--title', 'Code coverage',
+ '--legend',
+ '--show-details',
+ '--branch-coverage',
+ covinfo])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif gcovr_exe and gcovr_new_rootdir:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ if not os.path.isdir(htmloutdir):
+ os.mkdir(htmloutdir)
+ subprocess.check_call([gcovr_exe,
+ '--html',
+ '--html-details',
+ '-r', build_root,
+ '-e', subproject_root,
+ '-o', os.path.join(htmloutdir, 'index.html'),
+ ])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif outputs:
+ print('lcov/genhtml or gcovr >= 3.1 needed to generate Html coverage report')
+ exitcode = 1
+
+ if not outputs and not outfiles:
+ print('Need gcovr or lcov/genhtml to generate any coverage reports')
+ exitcode = 1
+
+ if outfiles:
print('')
- print('XML coverage report can be found at',
- pathlib.Path(log_dir, 'coverage.xml').as_uri())
- print('Text coverage report can be found at',
- pathlib.Path(log_dir, 'coverage.txt').as_uri())
- if lcov_exe and genhtml_exe:
- print('Html coverage report can be found at',
- pathlib.Path(htmloutdir, 'index.html').as_uri())
- return 0
+ for (filetype, path) in outfiles:
+ print(filetype + ' coverage report can be found at', path.as_uri())
+
+ return exitcode
def run(args):
if not os.path.isfile('build.ninja'):
print('Coverage currently only works with the Ninja backend.')
return 1
- source_root, build_root, log_dir = args[:]
- return coverage(source_root, build_root, log_dir)
+ parser = argparse.ArgumentParser(description='Generate coverage reports')
+ parser.add_argument('--text', dest='outputs', action='append_const',
+ const='text', help='generate Text report')
+ parser.add_argument('--xml', dest='outputs', action='append_const',
+ const='xml', help='generate Xml report')
+ parser.add_argument('--html', dest='outputs', action='append_const',
+ const='html', help='generate Html report')
+ parser.add_argument('source_root')
+ parser.add_argument('subproject_root')
+ parser.add_argument('build_root')
+ parser.add_argument('log_dir')
+ options = parser.parse_args(args)
+ return coverage(options.outputs, options.source_root,
+ options.subproject_root, options.build_root,
+ options.log_dir)
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index ee63147..41ede1d 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -14,6 +14,7 @@
import sys, struct
+import shutil, subprocess
SHT_STRTAB = 3
DT_NEEDED = 1
@@ -337,20 +338,68 @@ class Elf(DataSizes):
entry.write(self.bf)
return None
+def fix_elf(fname, new_rpath, verbose=True):
+ with Elf(fname, verbose) as e:
+ if new_rpath is None:
+ e.print_rpath()
+ e.print_runpath()
+ else:
+ e.fix_rpath(new_rpath)
+
+def get_darwin_rpaths_to_remove(fname):
+ out = subprocess.check_output(['otool', '-l', fname], universal_newlines=True)
+ result = []
+ current_cmd = 'FOOBAR'
+ for line in out.split('\n'):
+ line = line.strip()
+ if ' ' not in line:
+ continue
+ key, value = line.strip().split(' ', 1)
+ if key == 'cmd':
+ current_cmd = value
+ if key == 'path' and current_cmd == 'LC_RPATH':
+ rp = value.split('(', 1)[0].strip()
+ result.append(rp)
+ return result
+
+def fix_darwin(fname, new_rpath):
+ try:
+ rpaths = get_darwin_rpaths_to_remove(fname)
+ except subprocess.CalledProcessError:
+ # Otool failed, which happens when invoked on a
+ # non-executable target. Just return.
+ return
+ try:
+ for rp in rpaths:
+ subprocess.check_call(['install_name_tool', '-delete_rpath', rp, fname])
+ if new_rpath != '':
+ subprocess.check_call(['install_name_tool', '-add_rpath', new_rpath, fname])
+ except Exception as e:
+ raise
+ sys.exit(0)
+
+def fix_rpath(fname, new_rpath, verbose=True):
+ try:
+ fix_elf(fname, new_rpath, verbose)
+ return 0
+ except SystemExit as e:
+ if isinstance(e.code, int) and e.code == 0:
+ pass
+ else:
+ raise
+ if shutil.which('install_name_tool'):
+ fix_darwin(fname, new_rpath)
+ return 0
+
def run(args):
if len(args) < 1 or len(args) > 2:
print('This application resets target rpath.')
print('Don\'t run this unless you know what you are doing.')
print('%s: <binary file> <prefix>' % sys.argv[0])
sys.exit(1)
- with Elf(args[0]) as e:
- if len(args) == 1:
- e.print_rpath()
- e.print_runpath()
- else:
- new_rpath = args[1]
- e.fix_rpath(new_rpath)
- return 0
+ fname = args[0]
+ new_rpath = None if len(args) == 1 else args[1]
+ return fix_rpath(fname, new_rpath)
if __name__ == '__main__':
- run(sys.argv[1:])
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 2a5ee8b..3fe7fb7 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -58,6 +58,8 @@ def gtkdoc_run_check(cmd, cwd, library_path=None):
if out:
err_msg.append(out)
raise MesonException('\n'.join(err_msg))
+ elif out:
+ print(out)
def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
main_file, module,
diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py
index c43702e..46d501f 100644
--- a/mesonbuild/scripts/meson_exe.py
+++ b/mesonbuild/scripts/meson_exe.py
@@ -21,8 +21,10 @@ import subprocess
options = None
-parser = argparse.ArgumentParser()
-parser.add_argument('args', nargs='+')
+def buildparser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('args', nargs='+')
+ return parser
def is_windows():
platname = platform.system().lower()
@@ -70,7 +72,7 @@ def run_exe(exe):
def run(args):
global options
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if len(options.args) != 1:
print('Test runner for Meson. Do not run on your own, mmm\'kay?')
print(sys.argv[0] + ' [data file]')
diff --git a/mesonbuild/scripts/meson_install.py b/mesonbuild/scripts/meson_install.py
index cbc782d..013f2a0 100644
--- a/mesonbuild/scripts/meson_install.py
+++ b/mesonbuild/scripts/meson_install.py
@@ -97,6 +97,10 @@ def restore_selinux_contexts():
# is ignored quietly.
return
+ if not shutil.which('restorecon'):
+ # If we don't have restorecon, failure is ignored quietly.
+ return
+
with subprocess.Popen(['restorecon', '-F', '-f-', '-0'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0')
@@ -283,16 +287,10 @@ def run_install_script(d):
rc = subprocess.call(script + args, env=child_env)
if rc != 0:
sys.exit(rc)
- except:
+ except OSError:
print('Failed to run install script {!r}'.format(name))
sys.exit(1)
-def is_elf_platform():
- platname = platform.system().lower()
- if platname == 'darwin' or platname == 'windows' or platname == 'cygwin':
- return False
- return True
-
def check_for_stampfile(fname):
'''Some languages e.g. Rust have output files
whose names are not known at configure time.
@@ -368,10 +366,9 @@ def install_targets(d):
print("Symlink creation does not work on this platform. "
"Skipping all symlinking.")
printed_symlink_error = True
- if is_elf_platform() and os.path.isfile(outname):
+ if os.path.isfile(outname):
try:
- e = depfixer.Elf(outname, False)
- e.fix_rpath(install_rpath)
+ depfixer.fix_rpath(outname, install_rpath, False)
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
pass
diff --git a/mesonbuild/scripts/yelphelper.py b/mesonbuild/scripts/yelphelper.py
index ab99267..0f8b0b8 100644
--- a/mesonbuild/scripts/yelphelper.py
+++ b/mesonbuild/scripts/yelphelper.py
@@ -17,6 +17,7 @@ import subprocess
import shutil
import argparse
from .. import mlog
+from ..mesonlib import has_path_sep
from . import destdir_join
from .gettext import read_linguas
@@ -79,7 +80,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr
elif symlinks:
srcfile = os.path.join(c_install_dir, m)
mlog.log('Symlinking %s to %s.' % (outfile, srcfile))
- if '/' in m or '\\' in m:
+ if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
try:
try:
@@ -94,7 +95,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr
# Lang doesn't have media file so copy it over 'C' one
infile = os.path.join(srcdir, 'C', m)
mlog.log('Installing %s to %s' % (infile, outfile))
- if '/' in m or '\\' in m:
+ if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
diff --git a/mesonbuild/wrap/__init__.py b/mesonbuild/wrap/__init__.py
index 019634c..6e2bc83 100644
--- a/mesonbuild/wrap/__init__.py
+++ b/mesonbuild/wrap/__init__.py
@@ -25,7 +25,12 @@ from enum import Enum
# to use 'nofallback' so that any 'copylib' wraps will be
# download as subprojects.
#
+# --wrap-mode=forcefallback will ignore external dependencies,
+# even if they match the version requirements, and automatically
+# use the fallback if one was provided. This is useful for example
+# to make sure a project builds when using the fallbacks.
+#
# Note that these options do not affect subprojects that
# are git submodules since those are only usable in git
# repositories, and you almost always want to download them.
-WrapMode = Enum('WrapMode', 'default nofallback nodownload')
+WrapMode = Enum('WrapMode', 'default nofallback nodownload forcefallback')
diff --git a/mesonconf.py b/mesonconf.py
index d1874e0..894ec01 100755
--- a/mesonconf.py
+++ b/mesonconf.py
@@ -14,10 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import mesonmain
import sys
if __name__ == '__main__':
- print('Warning: This executable is deprecated. Use "meson configure" instead.',
- file=sys.stderr)
- sys.exit(mesonmain.run(['configure'] + sys.argv[1:]))
+ sys.exit('Error: This executable is no more. Use "meson configure" instead.')
diff --git a/mesonintrospect.py b/mesonintrospect.py
index 5cc07bf..9ef1535 100755
--- a/mesonintrospect.py
+++ b/mesonintrospect.py
@@ -14,10 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import mesonmain
import sys
if __name__ == '__main__':
- print('Warning: This executable is deprecated. Use "meson introspect" instead.',
- file=sys.stderr)
- sys.exit(mesonmain.run(['introspect'] + sys.argv[1:]))
+ sys.exit('Error: This executable is no more. Use "meson introspect" instead.')
diff --git a/mesonrewriter.py b/mesonrewriter.py
index e6f2637..ef47e57 100755
--- a/mesonrewriter.py
+++ b/mesonrewriter.py
@@ -23,10 +23,7 @@
# - move targets
# - reindent?
-from mesonbuild import mesonmain
import sys
if __name__ == '__main__':
- print('Warning: This executable is deprecated. Use "meson rewrite" instead.',
- file=sys.stderr)
- sys.exit(mesonmain.run(['rewrite'] + sys.argv[1:]))
+ sys.exit('Error: This executable is no more. Use "meson rewrite" instead.')
diff --git a/mesontest.py b/mesontest.py
index c2d39d6..e973d56 100755
--- a/mesontest.py
+++ b/mesontest.py
@@ -16,10 +16,7 @@
# A tool to run tests in many different ways.
-from mesonbuild import mesonmain
import sys
if __name__ == '__main__':
- print('Warning: This executable is deprecated. Use "meson test" instead.',
- file=sys.stderr)
- sys.exit(mesonmain.run(['test'] + sys.argv[1:]))
+ sys.exit('Error: This executable is no more. Use "meson test" instead.')
diff --git a/msi/createmsi.py b/msi/createmsi.py
index 3ea0958..499f4b0 100755
--- a/msi/createmsi.py
+++ b/msi/createmsi.py
@@ -50,10 +50,14 @@ class PackageGenerator:
self.staging_dirs = ['dist', 'dist2']
if self.bytesize == 64:
self.progfile_dir = 'ProgramFiles64Folder'
- self.redist_path = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\14.11.25325\\MergeModules\\Microsoft_VC141_CRT_x64.msm'
+ redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC141_CRT_x64.msm'
else:
self.progfile_dir = 'ProgramFilesFolder'
- self.redist_path = 'C:\\Program Files\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\14.11.25325\\MergeModules\\Microsoft_VC141_CRT_x86.msm'
+ redist_glob = 'C:\\Program Files\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC141_CRT_x86.msm'
+ trials = glob(redist_glob)
+ if len(trials) != 1:
+ sys.exit('There are more than one potential redist dirs.')
+ self.redist_path = trials[0]
self.component_num = 0
self.feature_properties = {
self.staging_dirs[0]: {
@@ -149,7 +153,7 @@ class PackageGenerator:
'SourceFile': self.redist_path,
'DiskId': '1',
'Language': '0',
- })
+ })
ET.SubElement(product, 'Property', {
'Id': 'WIXUI_INSTALLDIR',
@@ -181,7 +185,7 @@ class PackageGenerator:
'AllowAdvertise': 'no',
'Display': 'hidden',
'Level': '1',
- })
+ })
ET.SubElement(vcredist_feature, 'MergeRef', {'Id': 'VCRedist'})
ET.ElementTree(self.root).write(self.main_xml, encoding='utf-8', xml_declaration=True)
# ElementTree can not do prettyprinting so do it manually
@@ -219,7 +223,6 @@ class PackageGenerator:
})
self.component_num += 1
for f in cur_node.files:
- file_source = os.path.join(current_dir, f).replace('\\', '\\\\')
file_id = os.path.join(current_dir, f).replace('\\', '_').replace('#', '_').replace('-', '_')
ET.SubElement(comp_xml_node, 'File', {
'Id': file_id,
@@ -253,6 +256,7 @@ class PackageGenerator:
if __name__ == '__main__':
if not os.path.exists('meson.py'):
sys.exit(print('Run me in the top level source dir.'))
+ subprocess.check_call(['pip', 'install', '--upgrade', 'cx_freeze'])
p = PackageGenerator()
p.build_dist()
diff --git a/run_project_tests.py b/run_project_tests.py
index 9599155..a1d36ef 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -77,7 +77,8 @@ class AutoDeletedDir:
failing_logs = []
print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ
-do_debug = not {'MESON_PRINT_TEST_OUTPUT', 'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ)
+under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ)
+do_debug = under_ci or print_debug
no_meson_log_msg = 'No meson-log.txt found.'
system_compiler = None
@@ -108,8 +109,6 @@ def setup_commands(optbackend):
if backend is None:
if msbuild_exe is not None:
backend = 'vs' # Meson will auto-detect VS version to use
- elif mesonlib.is_osx():
- backend = 'xcode'
else:
backend = 'ninja'
# Set backend arguments for Meson
@@ -437,6 +436,55 @@ def have_java():
return True
return False
+def skippable(suite, test):
+ if not under_ci:
+ return True
+
+ if not suite.endswith('frameworks'):
+ return True
+
+ # gtk-doc test may be skipped, pending upstream fixes for spaces in
+ # filenames landing in the distro used for CI
+ if test.endswith('10 gtk-doc'):
+ return True
+
+ # No frameworks test should be skipped on linux CI, as we expect all
+ # prerequisites to be installed
+ if mesonlib.is_linux():
+ return False
+
+ # Boost test should only be skipped for windows CI build matrix entries
+ # which don't define BOOST_ROOT
+ if test.endswith('1 boost'):
+ if mesonlib.is_windows():
+ return 'BOOST_ROOT' not in os.environ
+ return False
+
+ # Other framework tests are allowed to be skipped on other platforms
+ return True
+
+def skip_csharp(backend):
+ if backend is not Backend.ninja:
+ return True
+ if not shutil.which('resgen'):
+ return True
+ if shutil.which('mcs'):
+ return False
+ if shutil.which('csc'):
+ # Only support VS2017 for now. Earlier versions fail
+ # under CI in mysterious ways.
+ try:
+ stdo = subprocess.check_output(['csc', '/version'])
+ except subprocess.CalledProcessError:
+ return True
+ # Having incrementing version numbers would be too easy.
+ # Microsoft reset the versioning back to 1.0 (from 4.x)
+ # when they got the Roslyn based compiler. Thus there
+ # is NO WAY to reliably do version number comparisons.
+ # Only support the version that ships with VS2017.
+ return not stdo.startswith(b'2.')
+ return True
+
def detect_tests_to_run():
# Name, subdirectory, skip condition.
all_tests = [
@@ -450,7 +498,7 @@ def detect_tests_to_run():
('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
- ('C#', 'csharp', backend is not Backend.ninja or not shutil.which('mcs')),
+ ('C#', 'csharp', skip_csharp(backend)),
('vala', 'vala', backend is not Backend.ninja or not shutil.which('valac')),
('rust', 'rust', backend is not Backend.ninja or not shutil.which('rustc')),
('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
@@ -460,20 +508,9 @@ def detect_tests_to_run():
('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
('python3', 'python3', backend is not Backend.ninja),
('fpga', 'fpga', shutil.which('yosys') is None),
+ ('frameworks', 'frameworks', False),
]
gathered_tests = [(name, gather_tests('test cases/' + subdir), skip) for name, subdir, skip in all_tests]
- if mesonlib.is_windows():
- # TODO: Set BOOST_ROOT in .appveyor.yml
- gathered_tests += [('framework', ['test cases/frameworks/1 boost'], 'BOOST_ROOT' not in os.environ)]
- elif mesonlib.is_osx():
- if os.path.exists('/usr/local/include/boost'):
- # Just do the BOOST test
- gathered_tests += [('framework', ['test cases/frameworks/1 boost'], False)]
- elif mesonlib.is_cygwin():
- # Just do the BOOST test
- gathered_tests += [('framework', ['test cases/frameworks/1 boost'], False)]
- else:
- gathered_tests += [('framework', gather_tests('test cases/frameworks'), False)]
return gathered_tests
def run_tests(all_tests, log_name_base, extra_args):
@@ -532,7 +569,7 @@ def _run_tests(all_tests, log_name_base, extra_args):
for (testname, t, result) in futures:
sys.stdout.flush()
result = result.result()
- if result is None or 'MESON_SKIP_TEST' in result.stdo:
+ if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t))):
print(yellow('Skipping:'), t)
current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,
'classname': name})
diff --git a/run_tests.py b/run_tests.py
index 1cc3983..648e6ce 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -131,7 +131,7 @@ def get_fake_options(prefix):
return opts
def should_run_linux_cross_tests():
- return shutil.which('arm-linux-gnueabihf-gcc-7') and not platform.machine().lower().startswith('arm')
+ return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
def run_configure_inprocess(meson_command, commandlist):
old_stdout = sys.stdout
diff --git a/run_unittests.py b/run_unittests.py
index c59c7bf..8bd5ae8 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -21,12 +21,11 @@ import tempfile
import textwrap
import os
import shutil
-import sys
import unittest
from unittest import mock
from configparser import ConfigParser
from glob import glob
-from pathlib import PurePath
+from pathlib import (PurePath, Path)
import mesonbuild.mlog
import mesonbuild.compilers
@@ -36,12 +35,14 @@ import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.interpreter import ObjectHolder
from mesonbuild.mesonlib import (
- is_linux, is_windows, is_osx, is_cygwin, is_dragonflybsd,
+ is_windows, is_osx, is_cygwin, is_dragonflybsd,
windows_proof_rmtree, python_command, meson_command, version_compare,
+ BuildDirLock
)
from mesonbuild.environment import Environment, detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
+import mesonbuild.modules.pkgconfig
from run_tests import exe_suffix, get_fake_options
from run_tests import get_builddir_target_args, get_backend_commands, Backend
@@ -50,6 +51,9 @@ from run_tests import should_run_linux_cross_tests
def get_dynamic_section_entry(fname, entry):
+ if is_cygwin() or is_osx():
+ raise unittest.SkipTest('Test only applicable to ELF platforms')
+
try:
raw_out = subprocess.check_output(['readelf', '-d', fname],
universal_newlines=True)
@@ -66,9 +70,11 @@ def get_dynamic_section_entry(fname, entry):
def get_soname(fname):
return get_dynamic_section_entry(fname, 'soname')
+
def get_rpath(fname):
return get_dynamic_section_entry(fname, r'(?:rpath|runpath)')
+
class InternalTests(unittest.TestCase):
def test_version_number(self):
@@ -193,6 +199,12 @@ class InternalTests(unittest.TestCase):
# Direct-adding the same library again still adds it
l.append_direct('-lbar')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar'])
+ # Direct-adding with absolute path deduplicates
+ l.append_direct('/libbaz.a')
+ self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
+ # Adding libbaz again does nothing
+ l.append_direct('/libbaz.a')
+ self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
def test_string_templates_substitution(self):
dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict
@@ -427,6 +439,35 @@ class InternalTests(unittest.TestCase):
kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}
self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))
+ def test_snippets(self):
+ hashcounter = re.compile('^ *(#)+')
+ snippet_dir = Path('docs/markdown/snippets')
+ self.assertTrue(snippet_dir.is_dir())
+ for f in snippet_dir.glob('*'):
+ self.assertTrue(f.is_file())
+ if f.suffix == '.md':
+ with f.open() as snippet:
+ for line in snippet:
+ m = re.match(hashcounter, line)
+ if m:
+ self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name)
+ else:
+ if f.name != 'add_release_note_snippets_here':
+ self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name)
+
+ def test_pkgconfig_module(self):
+ deps = mesonbuild.modules.pkgconfig.DependenciesHelper("thislib")
+
+ class Mock:
+ pass
+
+ mock = Mock()
+ mock.pcdep = Mock()
+ mock.pcdep.name = "some_name"
+ mock.version_reqs = []
+ deps.add_pub_libs([mock])
+ self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
+
class BasePlatformTests(unittest.TestCase):
def setUp(self):
@@ -494,7 +535,8 @@ class BasePlatformTests(unittest.TestCase):
windows_proof_rmtree(path)
except FileNotFoundError:
pass
- os.environ = self.orig_env
+ os.environ.clear()
+ os.environ.update(self.orig_env)
super().tearDown()
def _run(self, command, workdir=None):
@@ -502,16 +544,18 @@ class BasePlatformTests(unittest.TestCase):
Run a command while printing the stdout and stderr to stdout,
and also return a copy of it
'''
- p = subprocess.Popen(command, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, env=os.environ.copy(),
- universal_newlines=True, cwd=workdir)
- output = p.communicate()[0]
- print(output)
+ # If this call hangs CI will just abort. It is very hard to distinguish
+ # between CI issue and test bug in that case. Set timeout and fail loud
+ # instead.
+ p = subprocess.run(command, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, env=os.environ.copy(),
+ universal_newlines=True, cwd=workdir, timeout=60 * 5)
+ print(p.stdout)
if p.returncode != 0:
- if 'MESON_SKIP_TEST' in output:
+ if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command)
- return output
+ return p.stdout
def init(self, srcdir, extra_args=None, default_args=True, inprocess=False):
self.assertPathExists(srcdir)
@@ -526,9 +570,15 @@ class BasePlatformTests(unittest.TestCase):
self.privatedir = os.path.join(self.builddir, 'meson-private')
if inprocess:
try:
- (returncode, out, _) = run_configure(self.meson_mainfile, self.meson_args + args + extra_args)
+ (returncode, out, err) = run_configure(self.meson_mainfile, self.meson_args + args + extra_args)
+ if 'MESON_SKIP_TEST' in out:
+ raise unittest.SkipTest('Project requested skipping.')
if returncode != 0:
self._print_meson_log()
+ print('Stdout:\n')
+ print(out)
+ print('Stderr:\n')
+ print(err)
raise RuntimeError('Configure failed')
except:
self._print_meson_log()
@@ -563,10 +613,11 @@ class BasePlatformTests(unittest.TestCase):
def run_tests(self):
self._run(self.test_command, workdir=self.builddir)
- def install(self):
+ def install(self, *, use_destdir=True):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
- os.environ['DESTDIR'] = self.installdir
+ if use_destdir:
+ os.environ['DESTDIR'] = self.installdir
self._run(self.install_command, workdir=self.builddir)
def uninstall(self):
@@ -919,6 +970,12 @@ class AllPlatformTests(BasePlatformTests):
self.uninstall()
self.assertPathDoesNotExist(exename)
+ def test_forcefallback(self):
+ testdir = os.path.join(self.unit_test_dir, '27 forcefallback')
+ self.init(testdir, ['--wrap-mode=forcefallback'])
+ self.build()
+ self.run_tests()
+
def test_testsetups(self):
if not shutil.which('valgrind'):
raise unittest.SkipTest('Valgrind not installed.')
@@ -949,6 +1006,31 @@ class AllPlatformTests(BasePlatformTests):
# Setup with only a timeout works
self._run(self.mtest_command + ['--setup=timeout'])
+ def test_testsetup_selection(self):
+ testdir = os.path.join(self.unit_test_dir, '13 testsetup selection')
+ self.init(testdir)
+ self.build()
+
+ # Run tests without setup
+ self.run_tests()
+
+ self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo'])
+ self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:'])
+
+ self._run(self.mtest_command + ['--setup=worksforall'])
+ self._run(self.mtest_command + ['--setup=main:worksforall'])
+
+ self.assertRaises(subprocess.CalledProcessError, self._run,
+ self.mtest_command + ['--setup=onlyinbar'])
+ self.assertRaises(subprocess.CalledProcessError, self._run,
+ self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:'])
+ self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:'])
+ self._run(self.mtest_command + ['--setup=bar:onlyinbar'])
+ self.assertRaises(subprocess.CalledProcessError, self._run,
+ self.mtest_command + ['--setup=foo:onlyinbar'])
+ self.assertRaises(subprocess.CalledProcessError, self._run,
+ self.mtest_command + ['--setup=main:onlyinbar'])
+
def assertFailedTestCount(self, failure_count, command):
try:
self._run(command)
@@ -1332,9 +1414,9 @@ class AllPlatformTests(BasePlatformTests):
subprocess.check_call(['git', 'config',
'user.email', 'teh_coderz@example.com'], cwd=project_dir)
subprocess.check_call(['git', 'add', 'meson.build', 'distexe.c'], cwd=project_dir,
- stdout=subprocess.DEVNULL)
+ stdout=subprocess.DEVNULL)
subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
- stdout=subprocess.DEVNULL)
+ stdout=subprocess.DEVNULL)
try:
self.dist_impl(git_init)
@@ -1475,7 +1557,6 @@ int main(int argc, char **argv) {
cmd += ['-c', source, '-o', objectfile] + extra_args
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
-
def test_prebuilt_object(self):
(compiler, _, object_suffix, _) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '14 prebuilt object')
@@ -1736,6 +1817,16 @@ int main(int argc, char **argv) {
]:
self.assertRegex(out, re.escape(expected))
+ def test_permitted_method_kwargs(self):
+ tdir = os.path.join(self.unit_test_dir, '23 non-permitted kwargs')
+ out = self.init(tdir)
+ for expected in [
+ r'WARNING: Passed invalid keyword argument "prefixxx".',
+ r'WARNING: Passed invalid keyword argument "argsxx".',
+ r'WARNING: Passed invalid keyword argument "invalidxx".',
+ ]:
+ self.assertRegex(out, re.escape(expected))
+
def test_templates(self):
ninja = detect_ninja()
if ninja is None:
@@ -1749,6 +1840,164 @@ int main(int argc, char **argv) {
workdir=tmpdir)
self._run(ninja,
workdir=os.path.join(tmpdir, 'builddir'))
+ with tempfile.TemporaryDirectory() as tmpdir:
+ with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f:
+ f.write('int main() {}')
+ self._run(meson_command + ['init', '-b'], workdir=tmpdir)
+
+ # The test uses mocking and thus requires that
+ # the current process is the one to run the Meson steps.
+ # If we are using an external test executable (most commonly
+ # in Debian autopkgtests) then the mocking won't work.
+ @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
+ def test_cross_file_system_paths(self):
+ if is_windows():
+ raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ cross_content = textwrap.dedent("""\
+ [binaries]
+ c = '/usr/bin/cc'
+ ar = '/usr/bin/ar'
+ strip = '/usr/bin/ar'
+
+ [properties]
+
+ [host_machine]
+ system = 'linux'
+ cpu_family = 'x86'
+ cpu = 'i686'
+ endian = 'little'
+ """)
+
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
+ os.environ.pop('XDG_DATA_HOME', None)
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ def test_compiler_run_command(self):
+ '''
+ The test checks that the compiler object can be passed to
+ run_command().
+ '''
+ testdir = os.path.join(self.unit_test_dir, '23 compiler run_command')
+ self.init(testdir)
+
+ def test_identical_target_name_in_subproject_flat_layout(self):
+ '''
+ Test that identical targets in different subprojects do not collide
+ if layout is flat.
+ '''
+ testdir = os.path.join(self.common_test_dir, '182 identical target name in subproject flat layout')
+ self.init(testdir, extra_args=['--layout=flat'])
+ self.build()
+
+ def test_flock(self):
+ exception_raised = False
+ with tempfile.TemporaryDirectory() as tdir:
+ os.mkdir(os.path.join(tdir, 'meson-private'))
+ with BuildDirLock(tdir):
+ try:
+ with BuildDirLock(tdir):
+ pass
+ except MesonException:
+ exception_raised = True
+ self.assertTrue(exception_raised, 'Double locking did not raise exception.')
+
+ def test_check_module_linking(self):
+ """
+ Test that shared modules are not linked with targets(link_with:) #2865
+ """
+ tdir = os.path.join(self.unit_test_dir, '26 shared_mod linking')
+ out = self.init(tdir)
+ msg = ('''WARNING: target links against shared modules. This is not
+recommended as it can lead to undefined behaviour on some platforms''')
+ self.assertIn(msg, out)
+
+ def test_ndebug_if_release_disabled(self):
+ testdir = os.path.join(self.unit_test_dir, '25 ndebug if-release')
+ self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release'])
+ self.build()
+ exe = os.path.join(self.builddir, 'main')
+ self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip())
+
+ def test_ndebug_if_release_enabled(self):
+ testdir = os.path.join(self.unit_test_dir, '25 ndebug if-release')
+ self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release'])
+ self.build()
+ exe = os.path.join(self.builddir, 'main')
+ self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip())
+
+ def test_guessed_linker_dependencies(self):
+ '''
+ Test that meson adds dependencies for libraries based on the final
+ linker command line.
+ '''
+ # build library
+ testdirbase = os.path.join(self.unit_test_dir, '26 guessed linker dependencies')
+ testdirlib = os.path.join(testdirbase, 'lib')
+ extra_args = None
+ env = Environment(testdirlib, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ if env.detect_c_compiler(False).get_id() != 'msvc':
+ # static libraries are not linkable with -l with msvc because meson installs them
+ # as .a files which unix_args_to_native will not know as it expects libraries to use
+ # .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc
+ # this tests needs to use shared libraries to test the path resolving logic in the
+ # dependency generation code path.
+ extra_args = ['--default-library', 'static']
+ self.init(testdirlib, extra_args=extra_args)
+ self.build()
+ self.install()
+ libbuilddir = self.builddir
+ installdir = self.installdir
+ libdir = os.path.join(self.installdir, self.prefix.lstrip('/').lstrip('\\'), 'lib')
+
+ # build user of library
+ self.new_builddir()
+ # replace is needed because meson mangles platform pathes passed via LDFLAGS
+ os.environ["LDFLAGS"] = '-L{}'.format(libdir.replace('\\', '/'))
+ self.init(os.path.join(testdirbase, 'exe'))
+ del os.environ["LDFLAGS"]
+ self.build()
+ self.assertBuildIsNoop()
+
+ # rebuild library
+ exebuilddir = self.builddir
+ self.installdir = installdir
+ self.builddir = libbuilddir
+ # Microsoft's compiler is quite smart about touching import libs on changes,
+ # so ensure that there is actually a change in symbols.
+ self.setconf('-Dmore_exports=true')
+ self.build()
+ self.install()
+ # no ensure_backend_detects_changes needed because self.setconf did that already
+
+ # assert user of library will be rebuild
+ self.builddir = exebuilddir
+ self.assertRebuiltTarget('app')
class FailureTests(BasePlatformTests):
@@ -1915,8 +2164,8 @@ class FailureTests(BasePlatformTests):
env = Environment('', self.builddir, self.meson_command,
get_fake_options(self.prefix), [])
try:
- objc = env.detect_objc_compiler(False)
- objcpp = env.detect_objcpp_compiler(False)
+ env.detect_objc_compiler(False)
+ env.detect_objcpp_compiler(False)
except EnvironmentException:
code = "add_languages('objc')\nadd_languages('objcpp')"
self.assertMesonRaises(code, "Unknown compiler")
@@ -2057,6 +2306,9 @@ class LinuxlikeTests(BasePlatformTests):
is true and not when it is false. This can't be an ordinary test case
because we need to inspect the compiler database.
'''
+ if is_cygwin() or is_osx():
+ raise unittest.SkipTest('PIC not relevant')
+
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir)
compdb = self.get_compdb()
@@ -2104,11 +2356,11 @@ class LinuxlikeTests(BasePlatformTests):
os.environ['PKG_CONFIG_LIBDIR'] = os.pathsep.join([privatedir1, privatedir2])
cmd = ['pkg-config', 'dependency-test']
- out = self._run(cmd + ['--print-requires']).strip().split()
+ out = self._run(cmd + ['--print-requires']).strip().split('\n')
self.assertEqual(sorted(out), sorted(['libexposed']))
- out = self._run(cmd + ['--print-requires-private']).strip().split()
- self.assertEqual(sorted(out), sorted(['libfoo']))
+ out = self._run(cmd + ['--print-requires-private']).strip().split('\n')
+ self.assertEqual(sorted(out), sorted(['libfoo >= 1.0']))
out = self._run(cmd + ['--cflags-only-other']).strip().split()
self.assertEqual(sorted(out), sorted(['-pthread', '-DCUSTOM']))
@@ -2123,10 +2375,19 @@ class LinuxlikeTests(BasePlatformTests):
'-llibinternal', '-lcustom2',
'-lfoo']))
+ cmd = ['pkg-config', 'requires-test']
+ out = self._run(cmd + ['--print-requires']).strip().split('\n')
+ self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
+
+ cmd = ['pkg-config', 'requires-private-test']
+ out = self._run(cmd + ['--print-requires-private']).strip().split('\n')
+ self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
+
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '22 unfound pkgconfig')
self.init(testdir)
- pcfile = open(os.path.join(self.privatedir, 'somename.pc')).read()
+ with open(os.path.join(self.privatedir, 'somename.pc')) as f:
+ pcfile = f.read()
self.assertFalse('blub_blob_blib' in pcfile)
def test_vala_c_warnings(self):
@@ -2136,6 +2397,8 @@ class LinuxlikeTests(BasePlatformTests):
database.
https://github.com/mesonbuild/meson/issues/864
'''
+ if not shutil.which('valac'):
+ raise unittest.SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '5 target glib')
self.init(testdir)
compdb = self.get_compdb()
@@ -2192,11 +2455,8 @@ class LinuxlikeTests(BasePlatformTests):
if not shutil.which('qmake-qt5'):
if not shutil.which('qmake'):
raise unittest.SkipTest('QMake not found')
- # For some inexplicable reason qmake --version gives different
- # results when run from the command line vs invoked by Python.
- # Check for both cases in case this behavior changes in the future.
- output = subprocess.getoutput(['qmake', '--version'])
- if 'Qt version 5' not in output and 'qt5' not in output:
+ output = subprocess.getoutput('qmake --version')
+ if 'Qt version 5' not in output:
raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
# Disable pkg-config codepath and force searching with qmake/qmake-qt5
testdir = os.path.join(self.framework_test_dir, '4 qt')
@@ -2208,6 +2468,9 @@ class LinuxlikeTests(BasePlatformTests):
self.assertTrue(msg in mesonlog or msg2 in mesonlog)
def _test_soname_impl(self, libpath, install):
+ if is_cygwin() or is_osx():
+ raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames')
+
testdir = os.path.join(self.unit_test_dir, '1 soname')
self.init(testdir)
self.build()
@@ -2281,7 +2544,9 @@ class LinuxlikeTests(BasePlatformTests):
# Check that all the listed -std=xxx options for this compiler work
# just fine when used
for v in compiler.get_options()[lang_std].choices:
- if compiler.get_id() == 'clang' and version_compare(compiler.version, '<5.0.0') and '17' in v:
+ if (compiler.get_id() == 'clang' and '17' in v and
+ (version_compare(compiler.version, '<5.0.0') or
+ (compiler.clang_type == mesonbuild.compilers.CLANG_OSX and version_compare(compiler.version, '<9.2')))):
continue
std_opt = '{}={}'.format(lang_std, v)
self.init(testdir, ['-D' + std_opt])
@@ -2333,8 +2598,8 @@ class LinuxlikeTests(BasePlatformTests):
def test_unity_subproj(self):
testdir = os.path.join(self.common_test_dir, '49 subproject')
self.init(testdir, extra_args='--unity=subprojects')
- self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/simpletest@exe/simpletest-unity.c'))
- self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@sha/sublib-unity.c'))
+ self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@@simpletest@exe/simpletest-unity.c'))
+ self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@@sublib@sha/sublib-unity.c'))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
self.build()
@@ -2414,6 +2679,9 @@ class LinuxlikeTests(BasePlatformTests):
self.assertNotIn('-Werror', c03_comp)
def test_run_installed(self):
+ if is_cygwin() or is_osx():
+ raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable')
+
testdir = os.path.join(self.unit_test_dir, '7 run installed')
self.init(testdir)
self.build()
@@ -2483,6 +2751,8 @@ class LinuxlikeTests(BasePlatformTests):
self.assertTrue(gobject_found)
def test_build_rpath(self):
+ if is_cygwin():
+ raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.unit_test_dir, '11 build_rpath')
self.init(testdir)
self.build()
@@ -2500,6 +2770,9 @@ class LinuxlikeTests(BasePlatformTests):
self.assertEqual(install_rpath, 'baz')
def test_pch_with_address_sanitizer(self):
+ if is_cygwin():
+ raise unittest.SkipTest('asan not available on Cygwin')
+
testdir = os.path.join(self.common_test_dir, '13 pch')
self.init(testdir, ['-Db_sanitize=address'])
self.build()
@@ -2508,10 +2781,11 @@ class LinuxlikeTests(BasePlatformTests):
self.assertIn("-fsanitize=address", i["command"])
def test_coverage(self):
- if not shutil.which('gcovr'):
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
raise unittest.SkipTest('gcovr not found')
- if not shutil.which('genhtml'):
- raise unittest.SkipTest('genhtml not found')
+ if not shutil.which('genhtml') and not gcovr_new_rootdir:
+ raise unittest.SkipTest('genhtml not found and gcovr is too old')
if 'clang' in os.environ.get('CC', ''):
# We need to use llvm-cov instead of gcovr with clang
raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
@@ -2547,55 +2821,14 @@ endian = 'little'
self.init(testdir, ['-Db_coverage=true'], default_args=False)
self.build('reconfigure')
- def test_cross_file_system_paths(self):
- testdir = os.path.join(self.common_test_dir, '1 trivial')
- cross_content = textwrap.dedent("""\
- [binaries]
- c = '/usr/bin/cc'
- ar = '/usr/bin/ar'
- strip = '/usr/bin/ar'
-
- [properties]
-
- [host_machine]
- system = 'linux'
- cpu_family = 'x86'
- cpu = 'i686'
- endian = 'little'
- """)
-
- with tempfile.TemporaryDirectory() as d:
- dir_ = os.path.join(d, 'meson', 'cross')
- os.makedirs(dir_)
- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
- f.write(cross_content)
- name = os.path.basename(f.name)
-
- with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
- self.init(testdir, ['--cross-file=' + name], inprocess=True)
- self.wipe()
-
- with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
- os.environ.pop('XDG_DATA_HOME', None)
- self.init(testdir, ['--cross-file=' + name], inprocess=True)
- self.wipe()
-
- with tempfile.TemporaryDirectory() as d:
- dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
- os.makedirs(dir_)
- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
- f.write(cross_content)
- name = os.path.basename(f.name)
-
- with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
- self.init(testdir, ['--cross-file=' + name], inprocess=True)
- self.wipe()
-
def test_vala_generated_source_buildir_inside_source_tree(self):
'''
Test that valac outputs generated C files in the expected location when
the builddir is a subdir of the source tree.
'''
+ if not shutil.which('valac'):
+ raise unittest.SkipTest('valac not installed.')
+
testdir = os.path.join(self.vala_test_dir, '8 generated sources')
newdir = os.path.join(self.builddir, 'srctree')
shutil.copytree(testdir, newdir)
@@ -2622,6 +2855,63 @@ endian = 'little'
self.build()
mesonbuild.modules.gnome.native_glib_version = None
+ @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.')
+ def test_pkgconfig_usage(self):
+ testdir1 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependency')
+ testdir2 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependee')
+ if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL) != 0:
+ raise unittest.SkipTest('Glib 2.0 dependency not available.')
+ with tempfile.TemporaryDirectory() as tempdirname:
+ self.init(testdir1, ['--prefix=' + tempdirname, '--libdir=lib'], default_args=False)
+ self.install(use_destdir=False)
+ shutil.rmtree(self.builddir)
+ os.mkdir(self.builddir)
+ pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
+ self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc')))
+ lib_dir = os.path.join(tempdirname, 'lib')
+ os.environ['PKG_CONFIG_PATH'] = pkg_dir
+ # Private internal libraries must not leak out.
+ pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'])
+ self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.')
+ # Dependencies must not leak to cflags when building only a shared library.
+ pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'])
+ self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.')
+ # Test that the result is usable.
+ self.init(testdir2)
+ self.build()
+ myenv = os.environ.copy()
+ myenv['LD_LIBRARY_PATH'] = lib_dir
+ if is_cygwin():
+ bin_dir = os.path.join(tempdirname, 'bin')
+ myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH']
+ self.assertTrue(os.path.isdir(lib_dir))
+ test_exe = os.path.join(self.builddir, 'pkguser')
+ self.assertTrue(os.path.isfile(test_exe))
+ subprocess.check_call(test_exe, env=myenv)
+
+ @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.')
+ def test_pkgconfig_internal_libraries(self):
+ '''
+ '''
+ with tempfile.TemporaryDirectory() as tempdirname:
+ # build library
+ testdirbase = os.path.join(self.unit_test_dir, '28 pkgconfig use libraries')
+ testdirlib = os.path.join(testdirbase, 'lib')
+ self.init(testdirlib, extra_args=['--prefix=' + tempdirname,
+ '--libdir=lib',
+ '--default-library=static'], default_args=False)
+ self.build()
+ self.install(use_destdir=False)
+
+ # build user of library
+ pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
+ os.environ['PKG_CONFIG_PATH'] = pkg_dir
+ self.new_builddir()
+ self.init(os.path.join(testdirbase, 'app'))
+ self.build()
+
class LinuxArmCrossCompileTests(BasePlatformTests):
'''
@@ -2644,6 +2934,7 @@ class LinuxArmCrossCompileTests(BasePlatformTests):
compdb = self.get_compdb()
self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command'])
+
class RewriterTests(unittest.TestCase):
def setUp(self):
@@ -2719,7 +3010,7 @@ def unset_envs():
if __name__ == '__main__':
unset_envs()
cases = ['InternalTests', 'AllPlatformTests', 'FailureTests']
- if is_linux():
+ if not is_windows():
cases += ['LinuxlikeTests']
if should_run_linux_cross_tests():
cases += ['LinuxArmCrossCompileTests']
diff --git a/setup.py b/setup.py
index 0852cd6..8382440 100644
--- a/setup.py
+++ b/setup.py
@@ -19,8 +19,9 @@ import sys
from mesonbuild.coredata import version
-if sys.version_info[0] < 3:
- print('Tried to install with Python 2, Meson only supports Python 3.')
+if sys.version_info < (3, 5, 0):
+ print('Tried to install with an unsupported version of Python. '
+ 'Meson requires Python 3.5.0 or greater')
sys.exit(1)
# We need to support Python installations that have nothing but the basic
@@ -62,6 +63,7 @@ setup(name='meson',
author_email='jpakkane@gmail.com',
url='http://mesonbuild.com',
license=' Apache License, Version 2.0',
+ python_requires='>=3.5',
packages=['mesonbuild',
'mesonbuild.backend',
'mesonbuild.compilers',
diff --git a/skip_ci.py b/skip_ci.py
new file mode 100755
index 0000000..752dfdc
--- /dev/null
+++ b/skip_ci.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+import traceback
+
+
+def check_pr(is_pr_env):
+ if is_pr_env not in os.environ:
+ print('This is not pull request: {} is not set'.format(is_pr_env))
+ sys.exit()
+ elif os.environ[is_pr_env] == 'false':
+ print('This is not pull request: {} is false'.format(is_pr_env))
+ sys.exit()
+
+
+def get_base_branch(base_env):
+ if base_env not in os.environ:
+ print('Unable to determine base branch: {} is not set'.format(base_env))
+ sys.exit()
+ return os.environ[base_env]
+
+
+def get_git_files(base):
+ diff = subprocess.check_output(['git', 'diff', '--name-only', base + '...HEAD'])
+ return diff.strip().split(b'\n')
+
+
+def is_documentation(filename):
+ return filename.startswith(b'docs/')
+
+
+def main():
+ try:
+ parser = argparse.ArgumentParser(description='CI Skipper')
+ parser.add_argument('--base-branch-env', required=True,
+ help='Branch push is targeted to')
+ parser.add_argument('--is-pull-env', required=True,
+ help='Variable set if it is a PR')
+ args = parser.parse_args()
+ check_pr(args.is_pull_env)
+ base = get_base_branch(args.base_branch_env)
+ if all(is_documentation(f) for f in get_git_files(base)):
+ print("Don't run CI for documentation-only changes, add '[skip ci]' to commit title.")
+ print('See http://mesonbuild.com/Contributing.html#skipping-integration-tests')
+ sys.exit(1)
+ except Exception:
+ # If this script fails we want build to proceed.
+ # Failure likely means some corner case we did not consider or bug.
+ # Either case this should not prevent CI from running if it is needed,
+ # and we tolerate it if it is run where it is not required.
+ traceback.print_exc()
+ print('There is a BUG in skip_ci.py, exiting.')
+ sys.exit()
+
+if __name__ == '__main__':
+ main()
diff --git a/test cases/common/12 data/installed_files.txt b/test cases/common/12 data/installed_files.txt
index af1a735..43bb0e5 100644
--- a/test cases/common/12 data/installed_files.txt
+++ b/test cases/common/12 data/installed_files.txt
@@ -2,5 +2,10 @@ usr/share/progname/datafile.dat
usr/share/progname/fileobject_datafile.dat
usr/share/progname/vanishing.dat
usr/share/progname/vanishing2.dat
+usr/share/data install test/renamed file.txt
+usr/share/data install test/somefile.txt
+usr/share/data install test/some/nested/path.txt
+usr/share/renamed/renamed 2.txt
+usr/share/renamed/renamed 3.txt
etc/etcfile.dat
usr/bin/runscript.sh
diff --git a/test cases/common/12 data/meson.build b/test cases/common/12 data/meson.build
index d3407d1..d855bba 100644
--- a/test cases/common/12 data/meson.build
+++ b/test cases/common/12 data/meson.build
@@ -10,6 +10,14 @@ install_data(files('fileobject_datafile.dat'),
install_dir : 'share/progname',
install_mode : [false, false, 0])
+install_data(files('somefile.txt'))
+
subdir('vanishing')
install_data(sources : 'vanishing/vanishing2.dat', install_dir : 'share/progname')
+
+install_data(sources : 'to_be_renamed_1.txt', rename : 'renamed file.txt')
+install_data(sources : ['vanishing/to_be_renamed_2.txt', 'to_be_renamed_3.txt'],
+ install_dir : 'share/renamed',
+ rename : ['renamed 2.txt', 'renamed 3.txt'])
+install_data(sources : 'to_be_renamed_4.txt', rename : 'some/nested/path.txt')
diff --git a/test cases/common/12 data/somefile.txt b/test cases/common/12 data/somefile.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/12 data/somefile.txt
diff --git a/test cases/common/12 data/to_be_renamed_1.txt b/test cases/common/12 data/to_be_renamed_1.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/12 data/to_be_renamed_1.txt
diff --git a/test cases/common/12 data/to_be_renamed_3.txt b/test cases/common/12 data/to_be_renamed_3.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/12 data/to_be_renamed_3.txt
diff --git a/test cases/common/12 data/to_be_renamed_4.txt b/test cases/common/12 data/to_be_renamed_4.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/12 data/to_be_renamed_4.txt
diff --git a/test cases/common/12 data/vanishing/to_be_renamed_2.txt b/test cases/common/12 data/vanishing/to_be_renamed_2.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/12 data/vanishing/to_be_renamed_2.txt
diff --git a/test cases/common/138 include order/inc1/hdr.h b/test cases/common/138 include order/inc1/hdr.h
new file mode 100644
index 0000000..9d755a8
--- /dev/null
+++ b/test cases/common/138 include order/inc1/hdr.h
@@ -0,0 +1 @@
+#define SOME_DEFINE 42
diff --git a/test cases/common/138 include order/inc2/hdr.h b/test cases/common/138 include order/inc2/hdr.h
new file mode 100644
index 0000000..2ebcaca
--- /dev/null
+++ b/test cases/common/138 include order/inc2/hdr.h
@@ -0,0 +1 @@
+#undef SOME_DEFINE
diff --git a/test cases/common/138 include order/meson.build b/test cases/common/138 include order/meson.build
index c370bb1..9f275b8 100644
--- a/test cases/common/138 include order/meson.build
+++ b/test cases/common/138 include order/meson.build
@@ -30,3 +30,7 @@ f = executable('somefxe', 'sub4/main.c',
test('eh', e)
test('oh', f)
+
+# Test that the order in include_directories() is maintained
+incs = include_directories('inc1', 'inc2')
+executable('ordertest', 'ordertest.c', include_directories: incs)
diff --git a/test cases/common/138 include order/ordertest.c b/test cases/common/138 include order/ordertest.c
new file mode 100644
index 0000000..0d9173f
--- /dev/null
+++ b/test cases/common/138 include order/ordertest.c
@@ -0,0 +1,11 @@
+#include "hdr.h"
+
+#if !defined(SOME_DEFINE) || SOME_DEFINE != 42
+#error "Should have picked up hdr.h from inc1/hdr.h"
+#endif
+
+int
+main (int c, char ** argv)
+{
+ return 0;
+}
diff --git a/test cases/common/142 compute int/config.h.in b/test cases/common/142 compute int/config.h.in
index ad8d077..0de63ab 100644
--- a/test cases/common/142 compute int/config.h.in
+++ b/test cases/common/142 compute int/config.h.in
@@ -1,2 +1,4 @@
#define INTSIZE @INTSIZE@
#define FOOBAR_IN_CONFIG_H @FOOBAR@
+#define MAXINT @MAXINT@
+#define MININT @MININT@
diff --git a/test cases/common/142 compute int/meson.build b/test cases/common/142 compute int/meson.build
index 43553fe..22bd266 100644
--- a/test cases/common/142 compute int/meson.build
+++ b/test cases/common/142 compute int/meson.build
@@ -7,11 +7,15 @@ cc = meson.get_compiler('c')
intsize = cc.compute_int('sizeof(int)', low : 1, high : 16, guess : 4)
foobar = cc.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+maxint = cc.compute_int('INT_MAX', prefix: '#include <limits.h>')
+minint = cc.compute_int('INT_MIN', prefix: '#include <limits.h>')
cd = configuration_data()
cd.set('INTSIZE', intsize)
cd.set('FOOBAR', foobar)
cd.set('CONFIG', 'config.h')
+cd.set('MAXINT', maxint)
+cd.set('MININT', minint)
configure_file(input : 'config.h.in', output : 'config.h', configuration : cd)
s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd)
@@ -23,11 +27,15 @@ cpp = meson.get_compiler('cpp')
intsize = cpp.compute_int('sizeof(int)')
foobar = cpp.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+maxint = cpp.compute_int('INT_MAX', prefix: '#include <limits.h>')
+minint = cpp.compute_int('INT_MIN', prefix: '#include <limits.h>')
cdpp = configuration_data()
cdpp.set('INTSIZE', intsize)
cdpp.set('FOOBAR', foobar)
cdpp.set('CONFIG', 'config.hpp')
+cdpp.set('MAXINT', maxint)
+cdpp.set('MININT', minint)
configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp)
spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp)
diff --git a/test cases/common/142 compute int/prog.c.in b/test cases/common/142 compute int/prog.c.in
index 3ff1463..ff1ad55 100644
--- a/test cases/common/142 compute int/prog.c.in
+++ b/test cases/common/142 compute int/prog.c.in
@@ -1,6 +1,7 @@
#include "@CONFIG@"
#include <stdio.h>
#include <wchar.h>
+#include <limits.h>
#include "foobar.h"
int main(int argc, char **argv) {
@@ -12,5 +13,13 @@ int main(int argc, char **argv) {
fprintf(stderr, "Mismatch: computed int %d, should be %d.\n", FOOBAR_IN_CONFIG_H, FOOBAR_IN_FOOBAR_H);
return 1;
}
+ if(MAXINT != INT_MAX) {
+ fprintf(stderr, "Mismatch: computed max int %d, should be %d.\n", MAXINT, INT_MAX);
+ return 1;
+ }
+ if(MININT != INT_MIN) {
+ fprintf(stderr, "Mismatch: computed min int %d, should be %d.\n", MININT, INT_MIN);
+ return 1;
+ }
return 0;
}
diff --git a/test cases/common/145 whole archive/allofme/meson.build b/test cases/common/145 whole archive/allofme/meson.build
deleted file mode 100644
index f5c2027..0000000
--- a/test cases/common/145 whole archive/allofme/meson.build
+++ /dev/null
@@ -1 +0,0 @@
-stlib = static_library('allofme', '../libfile.c')
diff --git a/test cases/common/145 whole archive/exe/meson.build b/test cases/common/145 whole archive/exe/meson.build
index f47a246..91d298d 100644
--- a/test cases/common/145 whole archive/exe/meson.build
+++ b/test cases/common/145 whole archive/exe/meson.build
@@ -1,2 +1 @@
-exe = executable('prog', '../prog.c',
- link_with : dylib)
+exe = executable('prog', '../prog.c', link_with : sh_func2_linked_func1)
diff --git a/test cases/common/145 whole archive/exe2/meson.build b/test cases/common/145 whole archive/exe2/meson.build
index 5365f03..9184864 100644
--- a/test cases/common/145 whole archive/exe2/meson.build
+++ b/test cases/common/145 whole archive/exe2/meson.build
@@ -1 +1 @@
-exe2 = executable('prog2', '../prog.c', link_with : dylib2)
+exe2 = executable('prog2', '../prog.c', link_with : sh_only_link_whole)
diff --git a/test cases/common/145 whole archive/exe3/meson.build b/test cases/common/145 whole archive/exe3/meson.build
new file mode 100644
index 0000000..82cf57e
--- /dev/null
+++ b/test cases/common/145 whole archive/exe3/meson.build
@@ -0,0 +1 @@
+exe3 = executable('prog3', '../prog.c', link_with : sh_func2_dep_func1)
diff --git a/test cases/common/145 whole archive/exe4/meson.build b/test cases/common/145 whole archive/exe4/meson.build
new file mode 100644
index 0000000..0781250
--- /dev/null
+++ b/test cases/common/145 whole archive/exe4/meson.build
@@ -0,0 +1 @@
+exe4 = executable('prog4', '../prog.c', link_with : sh_func2_transdep_func1)
diff --git a/test cases/common/145 whole archive/libfile.c b/test cases/common/145 whole archive/func1.c
index b2690a0..b2690a0 100644
--- a/test cases/common/145 whole archive/libfile.c
+++ b/test cases/common/145 whole archive/func1.c
diff --git a/test cases/common/145 whole archive/dylib.c b/test cases/common/145 whole archive/func2.c
index 9e287a4..9e287a4 100644
--- a/test cases/common/145 whole archive/dylib.c
+++ b/test cases/common/145 whole archive/func2.c
diff --git a/test cases/common/145 whole archive/meson.build b/test cases/common/145 whole archive/meson.build
index 617ae03..012df33 100644
--- a/test cases/common/145 whole archive/meson.build
+++ b/test cases/common/145 whole archive/meson.build
@@ -10,15 +10,41 @@ if cc.get_id() == 'msvc'
endif
endif
-subdir('allofme')
-subdir('shlib')
+# Test 1: link_whole keeps all symbols
+# Make static func1
+subdir('st_func1')
+# Make shared func2 linking whole func1 archive
+subdir('sh_func2_linked_func1')
+# Link exe with shared library only
subdir('exe')
-
+# Test that both func1 and func2 are accessible from shared library
test('prog', exe)
-# link_whole only
-subdir('stlib')
-subdir('wholeshlib')
+# Test 2: link_whole can be used instead of source list, see #2180
+# Make static func2
+subdir('st_func2')
+# Link both func1 and func2 into same shared library
+# which does not have any sources other than 2 static libraries
+subdir('sh_only_link_whole')
+# Link exe2 with shared library only
subdir('exe2')
-
+# Test that both func1 and func2 are accessible from shared library
test('prog2', exe2)
+
+# Test 3: link_whole can be used in declare_dependency()
+func1_dep = declare_dependency(link_whole : [st_func1])
+# Use dependency to link func1 into shared library
+subdir('sh_func2_dep_func1')
+# Link exe3 with shared library
+subdir('exe3')
+# Test that both func1 and func2 are accessible from shared library
+test('prog3', exe3)
+
+# Test 4: link_whole can be used in transitive declare_dependency()
+func1_trans_dep = declare_dependency(dependencies : func1_dep)
+# Use transitive dependency to link func1 into shared library
+subdir('sh_func2_transdep_func1')
+# Link exe4 with shared library
+subdir('exe4')
+# Test that both func1 and func2 are accessible from shared library
+test('prog4', exe4)
diff --git a/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build b/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build
new file mode 100644
index 0000000..92baca6
--- /dev/null
+++ b/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build
@@ -0,0 +1,4 @@
+# Same as sh_func2_linked_func1, # func2.c does not depend on func1(),
+# so without link_whole compiler would throw func1() away.
+# This is the same version of the test with a dependency object instead.
+sh_func2_dep_func1 = shared_library('sh_func2_dep_func1', '../func2.c', dependencies : func1_dep)
diff --git a/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build b/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build
new file mode 100644
index 0000000..2858f65
--- /dev/null
+++ b/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build
@@ -0,0 +1,3 @@
+# Nothing in func2.c uses func1, so the linker would throw it
+# away and thus linking the exe would fail.
+sh_func2_linked_func1 = shared_library('sh_func2_linked_func1', '../func2.c', link_whole : st_func1)
diff --git a/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build b/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build
new file mode 100644
index 0000000..0703077
--- /dev/null
+++ b/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build
@@ -0,0 +1,6 @@
+# Same as sh_func2_dep_func1 but dependency is transitive.
+# func2.c does not have any reference to func1() so without link_whole compiler
+# should throw func1() out.
+sh_func2_transdep_func1 = shared_library(
+ 'sh_func2_transdep_func1', '../func2.c',
+ dependencies : func1_trans_dep)
diff --git a/test cases/common/145 whole archive/sh_only_link_whole/meson.build b/test cases/common/145 whole archive/sh_only_link_whole/meson.build
new file mode 100644
index 0000000..64baabd
--- /dev/null
+++ b/test cases/common/145 whole archive/sh_only_link_whole/meson.build
@@ -0,0 +1 @@
+sh_only_link_whole = shared_library('sh_only_link_whole', link_whole : [st_func1, st_func2])
diff --git a/test cases/common/145 whole archive/shlib/meson.build b/test cases/common/145 whole archive/shlib/meson.build
deleted file mode 100644
index 34a1b78..0000000
--- a/test cases/common/145 whole archive/shlib/meson.build
+++ /dev/null
@@ -1,4 +0,0 @@
-# Nothing in dylib.c uses func1, so the linker would throw it
-# away and thus linking the exe would fail.
-dylib = shared_library('shlib', '../dylib.c',
- link_whole : stlib)
diff --git a/test cases/common/145 whole archive/st_func1/meson.build b/test cases/common/145 whole archive/st_func1/meson.build
new file mode 100644
index 0000000..c84d781
--- /dev/null
+++ b/test cases/common/145 whole archive/st_func1/meson.build
@@ -0,0 +1 @@
+st_func1 = static_library('st_func1', '../func1.c')
diff --git a/test cases/common/145 whole archive/st_func2/meson.build b/test cases/common/145 whole archive/st_func2/meson.build
new file mode 100644
index 0000000..2732f96
--- /dev/null
+++ b/test cases/common/145 whole archive/st_func2/meson.build
@@ -0,0 +1 @@
+st_func2 = static_library('st_func2', '../func2.c')
diff --git a/test cases/common/145 whole archive/stlib/meson.build b/test cases/common/145 whole archive/stlib/meson.build
deleted file mode 100644
index 07a434e..0000000
--- a/test cases/common/145 whole archive/stlib/meson.build
+++ /dev/null
@@ -1 +0,0 @@
-static = static_library('static', '../dylib.c')
diff --git a/test cases/common/145 whole archive/wholeshlib/meson.build b/test cases/common/145 whole archive/wholeshlib/meson.build
deleted file mode 100644
index 69a1995..0000000
--- a/test cases/common/145 whole archive/wholeshlib/meson.build
+++ /dev/null
@@ -1 +0,0 @@
-dylib2 = shared_library('link_whole', link_whole : [stlib, static])
diff --git a/test cases/common/16 configure file/config7.h.in b/test cases/common/16 configure file/config7.h.in
new file mode 100644
index 0000000..edd0bb3
--- /dev/null
+++ b/test cases/common/16 configure file/config7.h.in
@@ -0,0 +1,16 @@
+/* No escape */
+#define MESSAGE1 "${var1}"
+
+/* Single escape means no replace */
+#define MESSAGE2 "\${var1}"
+
+/* Replace pairs of escapes before '@' or '\@' with escape characters
+ * (note we have to double number of pairs due to C string escaping)
+ */
+#define MESSAGE3 "\\\\${var1}"
+
+/* Pairs of escapes and then single escape to avoid replace */
+#define MESSAGE4 "\\\\\${var1}"
+
+/* Check escape character outside variables */
+#define MESSAGE5 "\\ ${ \${ \\\\${ \\\\\${"
diff --git a/test cases/common/16 configure file/meson.build b/test cases/common/16 configure file/meson.build
index eda0a8f..5c3a1a5 100644
--- a/test cases/common/16 configure file/meson.build
+++ b/test cases/common/16 configure file/meson.build
@@ -131,3 +131,21 @@ configure_file(
configuration : conf6
)
test('test6', executable('prog6', 'prog6.c'))
+
+# test empty install dir string
+cfile = configure_file(input : 'config.h.in',
+ output : 'do_not_get_installed.h',
+ install_dir : '',
+ configuration : conf)
+
+# Test escaping with cmake format
+conf7 = configuration_data()
+conf7.set('var1', 'foo')
+conf7.set('var2', 'bar')
+configure_file(
+ input : 'config7.h.in',
+ output : '@BASENAME@',
+ format : 'cmake',
+ configuration : conf7
+)
+test('test7', executable('prog7', 'prog7.c'))
diff --git a/test cases/common/16 configure file/prog7.c b/test cases/common/16 configure file/prog7.c
new file mode 100644
index 0000000..0bb7d13
--- /dev/null
+++ b/test cases/common/16 configure file/prog7.c
@@ -0,0 +1,10 @@
+#include <string.h>
+#include <config7.h>
+
+int main(int argc, char **argv) {
+ return strcmp(MESSAGE1, "foo")
+ || strcmp(MESSAGE2, "${var1}")
+ || strcmp(MESSAGE3, "\\foo")
+ || strcmp(MESSAGE4, "\\${var1}")
+ || strcmp(MESSAGE5, "\\ ${ ${ \\${ \\${");
+}
diff --git a/test cases/common/168 disabler/meson.build b/test cases/common/168 disabler/meson.build
index 7ca82b7..1956cd3 100644
--- a/test cases/common/168 disabler/meson.build
+++ b/test cases/common/168 disabler/meson.build
@@ -21,7 +21,7 @@ else
number = 2
endif
-assert(d == 0, 'Plain if handled incorrectly, value should be 0 but is @0@'.format(number))
+assert(number == 0, 'Plain if handled incorrectly, value should be 0 but is @0@'.format(number))
if d.found()
number = 1
@@ -29,6 +29,6 @@ else
number = 2
endif
-assert(d == 1, 'If found handled incorrectly, value should be 1 but is @0@'.format(number))
+assert(number == 2, 'If found handled incorrectly, value should be 2 but is @0@'.format(number))
diff --git a/test cases/common/174 preserve gendir/base.inp b/test cases/common/178 preserve gendir/base.inp
index df967b9..df967b9 100644
--- a/test cases/common/174 preserve gendir/base.inp
+++ b/test cases/common/178 preserve gendir/base.inp
diff --git a/test cases/common/174 preserve gendir/com/mesonbuild/subbie.inp b/test cases/common/178 preserve gendir/com/mesonbuild/subbie.inp
index df0f4e9..df0f4e9 100644
--- a/test cases/common/174 preserve gendir/com/mesonbuild/subbie.inp
+++ b/test cases/common/178 preserve gendir/com/mesonbuild/subbie.inp
diff --git a/test cases/common/174 preserve gendir/genprog.py b/test cases/common/178 preserve gendir/genprog.py
index 1e10998..1e10998 100755
--- a/test cases/common/174 preserve gendir/genprog.py
+++ b/test cases/common/178 preserve gendir/genprog.py
diff --git a/test cases/common/174 preserve gendir/meson.build b/test cases/common/178 preserve gendir/meson.build
index ce219f0..ce219f0 100644
--- a/test cases/common/174 preserve gendir/meson.build
+++ b/test cases/common/178 preserve gendir/meson.build
diff --git a/test cases/common/174 preserve gendir/testprog.c b/test cases/common/178 preserve gendir/testprog.c
index 46b4602..46b4602 100644
--- a/test cases/common/174 preserve gendir/testprog.c
+++ b/test cases/common/178 preserve gendir/testprog.c
diff --git a/test cases/common/179 source in dep/bar.cpp b/test cases/common/179 source in dep/bar.cpp
new file mode 100644
index 0000000..bda8cb6
--- /dev/null
+++ b/test cases/common/179 source in dep/bar.cpp
@@ -0,0 +1,5 @@
+extern "C" int foo();
+
+int main(int, char**) {
+ return foo() != 42;
+}
diff --git a/test cases/common/179 source in dep/foo.c b/test cases/common/179 source in dep/foo.c
new file mode 100644
index 0000000..1ecfa8c
--- /dev/null
+++ b/test cases/common/179 source in dep/foo.c
@@ -0,0 +1,3 @@
+int foo() {
+ return 42;
+}
diff --git a/test cases/common/179 source in dep/meson.build b/test cases/common/179 source in dep/meson.build
new file mode 100644
index 0000000..e2c007e
--- /dev/null
+++ b/test cases/common/179 source in dep/meson.build
@@ -0,0 +1,6 @@
+project('foo', 'c', 'cpp')
+
+dep = declare_dependency(sources : 'foo.c')
+
+executable('bar', 'bar.cpp',
+ dependencies : dep)
diff --git a/test cases/common/180 generator link whole/export.h b/test cases/common/180 generator link whole/export.h
new file mode 100644
index 0000000..f4f6f45
--- /dev/null
+++ b/test cases/common/180 generator link whole/export.h
@@ -0,0 +1,18 @@
+#pragma once
+
+#if defined BUILDING_EMBEDDED
+ #define DLL_PUBLIC
+#elif defined _WIN32 || defined __CYGWIN__
+ #if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+ #else
+ #define DLL_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
diff --git a/test cases/common/180 generator link whole/generator.py b/test cases/common/180 generator link whole/generator.py
new file mode 100755
index 0000000..0076b74
--- /dev/null
+++ b/test cases/common/180 generator link whole/generator.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+
+import os
+import os.path
+import sys
+
+
+def main():
+ name = os.path.splitext(os.path.basename(sys.argv[1]))[0]
+ out = sys.argv[2]
+ hname = os.path.join(out, name + '.h')
+ cname = os.path.join(out, name + '.c')
+ print(os.getcwd(), hname)
+ with open(hname, 'w') as hfile:
+ hfile.write('''
+#pragma once
+#include "export.h"
+int DLL_PUBLIC {name}();
+'''.format(name=name))
+ with open(cname, 'w') as cfile:
+ cfile.write('''
+#include "{name}.h"
+int {name}() {{
+ return {size};
+}}
+'''.format(name=name, size=len(name)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test cases/common/180 generator link whole/main.c b/test cases/common/180 generator link whole/main.c
new file mode 100644
index 0000000..acf8717
--- /dev/null
+++ b/test cases/common/180 generator link whole/main.c
@@ -0,0 +1,11 @@
+#include "meson_test_function.h"
+
+#include <stdio.h>
+
+int main() {
+ if (meson_test_function() != 19) {
+ printf("Bad meson_test_function()\n");
+ return 1;
+ }
+ return 0;
+}
diff --git a/test cases/common/180 generator link whole/meson.build b/test cases/common/180 generator link whole/meson.build
new file mode 100644
index 0000000..30ae9c6
--- /dev/null
+++ b/test cases/common/180 generator link whole/meson.build
@@ -0,0 +1,65 @@
+project('generator link_whole', 'c')
+
+cc = meson.get_compiler('c')
+if cc.get_id() == 'msvc'
+ if cc.version().version_compare('<19')
+ error('MESON_SKIP_TEST link_whole only works on VS2015 or newer.')
+ endif
+endif
+
+# This just generates foo.h and foo.c with int foo() defined.
+gen_py = find_program('generator.py')
+gen = generator(gen_py,
+ output: ['@BASENAME@.h', '@BASENAME@.c'],
+ arguments : ['@INPUT@', '@BUILD_DIR@'])
+
+# Test 1: link directly into executable
+srcs = gen.process('meson_test_function.tmpl')
+exe = executable('exe1', [srcs, 'main.c'], c_args : '-DBUILDING_EMBEDDED')
+test('test1', exe)
+
+# Test 2: link into shared library and access from executable
+srcs = gen.process('meson_test_function.tmpl')
+shlib2 = shared_library('shlib2', [srcs], c_args : '-DBUILDING_DLL')
+exe = executable('exe2', 'main.c',
+ link_with : shlib2,
+ include_directories : shlib2.private_dir_include(),
+)
+test('test2', exe)
+
+# Test 3: link into static library and access from executable
+srcs = gen.process('meson_test_function.tmpl')
+stlib3 = static_library('stlib3', [srcs], c_args : '-DBUILDING_EMBEDDED')
+exe = executable('exe3', 'main.c',
+ c_args : '-DBUILDING_EMBEDDED',
+ link_with : stlib3,
+ include_directories : stlib3.private_dir_include(),
+)
+test('test3', exe)
+
+# Test 4: link into static library, link into shared
+# and access from executable. To make sure static_library
+# is not dropped use pull_meson_test_function helper.
+srcs = gen.process('meson_test_function.tmpl')
+stlib4 = static_library('stlib4', [srcs], c_args : '-DBUILDING_DLL')
+shlib4 = shared_library('shlib4', 'pull_meson_test_function.c',
+ c_args : '-DBUILDING_DLL',
+ link_with : stlib4,
+ include_directories : stlib4.private_dir_include(),
+)
+exe = executable('exe4', 'main.c',
+ link_with : shlib4,
+ include_directories : stlib4.private_dir_include(),
+)
+test('test4', exe)
+
+# Test 5: link into static library, link_whole into shared
+# and access from executable
+srcs = gen.process('meson_test_function.tmpl')
+stlib5 = static_library('stlib5', [srcs], c_args : '-DBUILDING_DLL')
+shlib5 = shared_library('shlib5', link_whole : stlib5)
+exe = executable('exe5', 'main.c',
+ link_with : shlib5,
+ include_directories : stlib5.private_dir_include(),
+)
+test('test5', exe)
diff --git a/test cases/common/180 generator link whole/meson_test_function.tmpl b/test cases/common/180 generator link whole/meson_test_function.tmpl
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/common/180 generator link whole/meson_test_function.tmpl
diff --git a/test cases/common/180 generator link whole/pull_meson_test_function.c b/test cases/common/180 generator link whole/pull_meson_test_function.c
new file mode 100644
index 0000000..c54dda6
--- /dev/null
+++ b/test cases/common/180 generator link whole/pull_meson_test_function.c
@@ -0,0 +1,6 @@
+#include "export.h"
+#include "meson_test_function.h"
+
+int DLL_PUBLIC function_puller() {
+ return meson_test_function();
+}
diff --git a/test cases/common/181 initial c_args/meson.build b/test cases/common/181 initial c_args/meson.build
new file mode 100644
index 0000000..70a6e7a
--- /dev/null
+++ b/test cases/common/181 initial c_args/meson.build
@@ -0,0 +1,7 @@
+project('options', 'c')
+
+# Test passing c_args and c_link_args options from the command line.
+assert(get_option('c_args') == ['-march=native', '-funroll-loops'],
+ 'Incorrect value for c_args option.')
+assert(get_option('c_link_args') == ['-random_linker_option'],
+ 'Incorrect value for c_link_args option.')
diff --git a/test cases/common/181 initial c_args/test_args.txt b/test cases/common/181 initial c_args/test_args.txt
new file mode 100644
index 0000000..9a6da06
--- /dev/null
+++ b/test cases/common/181 initial c_args/test_args.txt
@@ -0,0 +1,4 @@
+# This file is not read by meson itself, but by the test framework.
+# It is not possible to pass arguments to meson from a file.
+['-Dc_args=-march=native', '-Dc_args=-funroll-loops',
+ '-Dc_link_args=-random_linker_option']
diff --git a/test cases/common/182 identical target name in subproject flat layout/foo.c b/test cases/common/182 identical target name in subproject flat layout/foo.c
new file mode 100644
index 0000000..ed42789
--- /dev/null
+++ b/test cases/common/182 identical target name in subproject flat layout/foo.c
@@ -0,0 +1 @@
+int meson_test_main_foo(void) { return 10; }
diff --git a/test cases/common/182 identical target name in subproject flat layout/main.c b/test cases/common/182 identical target name in subproject flat layout/main.c
new file mode 100644
index 0000000..6f02aeb
--- /dev/null
+++ b/test cases/common/182 identical target name in subproject flat layout/main.c
@@ -0,0 +1,16 @@
+#include <stdio.h>
+
+int meson_test_main_foo(void);
+int meson_test_subproj_foo(void);
+
+int main(void) {
+ if (meson_test_main_foo() != 10) {
+ printf("Failed meson_test_main_foo\n");
+ return 1;
+ }
+ if (meson_test_subproj_foo() != 20) {
+ printf("Failed meson_test_subproj_foo\n");
+ return 1;
+ }
+ return 0;
+}
diff --git a/test cases/common/182 identical target name in subproject flat layout/meson.build b/test cases/common/182 identical target name in subproject flat layout/meson.build
new file mode 100644
index 0000000..d859fda
--- /dev/null
+++ b/test cases/common/182 identical target name in subproject flat layout/meson.build
@@ -0,0 +1,11 @@
+project('subproject targets', 'c')
+
+# Idea behind this test is to create targets with identical name
+# but different output files. We can do this by choosing different
+# name_prefix of libraries. Target id does not depend on name_prefix.
+
+main_foo = static_library('foo', 'foo.c', name_prefix : 'main')
+subproj_foo = subproject('subproj').get_variable('foo')
+
+exe = executable('prog', 'main.c', link_with : [main_foo, subproj_foo])
+test('main test', exe)
diff --git a/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c
new file mode 100644
index 0000000..f334292
--- /dev/null
+++ b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c
@@ -0,0 +1 @@
+int meson_test_subproj_foo(void) { return 20; }
diff --git a/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build
new file mode 100644
index 0000000..c927194
--- /dev/null
+++ b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build
@@ -0,0 +1,3 @@
+project('subproj', 'c')
+
+foo = static_library('foo', 'foo.c', name_prefix : 'subproj')
diff --git a/test cases/common/184 as-needed/config.h b/test cases/common/184 as-needed/config.h
new file mode 100644
index 0000000..b8fb60f
--- /dev/null
+++ b/test cases/common/184 as-needed/config.h
@@ -0,0 +1,14 @@
+#if defined _WIN32 || defined __CYGWIN__
+ #if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+ #else
+ #define DLL_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
diff --git a/test cases/common/184 as-needed/libA.cpp b/test cases/common/184 as-needed/libA.cpp
new file mode 100644
index 0000000..5f45bc0
--- /dev/null
+++ b/test cases/common/184 as-needed/libA.cpp
@@ -0,0 +1,7 @@
+#define BUILDING_DLL
+
+#include "libA.h"
+
+namespace meson_test_as_needed {
+ DLL_PUBLIC bool linked = false;
+}
diff --git a/test cases/common/184 as-needed/libA.h b/test cases/common/184 as-needed/libA.h
new file mode 100644
index 0000000..8e76d22
--- /dev/null
+++ b/test cases/common/184 as-needed/libA.h
@@ -0,0 +1,5 @@
+#include "config.h"
+
+namespace meson_test_as_needed {
+ DLL_PUBLIC extern bool linked;
+}
diff --git a/test cases/common/184 as-needed/libB.cpp b/test cases/common/184 as-needed/libB.cpp
new file mode 100644
index 0000000..a872394
--- /dev/null
+++ b/test cases/common/184 as-needed/libB.cpp
@@ -0,0 +1,19 @@
+#include "libA.h"
+
+#undef DLL_PUBLIC
+#define BUILDING_DLL
+#include "config.h"
+
+namespace meson_test_as_needed {
+ namespace {
+ bool set_linked() {
+ linked = true;
+ return true;
+ }
+ bool stub = set_linked();
+ }
+
+ DLL_PUBLIC int libB_unused_func() {
+ return 0;
+ }
+}
diff --git a/test cases/common/184 as-needed/main.cpp b/test cases/common/184 as-needed/main.cpp
new file mode 100644
index 0000000..191d15c
--- /dev/null
+++ b/test cases/common/184 as-needed/main.cpp
@@ -0,0 +1,7 @@
+#include <cstdlib>
+
+#include "libA.h"
+
+int main() {
+ return !meson_test_as_needed::linked ? EXIT_SUCCESS : EXIT_FAILURE;
+}
diff --git a/test cases/common/184 as-needed/meson.build b/test cases/common/184 as-needed/meson.build
new file mode 100644
index 0000000..3b54aaa
--- /dev/null
+++ b/test cases/common/184 as-needed/meson.build
@@ -0,0 +1,13 @@
+project('as-needed test', 'cpp')
+
+# Idea behind this test is to have -Wl,--as-needed prune
+# away unneeded linkages, which would otherwise cause global
+# static initialiser side-effects to set a boolean to true.
+
+# Credits for portable ISO C++ idea go to sarum9in
+
+libA = library('A', 'libA.cpp')
+libB = library('B', 'libB.cpp', link_with : libA)
+
+main_exe = executable('C', 'main.cpp', link_with : [libA, libB])
+test('main test', main_exe)
diff --git a/test cases/common/185 ndebug if-release enabled/main.c b/test cases/common/185 ndebug if-release enabled/main.c
new file mode 100644
index 0000000..984ebca
--- /dev/null
+++ b/test cases/common/185 ndebug if-release enabled/main.c
@@ -0,0 +1,15 @@
+#include <assert.h>
+#include <stdlib.h>
+
+int meson_test_side_effect = EXIT_FAILURE;
+
+int meson_test_set_side_effect(void) {
+ meson_test_side_effect = EXIT_SUCCESS;
+ return 1;
+}
+
+int main(void) {
+ // meson_test_side_effect is set only if assert is executed
+ assert(meson_test_set_side_effect());
+ return meson_test_side_effect;
+}
diff --git a/test cases/common/185 ndebug if-release enabled/meson.build b/test cases/common/185 ndebug if-release enabled/meson.build
new file mode 100644
index 0000000..be26375
--- /dev/null
+++ b/test cases/common/185 ndebug if-release enabled/meson.build
@@ -0,0 +1,7 @@
+project('ndebug enabled', 'c',
+ default_options : [
+ 'buildtype=debugoptimized',
+ 'b_ndebug=if-release',
+ ])
+
+test('exe', executable('main', 'main.c'))
diff --git a/test cases/common/186 ndebug if-release disabled/main.c b/test cases/common/186 ndebug if-release disabled/main.c
new file mode 100644
index 0000000..cb3ec3f
--- /dev/null
+++ b/test cases/common/186 ndebug if-release disabled/main.c
@@ -0,0 +1,7 @@
+#include <assert.h>
+#include <stdlib.h>
+
+int main(void) {
+ assert(0);
+ return EXIT_SUCCESS;
+}
diff --git a/test cases/common/186 ndebug if-release disabled/meson.build b/test cases/common/186 ndebug if-release disabled/meson.build
new file mode 100644
index 0000000..a9a79ea
--- /dev/null
+++ b/test cases/common/186 ndebug if-release disabled/meson.build
@@ -0,0 +1,7 @@
+project('ndebug disabled', 'c',
+ default_options : [
+ 'buildtype=release',
+ 'b_ndebug=if-release',
+ ])
+
+test('exe', executable('main', 'main.c'))
diff --git a/test cases/common/187 subproject version/meson.build b/test cases/common/187 subproject version/meson.build
new file mode 100644
index 0000000..bd8fc03
--- /dev/null
+++ b/test cases/common/187 subproject version/meson.build
@@ -0,0 +1,10 @@
+project('subproject version', 'c',
+ version : '2.3.4',
+ license: 'mylicense')
+
+subproject('a')
+
+liba_dep = dependency('a',
+ fallback: ['a', 'liba_dep'],
+ version: ['>= 0.30.0', '!= 0.99.0'])
+
diff --git a/test cases/common/187 subproject version/subprojects/a/meson.build b/test cases/common/187 subproject version/subprojects/a/meson.build
new file mode 100644
index 0000000..dae3130
--- /dev/null
+++ b/test cases/common/187 subproject version/subprojects/a/meson.build
@@ -0,0 +1,5 @@
+project('mysubproject', 'c',
+ version : '1.0.0',
+ license : 'sublicense')
+
+liba_dep = declare_dependency (version : '1.0.0')
diff --git a/test cases/common/188 subdir_done/meson.build b/test cases/common/188 subdir_done/meson.build
new file mode 100644
index 0000000..5692f3a
--- /dev/null
+++ b/test cases/common/188 subdir_done/meson.build
@@ -0,0 +1,10 @@
+# Should run, even though main.cpp does not exist and we call error in the last line.
+# subdir_done jumps to end, so both lines are not executed.
+
+project('example exit', 'cpp')
+
+subdir_done()
+
+executable('main', 'main.cpp')
+error('Unreachable')
+
diff --git a/test cases/common/189 bothlibraries/libfile.c b/test cases/common/189 bothlibraries/libfile.c
new file mode 100644
index 0000000..085ef3b
--- /dev/null
+++ b/test cases/common/189 bothlibraries/libfile.c
@@ -0,0 +1,7 @@
+#include "mylib.h"
+
+DO_EXPORT int retval = 42;
+
+DO_EXPORT int func() {
+ return retval;
+}
diff --git a/test cases/common/189 bothlibraries/main.c b/test cases/common/189 bothlibraries/main.c
new file mode 100644
index 0000000..03a8e02
--- /dev/null
+++ b/test cases/common/189 bothlibraries/main.c
@@ -0,0 +1,8 @@
+#include "mylib.h"
+
+DO_IMPORT int func();
+DO_IMPORT int retval;
+
+int main(int argc, char **arg) {
+ return func() == retval ? 0 : 1;
+}
diff --git a/test cases/common/189 bothlibraries/meson.build b/test cases/common/189 bothlibraries/meson.build
new file mode 100644
index 0000000..3a13d62
--- /dev/null
+++ b/test cases/common/189 bothlibraries/meson.build
@@ -0,0 +1,12 @@
+project('both libraries linking test', 'c')
+
+both_libs = both_libraries('mylib', 'libfile.c')
+exe_shared = executable('prog-shared', 'main.c', link_with : both_libs.get_shared_lib())
+exe_static = executable('prog-static', 'main.c',
+ c_args : ['-DSTATIC_COMPILATION'],
+ link_with : both_libs.get_static_lib())
+exe_both = executable('prog-both', 'main.c', link_with : both_libs)
+
+test('runtest-shared', exe_shared)
+test('runtest-static', exe_static)
+test('runtest-both', exe_both)
diff --git a/test cases/common/189 bothlibraries/mylib.h b/test cases/common/189 bothlibraries/mylib.h
new file mode 100644
index 0000000..1038a01
--- /dev/null
+++ b/test cases/common/189 bothlibraries/mylib.h
@@ -0,0 +1,13 @@
+#pragma once
+
+#ifdef _WIN32
+ #ifdef STATIC_COMPILATION
+ #define DO_IMPORT extern
+ #else
+ #define DO_IMPORT __declspec(dllimport)
+ #endif
+ #define DO_EXPORT __declspec(dllexport)
+#else
+ #define DO_IMPORT extern
+ #define DO_EXPORT
+#endif
diff --git a/test cases/common/51 pkgconfig-gen/dependencies/meson.build b/test cases/common/51 pkgconfig-gen/dependencies/meson.build
index a767eb5..047e7e7 100644
--- a/test cases/common/51 pkgconfig-gen/dependencies/meson.build
+++ b/test cases/common/51 pkgconfig-gen/dependencies/meson.build
@@ -1,38 +1,50 @@
-project('pkgconfig-gen-dependencies', 'c')
+project('pkgconfig-gen-dependencies', 'c', version: '1.0')
pkgg = import('pkgconfig')
# libmain internally use libinternal and expose libexpose in its API
exposed_lib = shared_library('libexposed', 'exposed.c')
internal_lib = shared_library('libinternal', 'internal.c')
-main_lib = shared_library('libmain', link_with : [exposed_lib, internal_lib])
+main_lib = both_libraries('libmain', link_with : [exposed_lib, internal_lib])
-pkgg.generate(libraries : exposed_lib,
- version : '1.0',
- name : 'libexposed',
- description : 'An exposed library in dependency test.'
-)
+pkgg.generate(exposed_lib)
# Declare a few different Dependency objects
-pc_dep = dependency('libfoo')
+pc_dep = dependency('libfoo', version : '>=1.0')
+pc_dep_dup = dependency('libfoo', version : '>= 1.0')
notfound_dep = dependency('notfound', required : false)
threads_dep = dependency('threads')
custom_dep = declare_dependency(link_args : ['-lcustom'], compile_args : ['-DCUSTOM'])
custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DCUSTOM2'])
# Generate a PC file:
-# - Having libmain in libraries should pull implicitely libexposed and libinternal in Libs.private
+# - Having libmain in libraries should pull implicitly libexposed and libinternal in Libs.private
# - Having libexposed in libraries should remove it from Libs.private
# - We generated a pc file for libexposed so it should be in Requires instead of Libs
# - Having threads_dep in libraries should add '-pthread' in both Libs and Cflags
# - Having custom_dep in libraries and libraries_private should only add it in Libs
# - Having custom2_dep in libraries_private should not add its Cflags
# - Having pc_dep in libraries_private should add it in Requires.private
+# - pc_dep_dup is the same library and same version, should be ignored
# - notfound_dep is not required so it shouldn't appear in the pc file.
pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep , custom_dep],
- libraries_private : [custom_dep, custom2_dep, pc_dep, notfound_dep],
+ libraries_private : [custom_dep, custom2_dep, pc_dep, pc_dep_dup, notfound_dep],
version : '1.0',
name : 'dependency-test',
filebase : 'dependency-test',
description : 'A dependency test.'
)
+
+pkgg.generate(
+ name : 'requires-test',
+ version : '1.0',
+ description : 'Dependency Requires field test.',
+ requires : [exposed_lib, pc_dep, 'libhello'],
+)
+
+pkgg.generate(
+ name : 'requires-private-test',
+ version : '1.0',
+ description : 'Dependency Requires.private field test.',
+ requires_private : [exposed_lib, pc_dep, 'libhello', notfound_dep],
+)
diff --git a/test cases/common/51 pkgconfig-gen/meson.build b/test cases/common/51 pkgconfig-gen/meson.build
index f9d7f7f..7e6c670 100644
--- a/test cases/common/51 pkgconfig-gen/meson.build
+++ b/test cases/common/51 pkgconfig-gen/meson.build
@@ -46,3 +46,9 @@ pkgg.generate(
description : 'A foo library.',
variables : ['foo=bar', 'datadir=${prefix}/data']
)
+
+pkgg.generate(
+ name : 'libhello',
+ description : 'A minimalistic pkgconfig file.',
+ version : libver,
+)
diff --git a/test cases/common/64 custom header generator/meson.build b/test cases/common/64 custom header generator/meson.build
index 1222525..2279513 100644
--- a/test cases/common/64 custom header generator/meson.build
+++ b/test cases/common/64 custom header generator/meson.build
@@ -13,9 +13,9 @@ endif
gen = find_program('makeheader.py')
generated_h = custom_target('makeheader.py',
-output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too.
-input : 'input.def',
-command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')])
+ output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too.
+ input : 'input.def',
+ command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')])
prog = executable('prog', 'prog.c', generated_h)
test('gentest', prog)
diff --git a/test cases/common/72 build always/version_gen.py b/test cases/common/72 build always/version_gen.py
index d7b01ca..fbe2df9 100755
--- a/test cases/common/72 build always/version_gen.py
+++ b/test cases/common/72 build always/version_gen.py
@@ -6,14 +6,10 @@ def generate(infile, outfile, fallback):
workdir = os.path.split(infile)[0]
if workdir == '':
workdir = '.'
- version = fallback
try:
- p = subprocess.Popen(['git', 'describe'], cwd=workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (stdo, _) = p.communicate()
- if p.returncode == 0:
- version = stdo.decode().strip()
- except:
- pass
+ version = subprocess.check_output(['git', 'describe'], cwd=workdir).decode().strip()
+ except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
+ version = fallback
with open(infile) as f:
newdata = f.read().replace('@VERSION@', version)
try:
@@ -21,7 +17,7 @@ def generate(infile, outfile, fallback):
olddata = f.read()
if olddata == newdata:
return
- except:
+ except OSError:
pass
with open(outfile, 'w') as f:
f.write(newdata)
diff --git a/test cases/common/98 gen extra/srcgen3.py b/test cases/common/98 gen extra/srcgen3.py
index ad0a5cb..b737114 100644
--- a/test cases/common/98 gen extra/srcgen3.py
+++ b/test cases/common/98 gen extra/srcgen3.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-import os
import sys
import argparse
diff --git a/test cases/csharp/1 basic/meson.build b/test cases/csharp/1 basic/meson.build
index 2ee6a4a..09e46c2 100644
--- a/test cases/csharp/1 basic/meson.build
+++ b/test cases/csharp/1 basic/meson.build
@@ -1,4 +1,4 @@
project('simple c#', 'cs')
-e = executable('prog', 'prog.cs', install : true)
+e = executable('prog', 'prog.cs', 'text.cs', install : true)
test('basic', e)
diff --git a/test cases/csharp/1 basic/prog.cs b/test cases/csharp/1 basic/prog.cs
index dfb2400..6ee47b0 100644
--- a/test cases/csharp/1 basic/prog.cs
+++ b/test cases/csharp/1 basic/prog.cs
@@ -1,7 +1,8 @@
using System;
-
+
public class Prog {
static public void Main () {
- Console.WriteLine("C# is working.");
+ TextGetter tg = new TextGetter();
+ Console.WriteLine(tg.getText());
}
}
diff --git a/test cases/csharp/1 basic/text.cs b/test cases/csharp/1 basic/text.cs
new file mode 100644
index 0000000..c83c424
--- /dev/null
+++ b/test cases/csharp/1 basic/text.cs
@@ -0,0 +1,7 @@
+using System;
+
+public class TextGetter {
+ public String getText() {
+ return "C# is working.";
+ }
+}
diff --git a/test cases/csharp/4 external dep/meson.build b/test cases/csharp/4 external dep/meson.build
index 004d25f..019d618 100644
--- a/test cases/csharp/4 external dep/meson.build
+++ b/test cases/csharp/4 external dep/meson.build
@@ -1,4 +1,9 @@
project('C# external library', 'cs')
-glib_sharp_2 = dependency('glib-sharp-2.0')
+glib_sharp_2 = dependency('glib-sharp-2.0', required : false)
+
+if not glib_sharp_2.found()
+ error('MESON_SKIP_TEST glib# not found.')
+endif
+
e = executable('prog', 'prog.cs', dependencies: glib_sharp_2, install : true)
test('libtest', e, args: [join_paths(meson.current_source_dir(), 'hello.txt')])
diff --git a/test cases/d/3 shared library/meson.build b/test cases/d/3 shared library/meson.build
index 78ad766..4616242 100644
--- a/test cases/d/3 shared library/meson.build
+++ b/test cases/d/3 shared library/meson.build
@@ -10,3 +10,12 @@ endif
ldyn = shared_library('stuff', 'libstuff.d', install : true)
ed = executable('app_d', 'app.d', link_with : ldyn, install : true)
test('linktest_dyn', ed)
+
+# test D attributes for pkg-config
+pkgc = import('pkgconfig')
+pkgc.generate(name: 'test',
+ libraries: ldyn,
+ subdirs: 'd/stuff',
+ description: 'A test of D attributes to pkgconfig.generate.',
+ d_module_versions: ['Use_Static']
+)
diff --git a/test cases/d/6 unittest/app.d b/test cases/d/6 unittest/app.d
index 751e754..71c6414 100644
--- a/test cases/d/6 unittest/app.d
+++ b/test cases/d/6 unittest/app.d
@@ -23,10 +23,14 @@ unittest
{
writeln ("TEST");
import core.stdc.stdlib : exit;
+ import second_unit;
assert (getFour () > 2);
assert (getFour () == 4);
+ // this is a regression test for https://github.com/mesonbuild/meson/issues/3337
+ secondModuleTestFunc ();
+
// we explicitly terminate here to give the unittest program a different exit
// code than the main application has.
// (this prevents the regular main() from being executed)
diff --git a/test cases/d/6 unittest/meson.build b/test cases/d/6 unittest/meson.build
index 1551e94..49a0700 100644
--- a/test cases/d/6 unittest/meson.build
+++ b/test cases/d/6 unittest/meson.build
@@ -1,8 +1,8 @@
project('D Unittests', 'd')
-e = executable('dapp', 'app.d', install : true)
+e = executable('dapp', ['app.d', 'second_unit.d'], install : true)
test('dapp_run', e, should_fail: true)
-e_test = executable('dapp_test', 'app.d',
- d_args: meson.get_compiler('d').unittest_args())
+e_test = executable('dapp_test', ['app.d', 'second_unit.d'],
+ d_unittest: true)
test('dapp_test', e_test)
diff --git a/test cases/d/6 unittest/second_unit.d b/test cases/d/6 unittest/second_unit.d
new file mode 100644
index 0000000..fdb62a9
--- /dev/null
+++ b/test cases/d/6 unittest/second_unit.d
@@ -0,0 +1,10 @@
+
+void secondModuleTestFunc ()
+{
+ import std.stdio : writeln;
+
+ version (unittest)
+ writeln ("Hello!");
+ else
+ assert (0);
+}
diff --git a/test cases/d/9 features/app.d b/test cases/d/9 features/app.d
index 37cc1dd..6b43bf0 100644
--- a/test cases/d/9 features/app.d
+++ b/test cases/d/9 features/app.d
@@ -3,6 +3,8 @@ import std.stdio;
import std.array : split;
import std.string : strip;
+import extra;
+
auto getMenu ()
{
auto foods = import ("food.txt").strip.split ("\n");
@@ -31,7 +33,12 @@ void main (string[] args)
version (With_People) {
if (request == "people") {
writeln ("People: ", getPeople.join (", "));
- exit (0);
+
+ // only exit successfully if the second module also had its module version set.
+ // this checks for issue https://github.com/mesonbuild/meson/issues/3337
+ if (secondModulePeopleVersionSet ())
+ exit (0);
+ exit (1);
}
}
diff --git a/test cases/d/9 features/extra.d b/test cases/d/9 features/extra.d
new file mode 100644
index 0000000..832b292
--- /dev/null
+++ b/test cases/d/9 features/extra.d
@@ -0,0 +1,9 @@
+
+auto secondModulePeopleVersionSet ()
+{
+ version (With_People) {
+ return true;
+ } else {
+ return false;
+ }
+}
diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build
index 9e63710..694e488 100644
--- a/test cases/d/9 features/meson.build
+++ b/test cases/d/9 features/meson.build
@@ -1,18 +1,35 @@
project('D Features', 'd')
-# directory for data
+# ONLY FOR BACKWARDS COMPATIBILITY.
+# DO NOT DO THIS IN NEW CODE!
+# USE include_directories() INSTEAD OF BUILDING
+# STRINGS TO PATHS MANUALLY!
data_dir = join_paths(meson.current_source_dir(), 'data')
+test_src = ['app.d', 'extra.d']
+
+e_plain_bcompat = executable('dapp_menu_bcompat',
+ test_src,
+ d_import_dirs: [data_dir]
+)
+test('dapp_menu_t_fail_bcompat', e_plain_bcompat, should_fail: true)
+test('dapp_menu_t_bcompat', e_plain_bcompat, args: ['menu'])
+
+# directory for data
+# This is the correct way to do this.
+data_dir = include_directories('data')
+
e_plain = executable('dapp_menu',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir]
)
test('dapp_menu_t_fail', e_plain, should_fail: true)
test('dapp_menu_t', e_plain, args: ['menu'])
+
# test feature versions and string imports
e_versions = executable('dapp_versions',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir],
d_module_versions: ['No_Menu', 'With_People']
)
@@ -21,7 +38,7 @@ test('dapp_versions_t', e_versions, args: ['people'])
# test everything and unittests
e_test = executable('dapp_test',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir],
d_module_versions: ['No_Menu', 'With_People'],
d_unittest: true
diff --git a/test cases/failing/70 install_data rename bad size/file1.txt b/test cases/failing/70 install_data rename bad size/file1.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/70 install_data rename bad size/file1.txt
diff --git a/test cases/failing/70 install_data rename bad size/file2.txt b/test cases/failing/70 install_data rename bad size/file2.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/70 install_data rename bad size/file2.txt
diff --git a/test cases/failing/70 install_data rename bad size/meson.build b/test cases/failing/70 install_data rename bad size/meson.build
new file mode 100644
index 0000000..c7cde08
--- /dev/null
+++ b/test cases/failing/70 install_data rename bad size/meson.build
@@ -0,0 +1,3 @@
+project('data install test', 'c')
+
+install_data(['file1.txt', 'file2.txt'], rename : 'just one name')
diff --git a/test cases/failing/71 skip only subdir/meson.build b/test cases/failing/71 skip only subdir/meson.build
new file mode 100644
index 0000000..4832bd4
--- /dev/null
+++ b/test cases/failing/71 skip only subdir/meson.build
@@ -0,0 +1,8 @@
+# Check that skip_rest only exits subdir, not the whole script.
+# Should create an error because main.cpp does not exists.
+project('example exit', 'cpp')
+
+subdir('subdir')
+
+message('Good')
+executable('main', 'main.cpp')
diff --git a/test cases/failing/71 skip only subdir/subdir/meson.build b/test cases/failing/71 skip only subdir/subdir/meson.build
new file mode 100644
index 0000000..1ba447b
--- /dev/null
+++ b/test cases/failing/71 skip only subdir/subdir/meson.build
@@ -0,0 +1,3 @@
+subdir_done()
+
+error('Unreachable')
diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build
index 771ecbc..df55b30 100644
--- a/test cases/frameworks/1 boost/meson.build
+++ b/test cases/frameworks/1 boost/meson.build
@@ -5,6 +5,11 @@ add_project_arguments(['-DBOOST_LOG_DYN_LINK'],
language : 'cpp'
)
+dep = dependency('boost', required: false)
+if not dep.found()
+ error('MESON_SKIP_TEST boost not found.')
+endif
+
# We want to have multiple separate configurations of Boost
# within one project. The need to be independent of each other.
# Use one without a library dependency and one with it.
diff --git a/test cases/frameworks/10 gtk-doc/include/meson.build b/test cases/frameworks/10 gtk-doc/include/meson.build
index f6dd99f..aa32885 100644
--- a/test cases/frameworks/10 gtk-doc/include/meson.build
+++ b/test cases/frameworks/10 gtk-doc/include/meson.build
@@ -13,4 +13,5 @@ generate_enums_docbook = find_program('generate-enums-docbook.py')
docbook = custom_target('enum-docbook',
output : 'bar.xml',
- command : [generate_enums_docbook, '@OUTPUT@', 'BAR', 'BAR_TYPE', 'BAR_FOO'])
+ command : [generate_enums_docbook, '@OUTPUT@', 'BAR', 'BAR_TYPE', 'BAR_FOO'],
+ build_by_default : true)
diff --git a/test cases/frameworks/10 gtk-doc/installed_files.txt.bak b/test cases/frameworks/10 gtk-doc/installed_files.txt
index 9004af2..6f8ca01 100644
--- a/test cases/frameworks/10 gtk-doc/installed_files.txt.bak
+++ b/test cases/frameworks/10 gtk-doc/installed_files.txt
@@ -1,13 +1,15 @@
+usr/include/foo-version.h
+usr/share/gtk-doc/html/foobar/BAR.html
usr/share/gtk-doc/html/foobar/foobar.devhelp2
-usr/share/gtk-doc/html/foobar/foobar-foo.html
usr/share/gtk-doc/html/foobar/foobar.html
+usr/share/gtk-doc/html/foobar/foobar-foo.html
+usr/share/gtk-doc/html/foobar/foobar-foo-version.html
usr/share/gtk-doc/html/foobar/home.png
usr/share/gtk-doc/html/foobar/index.html
-usr/share/gtk-doc/html/foobar/index.sgml
-usr/share/gtk-doc/html/foobar/left-insensitive.png
usr/share/gtk-doc/html/foobar/left.png
-usr/share/gtk-doc/html/foobar/right-insensitive.png
+usr/share/gtk-doc/html/foobar/left-insensitive.png
usr/share/gtk-doc/html/foobar/right.png
+usr/share/gtk-doc/html/foobar/right-insensitive.png
usr/share/gtk-doc/html/foobar/style.css
-usr/share/gtk-doc/html/foobar/up-insensitive.png
usr/share/gtk-doc/html/foobar/up.png
+usr/share/gtk-doc/html/foobar/up-insensitive.png
diff --git a/test cases/frameworks/10 gtk-doc/meson.build b/test cases/frameworks/10 gtk-doc/meson.build
index e5e7705..5c22ad0 100644
--- a/test cases/frameworks/10 gtk-doc/meson.build
+++ b/test cases/frameworks/10 gtk-doc/meson.build
@@ -1,5 +1,10 @@
project('gtkdoctest', 'c', version : '1.0.0')
+gtkdoc = find_program('gtkdoc-scan', required: false)
+if not gtkdoc.found()
+ error('MESON_SKIP_TEST gtkdoc not found.')
+endif
+
gnome = import('gnome')
assert(gnome.gtkdoc_html_dir('foobar') == 'share/gtk-doc/html/foobar', 'Gtkdoc install dir is incorrect.')
@@ -8,8 +13,15 @@ inc = include_directories('include')
subdir('include')
-# We have to disable this test until this bug fix has landed to
-# distros https://bugzilla.gnome.org/show_bug.cgi?id=753145
-error('MESON_SKIP_TEST can not enable gtk-doc test until upstream fixes have landed.')
+# disable this test unless a bug fix for spaces in pathnames is present
+# https://bugzilla.gnome.org/show_bug.cgi?id=753145
+result = run_command(gtkdoc, ['--version'])
+gtkdoc_ver = result.stdout().strip()
+if gtkdoc_ver == ''
+ gtkdoc_ver = result.stderr().strip()
+endif
+if gtkdoc_ver.version_compare('<1.26')
+ error('MESON_SKIP_TEST gtk-doc test requires gtkdoc >= 1.26.')
+endif
subdir('doc')
diff --git a/test cases/frameworks/11 gir subproject/meson.build b/test cases/frameworks/11 gir subproject/meson.build
index f3bde40..a599ae9 100644
--- a/test cases/frameworks/11 gir subproject/meson.build
+++ b/test cases/frameworks/11 gir subproject/meson.build
@@ -1,5 +1,16 @@
project('gobject-introspection-with-subproject', 'c')
+gir = find_program('g-ir-scanner', required: false)
+if not gir.found()
+ error('MESON_SKIP_TEST g-ir-scanner not found.')
+endif
+
+python3 = import('python3')
+py3 = python3.find_python()
+if run_command(py3, '-c', 'import gi;').returncode() != 0
+ error('MESON_SKIP_TEST python3-gi not found')
+endif
+
gnome = import('gnome')
gobj = dependency('gobject-2.0')
@@ -7,4 +18,3 @@ add_global_arguments('-DMESON_TEST', language : 'c')
meson_gir = dependency('meson-gir', fallback : ['mesongir', 'meson_gir'])
subdir('gir')
-
diff --git a/test cases/frameworks/12 multiple gir/meson.build b/test cases/frameworks/12 multiple gir/meson.build
index 794abc5..ddc9830 100644
--- a/test cases/frameworks/12 multiple gir/meson.build
+++ b/test cases/frameworks/12 multiple gir/meson.build
@@ -1,5 +1,10 @@
project('multiple-gobject-introspection', 'c')
+gir = find_program('g-ir-scanner', required: false)
+if not gir.found()
+ error('MESON_SKIP_TEST g-ir-scanner not found.')
+endif
+
gnome = import('gnome')
gobj = dependency('gobject-2.0')
diff --git a/test cases/frameworks/13 yelp/meson.build b/test cases/frameworks/13 yelp/meson.build
index 725ff7b..9fdde25 100644
--- a/test cases/frameworks/13 yelp/meson.build
+++ b/test cases/frameworks/13 yelp/meson.build
@@ -1,2 +1,8 @@
project('yelp', 'c')
+
+itstool = find_program('itstool', required: false)
+if not itstool.found()
+ error('MESON_SKIP_TEST itstool not found.')
+endif
+
subdir('help')
diff --git a/test cases/frameworks/14 doxygen/include/comedian.h b/test cases/frameworks/14 doxygen/include/comedian.h
index 97b5086..d62b283 100644
--- a/test cases/frameworks/14 doxygen/include/comedian.h
+++ b/test cases/frameworks/14 doxygen/include/comedian.h
@@ -11,7 +11,7 @@ namespace Comedy {
* Do the thing people want to happen.
*/
virtual void tell_joke() = 0;
- virtual ~Comedian();
+ virtual ~Comedian(){};
};
}
diff --git a/test cases/frameworks/14 doxygen/include/spede.h b/test cases/frameworks/14 doxygen/include/spede.h
index 8175465..380708a 100644
--- a/test cases/frameworks/14 doxygen/include/spede.h
+++ b/test cases/frameworks/14 doxygen/include/spede.h
@@ -29,10 +29,7 @@ namespace Comedy {
throw std::runtime_error("Not implemented");
}
+ private:
+ int num_movies; ///< How many movies has he done.
};
-
-
-private:
-
- int num_movies; ///< How many movies has he done.
}
diff --git a/test cases/frameworks/14 doxygen/meson.build b/test cases/frameworks/14 doxygen/meson.build
index 55df316..023aa0e 100644
--- a/test cases/frameworks/14 doxygen/meson.build
+++ b/test cases/frameworks/14 doxygen/meson.build
@@ -1,5 +1,11 @@
project('doxygen test', 'cpp', version : '0.1.0')
+spede_inc = include_directories('include')
+
+spede_src = [ 'src/spede.cpp' ]
+
+spede_lib = library('spede', spede_src, include_directories: spede_inc)
+
doxygen = find_program('doxygen', required : false)
if not doxygen.found()
error('MESON_SKIP_TEST doxygen not found.')
diff --git a/test cases/frameworks/14 doxygen/src/spede.cpp b/test cases/frameworks/14 doxygen/src/spede.cpp
index 31c8fb2..d382902 100644
--- a/test cases/frameworks/14 doxygen/src/spede.cpp
+++ b/test cases/frameworks/14 doxygen/src/spede.cpp
@@ -42,7 +42,7 @@ int gesticulate(int force) {
Spede::Spede() : num_movies(100) {
}
-Spede::slap_forehead() {
+void Spede::slap_forehead() {
gesticulate(42);
}
diff --git a/test cases/frameworks/15 llvm/meson.build b/test cases/frameworks/15 llvm/meson.build
index 549adce..b5505eb 100644
--- a/test cases/frameworks/15 llvm/meson.build
+++ b/test cases/frameworks/15 llvm/meson.build
@@ -1,5 +1,10 @@
project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99'])
+d = dependency('llvm', required : false)
+if not d.found()
+ error('MESON_SKIP_TEST llvm not found.')
+endif
+
d = dependency('llvm', modules : 'not-found', required : false)
assert(d.found() == false, 'not-found llvm module found')
@@ -12,7 +17,7 @@ assert(d.found() == true, 'optional module stopped llvm from being found.')
dep_tinfo = dependency('tinfo', required : false)
if not dep_tinfo.found()
cpp = meson.get_compiler('cpp')
- dep_tinfo = cpp.find_library('tinfo')
+ dep_tinfo = cpp.find_library('tinfo', required: false)
endif
foreach static : [true, false]
diff --git a/test cases/frameworks/16 sdl2/meson.build b/test cases/frameworks/16 sdl2/meson.build
index 61a34ef..1bbf09f 100644
--- a/test cases/frameworks/16 sdl2/meson.build
+++ b/test cases/frameworks/16 sdl2/meson.build
@@ -1,6 +1,10 @@
project('sdl2 test', 'c')
-sdl2_dep = dependency('sdl2', version : '>=2.0.0')
+sdl2_dep = dependency('sdl2', version : '>=2.0.0', required: false)
+
+if not sdl2_dep.found()
+ error('MESON_SKIP_TEST sdl2 not found.')
+endif
e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep)
diff --git a/test cases/frameworks/19 pcap/meson.build b/test cases/frameworks/19 pcap/meson.build
index f02f411..eb6fc2c 100644
--- a/test cases/frameworks/19 pcap/meson.build
+++ b/test cases/frameworks/19 pcap/meson.build
@@ -1,6 +1,10 @@
project('pcap test', 'c')
-pcap_dep = dependency('pcap', version : '>=1.0')
+pcap_dep = dependency('pcap', version : '>=1.0', required: false)
+
+if not pcap_dep.found()
+ error('MESON_SKIP_TEST pcap not found.')
+endif
pcap_ver = pcap_dep.version()
assert(pcap_ver.split('.').length() > 1, 'pcap version is "@0@"'.format(pcap_ver))
@@ -9,6 +13,6 @@ e = executable('pcap_prog', 'pcap_prog.c', dependencies : pcap_dep)
test('pcaptest', e)
-# Ensure discovery bia the configuration tools work also
+# Ensure discovery via the configuration tools work also
pcap_dep = dependency('pcap', version : '>=1.0', method : 'pcap-config')
pcap_dep = dependency('pcap', version : '>=1.0', method : 'config-tool')
diff --git a/test cases/frameworks/19 pcap/pcap_prog.c b/test cases/frameworks/19 pcap/pcap_prog.c
index 18e0ad8..0fca16c 100644
--- a/test cases/frameworks/19 pcap/pcap_prog.c
+++ b/test cases/frameworks/19 pcap/pcap_prog.c
@@ -4,6 +4,12 @@ int
main()
{
char errbuf[PCAP_ERRBUF_SIZE];
- pcap_t *p = pcap_create(NULL, errbuf);
+#ifdef __APPLE__
+ // source = NULL for "any" doesn't work on macOS (linux only?)
+ char *source = "en0";
+#else
+ char *source = NULL;
+#endif
+ pcap_t *p = pcap_create(source, errbuf);
return p == NULL;
}
diff --git a/test cases/frameworks/20 cups/meson.build b/test cases/frameworks/20 cups/meson.build
index 11f6f63..9040de6 100644
--- a/test cases/frameworks/20 cups/meson.build
+++ b/test cases/frameworks/20 cups/meson.build
@@ -1,6 +1,10 @@
project('cups test', 'c')
-cups_dep = dependency('cups', version : '>=1.4')
+cups_dep = dependency('cups', version : '>=1.4', required: false)
+
+if not cups_dep.found()
+ error('MESON_SKIP_TEST cups not found.')
+endif
e = executable('cups_prog', 'cups_prog.c', dependencies : cups_dep)
diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build
index b0e848d..e8d12b6 100644
--- a/test cases/frameworks/4 qt/meson.build
+++ b/test cases/frameworks/4 qt/meson.build
@@ -21,6 +21,14 @@ foreach qt : ['qt4', 'qt5']
error('Invalid qt dep incorrectly found!')
endif
+ # This test should be skipped if qt5 isn't found
+ if qt == 'qt5'
+ dep = dependency(qt, modules : ['Core'], required : false, method : get_option('method'))
+ if not dep.found()
+ error('MESON_SKIP_TEST qt5 not found.')
+ endif
+ endif
+
# Ensure that the "no-Core-module-specified" code branch is hit
nocoredep = dependency(qt, modules : ['Gui'], required : qt == 'qt5', method : get_option('method'))
@@ -48,7 +56,7 @@ foreach qt : ['qt4', 'qt5']
endif
# Test that setting a unique name with a positional argument works
- qtmodule.preprocess(qt + 'teststuff', qresources : ['stuff.qrc', 'stuff2.qrc'], method : get_option('method'))
+ qtmodule.preprocess(qt + 'teststuff', qresources : files(['stuff.qrc', 'stuff2.qrc']), method : get_option('method'))
qexe = executable(qt + 'app',
sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing.
diff --git a/test cases/frameworks/4 qt/subfolder/generator.py b/test cases/frameworks/4 qt/subfolder/generator.py
new file mode 100644
index 0000000..045d99a
--- /dev/null
+++ b/test cases/frameworks/4 qt/subfolder/generator.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+import sys
+
+if len(sys.argv) > 1:
+ with open(sys.argv[1], "w") as output:
+ output.write("Hello World")
diff --git a/test cases/frameworks/4 qt/subfolder/main.cpp b/test cases/frameworks/4 qt/subfolder/main.cpp
index 61cc9d4..9661811 100644
--- a/test cases/frameworks/4 qt/subfolder/main.cpp
+++ b/test cases/frameworks/4 qt/subfolder/main.cpp
@@ -1,9 +1,28 @@
#include <QImage>
+#include <QFile>
+#include <QString>
int main(int argc, char **argv) {
+ #ifndef UNITY_BUILD
Q_INIT_RESOURCE(stuff3);
- QImage qi(":/thing.png");
- if(qi.width() != 640) {
+ Q_INIT_RESOURCE(stuff4);
+ #endif
+
+ for(auto fname:{":/thing.png", ":/thing4.png"})
+ {
+ QImage img1(fname);
+ if(img1.width() != 640) {
+ return 1;
+ }
+ }
+
+ for(auto fname:{":/txt_resource.txt",":/txt_resource2.txt"})
+ {
+ QFile file(fname);
+ if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
+ return 1;
+ QString line = file.readLine();
+ if(line.compare("Hello World"))
return 1;
}
return 0;
diff --git a/test cases/frameworks/4 qt/subfolder/meson.build b/test cases/frameworks/4 qt/subfolder/meson.build
index d3ff609..f1b84e6 100644
--- a/test cases/frameworks/4 qt/subfolder/meson.build
+++ b/test cases/frameworks/4 qt/subfolder/meson.build
@@ -1,4 +1,32 @@
-qresources = qtmodule.preprocess(qresources : 'resources/stuff3.qrc')
+simple_gen = find_program('generator.py', required : true)
-app = executable('subfolder', 'main.cpp', qresources, dependencies : qtdep)
+txt_resource = custom_target('txt_resource',
+ output : 'txt_resource.txt',
+ command : [simple_gen, '@OUTPUT@'],
+)
+
+cfg = configuration_data()
+
+cfg.set('filepath', meson.current_source_dir()+'/../thing2.png')
+cfg.set('txt_resource', txt_resource.full_path())
+# here we abuse the system by guessing build dir layout
+cfg.set('txt_resource2', 'txt_resource.txt')
+
+
+rc_file = configure_file(
+ configuration : cfg,
+ input : 'resources/stuff4.qrc.in',
+ output : 'stuff4.qrc',
+)
+
+extra_cpp_args = []
+if meson.is_unity()
+ extra_cpp_args += '-DUNITY_BUILD'
+ qresources = qtmodule.preprocess(qt + '_subfolder_unity_ressource',qresources : ['resources/stuff3.qrc', rc_file])
+else
+ qresources = qtmodule.preprocess(qresources : ['resources/stuff3.qrc', rc_file])
+endif
+
+app = executable('subfolder', 'main.cpp', qresources, dependencies : qtdep, cpp_args: extra_cpp_args)
+test(qt + 'subfolder', app)
diff --git a/test cases/frameworks/4 qt/subfolder/resources/stuff4.qrc.in b/test cases/frameworks/4 qt/subfolder/resources/stuff4.qrc.in
new file mode 100644
index 0000000..c30a358
--- /dev/null
+++ b/test cases/frameworks/4 qt/subfolder/resources/stuff4.qrc.in
@@ -0,0 +1,8 @@
+<!DOCTYPE RCC>
+<RCC version="1.0">
+ <qresource>
+ <file alias="thing4.png">@filepath@</file>
+ <file alias="txt_resource.txt">@txt_resource@</file>
+ <file alias="txt_resource2.txt">@txt_resource2@</file>
+ </qresource>
+</RCC>
diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt
index ffe543f..879f56b 100644
--- a/test cases/frameworks/6 gettext/installed_files.txt
+++ b/test cases/frameworks/6 gettext/installed_files.txt
@@ -1,4 +1,4 @@
-usr/bin/intlprog
+usr/bin/intlprog?exe
usr/share/locale/de/LC_MESSAGES/intltest.mo
usr/share/locale/fi/LC_MESSAGES/intltest.mo
usr/share/applications/test.desktop
diff --git a/test cases/frameworks/6 gettext/meson.build b/test cases/frameworks/6 gettext/meson.build
index 6b517a4..e02234b 100644
--- a/test cases/frameworks/6 gettext/meson.build
+++ b/test cases/frameworks/6 gettext/meson.build
@@ -1,5 +1,14 @@
project('gettext example', 'c')
+gettext = find_program('gettext', required: false)
+if not gettext.found()
+ error('MESON_SKIP_TEST gettext not found.')
+endif
+
+if not meson.get_compiler('c').has_header('libintl.h')
+ error('MESON_SKIP_TEST libintl.h not found.')
+endif
+
i18n = import('i18n')
subdir('po')
diff --git a/test cases/frameworks/7 gnome/gdbus/meson.build b/test cases/frameworks/7 gnome/gdbus/meson.build
index ea91caa..57d7f23 100644
--- a/test cases/frameworks/7 gnome/gdbus/meson.build
+++ b/test cases/frameworks/7 gnome/gdbus/meson.build
@@ -1,3 +1,12 @@
+gdbus_src = gnome.gdbus_codegen('generated-gdbus-no-docbook', 'com.example.Sample.xml',
+ interface_prefix : 'com.example.',
+ namespace : 'Sample',
+ annotations : [
+ ['com.example.Hello()', 'org.freedesktop.DBus.Deprecated', 'true']
+ ],
+)
+assert(gdbus_src.length() == 2, 'expected 2 targets')
+
gdbus_src = gnome.gdbus_codegen('generated-gdbus', 'com.example.Sample.xml',
interface_prefix : 'com.example.',
namespace : 'Sample',
@@ -6,6 +15,7 @@ gdbus_src = gnome.gdbus_codegen('generated-gdbus', 'com.example.Sample.xml',
],
docbook : 'generated-gdbus-doc'
)
+assert(gdbus_src.length() == 3, 'expected 3 targets')
gdbus_exe = executable('gdbus-test', 'gdbusprog.c',
gdbus_src,
diff --git a/test cases/frameworks/7 gnome/installed_files.txt b/test cases/frameworks/7 gnome/installed_files.txt
index c7c704f..ac132ef 100644
--- a/test cases/frameworks/7 gnome/installed_files.txt
+++ b/test cases/frameworks/7 gnome/installed_files.txt
@@ -1,6 +1,7 @@
usr/include/enums.h
usr/include/enums2.h
usr/include/enums3.h
+usr/include/enums5.h
usr/include/marshaller.h
usr/lib/?libgir_lib.so
usr/lib/?libdep1lib.so
diff --git a/test cases/frameworks/7 gnome/meson.build b/test cases/frameworks/7 gnome/meson.build
index 795f458..03335b8 100644
--- a/test cases/frameworks/7 gnome/meson.build
+++ b/test cases/frameworks/7 gnome/meson.build
@@ -1,5 +1,16 @@
project('gobject-introspection', 'c')
+glib = dependency('glib-2.0', required: false)
+if not glib.found()
+ error('MESON_SKIP_TEST glib not found.')
+endif
+
+python3 = import('python3')
+py3 = python3.find_python()
+if run_command(py3, '-c', 'import gi;').returncode() != 0
+ error('MESON_SKIP_TEST python3-gi not found')
+endif
+
cc = meson.get_compiler('c')
add_global_arguments('-DMESON_TEST', language : 'c')
diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build
index 9963455..44c21cb 100644
--- a/test cases/frameworks/7 gnome/mkenums/meson.build
+++ b/test cases/frameworks/7 gnome/mkenums/meson.build
@@ -123,6 +123,7 @@ enums4 = gnome.mkenums_simple('enums4', sources : 'meson-sample.h',
enumexe4 = executable('enumprog4', 'main4.c', enums4, dependencies : gobj)
enums5 = gnome.mkenums_simple('enums5', sources : 'meson-sample.h',
+ install_header : true,
decorator : 'MESON_EXPORT',
header_prefix : '#include "meson-decls.h"')
enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj)
diff --git a/test cases/frameworks/8 flex/meson.build b/test cases/frameworks/8 flex/meson.build
index 13ac9f6..cb5efde 100644
--- a/test cases/frameworks/8 flex/meson.build
+++ b/test cases/frameworks/8 flex/meson.build
@@ -4,8 +4,16 @@ project('flex and bison', 'c')
# may output headers that are necessary to build
# the sources of a different generator.
-flex = find_program('flex')
-bison = find_program('bison')
+flex = find_program('flex', required: false)
+bison = find_program('bison', required: false)
+
+if not flex.found()
+ error('MESON_SKIP_TEST flex not found.')
+endif
+
+if not bison.found()
+ error('MESON_SKIP_TEST bison not found.')
+endif
lgen = generator(flex,
output : '@PLAINNAME@.yy.c',
@@ -23,4 +31,3 @@ e = executable('pgen', 'prog.c',
lfiles, pfiles)
test('parsertest', e)
-
diff --git a/test cases/frameworks/8 flex/prog.c b/test cases/frameworks/8 flex/prog.c
index 1e48f61..0b84d18 100644
--- a/test cases/frameworks/8 flex/prog.c
+++ b/test cases/frameworks/8 flex/prog.c
@@ -6,6 +6,8 @@
#include<stdio.h>
#include<stdlib.h>
+extern int yyparse();
+
int main(int argc, char **argv) {
/*
int input;
diff --git a/test cases/frameworks/9 wxwidgets/meson.build b/test cases/frameworks/9 wxwidgets/meson.build
index 5f9419c..d815a2d 100644
--- a/test cases/frameworks/9 wxwidgets/meson.build
+++ b/test cases/frameworks/9 wxwidgets/meson.build
@@ -7,4 +7,9 @@ if wxd.found()
wp = executable('wxprog', 'wxprog.cpp', dependencies : wxd)
test('wxtest', wp)
+
+ # WxWidgets framework is available, we can use required here
+ wx_stc = dependency('wxwidgets', version : '>=3.0.0', modules : ['std', 'stc'])
+ stc_exe = executable('wxstc', 'wxstc.cpp', dependencies : wx_stc)
+ test('wxstctest', stc_exe)
endif
diff --git a/test cases/frameworks/9 wxwidgets/wxstc.cpp b/test cases/frameworks/9 wxwidgets/wxstc.cpp
new file mode 100644
index 0000000..8499ff9
--- /dev/null
+++ b/test cases/frameworks/9 wxwidgets/wxstc.cpp
@@ -0,0 +1,6 @@
+#include <wx/stc/stc.h>
+
+int main() {
+ wxStyledTextCtrl *canvas = new wxStyledTextCtrl();
+ delete canvas;
+}
diff --git a/test cases/linuxlike/9 compiler checks with dependencies/meson.build b/test cases/linuxlike/9 compiler checks with dependencies/meson.build
index bebfb84..9f1755b 100644
--- a/test cases/linuxlike/9 compiler checks with dependencies/meson.build
+++ b/test cases/linuxlike/9 compiler checks with dependencies/meson.build
@@ -26,7 +26,7 @@ int main(int argc, char *argv[]) {
return ptr == 0;
}
'''
- assert (cc.has_function('deflate', prefix : '#include<zlib.h>', dependencies : zlib, name : 'Test for function in zlib'), 'has_function test failed.')
+ assert (cc.has_function('deflate', prefix : '#include<zlib.h>', dependencies : zlib), 'has_function test failed.')
assert (cc.links(linkcode, dependencies : zlib, name : 'Test link against zlib'), 'Linking test failed against zlib.')
endif
diff --git a/test cases/rust/7 private crate collision/installed_files.txt b/test cases/rust/7 private crate collision/installed_files.txt
new file mode 100644
index 0000000..06ebd77
--- /dev/null
+++ b/test cases/rust/7 private crate collision/installed_files.txt
@@ -0,0 +1,2 @@
+usr/bin/prog?exe
+usr/lib/librand.rlib
diff --git a/test cases/rust/7 private crate collision/meson.build b/test cases/rust/7 private crate collision/meson.build
new file mode 100644
index 0000000..81b6aab
--- /dev/null
+++ b/test cases/rust/7 private crate collision/meson.build
@@ -0,0 +1,5 @@
+project('rust private crate collision', 'rust')
+
+l = static_library('rand', 'rand.rs', install : true)
+e = executable('prog', 'prog.rs', link_with : l, install : true)
+test('linktest', e)
diff --git a/test cases/rust/7 private crate collision/prog.rs b/test cases/rust/7 private crate collision/prog.rs
new file mode 100644
index 0000000..b9a30f1
--- /dev/null
+++ b/test cases/rust/7 private crate collision/prog.rs
@@ -0,0 +1,3 @@
+extern crate rand;
+
+fn main() { println!("printing: {}", rand::explore()); }
diff --git a/test cases/rust/7 private crate collision/rand.rs b/test cases/rust/7 private crate collision/rand.rs
new file mode 100644
index 0000000..8a3d427
--- /dev/null
+++ b/test cases/rust/7 private crate collision/rand.rs
@@ -0,0 +1,4 @@
+// use a name that collides with one of the rustc_private libraries
+#![crate_name = "rand"]
+
+pub fn explore() -> &'static str { "librarystring" }
diff --git a/test cases/unit/13 testsetup selection/main.c b/test cases/unit/13 testsetup selection/main.c
new file mode 100644
index 0000000..cb3f748
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/main.c
@@ -0,0 +1,3 @@
+int main() {
+ return 0;
+}
diff --git a/test cases/unit/13 testsetup selection/meson.build b/test cases/unit/13 testsetup selection/meson.build
new file mode 100644
index 0000000..ae996c5
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/meson.build
@@ -0,0 +1,10 @@
+project('main', 'c')
+
+main = executable('main', 'main.c')
+test('Test main', main)
+
+add_test_setup('worksforall')
+add_test_setup('missingfromfoo')
+
+subproject('foo')
+subproject('bar')
diff --git a/test cases/unit/13 testsetup selection/subprojects/bar/bar.c b/test cases/unit/13 testsetup selection/subprojects/bar/bar.c
new file mode 100644
index 0000000..cb3f748
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/subprojects/bar/bar.c
@@ -0,0 +1,3 @@
+int main() {
+ return 0;
+}
diff --git a/test cases/unit/13 testsetup selection/subprojects/bar/meson.build b/test cases/unit/13 testsetup selection/subprojects/bar/meson.build
new file mode 100644
index 0000000..1155a88
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/subprojects/bar/meson.build
@@ -0,0 +1,6 @@
+project('bar', 'c')
+bar = executable('bar', 'bar.c')
+test('Test bar', bar)
+add_test_setup('onlyinbar')
+add_test_setup('worksforall')
+add_test_setup('missingfromfoo')
diff --git a/test cases/unit/13 testsetup selection/subprojects/foo/foo.c b/test cases/unit/13 testsetup selection/subprojects/foo/foo.c
new file mode 100644
index 0000000..cb3f748
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/subprojects/foo/foo.c
@@ -0,0 +1,3 @@
+int main() {
+ return 0;
+}
diff --git a/test cases/unit/13 testsetup selection/subprojects/foo/meson.build b/test cases/unit/13 testsetup selection/subprojects/foo/meson.build
new file mode 100644
index 0000000..2eef840
--- /dev/null
+++ b/test cases/unit/13 testsetup selection/subprojects/foo/meson.build
@@ -0,0 +1,4 @@
+project('foo', 'c')
+foo = executable('foo', 'foo.c')
+test('Test foo', foo)
+add_test_setup('worksforall')
diff --git a/test cases/unit/23 compiler run_command/meson.build b/test cases/unit/23 compiler run_command/meson.build
new file mode 100644
index 0000000..6d9e0b9
--- /dev/null
+++ b/test cases/unit/23 compiler run_command/meson.build
@@ -0,0 +1,10 @@
+project('compiler_object_in_run_command', 'c')
+cc = meson.get_compiler('c')
+
+# This test only checks that the compiler object can be passed to
+# run_command(). If the compiler has been launched, it is expected
+# to output something either to stdout or to stderr.
+result = run_command(cc, '--version')
+if result.stdout() == '' and result.stderr() == ''
+ error('No output in stdout and stderr. Did the compiler run at all?')
+endif
diff --git a/test cases/unit/23 non-permitted kwargs/meson.build b/test cases/unit/23 non-permitted kwargs/meson.build
new file mode 100644
index 0000000..9f7dc1f
--- /dev/null
+++ b/test cases/unit/23 non-permitted kwargs/meson.build
@@ -0,0 +1,5 @@
+project('non-permitted kwargs', 'c')
+cc = meson.get_compiler('c')
+cc.has_header_symbol('stdio.h', 'printf', prefixxx: '#define XXX')
+cc.links('int main(){}', argsxx: '')
+cc.get_id(invalidxx: '')
diff --git a/test cases/unit/24 pkgconfig usage/dependee/meson.build b/test cases/unit/24 pkgconfig usage/dependee/meson.build
new file mode 100644
index 0000000..beb446c
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependee/meson.build
@@ -0,0 +1,7 @@
+project('pkgconfig user', 'c')
+
+pkgdep = dependency('libpkgdep')
+
+executable('pkguser', 'pkguser.c',
+ dependencies : pkgdep)
+
diff --git a/test cases/unit/24 pkgconfig usage/dependee/pkguser.c b/test cases/unit/24 pkgconfig usage/dependee/pkguser.c
new file mode 100644
index 0000000..2bff316
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependee/pkguser.c
@@ -0,0 +1,6 @@
+#include<pkgdep.h>
+
+int main(int argc, char **argv) {
+ int res = pkgdep();
+ return res != 99;
+}
diff --git a/test cases/unit/24 pkgconfig usage/dependency/meson.build b/test cases/unit/24 pkgconfig usage/dependency/meson.build
new file mode 100644
index 0000000..89fae8e
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependency/meson.build
@@ -0,0 +1,24 @@
+project('pkgconfig dep', 'c',
+ version : '1.0.0')
+
+# This is not used in the code, only to check that it does not
+# leak out to --libs.
+glib_dep = dependency('glib-2.0')
+
+pkgconfig = import('pkgconfig')
+
+intlib = static_library('libpkgdep-int', 'privatelib.c')
+intdep = declare_dependency(link_with : intlib)
+
+lib = shared_library('pkgdep', 'pkgdep.c',
+ dependencies : [glib_dep, intdep],
+ install : true)
+
+install_headers('pkgdep.h')
+
+pkgconfig.generate(
+ filebase : 'libpkgdep',
+ name : 'Libpkgdep',
+ description : 'Sample pkgconfig dependency library',
+ version : meson.project_version(),
+ libraries : lib)
diff --git a/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c
new file mode 100644
index 0000000..bd5c3f4
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c
@@ -0,0 +1,7 @@
+#include<pkgdep.h>
+
+int internal_thingy();
+
+int pkgdep() {
+ return internal_thingy();
+}
diff --git a/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h
new file mode 100644
index 0000000..16d622e
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h
@@ -0,0 +1,3 @@
+#pragma once
+
+int pkgdep();
diff --git a/test cases/unit/24 pkgconfig usage/dependency/privatelib.c b/test cases/unit/24 pkgconfig usage/dependency/privatelib.c
new file mode 100644
index 0000000..71d2179
--- /dev/null
+++ b/test cases/unit/24 pkgconfig usage/dependency/privatelib.c
@@ -0,0 +1,3 @@
+int internal_thingy() {
+ return 99;
+}
diff --git a/test cases/unit/25 ndebug if-release/main.c b/test cases/unit/25 ndebug if-release/main.c
new file mode 100644
index 0000000..70b3d04
--- /dev/null
+++ b/test cases/unit/25 ndebug if-release/main.c
@@ -0,0 +1,11 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+int main(void) {
+#ifdef NDEBUG
+ printf("NDEBUG=1\n");
+#else
+ printf("NDEBUG=0\n");
+#endif
+ return 0;
+}
diff --git a/test cases/unit/25 ndebug if-release/meson.build b/test cases/unit/25 ndebug if-release/meson.build
new file mode 100644
index 0000000..4af2406
--- /dev/null
+++ b/test cases/unit/25 ndebug if-release/meson.build
@@ -0,0 +1,3 @@
+project('ndebug enabled', 'c')
+
+executable('main', 'main.c')
diff --git a/test cases/unit/26 guessed linker dependencies/exe/app.c b/test cases/unit/26 guessed linker dependencies/exe/app.c
new file mode 100644
index 0000000..1031a42
--- /dev/null
+++ b/test cases/unit/26 guessed linker dependencies/exe/app.c
@@ -0,0 +1,6 @@
+void liba_func();
+
+int main() {
+ liba_func();
+ return 0;
+}
diff --git a/test cases/unit/26 guessed linker dependencies/exe/meson.build b/test cases/unit/26 guessed linker dependencies/exe/meson.build
new file mode 100644
index 0000000..8bb1bd7
--- /dev/null
+++ b/test cases/unit/26 guessed linker dependencies/exe/meson.build
@@ -0,0 +1,7 @@
+project('exe', ['c'])
+
+executable('app',
+ 'app.c',
+ # Use uninterpreted strings to avoid path finding by dependency or compiler.find_library
+ link_args: ['-ltest-lib']
+ )
diff --git a/test cases/unit/26 guessed linker dependencies/lib/lib.c b/test cases/unit/26 guessed linker dependencies/lib/lib.c
new file mode 100644
index 0000000..1a8f94d
--- /dev/null
+++ b/test cases/unit/26 guessed linker dependencies/lib/lib.c
@@ -0,0 +1,20 @@
+#if defined _WIN32
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+void DLL_PUBLIC liba_func() {
+}
+
+#ifdef MORE_EXPORTS
+
+void DLL_PUBLIC libb_func() {
+}
+
+#endif
diff --git a/test cases/unit/26 guessed linker dependencies/lib/meson.build b/test cases/unit/26 guessed linker dependencies/lib/meson.build
new file mode 100644
index 0000000..36df112
--- /dev/null
+++ b/test cases/unit/26 guessed linker dependencies/lib/meson.build
@@ -0,0 +1,11 @@
+project('lib1', ['c'])
+
+c_args = []
+
+# Microsoft's compiler is quite smart about touching import libs on changes,
+# so ensure that there is actually a change in symbols.
+if get_option('more_exports')
+ c_args += '-DMORE_EXPORTS'
+endif
+
+a = library('test-lib', 'lib.c', c_args: c_args, install: true)
diff --git a/test cases/unit/26 guessed linker dependencies/lib/meson_options.txt b/test cases/unit/26 guessed linker dependencies/lib/meson_options.txt
new file mode 100644
index 0000000..2123e45
--- /dev/null
+++ b/test cases/unit/26 guessed linker dependencies/lib/meson_options.txt
@@ -0,0 +1 @@
+option('more_exports', type : 'boolean', value : false)
diff --git a/test cases/unit/26 shared_mod linking/libfile.c b/test cases/unit/26 shared_mod linking/libfile.c
new file mode 100644
index 0000000..44f7667
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/libfile.c
@@ -0,0 +1,14 @@
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func() {
+ return 0;
+}
diff --git a/test cases/unit/26 shared_mod linking/main.c b/test cases/unit/26 shared_mod linking/main.c
new file mode 100644
index 0000000..12f9c98
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/main.c
@@ -0,0 +1,11 @@
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_IMPORT __declspec(dllimport)
+#else
+ #define DLL_IMPORT
+#endif
+
+int DLL_IMPORT func();
+
+int main(int argc, char **arg) {
+ return func();
+}
diff --git a/test cases/unit/26 shared_mod linking/meson.build b/test cases/unit/26 shared_mod linking/meson.build
new file mode 100644
index 0000000..994a5d3
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/meson.build
@@ -0,0 +1,5 @@
+project('shared library linking test', 'c', 'cpp')
+
+mod = shared_module('mymod', 'libfile.c')
+
+exe = executable('prog', 'main.c', link_with : mod, install : true) \ No newline at end of file
diff --git a/test cases/unit/27 forcefallback/meson.build b/test cases/unit/27 forcefallback/meson.build
new file mode 100644
index 0000000..e6a90ea
--- /dev/null
+++ b/test cases/unit/27 forcefallback/meson.build
@@ -0,0 +1,8 @@
+project('mainproj', 'c',
+ default_options : ['wrap_mode=forcefallback'])
+
+zlib_dep = dependency('zlib', fallback: ['notzlib', 'zlib_dep'])
+
+test_not_zlib = executable('test_not_zlib', ['test_not_zlib.c'], dependencies: [zlib_dep])
+
+test('test_not_zlib', test_not_zlib)
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build b/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build
new file mode 100644
index 0000000..254a136
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build
@@ -0,0 +1,7 @@
+project('notzlib', 'c')
+
+notzlib_sources = ['notzlib.c']
+
+notzlib = library('notzlib', notzlib_sources)
+
+zlib_dep = declare_dependency(link_with: notzlib, include_directories: include_directories(['.']))
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c
new file mode 100644
index 0000000..c3b6bf9
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c
@@ -0,0 +1,6 @@
+#include "notzlib.h"
+
+int not_a_zlib_function (void)
+{
+ return 42;
+}
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h
new file mode 100644
index 0000000..695921d
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h
@@ -0,0 +1,18 @@
+#pragma once
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC not_a_zlib_function (void);
diff --git a/test cases/unit/27 forcefallback/test_not_zlib.c b/test cases/unit/27 forcefallback/test_not_zlib.c
new file mode 100644
index 0000000..36256af
--- /dev/null
+++ b/test cases/unit/27 forcefallback/test_not_zlib.c
@@ -0,0 +1,8 @@
+#include <notzlib.h>
+
+int main (int ac, char **av)
+{
+ if (not_a_zlib_function () != 42)
+ return 1;
+ return 0;
+}
diff --git a/test cases/unit/28 pkgconfig use libraries/app/app.c b/test cases/unit/28 pkgconfig use libraries/app/app.c
new file mode 100644
index 0000000..b271a9e
--- /dev/null
+++ b/test cases/unit/28 pkgconfig use libraries/app/app.c
@@ -0,0 +1,6 @@
+void libb_func();
+
+int main() {
+ libb_func();
+ return 0;
+}
diff --git a/test cases/unit/28 pkgconfig use libraries/app/meson.build b/test cases/unit/28 pkgconfig use libraries/app/meson.build
new file mode 100644
index 0000000..3d85a32
--- /dev/null
+++ b/test cases/unit/28 pkgconfig use libraries/app/meson.build
@@ -0,0 +1,5 @@
+project('app', ['c'])
+
+b = dependency('test-b')
+
+executable('app', 'app.c', dependencies : [b])
diff --git a/test cases/unit/28 pkgconfig use libraries/lib/liba.c b/test cases/unit/28 pkgconfig use libraries/lib/liba.c
new file mode 100644
index 0000000..e98906b
--- /dev/null
+++ b/test cases/unit/28 pkgconfig use libraries/lib/liba.c
@@ -0,0 +1,2 @@
+void liba_func() {
+}
diff --git a/test cases/unit/28 pkgconfig use libraries/lib/libb.c b/test cases/unit/28 pkgconfig use libraries/lib/libb.c
new file mode 100644
index 0000000..3160e5f
--- /dev/null
+++ b/test cases/unit/28 pkgconfig use libraries/lib/libb.c
@@ -0,0 +1,5 @@
+void liba_func();
+
+void libb_func() {
+ liba_func();
+}
diff --git a/test cases/unit/28 pkgconfig use libraries/lib/meson.build b/test cases/unit/28 pkgconfig use libraries/lib/meson.build
new file mode 100644
index 0000000..748adf4
--- /dev/null
+++ b/test cases/unit/28 pkgconfig use libraries/lib/meson.build
@@ -0,0 +1,16 @@
+project('lib', ['c'])
+
+a = library('test-a', 'liba.c', install: true)
+
+b = library('test-b', 'libb.c', link_with: a, install: true)
+
+import('pkgconfig').generate(
+ version: '0.0',
+ description: 'test library',
+ filebase: 'test-b',
+ name: 'test library',
+ libraries: [b],
+ subdirs: ['.']
+)
+
+