diff options
244 files changed, 4457 insertions, 1866 deletions
diff --git a/.appveyor.yml b/.appveyor.yml index 56a123a..a1a9c5f 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -52,6 +52,8 @@ platform: branches: only: - master + # Release branches + - /^[0-9]+\.[0-9]+$/ init: - ps: | @@ -62,14 +64,35 @@ init: } install: + - ps: | + function DownloadFile([String] $Source, [String] $Destination) { + $retries = 10 + for ($i = 1; $i -le $retries; $i++) { + try { + (New-Object net.webclient).DownloadFile($Source, $Destination) + break # succeeded + } catch [net.WebException] { + if ($i -eq $retries) { + throw # fail on last retry + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $Source, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } + } + } - cmd: set "ORIG_PATH=%PATH%" # Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219 - cmd: set "MESON_FIXED_NINJA=1" - - ps: (new-object net.webclient).DownloadFile('http://nirbheek.in/files/binaries/ninja/win32/ninja.exe', 'C:\projects\meson\ninja.exe') + - ps: DownloadFile -Source 'http://nirbheek.in/files/binaries/ninja/win32/ninja.exe' -Destination 'C:\projects\meson\ninja.exe' # Use the x86 python only when building for x86 for the cpython tests. # For all other archs (including, say, arm), use the x64 python. - cmd: if %arch%==x86 (set MESON_PYTHON_PATH=C:\python35) else (set MESON_PYTHON_PATH=C:\python35-x64) + # Skip CI requires python + - cmd: python ./skip_ci.py --base-branch-env=APPVEYOR_REPO_BRANCH --is-pull-env=APPVEYOR_PULL_REQUEST_NUMBER + # Set paths for BOOST dll files - cmd: if %compiler%==msvc2015 ( if %arch%==x86 ( set "PATH=%PATH%;C:\Libraries\boost_1_59_0\lib32-msvc-14.0" ) else ( set "PATH=%PATH%;C:\Libraries\boost_1_59_0\lib64-msvc-14.0" ) ) - cmd: if %compiler%==msvc2017 ( if %arch%==x86 ( set "PATH=%PATH%;C:\Libraries\boost_1_64_0\lib32-msvc-14.1" ) else ( set "PATH=%PATH%;C:\Libraries\boost_1_64_0\lib64-msvc-14.1" ) ) @@ -79,36 +102,32 @@ install: - cmd: if %compiler%==msvc2015 ( call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %arch% ) - cmd: if %compiler%==msvc2017 ( call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=%arch% ) - cmd: if %compiler%==cygwin ( set PYTHON=python3 ) else ( set PYTHON=python ) - - ps: | - If($Env:compiler -eq 'msys2-mingw') { - If($Env:arch -eq 'x86') { - $env:Path = 'C:\msys64\mingw32\bin;' + $env:Path - $env:MESON_PYTHON_PATH = 'C:\msys64\mingw32\bin' - $env:PYTHON = 'python3' - C:\msys64\usr\bin\pacman -S --noconfirm mingw32/mingw-w64-i686-python3 - } Else { - $env:Path = 'C:\msys64\mingw64\bin;' + $env:Path - $env:MESON_PYTHON_PATH = 'C:\msys64\mingw64\bin' - $env:PYTHON = 'python3' - C:\msys64\usr\bin\pacman -S --noconfirm mingw64/mingw-w64-x86_64-python3 - } - } + # MinGW setup, lines are split to prevent "The input line is too long." error. + - cmd: if %arch%==x86 ( set "PACMAN_ARCH=i686" ) else ( set "PACMAN_ARCH=x86_64" ) + - cmd: if %arch%==x86 ( set "PACMAN_BITS=32" ) else ( set "PACMAN_BITS=64" ) + - cmd: if %compiler%==msys2-mingw ( set "PATH=C:\msys64\mingw%PACMAN_BITS%\bin;%PATH%" ) + - cmd: if %compiler%==msys2-mingw ( set "MESON_PYTHON_PATH=C:\msys64\mingw%PACMAN_BITS%\bin" ) + - cmd: if %compiler%==msys2-mingw ( set "PYTHON=python3" ) + - cmd: if %compiler%==msys2-mingw ( C:\msys64\usr\bin\pacman -S --needed --noconfirm "mingw%PACMAN_BITS%/mingw-w64-%PACMAN_ARCH%-python3" ) + # Cygwin - cmd: if not %compiler%==cygwin ( set "PATH=%cd%;%MESON_PYTHON_PATH%;%PATH%;" ) - cmd: if %compiler%==cygwin ( set WRAPPER=ci\run-in-cygwin.bat ) - cmd: if %compiler%==cygwin ( %WRAPPER% which %PYTHON% ) else ( where %PYTHON% ) # pkg-config is needed for the pkg-config tests on msvc - - ps: If($Env:compiler.StartsWith('msvc')) {(new-object net.webclient).DownloadFile('http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe', 'C:\projects\meson\pkg-config.exe')} + - ps: | + If($Env:compiler.StartsWith('msvc')) { + DownloadFile -Source 'http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe' ` + -Destination 'C:\projects\meson\pkg-config.exe' + } - cmd: if %compiler%==cygwin ( call ci\appveyor-install.bat ) - ps: | If($Env:compiler -like 'msvc*') { - (new-object net.webclient).DownloadFile( - "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi", - "C:\projects\msmpisdk.msi") + DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi" ` + -Destination "C:\projects\msmpisdk.msi" c:\windows\system32\msiexec.exe /i C:\projects\msmpisdk.msi /quiet - (new-object net.webclient).DownloadFile( - "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe", - "C:\projects\MSMpiSetup.exe") + DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe" ` + -Destination "C:\projects\MSMpiSetup.exe" c:\projects\MSMpiSetup.exe -unattend -full } diff --git a/.travis.yml b/.travis.yml index f077c9c..16fa55b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,6 +3,8 @@ sudo: false branches: only: - master + # Release branches + - /^[0-9]+\.[0-9]+$/ os: - linux @@ -29,8 +31,10 @@ matrix: compiler: gcc before_install: + - python ./skip_ci.py --base-branch-env=TRAVIS_BRANCH --is-pull-env=TRAVIS_PULL_REQUEST - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python3; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew uninstall python mercurial; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python@2 python@3 mercurial qt; fi # Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219 - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkdir -p $HOME/tools; curl -L http://nirbheek.in/files/binaries/ninja/macos/ninja -o $HOME/tools/ninja; chmod +x $HOME/tools/ninja; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:artful; fi @@ -49,4 +53,4 @@ script: withgit \ /bin/sh -c "cd /root && mkdir -p tools; wget -c http://nirbheek.in/files/binaries/ninja/linux-amd64/ninja -O /root/tools/ninja; chmod +x /root/tools/ninja; CC=$CC CXX=$CXX OBJC=$CC OBJCXX=$CXX PATH=/root/tools:$PATH MESON_FIXED_NINJA=1 ./run_tests.py -- $MESON_ARGS && chmod -R a+rwX .coverage" fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:$PATH MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi @@ -8,7 +8,7 @@ build system. [](https://pypi.python.org/pypi/meson) [](https://travis-ci.org/mesonbuild/meson) -[](https://ci.appveyor.com/project/jpakkane/meson) +[](https://ci.appveyor.com/project/mesonbuild/meson) [](https://codecov.io/gh/mesonbuild/meson/branch/master) #### Dependencies diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile index 05e679e..72788c3 100644 --- a/ciimage/Dockerfile +++ b/ciimage/Dockerfile @@ -14,3 +14,5 @@ RUN apt-get -y update && apt-get -y upgrade \ && apt-get -y install libwmf-dev \ && apt-get -y install qt4-linguist-tools qttools5-dev-tools \ && python3 -m pip install hotdoc codecov + +ENV LANG='C.UTF-8' diff --git a/contributing.md b/contributing.md new file mode 100644 index 0000000..3d4dc34 --- /dev/null +++ b/contributing.md @@ -0,0 +1,8 @@ +## Contributing to the Meson build system + +Thank you for your interest in participating to the development! +A large fraction of Meson is contributed by people outside +the core team and we are **excited** to see what you do. + +**Contribution instructions can be found on the website** + @ http://mesonbuild.com/Contributing.html diff --git a/contributing.txt b/contributing.txt deleted file mode 100644 index b1c015c..0000000 --- a/contributing.txt +++ /dev/null @@ -1,4 +0,0 @@ -Contributing to the Meson build system - -Contribution instructions can be found [on the -website](http://mesonbuild.com/Contributing.html). diff --git a/docs/markdown/Adding-arguments.md b/docs/markdown/Adding-arguments.md index e314102..117622b 100644 --- a/docs/markdown/Adding-arguments.md +++ b/docs/markdown/Adding-arguments.md @@ -49,6 +49,8 @@ executable('prog', 'prog.cc', cpp_args : '-DCPPTHING') Here we create a C++ executable with an extra argument that is used during compilation but not for linking. +You can find the parameter name for other languages in the [reference tables](Reference-tables.md). + Specifying extra linker arguments is done in the same way: ```meson diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md index 74d2355..9ccdf83 100644 --- a/docs/markdown/Build-options.md +++ b/docs/markdown/Build-options.md @@ -16,7 +16,7 @@ Here is a simple option file. option('someoption', type : 'string', value : 'optval', description : 'An option') option('other_one', type : 'boolean', value : false) option('combo_opt', type : 'combo', choices : ['one', 'two', 'three'], value : 'three') -option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) +option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.45.0 option('free_array_opt', type : 'array', value : ['one', 'two']) option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) ``` @@ -44,7 +44,9 @@ default. An integer option contains a single integer with optional upper and lower values that are specified with the `min` and `max` keyword -arguments. Available since Meson version 0.45.0. +arguments. + +This type is available since Meson version 0.45.0. ### Arrays @@ -56,7 +58,7 @@ empty. The `value` parameter specifies the default value of the option and if it is unset then the values of `choices` will be used as the default. -This type is new in version 0.44.0 +This type is available since version 0.44.0 ## Using build options @@ -105,6 +107,13 @@ $ meson configure "-Doption=['a,b', 'c,d']" The inner values must always be single quotes and the outer ones double quotes. +To change values in subprojects prepend the name of the subproject and +a colon: + +```console +$ meson configure -Dsubproject:option=newvalue +``` + **NOTE:** If you cannot call `meson configure` you likely have a old version of Meson. In that case you can call `mesonconf` instead, but that is deprecated in newer versions diff --git a/docs/markdown/Configuration.md b/docs/markdown/Configuration.md index 9db6370..d8fa54e 100644 --- a/docs/markdown/Configuration.md +++ b/docs/markdown/Configuration.md @@ -58,7 +58,7 @@ Note that if you want to define a C string, you need to do the quoting yourself like this: ```meson -conf.set('TOKEN', '"value"') +conf_data.set('TOKEN', '"value"') ``` Since this is such a common operation, Meson provides a convenience @@ -66,7 +66,7 @@ method: ```meson plain_var = 'value' -conf.set_quoted('TOKEN', plain_var) # becomes #define TOKEN "value" +conf_data.set_quoted('TOKEN', plain_var) # becomes #define TOKEN "value" ``` Often you have a boolean value in Meson but need to define the C/C++ @@ -74,12 +74,12 @@ token as 0 or 1. Meson provides a convenience function for this use case. ```meson -conf.set10(token, boolean_value) +conf_data.set10(token, boolean_value) # The line above is equivalent to this: if boolean_value - conf.set(token, 1) + conf_data.set(token, 1) else - conf.set(token, 0) + conf_data.set(token, 0) endif ``` @@ -90,19 +90,19 @@ file all the entries in the configuration data object. The replacements are the same as when generating `#mesondefine` entries: ```meson -cdata.set('FOO', '"string"') => #define FOO "string" -cdata.set('FOO', 'a_token') => #define FOO a_token -cdata.set('FOO', true) => #define FOO -cdata.set('FOO', false) => #undef FOO -cdata.set('FOO', 1) => #define FOO 1 -cdata.set('FOO', 0) => #define FOO 0 +conf_data.set('FOO', '"string"') => #define FOO "string" +conf_data.set('FOO', 'a_token') => #define FOO a_token +conf_data.set('FOO', true) => #define FOO +conf_data.set('FOO', false) => #undef FOO +conf_data.set('FOO', 1) => #define FOO 1 +conf_data.set('FOO', 0) => #define FOO 0 ``` In this mode, you can also specify a comment which will be placed before the value so that your generated files are self-documenting. ```meson -cdata.set('BAR', true, description : 'Set BAR if it is available') +conf_data.set('BAR', true, description : 'Set BAR if it is available') ``` Will produce: @@ -129,9 +129,11 @@ subprojects. At the top level we generate the file: ```meson +conf_data = configuration_data() +# Set data configure_file(input : 'projconfig.h.in', output : 'projconfig.h', - configuration : cdata_object) + configuration : conf_data) ``` Immediately afterwards we generate the include object. diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md index 774addf..8e016e2 100644 --- a/docs/markdown/Configuring-a-build-directory.md +++ b/docs/markdown/Configuring-a-build-directory.md @@ -9,9 +9,7 @@ generated. For example you might want to change from a debug build into a release build, set custom compiler flags, change the build options provided in your `meson_options.txt` file and so on. -The main tool for this is the `meson configure` command. You may also use the -`mesongui` graphical application if you want. However this document -describes the use of the command line client. +The main tool for this is the `meson configure` command. You invoke `meson configure` by giving it the location of your build dir. If omitted, the current working directory is used instead. Here's a diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 293b629..7b5fe73 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -18,6 +18,45 @@ Github](https://github.com/mesonbuild/meson/pulls). This causes them to be run through the CI system. All submissions must pass a full CI test run before they are even considered for submission. +## Acceptance and merging + +The kind of review and acceptance any merge proposal gets depends on +the changes it contains. All pull requests must be reviewed and +accepted by someone with commit rights who is not the original +submitter. Merge requests can be roughly split into three different +categories. + +The first one consists of MRs that only change the markdown +documentation under `docs/markdown`. Anyone with access rights can +push changes to these directly to master. For major changes it is +still recommended to create a MR so other people can comment on it. + +The second group consists of merges that don't change any +functionality, fixes to the CI system and bug fixes that have added +regression tests (see below) and don't change existing +functionality. Once successfully reviewed anyone with merge rights can +merge these to master. + +The final kind of merges are those that add new functionality or +change existing functionality in a backwards incompatible way. These +require the approval of the project lead. + +In a simplified list form the split would look like the following: + + - members with commit access can do: + - documentation changes (directly to master if warranted) + - bug fixes that don't change functionality + - refactorings + - new dependency types + - new tool support (e.g. a new Doxygen-kind of tool) + - support for new compilers to existing languages + - project leader decision is needed for: + - new modules + - new functions in the Meson language + - syntax changes for Meson files + - changes breaking backwards compatibility + - support for new languages + ## Tests All new features must come with automatic tests that thoroughly prove @@ -63,6 +102,21 @@ actually work should be done with a unit test. Projects needed by unit tests are in the `test cases/unit` subdirectory. They are not run as part of `./run_project_tests.py`. +### Skipping integration tests + +Meson uses several continuous integration testing systems that have slightly +different interface. To promote consistent naming policy, use: + +- `[skip ci]` in the commit title if you want to disable all integration tests +- `[skip appveyor]` in the commit title if you want to disable Windows-only tests + +Continuous integration systems currently used: + +- [Travis-CI](https://docs.travis-ci.com/user/customizing-the-build/#Skipping-a-build) + allows `[skip ci]` anywhere in the commit messages. +- [AppVeyor](https://www.appveyor.com/docs/how-to/filtering-commits/#skip-directive-in-commit-message) + requires `[skip ci]` or `[skip appveyor]` in the commit title. + ## Documentation The `docs` directory contains the full documentation that will be used @@ -76,6 +130,11 @@ notes. These features should be written in standalone files in the `docs/markdown/snippets` directory. The release manager will combine them into one page when doing the release. +[Integration tests should be disabled](#skipping-integration-tests) for +documentation-only commits by putting `[skip ci]` into commit title. +Reviewers should ask contributors to put `[skip ci]` into the title because +tests are run again after merge for `master`. + ## Python Coding style Meson follows the basic Python coding style. Additional rules are the diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index c1ad317..e739e37 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -83,7 +83,7 @@ automatically use the given wrapper when it needs to run host binaries. This happens e.g. when running the project's test suite. The next section lists properties of the cross compiler and thus of -the target system. It looks like this: +the host system. It looks like this: ```ini [properties] @@ -261,7 +261,7 @@ myvar = meson.get_cross_property('somekey') ## Cross file locations As of version 0.44.0 meson supports loading cross files from system locations -on Linux and the BSDs. This will be $XDG_DATA_DIRS/meson/cross, or if +(except on Windows). This will be $XDG_DATA_DIRS/meson/cross, or if XDG_DATA_DIRS is undefined, then /usr/local/share/meson/cross and /usr/share/meson/cross will be tried in that order, for system wide cross files. User local files can be put in $XDG_DATA_HOME/meson/cross, or @@ -272,7 +272,7 @@ The order of locations tried is as follows: - The user local location - The system wide locations in order -Linux and BSD distributions are encouraged to ship cross files either with +Distributions are encouraged to ship cross files either with their cross compiler toolchain packages or as a standalone package, and put them in one of the system paths referenced above. diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index 189db72..12e1b1f 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -61,7 +61,7 @@ zdep_prefix = zdep.get_pkgconfig_variable('libdir', define_variable: ['prefix', The dependency detector works with all libraries that provide a `pkg-config` file. Unfortunately several packages don't provide pkg-config files. Meson has autodetection support for some of these, -and they are described later on this page. +and they are described [later in this page](#dependencies-with-custom-lookup-functionality). # Declaring your own @@ -89,12 +89,16 @@ to build dependencies manually when they are not. To make this work, the dependency must have Meson build definitions and it must declare its own dependency like this: +```meson foo_dep = declare_dependency(...) +``` Then any project that wants to use it can write out the following declaration in their main `meson.build` file. +```meson foo_dep = dependency('foo', fallback : ['foo', 'foo_dep']) +``` What this declaration means is that first Meson tries to look up the dependency from the system (such as by using pkg-config). If it is not @@ -107,6 +111,14 @@ of all the work behind the scenes to make this work. # Dependencies with custom lookup functionality +## AppleFrameworks + +Use the `modules` keyword to list frameworks required, e.g. + +```meson +dep = find_dep('appleframeworks', modules : 'foundation') +``` + ## Boost Boost is not a single dependency but rather a group of different @@ -134,7 +146,11 @@ can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR environment variables. You can set the argument `threading` to `single` to use boost libraries that -has been compiled for single-threaded use instead. +have been compiled for single-threaded use instead. + +## GL + +This finds the OpenGL library in a way appropriate to the platform. ## GTest and GMock @@ -156,9 +172,9 @@ test('gtest test', e) MPI is supported for C, C++ and Fortran. Because dependencies are language-specific, you must specify the requested language using the `language` keyword argument, i.e., - * `dependency('mpi', language='c')` for the C MPI headers and libraries - * `dependency('mpi', language='cpp')` for the C++ MPI headers and libraries - * `dependency('mpi', language='fortran')` for the Fortran MPI headers and libraries + * `dependency('mpi', language: 'c')` for the C MPI headers and libraries + * `dependency('mpi', language: 'cpp')` for the C++ MPI headers and libraries + * `dependency('mpi', language: 'fortran')` for the Fortran MPI headers and libraries Meson prefers pkg-config for MPI, but if your MPI implementation does not provide them, it will search for the standard wrapper executables, @@ -167,9 +183,9 @@ are not in your path, they can be specified by setting the standard environment variables `MPICC`, `MPICXX`, `MPIFC`, `MPIF90`, or `MPIF77`, during configuration. -## Qt5 +## Qt4 & Qt5 -Meson has native Qt5 support. Its usage is best demonstrated with an +Meson has native Qt support. Its usage is best demonstrated with an example. ```meson @@ -200,12 +216,26 @@ the list of sources for the target. The `modules` keyword of `dependency` works just like it does with Boost. It tells which subparts of Qt the program uses. +## SDL2 + +SDL2 can be located using `pkg-confg`, the `sdl2-config` config tool, or as an +OSX framework. + +## Valgrind + +Meson will find valgrind using `pkg-config`, but only uses the compilation flags +and avoids trying to link with it's non-PIC static libs. + +## Vulkan + +Vulkan can be located using `pkg-config`, or the `VULKAN_SDK` environment variable. + ## Dependencies using config tools -CUPS, LLVM, PCAP, WxWidgets, libwmf, and GnuStep either do not provide -pkg-config modules or additionally can be detected via a config tool +CUPS, LLVM, PCAP, [WxWidgets](#wxwidgets), libwmf, and GnuStep either do not +provide pkg-config modules or additionally can be detected via a config tool (cups-config, llvm-config, etc). Meson has native support for these tools, and -then can be found like other dependencies: +they can be found like other dependencies: ```meson pcap_dep = dependency('pcap', version : '>=1.0') @@ -220,6 +250,30 @@ tools support. You can force one or another via the method keyword: wmf_dep = dependency('wmf', method : 'config-tool') ``` +## WxWidgets + +Similar to [Boost](#boost), WxWidgets is not a single library but rather +a collection of modules. WxWidgets is supported via `wx-config`. +Meson substitutes `modules` to `wx-config` invocation, it generates +- `compile_args` using `wx-config --cxxflags $modules...` +- `link_args` using `wx-config --libs $modules...` + +### Example + +```meson +wx_dep = dependency( + 'wxwidgets', version : '>=3.0.0', modules : ['std', 'stc'], +) +``` + +```shell +# compile_args: +$ wx-config --cxxflags std stc + +# link_args: +$ wx-config --libs std stc +``` + ## LLVM Meson has native support for LLVM going back to version LLVM version 3.5. @@ -248,3 +302,15 @@ llvm_dep = dependency( 'llvm', version : '>= 4.0', modules : ['amdgpu'], optional_modules : ['inteljitevents'], ) ``` + +## Python3 + +Python3 is handled specially by meson: +1. Meson tries to use `pkg-config`. +1. If `pkg-config` fails meson uses a fallback: + - On Windows the fallback is the current `python3` interpreter. + - On OSX the fallback is a framework dependency from `/Library/Frameworks`. + +Note that `python3` found by this dependency might differ from the one used in +`python3` module because modules uses the current interpreter, but dependency tries +`pkg-config` first. diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md index 441cd69..f4cf89b 100644 --- a/docs/markdown/FAQ.md +++ b/docs/markdown/FAQ.md @@ -7,9 +7,16 @@ See also [How do I do X in Meson](howtox.md). ## Why is it called Meson? -When the name was originally chosen, there were two main limitations: there must not exist either a Debian package or a Sourceforge project of the given name. This ruled out tens of potential project names. At some point the name Gluon was considered. Gluons are elementary particles that hold protons and neutrons together, much like a build system's job is to take pieces of source code and a compiler and bind them to a complete whole. +When the name was originally chosen, there were two main limitations: +there must not exist either a Debian package or a Sourceforge project +of the given name. This ruled out tens of potential project names. At +some point the name Gluon was considered. Gluons are elementary +particles that hold protons and neutrons together, much like a build +system's job is to take pieces of source code and a compiler and bind +them to a complete whole. -Unfortunately this name was taken, too. Then the rest of subatomic particles were examined and Meson was found to be available. +Unfortunately this name was taken, too. Then the rest of subatomic +particles were examined and Meson was found to be available. ## What is the correct way to use threads (such as pthreads)? @@ -17,23 +24,34 @@ Unfortunately this name was taken, too. Then the rest of subatomic particles wer thread_dep = dependency('threads') ``` -This will set up everything on your behalf. People coming from Autotools or CMake want to do this by looking for `libpthread.so` manually. Don't do that, it has tricky corner cases especially when cross compiling. +This will set up everything on your behalf. People coming from +Autotools or CMake want to do this by looking for `libpthread.so` +manually. Don't do that, it has tricky corner cases especially when +cross compiling. ## How to use Meson on a host where it is not available in system packages? -Starting from version 0.29.0, Meson is available from the [Python Package Index](https://pypi.python.org/pypi/meson/), so installing it simply a matter of running this command: +Starting from version 0.29.0, Meson is available from the [Python +Package Index](https://pypi.python.org/pypi/meson/), so installing it +simply a matter of running this command: ```console $ pip3 install <your options here> meson ``` -If you don't have access to PyPI, that is not a problem either. Meson has been designed to be easily runnable from an extracted source tarball or even a git checkout. First you need to download Meson. Then use this command to set up you build instead of plain `meson`. +If you don't have access to PyPI, that is not a problem either. Meson +has been designed to be easily runnable from an extracted source +tarball or even a git checkout. First you need to download Meson. Then +use this command to set up you build instead of plain `meson`. ```console $ /path/to/meson.py <options> ``` -After this you don't have to care about invoking Meson any more. It remembers where it was originally invoked from and calls itself appropriately. As a user the only thing you need to do is to `cd` into your build directory and invoke `ninja`. +After this you don't have to care about invoking Meson any more. It +remembers where it was originally invoked from and calls itself +appropriately. As a user the only thing you need to do is to `cd` into +your build directory and invoke `ninja`. ## Why can't I specify target files with a wildcard? @@ -43,17 +61,34 @@ Instead of specifying files explicitly, people seem to want to do this: executable('myprog', sources : '*.cpp') # This does NOT work! ``` -Meson does not support this syntax and the reason for this is simple. This can not be made both reliable and fast. By reliable we mean that if the user adds a new source file to the subdirectory, Meson should detect that and make it part of the build automatically. +Meson does not support this syntax and the reason for this is +simple. This can not be made both reliable and fast. By reliable we +mean that if the user adds a new source file to the subdirectory, +Meson should detect that and make it part of the build automatically. -One of the main requirements of Meson is that it must be fast. This means that a no-op build in a tree of 10 000 source files must take no more than a fraction of a second. This is only possible because Meson knows the exact list of files to check. If any target is specified as a wildcard glob, this is no longer possible. Meson would need to re-evaluate the glob every time and compare the list of files produced against the previous list. This means inspecting the entire source tree (because the glob pattern could be `src/\*/\*/\*/\*.cpp` or something like that). This is impossible to do efficiently. +One of the main requirements of Meson is that it must be fast. This +means that a no-op build in a tree of 10 000 source files must take no +more than a fraction of a second. This is only possible because Meson +knows the exact list of files to check. If any target is specified as +a wildcard glob, this is no longer possible. Meson would need to +re-evaluate the glob every time and compare the list of files produced +against the previous list. This means inspecting the entire source +tree (because the glob pattern could be `src/\*/\*/\*/\*.cpp` or +something like that). This is impossible to do efficiently. -The main backend of Meson is Ninja, which does not support wildcard matches either, and for the same reasons. +The main backend of Meson is Ninja, which does not support wildcard +matches either, and for the same reasons. Because of this, all source files must be specified explicitly. ## But I really want to use wildcards! -If the tradeoff between reliability and convenience is acceptable to you, then Meson gives you all the tools necessary to do wildcard globbing. You are allowed to run arbitrary commands during configuration. First you need to write a script that locates the files to compile. Here's a simple shell script that writes all `.c` files in the current directory, one per line. +If the tradeoff between reliability and convenience is acceptable to +you, then Meson gives you all the tools necessary to do wildcard +globbing. You are allowed to run arbitrary commands during +configuration. First you need to write a script that locates the files +to compile. Here's a simple shell script that writes all `.c` files in +the current directory, one per line. ```bash @@ -72,17 +107,37 @@ sources = c.stdout().strip().split('\n') e = executable('prog', sources) ``` -The script can be any executable, so it can be written in shell, Python, Lua, Perl or whatever you wish. +The script can be any executable, so it can be written in shell, +Python, Lua, Perl or whatever you wish. -As mentioned above, the tradeoff is that just adding new files to the source directory does *not* add them to the build automatically. To add them you need to tell Meson to reinitialize itself. The simplest way is to touch the `meson.build` file in your source root. Then Meson will reconfigure itself next time the build command is run. Advanced users can even write a small background script that utilizes a filesystem event queue, such as [inotify](https://en.wikipedia.org/wiki/Inotify), to do this automatically. +As mentioned above, the tradeoff is that just adding new files to the +source directory does *not* add them to the build automatically. To +add them you need to tell Meson to reinitialize itself. The simplest +way is to touch the `meson.build` file in your source root. Then Meson +will reconfigure itself next time the build command is run. Advanced +users can even write a small background script that utilizes a +filesystem event queue, such as +[inotify](https://en.wikipedia.org/wiki/Inotify), to do this +automatically. ## Should I use `subdir` or `subproject`? -The answer is almost always `subdir`. Subproject exists for a very specific use case: embedding external dependencies into your build process. As an example, suppose we are writing a game and wish to use SDL. Let us further suppose that SDL comes with a Meson build definition. Let us suppose even further that we don't want to use prebuilt binaries but want to compile SDL for ourselves. +The answer is almost always `subdir`. Subproject exists for a very +specific use case: embedding external dependencies into your build +process. As an example, suppose we are writing a game and wish to use +SDL. Let us further suppose that SDL comes with a Meson build +definition. Let us suppose even further that we don't want to use +prebuilt binaries but want to compile SDL for ourselves. -In this case you would use `subproject`. The way to do it would be to grab the source code of SDL and put it inside your own source tree. Then you would do `sdl = subproject('sdl')`, which would cause Meson to build SDL as part of your build and would then allow you to link against it or do whatever else you may prefer. +In this case you would use `subproject`. The way to do it would be to +grab the source code of SDL and put it inside your own source +tree. Then you would do `sdl = subproject('sdl')`, which would cause +Meson to build SDL as part of your build and would then allow you to +link against it or do whatever else you may prefer. -For every other use you would use `subdir`. As an example, if you wanted to build a shared library in one dir and link tests against it in another dir, you would do something like this: +For every other use you would use `subdir`. As an example, if you +wanted to build a shared library in one dir and link tests against it +in another dir, you would do something like this: ```meson project('simple', 'c') @@ -92,27 +147,53 @@ subdir('tests') # test binaries would link against the library here ## Why is there not a Make backend? -Because Make is slow. This is not an implementation issue, Make simply can not be made fast. For further info we recommend you read [this post](http://neugierig.org/software/chromium/notes/2011/02/ninja.html) by Evan Martin, the author of Ninja. Makefiles also have a syntax that is very unpleasant to write which makes them a big maintenance burden. +Because Make is slow. This is not an implementation issue, Make simply +can not be made fast. For further info we recommend you read [this +post](http://neugierig.org/software/chromium/notes/2011/02/ninja.html) +by Evan Martin, the author of Ninja. Makefiles also have a syntax that +is very unpleasant to write which makes them a big maintenance burden. -The only reason why one would use Make instead of Ninja is working on a platform that does not have a Ninja port. Even in this case it is an order of magnitude less work to port Ninja than it is to write a Make backend for Meson. +The only reason why one would use Make instead of Ninja is working on +a platform that does not have a Ninja port. Even in this case it is an +order of magnitude less work to port Ninja than it is to write a Make +backend for Meson. Just use Ninja, you'll be happier that way. I guarantee it. ## Why is Meson not just a Python module so I could code my build setup in Python? -A related question to this is *Why is Meson's configuration language not Turing-complete?* +A related question to this is *Why is Meson's configuration language +not Turing-complete?* -There are many good reasons for this, most of which are summarized on this web page: [Against The Use Of Programming Languages in Configuration Files](https://taint.org/2011/02/18/001527a.html). +There are many good reasons for this, most of which are summarized on +this web page: [Against The Use Of Programming Languages in +Configuration Files](https://taint.org/2011/02/18/001527a.html). -In addition to those reasons, not exposing Python or any other "real" programming language makes it possible to port Meson's implementation to a different language. This might become necessary if, for example, Python turns out to be a performance bottleneck. This is an actual problem that has caused complications for GNU Autotools and SCons. +In addition to those reasons, not exposing Python or any other "real" +programming language makes it possible to port Meson's implementation +to a different language. This might become necessary if, for example, +Python turns out to be a performance bottleneck. This is an actual +problem that has caused complications for GNU Autotools and SCons. ## How do I do the equivalent of Libtools export-symbol and export-regex? -Either by using [GCC symbol visibility](https://gcc.gnu.org/wiki/Visibility) or by writing a [linker script](https://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_mono/ld.html). This has the added benefit that your symbol definitions are in a standalone file instead of being buried inside your build definitions. An example can be found [here](https://github.com/jpakkane/meson/tree/master/test%20cases/linuxlike/3%20linker%20script). +Either by using [GCC symbol +visibility](https://gcc.gnu.org/wiki/Visibility) or by writing a +[linker +script](https://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_mono/ld.html). This +has the added benefit that your symbol definitions are in a standalone +file instead of being buried inside your build definitions. An example +can be found +[here](https://github.com/jpakkane/meson/tree/master/test%20cases/linuxlike/3%20linker%20script). ## My project works fine on Linux and MinGW but fails with MSVC due to a missing .lib file -With GCC, all symbols on shared libraries are exported automatically unless you specify otherwise. With MSVC no symbols are exported by default. If your shared library exports no symbols, MSVC will silently not produce an import library file leading to failures. The solution is to add symbol visibility definitions [as specified in GCC wiki](https://gcc.gnu.org/wiki/Visibility). +With GCC, all symbols on shared libraries are exported automatically +unless you specify otherwise. With MSVC no symbols are exported by +default. If your shared library exports no symbols, MSVC will silently +not produce an import library file leading to failures. The solution +is to add symbol visibility definitions [as specified in GCC +wiki](https://gcc.gnu.org/wiki/Visibility). ## I added some compiler flags and now the build fails with weird errors. What is happening? @@ -123,7 +204,13 @@ executable('foobar', ... c_args : '-some_arg -other_arg') ``` -Meson is *explicit*. In this particular case it will **not** automatically split your strings at whitespaces, instead it will take it as is and work extra hard to pass it to the compiler unchanged, including quoting it properly over shell invocations. This is mandatory to make e.g. files with spaces in them work flawlessly. To pass multiple command line arguments, you need to explicitly put them in an array like this: +Meson is *explicit*. In this particular case it will **not** +automatically split your strings at whitespaces, instead it will take +it as is and work extra hard to pass it to the compiler unchanged, +including quoting it properly over shell invocations. This is +mandatory to make e.g. files with spaces in them work flawlessly. To +pass multiple command line arguments, you need to explicitly put them +in an array like this: ```meson executable('foobar', ... @@ -138,20 +225,66 @@ You probably had a project that looked something like this: project('foobar', 'cpp') ``` -This defaults to `c++11` on GCC compilers. Suppose you want to use `c++14` instead, so you change the definition to this: +This defaults to `c++11` on GCC compilers. Suppose you want to use +`c++14` instead, so you change the definition to this: ```meson project('foobar', 'cpp', default_options : ['cpp_std=c++14']) ``` -But when you recompile, it still uses `c++11`. The reason for this is that default options are only looked at when you are setting up a build directory for the very first time. After that the setting is considered to have a value and thus the default value is ignored. To change an existing build dir to `c++14`, either reconfigure your build dir with `meson configure` or delete the build dir and recreate it from scratch. +But when you recompile, it still uses `c++11`. The reason for this is +that default options are only looked at when you are setting up a +build directory for the very first time. After that the setting is +considered to have a value and thus the default value is ignored. To +change an existing build dir to `c++14`, either reconfigure your build +dir with `meson configure` or delete the build dir and recreate it +from scratch. + +The reason we don't automatically change the option value when the +default is changed is that it is impossible to know to do that +reliably. The actual question that we need to solve is "if the +option's value is foo and the default value is bar, should we change +the option value to bar also". There are many choices: + + - if the user has changed the value themselves from the default, then + we must not change it back + + - if the user has not changed the value, but changes the default + value, then this section's premise would seem to indicate that the + value should be changed + + - suppose the user changes the value from the default to foo, then + back to bar and then changes the default value to bar, the correct + step to take is ambiguous by itself + +In order to solve the latter question we would need to remember not +only the current and old value, but also all the times the user has +changed the value and from which value to which other value. Since +people don't remember their own actions that far back, toggling +between states based on long history would be confusing. + +Because of this we do the simple and understandable thing: default +values are only defaults and will never affect the value of an option +once set. ## Does wrap download sources behind my back? -It does not. In order for Meson to download anything from the net while building, two conditions must be met. - -First of all there needs to be a `.wrap` file with a download URL in the `subprojects` directory. If one does not exist, Meson will not download anything. - -The second requirement is that there needs to be an explicit subproject invocation in your `meson.build` files. Either `subproject('foobar')` or `dependency('foobar', fallback : ['foobar', 'foo_dep'])`. If these declarations either are not in any build file or they are not called (due to e.g. `if/else`) then nothing is downloaded. - -If this is not sufficient for you, starting from release 0.40.0 Meson has a option called `wrap-mode` which can be used to disable wrap downloads altogether with `--wrap-mode=nodownload`. You can also disable dependency fallbacks altogether with `--wrap-mode=nofallback`, which also implies the `nodownload` option. +It does not. In order for Meson to download anything from the net +while building, two conditions must be met. + +First of all there needs to be a `.wrap` file with a download URL in +the `subprojects` directory. If one does not exist, Meson will not +download anything. + +The second requirement is that there needs to be an explicit +subproject invocation in your `meson.build` files. Either +`subproject('foobar')` or `dependency('foobar', fallback : ['foobar', +'foo_dep'])`. If these declarations either are not in any build file +or they are not called (due to e.g. `if/else`) then nothing is +downloaded. + +If this is not sufficient for you, starting from release 0.40.0 Meson +has a option called `wrap-mode` which can be used to disable wrap +downloads altogether with `--wrap-mode=nodownload`. You can also +disable dependency fallbacks altogether with `--wrap-mode=nofallback`, +which also implies the `nodownload` option. diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md index 65318ec..f865174 100644 --- a/docs/markdown/Feature-autodetection.md +++ b/docs/markdown/Feature-autodetection.md @@ -16,4 +16,4 @@ If you do not wish to use CCache for some reason, just specify your compiler wit Coverage -- -When doing a code coverage build, Meson will check the existence of binaries `gcovr`, `lcov` and `genhtml`. If the first one is found, it will create targets called *coverage-text* and *coverage-xml*. If the latter two are found, it generates the target *coverage-html*. You can then generate coverage reports just by calling e.g. `ninja coverage-xml`. +When doing a code coverage build, Meson will check the existence of binaries `gcovr`, `lcov` and `genhtml`. If the first one is found, it will create targets called *coverage-text* and *coverage-xml*. If the latter two or a new enough `gcovr` is found, it generates the target *coverage-html*. You can then generate coverage reports just by calling e.g. `ninja coverage-xml`. diff --git a/docs/markdown/Generating-sources.md b/docs/markdown/Generating-sources.md index 2ea1021..cbe6c0d 100644 --- a/docs/markdown/Generating-sources.md +++ b/docs/markdown/Generating-sources.md @@ -4,23 +4,32 @@ short-description: Generation of source files before compilation # Generating sources - Sometimes source files need to be preprocessed before they are passed to the actual compiler. As an example you might want build an IDL compiler and then run some files through that to generate actual source files. In Meson this is done with [`generator()`](Reference-manual.md#generator) or [`custom_target()`](Reference-manual.md#custom_target). +Sometimes source files need to be preprocessed before they are passed +to the actual compiler. As an example you might want build an IDL +compiler and then run some files through that to generate actual +source files. In Meson this is done with +[`generator()`](Reference-manual.md#generator) or +[`custom_target()`](Reference-manual.md#custom_target). ## Using custom_target() -Let's say you have a build target that must be built using sources generated by a compiler. The compiler can either be a built target: +Let's say you have a build target that must be built using sources +generated by a compiler. The compiler can either be a built target: ```meson mycomp = executable('mycompiler', 'compiler.c') ``` -Or an external program provided by the system, or script inside the source tree: +Or an external program provided by the system, or script inside the +source tree: ```meson mycomp = find_program('mycompiler') ``` -Custom targets can take zero or more input files and use them to generate one or more output files. Using a custom target, you can run this compiler at build time to generate the sources: +Custom targets can take zero or more input files and use them to +generate one or more output files. Using a custom target, you can run +this compiler at build time to generate the sources: ```meson gen_src = custom_target('gen-output', @@ -31,7 +40,9 @@ gen_src = custom_target('gen-output', '--h-out', '@OUTPUT1@']) ``` -The `@INPUT@` there will be transformed to `'somefile1.c' 'file2.c'`. Just like the output, you can also refer to each input file individually by index. +The `@INPUT@` there will be transformed to `'somefile1.c' +'file2.c'`. Just like the output, you can also refer to each input +file individually by index. Then you just put that in your program and you're done. @@ -41,11 +52,21 @@ executable('program', 'main.c', gen_src) ## Using generator() -Generators are similar to custom targets, except that we define a *generator*, which defines how to transform an input file into one or more output files, and then use that on as many input files as we want. +Generators are similar to custom targets, except that we define a +*generator*, which defines how to transform an input file into one or +more output files, and then use that on as many input files as we +want. -Note that generators should only be used for outputs that will only be used as inputs for a build target or a custom target. When you use the processed output of a generator in multiple targets, the generator will be run multiple times to create outputs for each target. Each output will be created in a target-private directory `@BUILD_DIR@`. +Note that generators should only be used for outputs that will only be +used as inputs for a build target or a custom target. When you use the +processed output of a generator in multiple targets, the generator +will be run multiple times to create outputs for each target. Each +output will be created in a target-private directory `@BUILD_DIR@`. -If you want to generate files for general purposes such as for generating headers to be used by several sources, or data that will be installed, and so on, use a [`custom_target()`](Reference-manual.md#custom_target) instead. +If you want to generate files for general purposes such as for +generating headers to be used by several sources, or data that will be +installed, and so on, use a +[`custom_target()`](Reference-manual.md#custom_target) instead. ```meson @@ -54,9 +75,23 @@ gen = generator(mycomp, arguments : ['@INPUT@', '@OUTPUT@']) ``` -The first argument is the executable file to run. The next file specifies a name generation rule. It specifies how to build the output file name for a given input name. `@BASENAME@` is a placeholder for the input file name without preceding path or suffix (if any). So if the input file name were `some/path/filename.idl`, then the output name would be `filename.c`. You can also use `@PLAINNAME@`, which preserves the suffix which would result in a file called `filename.idl.c`. The last line specifies the command line arguments to pass to the executable. `@INPUT@` and `@OUTPUT@` are placeholders for the input and output files, respectively, and will be automatically filled in by Meson. If your rule produces multiple output files and you need to pass them to the command line, append the location to the output holder like this: `@OUTPUT0@`, `@OUTPUT1@` and so on. - -With this rule specified we can generate source files and add them to a target. +The first argument is the executable file to run. The next file +specifies a name generation rule. It specifies how to build the output +file name for a given input name. `@BASENAME@` is a placeholder for +the input file name without preceding path or suffix (if any). So if +the input file name were `some/path/filename.idl`, then the output +name would be `filename.c`. You can also use `@PLAINNAME@`, which +preserves the suffix which would result in a file called +`filename.idl.c`. The last line specifies the command line arguments +to pass to the executable. `@INPUT@` and `@OUTPUT@` are placeholders +for the input and output files, respectively, and will be +automatically filled in by Meson. If your rule produces multiple +output files and you need to pass them to the command line, append the +location to the output holder like this: `@OUTPUT0@`, `@OUTPUT1@` and +so on. + +With this rule specified we can generate source files and add them to +a target. ```meson gen_src = gen.process('input1.idl', 'input2.idl') @@ -67,8 +102,32 @@ Generators can also generate multiple output files with unknown names: ```meson gen2 = generator(someprog, - outputs : ['@BASENAME@.c', '@BASENAME@.h'], + output : ['@BASENAME@.c', '@BASENAME@.h'], arguments : ['--out_dir=@BUILD_DIR@', '@INPUT@']) ``` -In this case you can not use the plain `@OUTPUT@` variable, as it would be ambiguous. This program only needs to know the output directory, it will generate the file names by itself. +In this case you can not use the plain `@OUTPUT@` variable, as it +would be ambiguous. This program only needs to know the output +directory, it will generate the file names by itself. + +To make passing different additional arguments to the generator +program at each use possible, you can use the `@EXTRA_ARGS@` string in +the `arguments` list. Note that this placeholder can only be present +as a whole string, and not as a substring. The main reason is that it +represents a list of strings, which may be empty, or contain multiple +elements; and in either case, interpolating it into the middle of a +single string would be troublesome. If there are no extra arguments +passed in from a `process()` invocation, the placeholder is entirely +omitted from the actual list of arguments, so an empty string won't be +passed to the generator program because of this. If there are multiple +elements in `extra_args`, they are inserted into to the actual +argument list as separate elements. + +```meson +gen3 = generator(genprog, + output : '@BASENAME@.cc', + arguments : ['@INPUT@', '@EXTRA_ARGS@', '@OUTPUT@']) +gen3_src1 = gen3.process('input1.y') +gen3_src2 = gen3.process('input2.y', extra_args: '--foo') +gen3_src3 = gen3.process('input3.y', extra_args: ['--foo', '--bar']) +``` diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index b33009d..ad3715e 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -46,6 +46,25 @@ file called `foobar.h`, which you can then include in your sources. Returns an array containing: `[c_source, header_file]` or `[gresource_bundle]` +Example: + +```meson +gnome = import('gnome') + +asresources = gnome.compile_resources( + 'as-resources', 'data/asresources.gresource.xml', + source_dir: 'data', + c_name: 'as' +) + +executable( + meson.project_name(), + asresources, + dependencies: my_deps, + install: true +) +``` + ### gnome.generate_gir() Generates GObject introspection data. Takes one positional argument, @@ -104,7 +123,9 @@ Returns an array of two elements which are: `[c_source, header_file]` ### gnome.mkenums() Generates enum files for GObject using the `glib-mkenums` tool. The -first argument is the base name of the output files. +first argument is the base name of the output files, unless `c_template` +and `h_template` are specified. In this case, the output files will be +the base name of the values passed as templates. This method is essentially a wrapper around the `glib-mkenums` tool's command line API. It is the most featureful method for enum creation. diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index f7939dd..f608c5c 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -6,7 +6,7 @@ short-description: Meson's API to integrate Meson support into an IDE Meson has exporters for Visual Studio and XCode, but writing a custom backend for every IDE out there is not a scalable approach. To solve this problem, Meson provides an API that makes it easy for any IDE or build tool to integrate Meson builds and provide an experience comparable to a solution native to the IDE. -The basic tool for this is a script called `mesonintrospect.py`. Some distro packages might not expose this script in the regular path, and in this case you need to execute it from the install directory. +The basic tool for this is `meson introspect`. The first thing to do when setting up a Meson project in an IDE is to select the source and build directories. For this example we assume that the source resides in an Eclipse-like directory called `workspace/project` and the build tree is nested inside it as `workspace/project/build`. First we initialise Meson by running the following command in the source directory. @@ -16,13 +16,13 @@ For the remainder of the document we assume that all commands are executed insid The first thing you probably want is to get a list of top level targets. For that we use the introspection tool. It comes with extensive command line help so we recommend using that in case problems appear. - mesonintrospect.py --targets + meson introspect --targets The JSON formats will not be specified in this document. The easiest way of learning them is to look at sample output from the tool. Once you have a list of targets, you probably need the list of source files that comprise the target. To get this list for a target, say `exampletarget`, issue the following command. - mesonintrospect.py --target-files exampletarget + meson introspect --target-files exampletarget In order to make code completion work, you need the compiler flags for each compilation step. Meson does not provide this itself, but the Ninja tool Meson uses to build does provide it. To find out the compile steps necessary to build target foo, issue the following command. @@ -32,7 +32,7 @@ Note that if the target has dependencies (such as generated sources), then the c The next thing to display is the list of options that can be set. These include build type and so on. Here's how to extract them. - mesonintrospect.py --buildoptions + meson introspect --buildoptions To set the options, use the `meson configure` command. @@ -40,6 +40,6 @@ Compilation and unit tests are done as usual by running the `ninja` and `ninja t When these tests fail, the user probably wants to run the failing test in a debugger. To make this as integrated as possible, extract the test test setups with this command. - mesonintrospect.py --tests + meson introspect --tests This provides you with all the information needed to run the test: what command to execute, command line arguments and environment variable settings. diff --git a/docs/markdown/Icestorm-module.md b/docs/markdown/Icestorm-module.md index 896311f..bc2ad61 100644 --- a/docs/markdown/Icestorm-module.md +++ b/docs/markdown/Icestorm-module.md @@ -1,6 +1,6 @@ -# Unstable SIMD module +# Unstable IceStorm module -This module provides is available since version 0.45.0. +This module is available since version 0.45.0. **Note**:Â this module is unstable. It is only provided as a technology preview. Its API may change in arbitrary ways between releases or it @@ -8,7 +8,7 @@ might be removed from Meson altogether. ## Usage -This module provides an experimental to create FPGA bitstreams using +This module provides an experimental method to create FPGA bitstreams using the [IceStorm](http://www.clifford.at/icestorm/) suite of tools. The module exposes only one method called `project` and it is used @@ -24,4 +24,4 @@ constraint file. This produces output files called `projname.asc`, `projname.blif` and `projname.bin`. In addition it creates two run targets called `projname-time` for running timing analysis and `projname-upload` that uploads the generated bitstream to an FPGA -devide using the `iceprog` programming executable. +device using the `iceprog` programming executable. diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md index 4670544..b8e6a81 100644 --- a/docs/markdown/Installing.md +++ b/docs/markdown/Installing.md @@ -29,6 +29,19 @@ install_man('foo.1') # -> share/man/man1/foo.1.gz install_data('datafile.dat', install_dir : join_paths(get_option('datadir'), 'progname')) # -> share/progname/datafile.dat ``` +`install_data()` supports rename of the file *since 0.46.0*. + +```meson +# file.txt -> {datadir}/{projectname}/new-name.txt +install_data('file.txt', rename : 'new-name.txt') + +# file1.txt -> share/myapp/dir1/data.txt +# file2.txt -> share/myapp/dir2/data.txt +install_data(['file1.txt', 'file2.txt'], + rename : ['dir1/data.txt', 'dir2/data.txt'], + install_dir : 'share/myapp') +``` + Sometimes you want to copy an entire subtree directly. For this use case there is the `install_subdir` command, which can be used like this. ```meson diff --git a/docs/markdown/Localisation.md b/docs/markdown/Localisation.md index 34cad8d..517b642 100644 --- a/docs/markdown/Localisation.md +++ b/docs/markdown/Localisation.md @@ -4,37 +4,57 @@ short-description: Localization with GNU Gettext # Localisation -Localising your application with GNU gettext takes a little effort but is quite straightforward. This documentation assumes that you have a `po` subdirectory at your project root directory that contains all the localisation info. +Localising your application with GNU gettext takes a little effort but is quite straightforward. We'll create a `po` subdirectory at your project root directory for all the localisation info. -The first thing you need is a file called `POTFILES`. It lists all the source files that gettext should scan in order to find strings to translate. The syntax of the file is one line per source file and the line must contain the relative path from source root. A sample POTFILES might look like this. +## Generating .pot and .po files +In your main meson.build file include the `po` subdirectory in the build proces. + + subdir('po') + +In this `po` subdirectory we need: +- `LINGUAS`: Space separated list of languages +- `POTFILES`: List of source files to scan for translatable strings. +- `meson.build`: Localization specific meson file + +### LINGUAS +File with space separated list of languages. A sample LINGUAS might look like this. + + aa ab ae af + +### POTFILES +File that lists all the source files that gettext should scan in order to find strings to translate. The syntax of the file is one line per source file and the line must contain the relative path from source root. A sample POTFILES might look like this. src/file1.c src/file2.c src/subdir/file3.c include/mything/somefile.h -We also need to define an array of strings containing all the locales we want to generate. This is done in the Meson file in the `po` subdirectory. Assuming we want to generate Finnish and German localisations, the definition would look like this. - +### meson.build +Localization specific meson file. It imports and uses the `i18n` module. If not defined before it needs to define the `GETTEXT_PACKAGE` global. ```meson -langs = ['fi', 'de'] +i18n = import('i18n') +# define GETTEXT_PACKAGE +add_project_arguments('-DGETTEXT_PACKAGE="intltest"', language:'c') +i18n.gettext(meson.project_name(), + args: '--directory=' + meson.source_root() +) ``` +The first command imports the `i18n` module that provides gettext features. The fourth line does the actual invocation. The first argument is the gettext package name. This causes two things to happen. The first is that Meson will generate binary mo files and put them to their proper locations when doing an install. The second is that it creates a build rule to regenerate the main pot file. If you are using the Ninja backend, this is how you would invoke the rebuild. -Then we need to generate the main pot file. Usually this is generated manually or exists already. If not, see later on how to generate it using Meson. The potfile can have any name but is usually the name of the gettext package. Let's say the project is called *intltest*. In this case the corresponding pot file would be called `intltest.pot`. +### generate .pot file -For each language listed in the array above we need a corresponding `.po` file. This has to be generated manually, see the gettext manual for details. Once we have all this, we can define the localisation to Meson with these lines. +Then we need to generate the main pot file. The potfile can have any name but is usually the name of the gettext package. Let's say the project is called *intltest*. In this case the corresponding pot file would be called `intltest.pot`. -```meson -i18n = import('i18n') -langs = ['fi', 'de'] -i18n.gettext('intltest', languages : langs) -``` - -The first command imports the `i18n` module that provides gettext features. The third line does the actual invocation. The first argument is the gettext package name. This causes two things to happen. The first is that Meson will generate binary mo files and put them to their proper locations when doing an install. The second is that it creates a build rule to regenerate the main pot file. If you are using the Ninja backend, this is how you would invoke the rebuild. +Run the following command from your build folder to generate the pot file. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information. ```console $ ninja intltest-pot ``` -If the pot file does not yet exist, it will be created. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information. +### generate .po files -Meson does not currently have built in commands for generating po files from the pot file. This is because translations are usually done by people who are not developers and thus have their own workflows. +For each language listed in the array above we need a corresponding `.po` file. Those can be generated by running the following command from your build folder. + +```console +$ ninja intltest-update-po +``` diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md index cbe01b4..853cf50 100644 --- a/docs/markdown/Pkgconfig-module.md +++ b/docs/markdown/Pkgconfig-module.md @@ -38,8 +38,9 @@ keyword arguments. search path, for example if you install headers into `${PREFIX}/include/foobar-1`, the correct value for this argument would be `foobar-1` -- `requires` list of strings to put in the `Requires` field -- `requires_private` list of strings to put in the `Requires.private` +- `requires` list of strings, pkgconfig-dependencies or libraries that + `pkgconfig.generate()` was used on to put in the `Requires` field +- `requires_private` same as `requires` but for `Requires.private` field field - `url` a string with a url for the library - `variables` a list of strings with custom variables to add to the diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md index d8459c6..5f323bd 100644 --- a/docs/markdown/Project-templates.md +++ b/docs/markdown/Project-templates.md @@ -25,6 +25,6 @@ $ ninja -C builddir ``` The generator has many different projects and settings. They can all -be listed by invoking the command `meson test --help`. +be listed by invoking the command `meson init --help`. This feature is available since Meson version 0.45.0. diff --git a/docs/markdown/Python-3-module.md b/docs/markdown/Python-3-module.md index dc6f571..7dda672 100644 --- a/docs/markdown/Python-3-module.md +++ b/docs/markdown/Python-3-module.md @@ -18,6 +18,10 @@ conventions of the target platform. All positional and keyword arguments are the same as for [shared_module](Reference-manual.md#shared_module). +`extension_module` does not add any dependencies to the library so user may +need to add `dependencies : dependency('python3')`, see +[Python3 dependency](Dependencies.md#Python3). + *Added 0.38.0* ## language_version diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index a557f0c..54b7131 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -173,7 +173,8 @@ These are all the supported keyword arguments: mode, all the variables in the `configuration:` object (see above) are written to the `output:` file. - `install_dir` the subdirectory to install the generated file to - (e.g. `share/myproject`), if omitted the file is not installed. + (e.g. `share/myproject`), if omitted or given the value of empty + string, the file is not installed. - `output` the output file name (since v0.41.0, may contain `@PLAINNAME@` or `@BASENAME@` substitutions). In configuration mode, the permissions of the input file (if it is specified) are copied to @@ -267,6 +268,8 @@ keyword arguments. - `include_directories`, the directories to add to header search path - `link_args`, link arguments to use - `link_with`, libraries to link against + - `link_whole`, libraries to link fully, same as [`executable`](#executable) + Since 0.46.0 - `sources`, sources to add to targets (or generated header files that should be built before sources including them are built) - `version`, the version of this dependency, such as `1.2.3` @@ -366,9 +369,8 @@ can be of the following types: These input files can be sources, objects, libraries, or any other file. Meson will automatically categorize them based on the extension and use them accordingly. For instance, sources (`.c`, `.cpp`, -`.vala`, `.rs`, etc) will be compiled, objects (`.o`, `.obj`) and -libraries (`.so`, `.dll`, etc) will be linked, and all other files -(headers, unknown extensions, etc) will be ignored. +`.vala`, `.rs`, etc) will be compiled and objects (`.o`, `.obj`) and +libraries (`.so`, `.dll`, etc) will be linked. With the Ninja backend, Meson will create a build-time [order-only dependency](https://ninja-build.org/manual.html#ref_dependencies) on @@ -433,7 +435,7 @@ be passed to [shared and static libraries](#library). - `install_dir` override install directory for this file. The value is relative to the `prefix` specified. F.ex, if you want to install plugins into a subdir, you'd use something like this: `install_dir : - get_option('libdir') + '/projectname-1.0'`. + join_paths(get_option('libdir'), 'projectname-1.0'`). - `install_rpath` a string to set the target's rpath to after install (but *not* before that) - `objects` list of prebuilt object files (usually for third party @@ -580,7 +582,7 @@ the following special substitutions: - `@PLAINNAME@`: the complete input file name, e.g: `foo.c` becomes `foo.c` (unchanged) - `@BASENAME@`: the base of the input filename, e.g.: `foo.c.y` becomes `foo.c` (extension is removed) -Each string passed to the `outputs` keyword argument *must* be +Each string passed to the `output` keyword argument *must* be constructed using one or both of these two substitutions. In addition to the above substitutions, the `arguments` keyword @@ -613,8 +615,13 @@ Obtains the value of the [project build option](Build-options.md) specified in t Note that the value returned for built-in options that end in `dir` such as `bindir` and `libdir` is always a path relative to (and inside) the `prefix`. + The only exceptions are: `sysconfdir`, `localstatedir`, and `sharedstatedir` -which will return the value passed during configuration as-is. +which will return the value passed during configuration as-is, which may be +absolute, or relative to `prefix`. [`install_dir` arguments](Installing.md) +handles that as expected, but if you need the absolute path to one of these +e.g. to use in a define etc., you should use `join_paths(get_option('prefix'), +get_option('localstatedir')))` ### get_variable() @@ -661,6 +668,10 @@ Note that this function call itself does not add the directories into the search path, since there is no global search path. For something like that, see [`add_project_arguments()`](#add_project_arguments). +See also `implicit_include_directories` parameter of +[executable()](#executable), which adds current source and build directories +to include path. + Each directory given is converted to two include paths: one that is relative to the source root and one relative to the build root. @@ -712,6 +723,8 @@ arguments. The following keyword arguments are supported: directory. If this is a relative path, it is assumed to be relative to the prefix. + If omitted, the directory defaults to `{datadir}/{projectname}` *(added 0.45.0)*. + - `install_mode` specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. For example: @@ -724,6 +737,13 @@ arguments. The following keyword arguments are supported: To leave any of these three as the default, specify `false`. +- `rename` if specified renames each source file into corresponding file + from `rename` list. Nested paths are allowed and they are joined with + `install_dir`. Length of `rename` list must be equal to the number of sources. + *(added 0.46.0)* + +See [Installing](Installing.md) for more examples. + ### install_headers() ``` meson @@ -974,14 +994,20 @@ Project supports the following keyword arguments. runresult run_command(command, list_of_args) ``` -Runs the command specified in positional arguments. Returns [an opaque -object](#run-result-object) containing the result of the -invocation. The script is run from an *unspecified* directory, and +Runs the command specified in positional arguments. +`command` can be a string, or the output of [`find_program()`](#find_program), +[`files()`](#files) or [`configure_file()`](#configure_file), or +[a compiler object](#compiler-object). + +Returns [an opaque object](#run-result-object) containing the result of the +invocation. The command is run from an *unspecified* directory, and Meson will set three environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT` and `MESON_SUBDIR` that specify the source directory, build directory and subdirectory the target was defined in, respectively. +See also [External commands](External-commands.md). + ### run_target ``` meson @@ -1132,15 +1158,25 @@ subproject. However, if you want to use a dependency object from inside a subproject, an easier way is to use the `fallback:` keyword argument to [`dependency()`](#dependency). +[See additional documentation](Subprojects.md). + ### test() ``` meson void test(name, executable, ...) ``` -Defines a unit test. Takes two positional arguments, the first is the -name of this test and the second is the executable to run. Keyword -arguments are the following. +Defines a test to run with the test harness. Takes two positional arguments, +the first is the name of the test and the second is the executable to run. +The executable can be an [executable build target object](#build-target-object) +returned by [`executable()`](#executable) or an +[external program object](#external-program-object) returned by +[`find_program()`](#find_program). The executable's exit code is used by the +test harness to record the outcome of the test, for example exit code zero +indicates success. For more on the Meson test harness protocol read +[Unit Tests](Unit-tests.md). + +Keyword arguments are the following: - `args` arguments to pass to the executable @@ -1189,10 +1225,18 @@ be up to date on every build. Keywords are similar to `custom_target`. Meson will read the contents of `input`, substitute the `replace_string` with the detected revision number, and write the -result to `output`. This method returns an opaque -[`custom_target`](#custom_target) object that can be used as -source. If you desire more specific behavior than what this command -provides, you should use `custom_target`. +result to `output`. This method returns a +[`custom_target`](#custom_target) object that (as usual) should be +used to signal dependencies if other targets use the file outputted +by this. + +For example, if you generate a header with this and want to use that in +a build target, you must add the return value to the sources of that +build target. Without that, Meson will not know the order in which to +build the targets. + +If you desire more specific behavior than what this command provides, +you should use `custom_target`. ## Built-in objects @@ -1220,7 +1264,17 @@ the following methods. current backend: `ninja`, `vs2010`, `vs2015`, `vs2017`, or `xcode`. - `build_root()` returns a string with the absolute path to the build - root directory. + root directory. Note: this function will return the build root of + the parent project if called from a subproject, which is usually + not what you want. Try using `current_build_dir()`. + +- `source_root()` returns a string with the absolute path to the + source root directory. Note: you should use the `files()` function + to refer to files in the root source directory instead of + constructing paths manually with `meson.source_root()`. This + function will return the source root of the parent project if called + from a subproject, which is usually not what you want. Try using + `current_source_dir()`. - `current_build_dir()` returns a string with the absolute path to the current build directory. @@ -1284,11 +1338,6 @@ the following methods. /path/to/meson.py introspect`. The user is responsible for splitting the string to an array if needed. -- `source_root()` returns a string with the absolute path to the - source root directory. Note: you should use the `files()` function - to refer to files in the root source directory instead of - constructing paths manually with `meson.source_root()`. - - `project_version()` returns the version string specified in `project` function call. - `project_license()` returns the array of licenses specified in `project` function call. @@ -1738,7 +1787,7 @@ tests. It has the following methods. - `set(varname, value)` sets environment variable in the first argument to the value in the second argument, e.g. - `env.set('FOO', 'BAR') sets envvar`FOO`to value`BAR\` + `env.set('FOO', 'BAR')` sets envvar `FOO` to value `BAR` ### `external library` object @@ -1773,7 +1822,7 @@ opaque object representing it. - `get_variable(name)` fetches the specified variable from inside the subproject. This is useful to, for instance, get a [declared - dependency](#declare_dependency) from the subproject. + dependency](#declare_dependency) from the [subproject](Subprojects.md). ### `run result` object diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 5ee0db1..7611232 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -63,3 +63,21 @@ These are provided by the `.system()` method call. Any string not listed above is not guaranteed to remain stable in future releases. + + +## Language arguments parameter names + +These are the parameter names for passing language specific arguments to your build target. + +| Language | Parameter name | +| ----- | ----- | +| C | c_args | +| C++ | cpp_args | +| C# | cs_args | +| D | d_args | +| Fortran | fortran_args | +| Java | java_args | +| Objective C | objc_args | +| Objective C++ | objcpp_args | +| Rust | rust_args | +| Vala | vala_args | diff --git a/docs/markdown/Release-notes-for-0.45.0.md b/docs/markdown/Release-notes-for-0.45.0.md index b3df71c..6b24183 100644 --- a/docs/markdown/Release-notes-for-0.45.0.md +++ b/docs/markdown/Release-notes-for-0.45.0.md @@ -1,16 +1,194 @@ --- title: Release 0.45 -short-description: Release notes for 0.45 (preliminary) +short-description: Release notes for 0.45 ... # New features -This page is a placeholder for the eventual release notes. +## Python minimum version is now 3.5 -Notable new features should come with release note updates. This is -done by creating a file snippet called `snippets/featurename.md` and -whose contents should look like this: +Meson will from this version on require Python version 3.5 or newer. - ## Feature name +## Config-Tool based dependencies can be specified in a cross file - A short description explaining the new feature and how it should be used. +Tools like LLVM and pcap use a config tool for dependencies, this is a +script or binary that is run to get configuration information (cflags, +ldflags, etc) from. + +These binaries may now be specified in the `binaries` section of a +cross file. + +```dosini +[binaries] +cc = ... +llvm-config = '/usr/bin/llvm-config32' +``` + +## Visual Studio C# compiler support + +In addition to the Mono C# compiler we also support Visual Studio's C# +compiler. Currently this is only supported on the Ninja backend. + +## Removed two deprecated features + +The standalone `find_library` function has been a no-op for a long +time. Starting with this version it becomes a hard error. + +There used to be a keywordless version of `run_target` which looked +like this: + + run_target('targetname', 'command', 'arg1', 'arg2') + +This is now an error. The correct format for this is now: + + run_target('targetname', + command : ['command', 'arg1', 'arg2']) + +## Experimental FPGA support + +This version adds support for generating, analysing and uploading FPGA +programs using the [IceStorm +toolchain](http://www.clifford.at/icestorm/). This support is +experimental and is currently limited to the `iCE 40` series of FPGA +chips. + +FPGA generation integrates with other parts of Meson seamlessly. As an +example, [here](https://github.com/jpakkane/lm32) is an example +project that compiles a simple firmware into Verilog and combines that +with an lm32 softcore processor. + +## Generator outputs can preserve directory structure + +Normally when generating files with a generator, Meson flattens the +input files so they all go in the same directory. Some code +generators, such as Protocol Buffers, require that the generated files +have the same directory layout as the input files used to generate +them. This can now be achieved like this: + +```meson +g = generator(...) # Compiles protobuf sources +generated = gen.process('com/mesonbuild/one.proto', + 'com/mesonbuild/two.proto', + preserve_path_from : meson.current_source_dir()) +``` + +This would cause the following files to be generated inside the target +private directory: + + com/mesonbuild/one.pb.h + com/mesonbuild/one.pb.cc + com/mesonbuild/two.pb.h + com/mesonbuild/two.pb.cc + +## Hexadecimal string literals + +Hexadecimal integer literals can now be used in build and option files. + + int_255 = 0xFF + +## b_ndebug : if-release + +The value `if-release` can be given for the `b_ndebug` project option. + +This will make the `NDEBUG` pre-compiler macro to be defined for release +type builds as if the `b_ndebug` project option had had the value `true` +defined for it. + +## `install_data()` defaults to `{datadir}/{projectname}` + +If `install_data()` is not given an `install_dir` keyword argument, the +target directory defaults to `{datadir}/{projectname}` (e.g. +`/usr/share/myproj`). + +## install_subdir() supports strip_directory + +If strip_directory=true install_subdir() installs directory contents +instead of directory itself, stripping basename of the source directory. + +## Integer options + +There is a new integer option type with optional minimum and maximum +values. It can be specified like this in the `meson_options.txt` file: + + option('integer_option', type : 'integer', min : 0, max : 5, value : 3) + +## New method meson.project_license() + +The `meson` builtin object now has a `project_license()` method that +returns a list of all licenses for the project. + +## Rust cross-compilation + +Cross-compilation is now supported for Rust targets. Like other +cross-compilers, the Rust binary must be specified in your cross +file. It should specify a `--target` (as installed by `rustup target`) +and a custom linker pointing to your C cross-compiler. For example: + +``` +[binaries] +c = '/usr/bin/arm-linux-gnueabihf-gcc-7' +rust = [ + 'rustc', + '--target', 'arm-unknown-linux-gnueabihf', + '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7', +] +``` + +## Rust compiler-private library disambiguation + +When building a Rust target with Rust library dependencies, an +`--extern` argument is now specified to avoid ambiguity between the +dependency library, and any crates of the same name in `rustc`'s +private sysroot. + +## Project templates + +Meson ships with predefined project templates. To start a new project from +scratch, simply go to an empty directory and type: + +```meson +meson init --name=myproject --type=executable --language=c +``` + +## Improve test setup selection + +Test setups are now identified (also) by the project they belong to +and it is possible to select the used test setup from a specific +project. E.g. to use a test setup `some_setup` from project +`some_project` for all executed tests one can use + + meson test --setup some_project:some_setup + +Should one rather want test setups to be used from the same project as +where the current test itself has been defined, one can use just + + meson test --setup some_setup + +In the latter case every (sub)project must have a test setup `some_setup` +defined in it. + +## Can use custom targets as Windows resource files + +The `compile_resources()` function of the `windows` module can now be used on custom targets as well as regular files. +# Can promote dependencies with wrap command + +The `promote` command makes it easy to copy nested dependencies to the top level. + + meson wrap promote scommon + +This will search the project tree for a subproject called `scommon` +and copy it to the top level. + +If there are many embedded subprojects with the same name, you have to +specify which one to promote manually like this: + + meson wrap promote subprojects/s1/subprojects/scommon + +## Yielding subproject option to superproject + +Normally project options are specific to the current project. However +sometimes you want to have an option whose value is the same over all +projects. This can be achieved with the new `yield` keyword for +options. When set to `true`, getting the value of this option in +`meson.build` files gets the value from the option with the same name +in the master project (if such an option exists). diff --git a/docs/markdown/Release-notes-for-0.46.0.md b/docs/markdown/Release-notes-for-0.46.0.md new file mode 100644 index 0000000..395a94d --- /dev/null +++ b/docs/markdown/Release-notes-for-0.46.0.md @@ -0,0 +1,16 @@ +--- +title: Release 0.46 +short-description: Release notes for 0.46 (preliminary) +... + +# New features + +This page is a placeholder for the eventual release notes. + +Notable new features should come with release note updates. This is +done by creating a file snippet called `snippets/featurename.md` and +whose contents should look like this: + + ## Feature name + + A short description explaining the new feature and how it should be used. diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md index 23d5e97..14b2d19 100644 --- a/docs/markdown/Running-Meson.md +++ b/docs/markdown/Running-Meson.md @@ -146,3 +146,9 @@ Meson has a standard command line help feature. It can be accessed with the following command. meson --help + +Exit status +== + +Meson exits with status 0 if successful, 1 for problems with the command line or +meson.build file, and 2 for internal errors. diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 14f01d4..ad2aae2 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -18,7 +18,7 @@ becomes a transparent part of the project. The basic idiom goes something like this. ```meson -dep = dependency('foo', fallback : [subproject_name, variable_name] +dep = dependency('foo', fallback : [subproject_name, variable_name]) ``` As an example, suppose we have a simple project that provides a shared @@ -40,7 +40,7 @@ this. ```meson project('master', 'c') -dep = dependency('simple', fallback : ['simple', 'simple_dep'] +dep = dependency('simple', fallback : ['simple', 'simple_dep']) exe = executable('prog', 'prog.c', dependencies : dep, install : true) ``` @@ -77,3 +77,27 @@ subproject `b` and have `b` also use `a`. Meson ships with a dependency system to automatically obtain dependency subprojects. It is documented in the [Wrap dependency system manual](Wrap-dependency-system-manual.md). + +# Why must all subprojects be inside a single directory? + +There are several reasons. + +First of all, to maintain any sort of sanity, the system must prevent going +inside other subprojects with `subdir()` or variations thereof. Having the +subprojects in well defined places makes this easy. If subprojects could be +anywhere at all, it would be a lot harder. + +Second of all it is extremely important that end users can easily see what +subprojects any project has. Because they are in one, and only one, place, +reviewing them becomes easy. + +This is also a question of convention. Since all Meson projects have the same +layout w.r.t subprojects, switching between projects becomes easier. You don't +have to spend time on a new project traipsing through the source tree looking +for subprojects. They are always in the same place. + +Finally if you can have subprojects anywhere, this increases the possibility of +having many different (possibly incompatible) versions of a dependency in your +source tree. Then changing some code (such as changing the order you traverse +directories) may cause a completely different version of the subproject to be +used by accident. diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index afbeaa0..53ce9ec 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -30,7 +30,7 @@ Note how you need to specify multiple values as an array. Coverage -- -If you enable coverage measurements by giving Meson the command line flag `-Db_coverage=true`, you can generate coverage reports. Meson will autodetect what coverage generator tools you have installed and will generate the corresponding targets. These targets are `coverage-xml` and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) and `coverage-html`, which requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and [GenHTML](https://linux.die.net/man/1/genhtml). +If you enable coverage measurements by giving Meson the command line flag `-Db_coverage=true`, you can generate coverage reports. Meson will autodetect what coverage generator tools you have installed and will generate the corresponding targets. These targets are `coverage-xml` and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) and `coverage-html`, which requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and [GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com) with html support. The output of these commands is written to the log directory `meson-logs` in your build directory. diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index f02e4c7..e152555 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -4,60 +4,73 @@ title: Users # List of projects using Meson -If you have a project that uses Meson that you want to add to this list, let us know and we'll add it. +If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here. - [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3 - [Arduino sample project](https://github.com/jpakkane/mesonarduino) - [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies - [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool - - [Dpdk](http://dpdk.org/ml/archives/dev/2018-January/089724.html), Data plane development kit, a set of libraries and drivers for fast packet processing + - [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library + - [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker + - [Dpdk](http://dpdk.org/browse/dpdk), Data plane development kit, a set of libraries and drivers for fast packet processing + - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine - [Emeus](https://github.com/ebassi/emeus), Constraint based layout manager for GTK+ - - [Frida](https://www.frida.re/), a dynamic binary instrumentation toolkit + - [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit - [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop. - - [GLib](https://git.gnome.org/browse/glib/), cross-platform C library used by GTK+ and GStreamer (not the default yet) - - [Gnome Builder](https://git.gnome.org/browse/gnome-builder/), an IDE for the Gnome platform + - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer (not the default yet) + - [Gnome Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a Gnome hypervisor + - [Gnome Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the Gnome platform - [Gnome MPV](https://github.com/gnome-mpv/gnome-mpv), Gnome frontend to the mpv video player - - [Gnome Recipes](https://github.com/matthiasclasen/gr), application for cooking recipes - - [Gnome Software](https://git.gnome.org//browse/gnome-software), an app store for Gnome + - [Gnome Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes + - [Gnome Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for Gnome - [Gnome Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on Gnome desktop + - [Gnome Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a Gnome application for visualizing system resources - [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics - - [Grilo](https://mail.gnome.org/archives/grilo-list/2017-February/msg00000.html) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins/commit/?id=ea047c4fb63e90268eb795ed91a09a2be5068a4c), the Grilo multimedia framework + - [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework - [GStreamer](https://cgit.freedesktop.org/gstreamer/gstreamer/), multimedia framework (not the default yet) - - [GTK+](https://git.gnome.org/browse/gtk+/), the multi-platform toolkit used by GNOME + - [GTK+](https://gitlab.gnome.org/GNOME/gtk), the multi-platform toolkit used by GNOME - [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D + - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO + - [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux - [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C - [IGT](https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/), Linux kernel graphics driver test suite. - [JsonCpp](https://github.com/open-source-parsers/jsoncpp), a C++ library for interacting with JSON - - [Json-glib](https://git.gnome.org/browse/json-glib), GLib-based JSON manipulation library + - [Json-glib](https://gitlab.gnome.org/GNOME/json-glib), GLib-based JSON manipulation library - [Ksh](https://github.com/att/ast), a Korn Shell - [Libdrm](https://cgit.freedesktop.org/drm/libdrm/), a library for abstracting DRM kernel interfaces - [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management - - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib/), a GLib wrapper for libgit2 - - [Libhttpseverywhere](https://github.com/grindhold/libhttpseverywhere), a library to enable httpseverywhere on any desktop app + - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface + - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 + - [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app - [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location lookup based on OpenStreetMap data - [Libva](https://github.com/intel/libva), an implementation for the VA (VIdeo Acceleration) API - - [Lightdm-Webkit2-Greeter](https://github.com/Antergos/lightdm-webkit2-greeter) + - [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format - [Kiwix libraries](https://github.com/kiwix/kiwix-lib) - - [Mesa](https://www.mesa3d.org/), An open source graphics driver project - - [Nautilus](https://git.gnome.org/browse/nautilus/commit/?id=ed5652c89ac0654df2e82b54b00b27d51c825465) the Gnome file manager + - [Mesa](https://cgit.freedesktop.org/mesa/mesa/), An open source graphics driver project + - [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast + - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the Gnome file manager + - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment - [Orc](http://cgit.freedesktop.org/gstreamer/orc/), the Optimized Inner Loop Runtime Compiler (not the default yet) - [Outlier](https://github.com/kerolasa/outlier), a small Hello World style meson example project - [Pango](https://git.gnome.org/browse/pango/), an Internationalized text layout and rendering library (not the default yet) - [Parzip](https://github.com/jpakkane/parzip), a multithreaded reimplementation of Zip - - [PipeWire](https://pipewire.org/), a framework for video and audio for containerized applications - - [Pitivi](http://pitivi.org/), a nonlinear video editor - - [Polari](https://git.gnome.org/browse/polari), an IRC client + - [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications + - [Pithos](https://github.com/pithos/pithos), a Pandora Radio client + - [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor + - [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client - [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default) - [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP - - [Sysprof](https://wiki.gnome.org/Apps/Sysprof), a profiling tool + - [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool - [systemd](https://github.com/systemd/systemd), the init system + - [szl](https://github.com/dimkr/szl), a lightweight, embeddable scripting language - [Taisei Project](https://taisei-project.org/), an open-source Touhou Project clone and fangame - - [Xorg](https://cgit.freedesktop.org/xorg/xserver/) the X.org display server (not the default yet) + - [xi-gtk](https://github.com/eyelash/xi-gtk), a GTK+ front-end for the Xi editor + - [Xorg](https://cgit.freedesktop.org/xorg/xserver/), the X.org display server (not the default yet) - [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala - [Wayland and Weston](https://lists.freedesktop.org/archives/wayland-devel/2016-November/031984.html), a next generation display server (not merged yet) - [wlroots](https://github.com/swaywm/wlroots), a modular Wayland compositor library - - [ZStandard](https://github.com/facebook/zstd/commit/4dca56ed832c6a88108a2484a8f8ff63d8d76d91) a compression algorithm developed at Facebook (not used by default) + - [ZStandard](https://github.com/facebook/zstd/commit/4dca56ed832c6a88108a2484a8f8ff63d8d76d91), a compression algorithm developed at Facebook (not used by default) Note that a more up-to-date list of GNOME projects that use Meson can be found [here](https://wiki.gnome.org/Initiatives/GnomeGoals/MesonPorting). diff --git a/docs/markdown/Using-wraptool.md b/docs/markdown/Using-wraptool.md index 08b1bfa..f6023e8 100644 --- a/docs/markdown/Using-wraptool.md +++ b/docs/markdown/Using-wraptool.md @@ -1,9 +1,9 @@ # Using wraptool -Wraptool is a helper tool that allows you to manage your source -dependencies using the WrapDB database. It gives you all things you -would expect, such as installing and updating dependencies. The wrap -tool works on all platforms, the only limitation is that the wrap +Wraptool is a subcommand of Meson that allows you to manage your +source dependencies using the WrapDB database. It gives you all things +you would expect, such as installing and updating dependencies. The +wrap tool works on all platforms, the only limitation is that the wrap definition works on your target platform. If you find some Wraps that don't work, please file bugs or, even better, patches. @@ -16,7 +16,7 @@ are commands to type. The simplest operation to do is to query the list of packages available. To list them all issue the following command: - $ wraptool list + $ meson wrap list box2d enet gtest @@ -33,13 +33,13 @@ available. To list them all issue the following command: Usually you want to search for a specific package. This can be done with the `search` command: - $ wraptool search jpeg + $ meson wrap search jpeg libjpeg To determine which versions of libjpeg are available to install, issue the `info` command: - $ wraptool info libjpeg + $ meson wrap info libjpeg Available versions of libjpeg: 9a 2 @@ -54,7 +54,7 @@ Installing dependencies is just as straightforward. First just create the `subprojects` directory at the top of your source tree and issue the install command. - $ wraptool install libjpeg + $ meson wrap install libjpeg Installed libjpeg branch 9a revision 2 Now you can issue a `subproject('libjpeg')` in your `meson.build` file @@ -62,7 +62,7 @@ to use it. To check if your projects are up to date you can issue the `status` command. - $ wraptool status + $ meson wrap status Subproject status libjpeg up to date. Branch 9a, revision 2. zlib not up to date. Have 1.2.8 2, but 1.2.8 4 is available. @@ -70,12 +70,12 @@ To check if your projects are up to date you can issue the `status` command. In this case `zlib` has a newer release available. Updating it is straightforward: - $ wraptool update zlib + $ meson wrap update zlib Updated zlib to branch 1.2.8 revision 4 Wraptool can do other things besides these. Documentation for these can be found in the command line help, which can be accessed by -`wraptool --help`. +`meson wrap --help`. ## Promoting dependencies diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 4e7e220..acc18d7 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -125,9 +125,9 @@ Install scan-build and configure your project. Then do this: $ ninja scan-build ``` -You can use the `SCAN_BUILD` environment variable to choose the scan-build executable. +You can use the `SCANBUILD` environment variable to choose the scan-build executable. ```console -$ SCAN_BUILD=<your exe> ninja scan-build +$ SCANBUILD=<your exe> ninja scan-build ``` diff --git a/docs/markdown/index.md b/docs/markdown/index.md index 6893564..cffd488 100644 --- a/docs/markdown/index.md +++ b/docs/markdown/index.md @@ -33,6 +33,16 @@ developers. The first one is the mailing list, which is hosted at The second way is via IRC. The channel to use is `#mesonbuild` at [Freenode](https://freenode.net/). +### [Projects using Meson](http://mesonbuild.com/Users.html) + +Many projects out there are using Meson and their communities are also +a great resource for learning about what (and what not too!) do when +trying to convert to using Meson. + +[A short list of Meson users can be found here](http://mesonbuild.com/Users.html) +but there are many more. We would love to hear about your success +stories too and how things could be improved too! + ## Development All development on Meson is done on the [GitHub diff --git a/docs/markdown/snippets/altered-logging.md b/docs/markdown/snippets/altered-logging.md new file mode 100644 index 0000000..4ff9bb0 --- /dev/null +++ b/docs/markdown/snippets/altered-logging.md @@ -0,0 +1,5 @@ +## Log output slightly changed + +The format of some human-readable diagnostic messages has changed in +minor ways. In case you are parsing these messages, you may need to +adjust your code. diff --git a/docs/markdown/snippets/compiler-object-run_command.md b/docs/markdown/snippets/compiler-object-run_command.md new file mode 100644 index 0000000..0308416 --- /dev/null +++ b/docs/markdown/snippets/compiler-object-run_command.md @@ -0,0 +1,10 @@ +## Compiler object can now be passed to run_command() + +This can be used to run the current compiler with the specified arguments +to obtain additional information from it. +One of the use cases is to get the location of development files for the +GCC plugins: + + cc = meson.get_compiler('c') + result = run_command(cc, '-print-file-name=plugin') + plugin_dev_path = result.stdout().strip() diff --git a/docs/markdown/snippets/config-tool-cross.md b/docs/markdown/snippets/config-tool-cross.md deleted file mode 100644 index 1102481..0000000 --- a/docs/markdown/snippets/config-tool-cross.md +++ /dev/null @@ -1,13 +0,0 @@ -# Config-Tool based dependencies can be specified in a cross file - -Tools like LLVM and pcap use a config tool for dependencies, this is a script -or binary that is run to get configuration information (cflags, ldflags, etc) -from. - -These binaries may now be specified in the `binaries` section of a cross file. - -```dosini -[binaries] -cc = ... -llvm-config = '/usr/bin/llvm-config32' -``` diff --git a/docs/markdown/snippets/declare_dependency-link_whole.md b/docs/markdown/snippets/declare_dependency-link_whole.md new file mode 100644 index 0000000..827b1f6 --- /dev/null +++ b/docs/markdown/snippets/declare_dependency-link_whole.md @@ -0,0 +1,4 @@ +## declare_dependency() supports link_whole + +`declare_dependency()` supports `link_whole` parameter. +`link_whole` propagates to build target that uses dependency. diff --git a/docs/markdown/snippets/del-old-names.md b/docs/markdown/snippets/del-old-names.md new file mode 100644 index 0000000..c4abc9a --- /dev/null +++ b/docs/markdown/snippets/del-old-names.md @@ -0,0 +1,7 @@ +## Old command names are now errors + +Old executable names `mesonintrospect`, `mesonconf`, `mesonrewriter` +and `mesontest` have been deprecated for a long time. Starting from +this versino they no longer do anything but instead always error +out. All functionality is available as subcommands in the main `meson` +binary. diff --git a/docs/markdown/snippets/deprecations.md b/docs/markdown/snippets/deprecations.md deleted file mode 100644 index adab2e6..0000000 --- a/docs/markdown/snippets/deprecations.md +++ /dev/null @@ -1,14 +0,0 @@ -## Removed two deprecated features - -The standalone `find_library` function has been a no-op for a long -time. Starting with this version it becomes a hard error. - -There used to be a keywordless version of `run_target` which looked -like this: - - run_target('targetname', 'command', 'arg1', 'arg2') - -This is now an error. The correct format for this is now: - - run_target('targetname', - command : ['command', 'arg1', 'arg2']) diff --git a/docs/markdown/snippets/fpga.md b/docs/markdown/snippets/fpga.md deleted file mode 100644 index b5e4938..0000000 --- a/docs/markdown/snippets/fpga.md +++ /dev/null @@ -1,12 +0,0 @@ -## Experimental FPGA support - -This version adds support for generating, analysing and uploading FPGA -programs using the [IceStorm -toolchain](http://www.clifford.at/icestorm/). This support is -experimental and is currently limited to the `iCE 40` series of FPGA -chips. - -FPGA generation integrates with other parts of Meson seamlessly. As an -example, [here](https://github.com/jpakkane/lm32) is an example -project that compiles a simple firmware into Verilog and combines that -with an lm32 softcore processor. diff --git a/docs/markdown/snippets/gen-subdirs.md b/docs/markdown/snippets/gen-subdirs.md deleted file mode 100644 index fdb5945..0000000 --- a/docs/markdown/snippets/gen-subdirs.md +++ /dev/null @@ -1,21 +0,0 @@ -## Generator outputs can preserve directory structure - -Normally when generating files with a generator, Meson flattens the -input files so they all go in the same directory. Some code -generators, such as Protocol Buffers, require that the generated files -have the same directory layout as the input files used to generate -them. This can now be achieved like this: - -```meson -g = generator(...) # Compiles protobuf sources -generated = gen.process('com/mesonbuild/one.proto', - 'com/mesonbuild/two.proto', - preserve_path_from : meson.current_source_dir()) - -This would cause the following files to be generated inside the target -private directory: - - com/mesonbuild/one.pb.h - com/mesonbuild/one.pb.cc - com/mesonbuild/two.pb.h - com/mesonbuild/two.pb.cc diff --git a/docs/markdown/snippets/hexnumbers.md b/docs/markdown/snippets/hexnumbers.md deleted file mode 100644 index 840c0cb..0000000 --- a/docs/markdown/snippets/hexnumbers.md +++ /dev/null @@ -1,5 +0,0 @@ -## Hexadecimal string literals - -Hexadecimal integer literals can now be used in build and option files. - - int_255 = 0xFF diff --git a/docs/markdown/snippets/if-release.md b/docs/markdown/snippets/if-release.md deleted file mode 100644 index 96e12ef..0000000 --- a/docs/markdown/snippets/if-release.md +++ /dev/null @@ -1,7 +0,0 @@ -## b_ndebug : if-release - -The value `if-release` can be given for the `b_ndebug` project option. - -This will make the `NDEBUG` pre-compiler macro to be defined for release -type builds as if the `b_ndebug` project option had had the value `true` -defined for it. diff --git a/docs/markdown/snippets/improved-help.md b/docs/markdown/snippets/improved-help.md new file mode 100644 index 0000000..db7e852 --- /dev/null +++ b/docs/markdown/snippets/improved-help.md @@ -0,0 +1,6 @@ +## "meson help" now shows command line help + +Command line parsing is now less surprising. "meson help" is now +equivalent to "meson --help" and "meson help <subcommand>" is +equivalent to "meson <subcommand> --help", instead of creating a build +directory called "help" in these cases. diff --git a/docs/markdown/snippets/improved-meson-init.md b/docs/markdown/snippets/improved-meson-init.md new file mode 100644 index 0000000..ec17bc4 --- /dev/null +++ b/docs/markdown/snippets/improved-meson-init.md @@ -0,0 +1,19 @@ +## Autogeneration of simple meson.build files + +A feature to generate a meson.build file compiling given C/C++ source +files into a single executable has been added to "meson init". By +default, it will take all recognizable source files in the current +directory. You can also specify a list of dependencies with the -d +flag and automatically invoke a build with the -b flag to check if the +code builds with those dependencies. + +For example, + +```meson +meson init -fbd sdl2,gl +``` + +will look for C or C++ files in the current directory, generate a +meson.build for them with the dependencies of sdl2 and gl and +immediately try to build it, overwriting any previous meson.build and +build directory. diff --git a/docs/markdown/snippets/install_data-rename.md b/docs/markdown/snippets/install_data-rename.md new file mode 100644 index 0000000..6378d0f --- /dev/null +++ b/docs/markdown/snippets/install_data-rename.md @@ -0,0 +1,11 @@ +## install_data() supports rename + +`rename` parameter is used to change names of the installed files. +In order to install +- `file1.txt` into `share/myapp/dir1/data.txt` +- `file2.txt` into `share/myapp/dir2/data.txt` +```meson +install_data(['file1.txt', 'file2.txt'], + rename : ['dir1/data.txt', 'dir2/data.txt'], + install_dir : 'share/myapp') +``` diff --git a/docs/markdown/snippets/install_subdir-strip_directory.md b/docs/markdown/snippets/install_subdir-strip_directory.md deleted file mode 100644 index 9ddb4a4..0000000 --- a/docs/markdown/snippets/install_subdir-strip_directory.md +++ /dev/null @@ -1,4 +0,0 @@ -## install_subdir() supports strip_directory - -If strip_directory=true install_subdir() installs directory contents -instead of directory itself, stripping basename of the source directory. diff --git a/docs/markdown/snippets/intopt.md b/docs/markdown/snippets/intopt.md deleted file mode 100644 index daf660b..0000000 --- a/docs/markdown/snippets/intopt.md +++ /dev/null @@ -1,6 +0,0 @@ -## Integer options - -There is a new integer option type with optional minimum and maximum -values. It can be specified like this in the `meson_options.txt` file: - - option('integer_option', type : 'integer', min : 0, max : 5, value : 3) diff --git a/docs/markdown/snippets/pkgconfig-requires-non-string.md b/docs/markdown/snippets/pkgconfig-requires-non-string.md new file mode 100644 index 0000000..abf85b0 --- /dev/null +++ b/docs/markdown/snippets/pkgconfig-requires-non-string.md @@ -0,0 +1,5 @@ +## pkgconfig.generate() requires parameters non-string arguments + +`pkgconfig.generate()` `requires` and `requires_private` parameters +accept pkgconfig-dependencies and libraries that pkgconfig-files were +generated for. diff --git a/docs/markdown/snippets/project-license.md b/docs/markdown/snippets/project-license.md deleted file mode 100644 index 5da2c6a..0000000 --- a/docs/markdown/snippets/project-license.md +++ /dev/null @@ -1,4 +0,0 @@ -## New method meson.project_license() - -The `meson` builtin object now has a `project_license()` method that returns a -list of all licenses for the project. diff --git a/docs/markdown/snippets/rust-cross.md b/docs/markdown/snippets/rust-cross.md deleted file mode 100644 index 7f18c44..0000000 --- a/docs/markdown/snippets/rust-cross.md +++ /dev/null @@ -1,16 +0,0 @@ -## Rust cross-compilation - -Cross-compilation is now supported for Rust targets. Like other -cross-compilers, the Rust binary must be specified in your cross -file. It should specify a `--target` (as installed by `rustup target`) -and a custom linker pointing to your C cross-compiler. For example: - -``` -[binaries] -c = '/usr/bin/arm-linux-gnueabihf-gcc-7' -rust = [ - 'rustc', - '--target', 'arm-unknown-linux-gnueabihf', - '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7', -] -``` diff --git a/docs/markdown/snippets/templates.md b/docs/markdown/snippets/templates.md deleted file mode 100644 index 6f0474d..0000000 --- a/docs/markdown/snippets/templates.md +++ /dev/null @@ -1,8 +0,0 @@ -## Project templates - -Meson ships with predefined project templates. To start a new project from -scratch, simply go to an empty directory and type: - -```meson -meson init --name=myproject --type=executable --language=c -``` diff --git a/docs/markdown/snippets/windows-resources-custom-targets.md b/docs/markdown/snippets/windows-resources-custom-targets.md deleted file mode 100644 index a2dce3a..0000000 --- a/docs/markdown/snippets/windows-resources-custom-targets.md +++ /dev/null @@ -1,3 +0,0 @@ -## Can use custom targets as Windows resource files - -The `compile_resources()` function of the `windows` module can now be used on custom targets as well as regular files. diff --git a/docs/markdown/snippets/wrap_promote.md b/docs/markdown/snippets/wrap_promote.md deleted file mode 100644 index 20fee47..0000000 --- a/docs/markdown/snippets/wrap_promote.md +++ /dev/null @@ -1,11 +0,0 @@ -# Can promote dependencies with wrap command - -The `promote` command makes it easy to copy nested dependencies to the top level. - - meson wrap promote scommon - -This will search the project tree for a subproject called `scommon` and copy it to the top level. - -If there are many embedded subprojects with the same name, you have to specify which one to promote manually like this: - - meson wrap promote subprojects/s1/subprojects/scommon diff --git a/docs/markdown/snippets/yield.md b/docs/markdown/snippets/yield.md deleted file mode 100644 index 3880e67..0000000 --- a/docs/markdown/snippets/yield.md +++ /dev/null @@ -1,8 +0,0 @@ -## Yielding subproject option to superproject - -Normally project options are specific to the current project. However -sometimes you want to have an option whose value is the same over all -projects. This can be achieved with the new `yield` keyword for -options. When set to `true`, getting the value of this option in -`meson.build` files gets the value from the option with the same name -in the master project (if such an option exists). diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 144ca4a..844b600 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -65,6 +65,7 @@ index.md Shipping-prebuilt-binaries-as-wraps.md fallback-wraptool.md Release-notes.md + Release-notes-for-0.46.0.md Release-notes-for-0.45.0.md Release-notes-for-0.44.0.md Release-notes-for-0.43.0.md @@ -55,7 +55,7 @@ def unpack(sproj, branch, outdir): print(' expected:', dig) print(' obtained:', should) return 1 - spdir = os.path.split(outdir)[0] + spdir = os.path.dirname(outdir) ofilename = os.path.join(spdir, config['wrap-file']['source_filename']) with open(ofilename, 'wb') as ofile: ofile.write(us) diff --git a/man/meson.1 b/man/meson.1 index 929bc6e..19ad737 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands" +.TH MESON "1" "March 2018" "meson 0.45.0" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION @@ -202,6 +202,19 @@ show available versions of the specified project \fBstatus\fR show installed and available versions of currently used subprojects +.SH EXIT STATUS + +.TP +.B 0 +Successful. +.TP +.B 1 +Usage error, or an error parsing or executing meson.build. +.TP +.B 2 +Internal error. +.TP + .SH SEE ALSO http://mesonbuild.com/ diff --git a/man/mesonconf.1 b/man/mesonconf.1 index 3a83473..b189663 100644 --- a/man/mesonconf.1 +++ b/man/mesonconf.1 @@ -1,4 +1,4 @@ -.TH MESONCONF "1" "December 2017" "mesonconf 0.44.0" "User Commands" +.TH MESONCONF "1" "March 2018" "mesonconf 0.45.0" "User Commands" .SH NAME mesonconf - a tool to configure Meson builds .SH DESCRIPTION diff --git a/man/mesonintrospect.1 b/man/mesonintrospect.1 index 27f39c0..61aa381 100644 --- a/man/mesonintrospect.1 +++ b/man/mesonintrospect.1 @@ -1,4 +1,4 @@ -.TH MESONINTROSPECT "1" "December 2017" "mesonintrospect 0.44.0" "User Commands" +.TH MESONINTROSPECT "1" "March 2017" "mesonintrospect 0.45.0" "User Commands" .SH NAME mesonintrospect - a tool to extract information about a Meson build .SH DESCRIPTION diff --git a/man/mesontest.1 b/man/mesontest.1 index d2b2743..9a9f743 100644 --- a/man/mesontest.1 +++ b/man/mesontest.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands" +.TH MESON "1" "March 2018" "meson 0.45.0" "User Commands" .SH NAME mesontest - test tool for the Meson build system .SH DESCRIPTION diff --git a/man/wraptool.1 b/man/wraptool.1 index 113b33c..93ec457 100644 --- a/man/wraptool.1 +++ b/man/wraptool.1 @@ -1,4 +1,4 @@ -.TH WRAPTOOL "1" "December 2017" "meson 0.44.0" "User Commands" +.TH WRAPTOOL "1" "March 2018" "meson 0.45.0" "User Commands" .SH NAME wraptool - source dependency downloader .SH DESCRIPTION @@ -14,8 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mesonbuild import mesonmain, mesonlib -import sys, os, locale +from mesonbuild import mesonmain +import sys, os def main(): # Always resolve the command path so Ninja can find it for regen, tests, etc. diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 292b027..ad45204 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -65,9 +65,10 @@ class ExecutableSerialisation: self.capture = capture class TestSerialisation: - def __init__(self, name, suite, fname, is_cross_built, exe_wrapper, is_parallel, cmd_args, env, - should_fail, timeout, workdir, extra_paths): + def __init__(self, name, project, suite, fname, is_cross_built, exe_wrapper, is_parallel, + cmd_args, env, should_fail, timeout, workdir, extra_paths): self.name = name + self.project_name = project self.suite = suite self.fname = fname self.is_cross_built = is_cross_built @@ -88,12 +89,17 @@ class OptionProxy: class OptionOverrideProxy: '''Mimic an option list but transparently override selected option values.''' - def __init__(self, overrides, options): + def __init__(self, overrides, *options): self.overrides = overrides self.options = options def __getitem__(self, option_name): - base_opt = self.options[option_name] + for opts in self.options: + if option_name in opts: + return self._get_override(option_name, opts[option_name]) + raise KeyError('Option not found', option_name) + + def _get_override(self, option_name, base_opt): if option_name in self.overrides: return OptionProxy(base_opt.name, base_opt.validate_value(self.overrides[option_name])) return base_opt @@ -107,9 +113,6 @@ class Backend: self.processed_targets = {} self.build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) - for t in self.build.targets: - priv_dirname = self.get_target_private_dir_abs(t) - os.makedirs(priv_dirname, exist_ok=True) def get_target_filename(self, t): if isinstance(t, build.CustomTarget): @@ -125,6 +128,20 @@ class Backend: def get_target_filename_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)) + def get_builtin_options_for_target(self, target): + return OptionOverrideProxy(target.option_overrides, + self.environment.coredata.builtins) + + def get_base_options_for_target(self, target): + return OptionOverrideProxy(target.option_overrides, + self.environment.coredata.builtins, + self.environment.coredata.base_options) + + def get_compiler_options_for_target(self, target): + return OptionOverrideProxy(target.option_overrides, + # no code depends on builtins for now + self.environment.coredata.compiler_options) + def get_option_for_target(self, option_name, target): if option_name in target.option_overrides: override = target.option_overrides[option_name] @@ -169,12 +186,10 @@ class Backend: return self.build_to_src def get_target_private_dir(self, target): - dirname = os.path.join(self.get_target_dir(target), target.get_basename() + target.type_suffix()) - return dirname + return os.path.join(self.get_target_dir(target), target.get_id()) def get_target_private_dir_abs(self, target): - dirname = os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) - return dirname + return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) def get_target_generated_dir(self, target, gensrc, src): """ @@ -312,7 +327,7 @@ class Backend: def rpaths_for_bundled_shared_libraries(self, target): paths = [] for dep in target.external_deps: - if isinstance(dep, dependencies.ExternalLibrary): + if isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)): la = dep.link_args if len(la) == 1 and os.path.isabs(la[0]): # The only link argument is an absolute path to a library file. @@ -323,9 +338,12 @@ class Backend: if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so']: continue absdir = os.path.dirname(libpath) - rel_to_src = absdir[len(self.environment.get_source_dir()) + 1:] - assert(not os.path.isabs(rel_to_src)) - paths.append(os.path.join(self.build_to_src, rel_to_src)) + if absdir.startswith(self.environment.get_source_dir()): + rel_to_src = absdir[len(self.environment.get_source_dir()) + 1:] + assert not os.path.isabs(rel_to_src), 'rel_to_src: {} is absolute'.format(rel_to_src) + paths.append(os.path.join(self.build_to_src, rel_to_src)) + else: + paths.append(absdir) return paths def determine_rpath_dirs(self, target): @@ -407,16 +425,10 @@ class Backend: args = [] pchpath = self.get_target_private_dir(target) includeargs = compiler.get_include_args(pchpath, False) - for lang in ['c', 'cpp']: - p = target.get_pch(lang) - if not p: - continue - if compiler.can_compile(p[-1]): - header = p[0] - args += compiler.get_pch_use_args(pchpath, header) - if len(args) > 0: - args = includeargs + args - return args + p = target.get_pch(compiler.get_language()) + if p: + args += compiler.get_pch_use_args(pchpath, p[0]) + return includeargs + args @staticmethod def escape_extra_args(compiler, args): @@ -451,7 +463,7 @@ class Backend: # starting from hard-coded defaults followed by build options and so on. commands = CompilerArgs(compiler) - copt_proxy = OptionOverrideProxy(target.option_overrides, self.environment.coredata.compiler_options) + copt_proxy = self.get_compiler_options_for_target(target) # First, the trivial ones that are impossible to override. # # Add -nostdinc/-nostdinc++ if needed; can't be overridden @@ -521,9 +533,8 @@ class Backend: # Fortran requires extra include directives. if compiler.language == 'fortran': for lt in target.link_targets: - priv_dir = os.path.join(self.get_target_dir(lt), lt.get_basename() + lt.type_suffix()) - incflag = compiler.get_include_args(priv_dir, False) - commands += incflag + priv_dir = self.get_target_private_dir(lt) + commands += compiler.get_include_args(priv_dir, False) return commands def build_target_link_arguments(self, compiler, deps): @@ -606,9 +617,9 @@ class Backend: cmd_args.append(self.get_target_filename(a)) else: raise MesonException('Bad object in test command.') - ts = TestSerialisation(t.get_name(), t.suite, cmd, is_cross, exe_wrapper, - t.is_parallel, cmd_args, t.env, t.should_fail, - t.timeout, t.workdir, extra_paths) + ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross, + exe_wrapper, t.is_parallel, cmd_args, t.env, + t.should_fail, t.timeout, t.workdir, extra_paths) arr.append(ts) pickle.dump(arr, datafile) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index f161d57..bfac4c7 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, pickle, re, shlex, subprocess, sys +import os, pickle, re, shlex, subprocess from collections import OrderedDict +import itertools +from pathlib import PurePath from . import backends from .. import modules @@ -25,7 +27,7 @@ from .. import compilers from ..compilers import CompilerArgs from ..linkers import ArLinker from ..mesonlib import File, MesonException, OrderedSet -from ..mesonlib import get_compiler_for_source +from ..mesonlib import get_compiler_for_source, has_path_sep from .backends import CleanTrees, InstallData from ..build import InvalidArguments @@ -102,7 +104,8 @@ class NinjaBuildElement: # This is the only way I could find to make this work on all # platforms including Windows command shell. Slash is a dir separator # on Windows, too, so all characters are unambiguous and, more importantly, - # do not require quoting. + # do not require quoting, unless explicitely specified, which is necessary for + # the csc compiler. line = line.replace('\\', '/') outfile.write(line) @@ -113,7 +116,6 @@ class NinjaBuildElement: (name, elems) = e should_quote = name not in raw_names line = ' %s = ' % name - noq_templ = "%s" newelems = [] for i in elems: if not should_quote or i == '&&': # Hackety hack hack @@ -262,7 +264,7 @@ int dummy; vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header) header_deps.append(vala_header) # Recurse and find generated headers - for dep in target.link_targets: + for dep in itertools.chain(target.link_targets, target.link_whole_targets): if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): header_deps += self.get_generated_headers(dep) return header_deps @@ -472,8 +474,7 @@ int dummy; def process_target_dependencies(self, target, outfile): for t in target.get_dependencies(): - tname = t.get_basename() + t.type_suffix() - if tname not in self.processed_targets: + if t.get_id() not in self.processed_targets: self.generate_target(t, outfile) def custom_target_generator_inputs(self, target, outfile): @@ -626,19 +627,24 @@ int dummy; self.generate_coverage_legacy_rules(outfile) def generate_coverage_legacy_rules(self, outfile): - (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools() + (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools() added_rule = False if gcovr_exe: + # gcovr >= 3.1 interprets rootdir differently + if gcovr_new_rootdir: + rootdir = self.environment.get_build_dir() + else: + rootdir = self.environment.get_source_dir(), added_rule = True elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', '') - elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_source_dir(), + elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', rootdir, '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')]) elem.add_item('DESC', 'Generating XML coverage report.') elem.write(outfile) # Alias that runs the target defined above self.create_target_alias('meson-coverage-xml', outfile) elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', '') - elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_source_dir(), + elem.add_item('COMMAND', [gcovr_exe, '-r', rootdir, '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')]) elem.add_item('DESC', 'Generating text coverage report.') elem.write(outfile) @@ -653,11 +659,43 @@ int dummy; # Alias that runs the target defined above self.create_target_alias('meson-coverage-html', outfile) elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '') - command = [lcov_exe, '--directory', self.environment.get_build_dir(), - '--capture', '--output-file', covinfo, '--no-checksum', - '&&', genhtml_exe, '--prefix', self.environment.get_build_dir(), - '--output-directory', htmloutdir, '--title', 'Code coverage', - '--legend', '--show-details', covinfo] + + subproject_dir = self.build.get_subproject_dir() + command = [lcov_exe, + '--directory', self.environment.get_build_dir(), + '--capture', + '--output-file', covinfo, + '--no-checksum', + '&&', lcov_exe, + '--extract', + covinfo, + os.path.join(self.environment.get_source_dir(), '*'), + '--output-file', covinfo, + '&&', lcov_exe, + '--remove', + covinfo, + os.path.join(self.environment.get_source_dir(), subproject_dir, '*'), + '--output-file', covinfo, + '&&', genhtml_exe, + '--prefix', self.environment.get_build_dir(), + '--output-directory', htmloutdir, + '--title', 'Code coverage', + '--legend', + '--show-details', + covinfo] + elem.add_item('COMMAND', command) + elem.add_item('DESC', 'Generating HTML coverage report.') + elem.write(outfile) + elif gcovr_exe and gcovr_new_rootdir: + added_rule = True + htmloutdir = os.path.join(self.environment.get_log_dir(), 'coveragereport') + phony_elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'phony', os.path.join(htmloutdir, 'index.html')) + phony_elem.write(outfile) + # Alias that runs the target defined above + self.create_target_alias('meson-coverage-html', outfile) + elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '') + command = [gcovr_exe, '--html', '--html-details', '-r', self.environment.get_build_dir(), + '-o', os.path.join(htmloutdir, 'index.html')] elem.add_item('COMMAND', command) elem.add_item('DESC', 'Generating HTML coverage report.') elem.write(outfile) @@ -838,11 +876,12 @@ int dummy; for de in data: assert(isinstance(de, build.Data)) subdir = de.install_dir - for f in de.sources: - assert(isinstance(f, mesonlib.File)) - plain_f = os.path.basename(f.fname) - dstabs = os.path.join(subdir, plain_f) - i = [f.absolute_path(srcdir, builddir), dstabs, de.install_mode] + if not subdir: + subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name) + for src_file, dst_name in zip(de.sources, de.rename): + assert(isinstance(src_file, mesonlib.File)) + dst_abs = os.path.join(subdir, dst_name) + i = [src_file.absolute_path(srcdir, builddir), dst_abs, de.install_mode] d.data.append(i) def generate_subdir_install(self, d): @@ -985,7 +1024,7 @@ int dummy; outname_rel = os.path.join(self.get_target_dir(target), fname) src_list = target.get_sources() compiler = target.compilers['cs'] - rel_srcs = [s.rel_to_builddir(self.build_to_src) for s in src_list] + rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list] deps = [] commands = CompilerArgs(compiler, target.extra_args.get('cs', [])) commands += compiler.get_buildtype_args(buildtype) @@ -1011,8 +1050,8 @@ int dummy; for rel_src in generated_sources.keys(): dirpart, fnamepart = os.path.split(rel_src) if rel_src.lower().endswith('.cs'): - rel_srcs.append(rel_src) - deps.append(rel_src) + rel_srcs.append(os.path.normpath(rel_src)) + deps.append(os.path.normpath(rel_src)) for dep in target.get_external_deps(): commands.extend_direct(dep.get_link_args()) @@ -1061,7 +1100,7 @@ int dummy; the build directory. """ result = OrderedSet() - for dep in target.link_targets + target.link_whole_targets: + for dep in itertools.chain(target.link_targets, target.link_whole_targets): for i in dep.sources: if hasattr(i, 'fname'): i = i.fname @@ -1141,16 +1180,31 @@ int dummy; valac_outputs = [] # All sources that are passed to valac on the commandline all_files = list(vapi_src.keys()) + # Passed as --basedir + srcbasedir = os.path.join(self.build_to_src, target.get_subdir()) for (vala_file, gensrc) in vala_src.items(): all_files.append(vala_file) # Figure out where the Vala compiler will write the compiled C file + # # If the Vala file is in a subdir of the build dir (in our case - # because it was generated/built by something else), the subdir path - # components will be preserved in the output path. But if the Vala - # file is outside the build directory, the path components will be - # stripped and just the basename will be used. + # because it was generated/built by something else), and is also + # a subdir of --basedir (because the builddir is in the source + # tree, and the target subdir is the source root), the subdir + # components from the source root till the private builddir will be + # duplicated inside the private builddir. Otherwise, just the + # basename will be used. + # + # If the Vala file is outside the build directory, the paths from + # the --basedir till the subdir will be duplicated inside the + # private builddir. if isinstance(gensrc, (build.CustomTarget, build.GeneratedList)) or gensrc.is_built: vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c' + # Check if the vala file is in a subdir of --basedir + abs_srcbasedir = os.path.join(self.environment.get_source_dir(), target.get_subdir()) + abs_vala_file = os.path.join(self.environment.get_build_dir(), vala_file) + if PurePath(os.path.commonpath((abs_srcbasedir, abs_vala_file))) == PurePath(abs_srcbasedir): + vala_c_subdir = PurePath(abs_vala_file).parent.relative_to(abs_srcbasedir) + vala_c_file = os.path.join(vala_c_subdir, vala_c_file) else: path_to_target = os.path.join(self.build_to_src, target.get_subdir()) if vala_file.startswith(path_to_target): @@ -1168,7 +1222,7 @@ int dummy; # means it will also preserve the directory components of Vala sources # found inside the build tree (generated sources). args += ['--directory', c_out_dir] - args += ['--basedir', os.path.join(self.build_to_src, target.get_subdir())] + args += ['--basedir', srcbasedir] if not isinstance(target, build.Executable): # Library name args += ['--library', target.name] @@ -1258,6 +1312,10 @@ int dummy; linkdirs = OrderedDict() for d in target.link_targets: linkdirs[d.subdir] = True + # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust + # dependency, so that collisions with libraries in rustc's + # sysroot don't cause ambiguity + args += ['--extern', '{}={}'.format(d.name, os.path.join(d.subdir, d.filename))] for d in linkdirs.keys(): if d == '': d = '.' @@ -1276,7 +1334,7 @@ int dummy; # Set runtime-paths so we can run executables without needing to set # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. - if '/' in target.name or '\\' in target.name: + if has_path_sep(target.name): # Target names really should not have slashes in them, but # unfortunately we did not check for that and some downstream projects # now have them. Once slashes are forbidden, remove this bit. @@ -1566,7 +1624,15 @@ int dummy; def generate_cs_compile_rule(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) - command = ' command = %s $ARGS $in\n' % invoc + + if mesonlib.is_windows(): + command = ''' command = {executable} @$out.rsp + rspfile = $out.rsp + rspfile_content = $ARGS $in +'''.format(executable=invoc) + else: + command = ' command = %s $ARGS $in\n' % invoc + description = ' description = Compiling C Sharp target $out.\n' outfile.write(rule) outfile.write(command) @@ -1821,7 +1887,6 @@ rule FORTRAN_DEP_HACK infilelist = genlist.get_inputs() outfilelist = genlist.get_outputs() extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends] - source_target_dir = self.get_target_source_dir(target) for i in range(len(infilelist)): if len(generator.outputs) == 1: sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) @@ -1846,7 +1911,6 @@ rule FORTRAN_DEP_HACK # We have consumed output files, so drop them from the list of remaining outputs. if sole_output == '': outfilelist = outfilelist[len(generator.outputs):] - relout = self.get_target_private_dir(target) args = self.replace_paths(target, args, override_subdir=subdir) cmdlist = exe_arr + self.replace_extra_args(args, genlist) if generator.capture: @@ -2073,8 +2137,7 @@ rule FORTRAN_DEP_HACK return incs def _generate_single_compile(self, target, compiler, is_generated=False): - base_proxy = backends.OptionOverrideProxy(target.option_overrides, - self.environment.coredata.base_options) + base_proxy = self.get_base_options_for_target(target) # Create an empty commands list, and start adding arguments from # various sources in the order in which they must override each other commands = CompilerArgs(compiler) @@ -2229,6 +2292,9 @@ rule FORTRAN_DEP_HACK depelem.write(outfile) commands += compiler.get_module_outdir_args(self.get_target_private_dir(target)) + if compiler.language == 'd': + commands += compiler.get_feature_args(target.d_features, self.build_to_src) + element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) for d in header_deps: if isinstance(d, File): @@ -2259,7 +2325,7 @@ rule FORTRAN_DEP_HACK # FIXME FIXME: The usage of this is a terrible and unreliable hack if isinstance(fname, File): return fname.subdir != '' - return '/' in fname or '\\' in fname + return has_path_sep(fname) # Fortran is a bit weird (again). When you link against a library, just compiling a source file # requires the mod files that are output when single files are built. To do this right we would need to @@ -2305,7 +2371,7 @@ rule FORTRAN_DEP_HACK pch = target.get_pch(lang) if not pch: continue - if '/' not in pch[0] or '/' not in pch[-1]: + if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]): msg = 'Precompiled header of {!r} must not be in the same ' \ 'directory as source, please put it in a subdirectory.' \ ''.format(target.get_basename()) @@ -2482,7 +2548,7 @@ rule FORTRAN_DEP_HACK commands += linker.get_option_link_args(self.environment.coredata.compiler_options) # Set runtime-paths so we can run executables without needing to set # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. - if '/' in target.name or '\\' in target.name: + if has_path_sep(target.name): # Target names really should not have slashes in them, but # unfortunately we did not check for that and some downstream projects # now have them. Once slashes are forbidden, remove this bit. @@ -2663,3 +2729,9 @@ rule FORTRAN_DEP_HACK elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '') elem.write(outfile) + +def load(build_dir): + filename = os.path.join(build_dir, 'meson-private', 'install.dat') + with open(filename, 'rb') as f: + obj = pickle.load(f) + return obj diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 60805c6..5e972f2 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, sys +import os import pickle import xml.dom.minidom import xml.etree.ElementTree as ET @@ -304,6 +304,7 @@ class Vs2010Backend(backends.Backend): projlist = [] for name, target in self.build.targets.items(): outdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(outdir, exist_ok=True) fname = name + '.vcxproj' relname = os.path.join(target.subdir, fname) projfile = os.path.join(outdir, fname) @@ -730,7 +731,7 @@ class Vs2010Backend(backends.Backend): # generate_single_compile() and generate_basic_compiler_args() for l, comp in target.compilers.items(): if l in file_args: - file_args[l] += compilers.get_base_compile_args(self.environment.coredata.base_options, comp) + file_args[l] += compilers.get_base_compile_args(self.get_base_options_for_target(target), comp) file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options) # Add compile args added using add_project_arguments() for l, args in self.build.projects_args.get(target.subproject, {}).items(): diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 3ae31e4..9a9f88b 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -16,7 +16,7 @@ from . import backends from .. import build from .. import dependencies from .. import mesonlib -import uuid, os, sys +import uuid, os from ..mesonlib import MesonException @@ -565,9 +565,7 @@ class XCodeBackend(backends.Backend): self.write_line(');') self.write_line('runOnlyForDeploymentPostprocessing = 0;') self.write_line('shellPath = /bin/sh;') - script_root = self.environment.get_script_dir() - test_script = os.path.join(script_root, 'meson_test.py') - cmd = mesonlib.python_command + [test_script, test_data, '--wd', self.environment.get_build_dir()] + cmd = mesonlib.meson_command + ['test', test_data, '-C', self.environment.get_build_dir()] cmdstr = ' '.join(["'%s'" % i for i in cmd]) self.write_line('shellScript = "%s";' % cmdstr) self.write_line('showEnvVarsInLog = 0;') @@ -708,7 +706,7 @@ class XCodeBackend(backends.Backend): if isinstance(target, build.SharedLibrary): ldargs = ['-dynamiclib', '-Wl,-headerpad_max_install_names'] + dep_libs install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype) - dylib_version = target.version + dylib_version = target.soversion else: ldargs = dep_libs install_path = '' diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 400b9e5..3ff68ed 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -15,6 +15,7 @@ import copy, os, re from collections import OrderedDict import itertools, pathlib +import pickle from . import environment from . import dependencies @@ -22,7 +23,7 @@ from . import mlog from .mesonlib import File, MesonException, listify, extract_as_list from .mesonlib import typeslistify, stringlistify, classify_unity_sources from .mesonlib import get_filenames_templates_dict, substitute_values -from .mesonlib import for_windows, for_darwin, for_cygwin, for_android +from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep from .compilers import is_object, clike_langs, sort_clike, lang_suffixes known_basic_kwargs = {'install': True, @@ -84,6 +85,8 @@ known_exe_kwargs = known_basic_kwargs.copy() known_exe_kwargs.update({'implib': True, 'export_dynamic': True }) +known_jar_kwargs = known_basic_kwargs.copy() +known_jar_kwargs.update({'target_type': 'jar'}) class InvalidArguments(MesonException): pass @@ -113,6 +116,7 @@ class Build: self.static_linker = None self.static_cross_linker = None self.subprojects = {} + self.subproject_dir = '' self.install_scripts = [] self.postconf_scripts = [] self.install_dirs = [] @@ -138,6 +142,9 @@ class Build: def get_project(self): return self.projects[''] + def get_subproject_dir(self): + return self.subproject_dir + def get_targets(self): return self.targets @@ -281,7 +288,7 @@ class EnvironmentVariables: class Target: def __init__(self, name, subdir, subproject, build_by_default): - if '/' in name or '\\' in name: + if has_path_sep(name): # Fix failing test 53 when this becomes an error. mlog.warning('''Target "%s" has a path separator in its name. This is not supported, it can cause unexpected failures and will become @@ -355,6 +362,7 @@ class BuildTarget(Target): self.extra_args = {} self.generated = [] self.extra_files = [] + self.d_features = {} # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree @@ -364,9 +372,9 @@ class BuildTarget(Target): # 1. Pre-existing objects provided by the user with the `objects:` kwarg # 2. Compiled objects created by and extracted from another target self.process_objectlist(objects) - self.process_compilers() self.process_kwargs(kwargs, environment) self.check_unknown_kwargs(kwargs) + self.process_compilers() if not any([self.sources, self.generated, self.objects, self.link_whole]): raise InvalidArguments('Build target %s has no sources.' % name) self.process_compilers_late() @@ -499,6 +507,13 @@ class BuildTarget(Target): # which is what we need. if not is_object(s): sources.append(s) + for d in self.external_deps: + if hasattr(d, 'held_object'): + d = d.held_object + for s in d.sources: + if isinstance(s, (str, File)): + sources.append(s) + # Sources that were used to create our extracted objects for o in self.objects: if not isinstance(o, ExtractedObjects): @@ -650,10 +665,6 @@ just like those detected with the dependency() function.''') self.link(linktarget) lwhole = extract_as_list(kwargs, 'link_whole') for linktarget in lwhole: - # Sorry for this hack. Keyword targets are kept in holders - # in kwargs. Unpack here without looking at the exact type. - if hasattr(linktarget, "held_object"): - linktarget = linktarget.held_object self.link_whole(linktarget) c_pchlist, cpp_pchlist, clist, cpplist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \ @@ -682,12 +693,14 @@ just like those detected with the dependency() function.''') dfeature_versions = kwargs.get('d_module_versions', None) if dfeature_versions: dfeatures['versions'] = dfeature_versions - dfeature_import_dirs = kwargs.get('d_import_dirs', None) - if dfeature_import_dirs: + if 'd_import_dirs' in kwargs: + dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True) + for d in dfeature_import_dirs: + if not isinstance(d, IncludeDirs): + raise InvalidArguments('Arguments to d_import_dirs must be include_directories.') dfeatures['import_dirs'] = dfeature_import_dirs if dfeatures: - if 'd' in self.compilers: - self.add_compiler_args('d', self.compilers['d'].get_feature_args(dfeatures)) + self.d_features = dfeatures self.link_args = extract_as_list(kwargs, 'link_args') for i in self.link_args: @@ -787,7 +800,7 @@ This will become a hard error in a future Meson release.''') def get_dependencies(self): transitive_deps = [] - for t in self.link_targets + self.link_whole_targets: + for t in itertools.chain(self.link_targets, self.link_whole_targets): transitive_deps.append(t) if isinstance(t, StaticLibrary): transitive_deps += t.get_dependencies() @@ -831,12 +844,14 @@ This will become a hard error in a future Meson release.''') self.add_include_dirs(dep.include_directories) for l in dep.libraries: self.link(l) + for l in dep.whole_libraries: + self.link_whole(l) # Those parts that are external. extpart = dependencies.InternalDependency('undefined', [], dep.compile_args, dep.link_args, - [], [], []) + [], [], [], []) self.external_deps.append(extpart) # Deps of deps. self.add_deps(dep.ext_deps) @@ -1054,7 +1069,7 @@ class Generator: raise InvalidArguments('"output" may only contain strings.') if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule: raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.') - if '/' in rule or '\\' in rule: + if has_path_sep(rule): raise InvalidArguments('"outputs" must not contain a directory separator.') if len(outputs) > 1: for o in outputs: @@ -1653,7 +1668,7 @@ class CustomTarget(Target): raise InvalidArguments('Output must not be empty.') if i.strip() == '': raise InvalidArguments('Output must not consist only of whitespace.') - if '/' in i: + if has_path_sep(i): raise InvalidArguments('Output must not contain a path segment.') if '@INPUT@' in i or '@INPUT0@' in i: m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \ @@ -1811,6 +1826,8 @@ class Jar(BuildTarget): # All jar targets are installable. pass + def check_unknown_kwargs(self, kwargs): + self.check_unknown_kwargs_int(kwargs, known_jar_kwargs) class CustomTargetIndex: @@ -1881,13 +1898,19 @@ class ConfigurationData: # A bit poorly named, but this represents plain data files to copy # during install. class Data: - def __init__(self, sources, install_dir, install_mode=None): + def __init__(self, sources, install_dir, install_mode=None, rename=None): self.sources = sources self.install_dir = install_dir self.install_mode = install_mode self.sources = listify(self.sources) for s in self.sources: assert(isinstance(s, File)) + if rename is None: + self.rename = [os.path.basename(f.fname) for f in self.sources] + else: + self.rename = stringlistify(rename) + if len(self.rename) != len(self.sources): + raise MesonException('Size of rename argument is different from number of sources') class RunScript(dict): def __init__(self, script, args): @@ -1922,3 +1945,22 @@ def get_sources_string_names(sources): else: raise AssertionError('Unknown source type: {!r}'.format(s)) return names + +def load(build_dir): + filename = os.path.join(build_dir, 'meson-private', 'build.dat') + load_fail_msg = 'Build data file {!r} is corrupted. Try with a fresh build tree.'.format(filename) + nonexisting_fail_msg = 'No such build data file as "{!r}".'.format(filename) + try: + with open(filename, 'rb') as f: + obj = pickle.load(f) + except FileNotFoundError: + raise MesonException(nonexisting_fail_msg) + except pickle.UnpicklingError: + raise MesonException(load_fail_msg) + if not isinstance(obj, Build): + raise MesonException(load_fail_msg) + return obj + +def save(obj, filename): + with open(filename, 'wb') as f: + pickle.dump(obj, f) diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py index f09f252..84c87fb 100644 --- a/mesonbuild/compilers/__init__.py +++ b/mesonbuild/compilers/__init__.py @@ -67,6 +67,7 @@ __all__ = [ 'JavaCompiler', 'LLVMDCompiler', 'MonoCompiler', + 'VisualStudioCsCompiler', 'NAGFortranCompiler', 'ObjCCompiler', 'ObjCPPCompiler', @@ -127,7 +128,7 @@ from .cpp import ( IntelCPPCompiler, VisualStudioCPPCompiler, ) -from .cs import MonoCompiler +from .cs import MonoCompiler, VisualStudioCsCompiler from .d import ( DCompiler, DmdDCompiler, diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index a59b7d3..56b46b4 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -416,7 +416,7 @@ class CCompiler(Compiler): }}''' if not self.compiles(t.format(**fargs), env, extra_args, dependencies): return -1 - return self.cross_compute_int('sizeof(%s)' % typename, 1, 128, None, prefix, env, extra_args, dependencies) + return self.cross_compute_int('sizeof(%s)' % typename, 1, 1024, None, prefix, env, extra_args, dependencies) def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None): if extra_args is None: @@ -525,7 +525,7 @@ class CCompiler(Compiler): elif rtype == 'int': try: return int(res.stdout.strip()) - except: + except ValueError: m = 'Return value of {}() is not an int' raise EnvironmentException(m.format(fname)) @@ -780,9 +780,12 @@ class CCompiler(Compiler): args = ['-l' + libname] if self.links(code, env, extra_args=args): return args + # Ensure that we won't modify the list that was passed to us + extra_dirs = extra_dirs[:] + # Search in the system libraries too + extra_dirs += self.get_library_dirs() # Not found or we want to use a specific libtype? Try to find the # library file itself. - extra_dirs += self.get_library_dirs() prefixes, suffixes = self.get_library_naming(env, libtype) # Triply-nested loop! for d in extra_dirs: @@ -933,7 +936,7 @@ class VisualStudioCCompiler(CCompiler): self.warn_args = {'1': ['/W2'], '2': ['/W3'], '3': ['/W4']} - self.base_options = ['b_pch'] # FIXME add lto, pgo and the like + self.base_options = ['b_pch', 'b_ndebug'] # FIXME add lto, pgo and the like self.is_64 = is_64 # Override CCompiler.get_always_args @@ -1137,7 +1140,7 @@ class VisualStudioCCompiler(CCompiler): # See boost/config/compiler/visualc.cpp for up to date mapping try: version = int(''.join(self.version.split('.')[0:2])) - except: + except ValueError: return None if version < 1310: return '7.0' diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 034fef4..a28a225 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -315,7 +315,9 @@ def get_base_compile_args(options, compiler): except KeyError: pass try: - if options['b_ndebug'].value == 'true' or (options['b_ndebug'].value == 'if-release' and options['buildtype'] == 'release'): + if (options['b_ndebug'].value == 'true' or + (options['b_ndebug'].value == 'if-release' and + options['buildtype'].value == 'release')): args += ['-DNDEBUG'] except KeyError: pass @@ -348,7 +350,7 @@ def get_base_link_args(options, linker, is_shared_module): pass try: if 'b_asneeded' in linker.base_options and options['b_asneeded'].value: - args.append('-Wl,--as-needed') + args.append(linker.get_asneeded_args()) except KeyError: pass try: @@ -900,6 +902,13 @@ ICC_STANDARD = 0 ICC_OSX = 1 ICC_WIN = 2 +# GNU ld cannot be installed on macOS +# https://github.com/Homebrew/homebrew-core/issues/17794#issuecomment-328174395 +# Hence, we don't need to differentiate between OS and ld +# for the sake of adding as-needed support +GNU_LD_AS_NEEDED = '-Wl,--as-needed' +APPLE_LD_AS_NEEDED = '-Wl,-dead_strip_dylibs' + def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module): if soversion is None: sostr = '' @@ -1002,10 +1011,18 @@ class GnuCompiler: 'b_colorout', 'b_ndebug', 'b_staticpic'] if self.gcc_type != GCC_OSX: self.base_options.append('b_lundef') - self.base_options.append('b_asneeded') + self.base_options.append('b_asneeded') # All GCC backends can do assembly self.can_compile_suffixes.add('s') + # TODO: centralise this policy more globally, instead + # of fragmenting it into GnuCompiler and ClangCompiler + def get_asneeded_args(self): + if self.gcc_type == GCC_OSX: + return APPLE_LD_AS_NEEDED + else: + return GNU_LD_AS_NEEDED + def get_colorout_args(self, colortype): if mesonlib.version_compare(self.version, '>=4.9.0'): return gnu_color_args[colortype][:] @@ -1084,10 +1101,18 @@ class ClangCompiler: 'b_ndebug', 'b_staticpic', 'b_colorout'] if self.clang_type != CLANG_OSX: self.base_options.append('b_lundef') - self.base_options.append('b_asneeded') + self.base_options.append('b_asneeded') # All Clang backends can do assembly and LLVM IR self.can_compile_suffixes.update(['ll', 's']) + # TODO: centralise this policy more globally, instead + # of fragmenting it into GnuCompiler and ClangCompiler + def get_asneeded_args(self): + if self.clang_type == CLANG_OSX: + return APPLE_LD_AS_NEEDED + else: + return GNU_LD_AS_NEEDED + def get_pic_args(self): if self.clang_type in (CLANG_WIN, CLANG_OSX): return [] # On Window and OS X, pic is always on. @@ -1207,6 +1232,14 @@ class IntelCompiler: raise MesonException('Unreachable code when converting icc type to gcc type.') return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module) + # TODO: centralise this policy more globally, instead + # of fragmenting it into GnuCompiler and ClangCompiler + def get_asneeded_args(self): + if self.icc_type == CLANG_OSX: + return APPLE_LD_AS_NEEDED + else: + return GNU_LD_AS_NEEDED + def get_std_shared_lib_link_args(self): # FIXME: Don't know how icc works on OSX # if self.icc_type == ICC_OSX: diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index c10f38e..1fa6f15 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -112,7 +112,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): if self.gcc_type == GCC_MINGW: opts.update({ 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Standard Win libraries to link against', - gnu_winlibs), }) + gnu_winlibs), }) return opts def get_option_compile_args(self, options): diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py index dd7a433..f78e364 100644 --- a/mesonbuild/compilers/cs.py +++ b/mesonbuild/compilers/cs.py @@ -15,19 +15,26 @@ import os.path, subprocess from ..mesonlib import EnvironmentException +from ..mesonlib import is_windows from .compilers import Compiler, mono_buildtype_args -class MonoCompiler(Compiler): - def __init__(self, exelist, version, **kwargs): +class CsCompiler(Compiler): + def __init__(self, exelist, version, id, runner=None): self.language = 'cs' - super().__init__(exelist, version, **kwargs) - self.id = 'mono' - self.monorunner = 'mono' + super().__init__(exelist, version) + self.id = id + self.runner = runner def get_display_language(self): return 'C sharp' + def get_always_args(self): + return ['/nologo'] + + def get_linker_always_args(self): + return ['/nologo'] + def get_output_args(self, fname): return ['-out:' + fname] @@ -92,11 +99,14 @@ class MonoCompiler(Compiler): } } ''') - pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) + pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir) pc.wait() if pc.returncode != 0: raise EnvironmentException('Mono compiler %s can not compile programs.' % self.name_string()) - cmdlist = [self.monorunner, obj] + if self.runner: + cmdlist = [self.runner, obj] + else: + cmdlist = [os.path.join(work_dir, obj)] pe = subprocess.Popen(cmdlist, cwd=work_dir) pe.wait() if pe.returncode != 0: @@ -107,3 +117,25 @@ class MonoCompiler(Compiler): def get_buildtype_args(self, buildtype): return mono_buildtype_args[buildtype] + + +class MonoCompiler(CsCompiler): + def __init__(self, exelist, version): + super().__init__(exelist, version, 'mono', + 'mono') + + +class VisualStudioCsCompiler(CsCompiler): + def __init__(self, exelist, version): + super().__init__(exelist, version, 'csc') + + def get_buildtype_args(self, buildtype): + res = mono_buildtype_args[buildtype] + if not is_windows(): + tmp = [] + for flag in res: + if flag == '-debug': + flag = '-debug:portable' + tmp.append(flag) + res = tmp + return res diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 9681a9f..474e1bd 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -81,7 +81,7 @@ class DCompiler(Compiler): return objfile + '.' + self.get_depfile_suffix() def get_depfile_suffix(self): - return 'dep' + return 'deps' def get_pic_args(self): return ['-fPIC'] @@ -93,7 +93,7 @@ class DCompiler(Compiler): # FIXME: Make this work for Windows, MacOS and cross-compiling return get_gcc_soname_args(GCC_STANDARD, prefix, shlib_name, suffix, path, soversion, is_shared_module) - def get_feature_args(self, kwargs): + def get_feature_args(self, kwargs, build_to_src): res = [] if 'unittest' in kwargs: unittest = kwargs.pop('unittest') @@ -122,8 +122,16 @@ class DCompiler(Compiler): import_dir_arg = d_feature_args[self.id]['import_dir'] if not import_dir_arg: raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string()) - for d in import_dirs: - res.append('{0}{1}'.format(import_dir_arg, d)) + for idir_obj in import_dirs: + basedir = idir_obj.get_curdir() + for idir in idir_obj.get_incdirs(): + # Avoid superfluous '/.' at the end of paths when d is '.' + if idir not in ('', '.'): + expdir = os.path.join(basedir, idir) + else: + expdir = basedir + srctreedir = os.path.join(build_to_src, expdir) + res.append('{0}{1}'.format(import_dir_arg, srctreedir)) if kwargs: raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) @@ -233,13 +241,20 @@ class GnuDCompiler(DCompiler): '3': default_warn_args + ['-Wextra', '-Wpedantic']} self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic'] + self._has_color_support = version_compare(self.version, '>=4.9') + # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ + # (and some backported versions) + self._has_deps_support = version_compare(self.version, '>=7.1') + def get_colorout_args(self, colortype): - if version_compare(self.version, '>=4.9.0'): + if self._has_color_support: return gnu_color_args[colortype][:] return [] def get_dependency_gen_args(self, outtarget, outfile): - return ['-fmake-deps=' + outfile] + if not self._has_deps_support: + return [] + return ['-MD', '-MQ', outtarget, '-MF', outfile] def get_output_args(self, target): return ['-o', target] diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index f87e62c..993effc 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -21,7 +21,7 @@ from .mesonlib import MesonException from .mesonlib import default_libdir, default_libexecdir, default_prefix import ast -version = '0.45.0.dev1' +version = '0.46.0.dev1' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode'] default_yielding = False @@ -111,7 +111,7 @@ class UserIntegerOption(UserOption): def toint(self, valuestring): try: return int(valuestring) - except: + except ValueError: raise MesonException('Value string "%s" is not convertable to an integer.' % valuestring) def validate_value(self, value): @@ -222,17 +222,17 @@ class CoreData: (after resolving variables and ~), return that absolute path. Next, check if the file is relative to the current source dir. If the path still isn't resolved do the following: - Linux + BSD: + Windows: + - Error + *: - $XDG_DATA_HOME/meson/cross (or ~/.local/share/meson/cross if undefined) - $XDG_DATA_DIRS/meson/cross (or /usr/local/share/meson/cross:/usr/share/meson/cross if undefined) - Error - *: - - Error - BSD follows the Linux path and will honor XDG_* if set. This simplifies - the implementation somewhat, especially since most BSD users wont set - those environment variables. + + Non-Windows follows the Linux path and will honor XDG_* if set. This + simplifies the implementation somewhat. """ if filename is None: return None @@ -242,7 +242,7 @@ class CoreData: path_to_try = os.path.abspath(filename) if os.path.exists(path_to_try): return path_to_try - if sys.platform == 'linux' or 'bsd' in sys.platform.lower(): + if sys.platform != 'win32': paths = [ os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')), ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':') @@ -340,7 +340,8 @@ class CoreData: return opt.validate_value(override_value) raise MesonException('Tried to validate unknown option %s.' % option_name) -def load(filename): +def load(build_dir): + filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename) try: with open(filename, 'rb') as f: @@ -354,7 +355,8 @@ def load(filename): (obj.version, version)) return obj -def save(obj, filename): +def save(obj, build_dir): + filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') if obj.version != version: raise MesonException('Fatal version mismatch corruption.') with open(filename, 'wb') as f: diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py index 69235da..4796980 100644 --- a/mesonbuild/dependencies/__init__.py +++ b/mesonbuild/dependencies/__init__.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from .boost import BoostDependency from .base import ( # noqa: F401 Dependency, DependencyException, DependencyMethods, ExternalProgram, NonExistingExternalProgram, ExternalDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency, PkgConfigDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language) from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency -from .misc import (BoostDependency, MPIDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency) +from .misc import (MPIDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency) from .platform import AppleFrameworks from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 1e3c49c..0375102 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -61,21 +61,14 @@ class DependencyMethods(Enum): class Dependency: - def __init__(self, type_name, kwargs): - self.name = "null" - self.version = 'none' - self.language = None # None means C-like - self.is_found = False - self.type_name = type_name - self.compile_args = [] - self.link_args = [] - self.sources = [] + @classmethod + def _process_method_kw(cls, kwargs): method = kwargs.get('method', 'auto') if method not in [e.value for e in DependencyMethods]: raise DependencyException('method {!r} is invalid'.format(method)) method = DependencyMethods(method) - # This sets per-too config methods which are deprecated to to the new + # This sets per-tool config methods which are deprecated to to the new # generic CONFIG_TOOL value. if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG, DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]: @@ -88,14 +81,27 @@ class Dependency: # Set the detection method. If the method is set to auto, use any available method. # If method is set to a specific string, allow only that detection method. if method == DependencyMethods.AUTO: - self.methods = self.get_methods() - elif method in self.get_methods(): - self.methods = [method] + methods = cls.get_methods() + elif method in cls.get_methods(): + methods = [method] else: raise DependencyException( 'Unsupported detection method: {}, allowed methods are {}'.format( method.value, - mlog.format_list([x.value for x in [DependencyMethods.AUTO] + self.get_methods()]))) + mlog.format_list([x.value for x in [DependencyMethods.AUTO] + cls.get_methods()]))) + + return methods + + def __init__(self, type_name, kwargs): + self.name = "null" + self.version = 'none' + self.language = None # None means C-like + self.is_found = False + self.type_name = type_name + self.compile_args = [] + self.link_args = [] + self.sources = [] + self.methods = self._process_method_kw(kwargs) def __repr__(self): s = '<{0} {1}: {2}>' @@ -115,7 +121,8 @@ class Dependency: As an example, gtest-all.cc when using GTest.""" return self.sources - def get_methods(self): + @staticmethod + def get_methods(): return [DependencyMethods.AUTO] def get_name(self): @@ -138,7 +145,7 @@ class Dependency: class InternalDependency(Dependency): - def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps): + def __init__(self, version, incdirs, compile_args, link_args, libraries, whole_libraries, sources, ext_deps): super().__init__('internal', {}) self.version = version self.is_found = True @@ -146,6 +153,7 @@ class InternalDependency(Dependency): self.compile_args = compile_args self.link_args = link_args self.libraries = libraries + self.whole_libraries = whole_libraries self.sources = sources self.ext_deps = ext_deps @@ -246,14 +254,17 @@ class ConfigToolDependency(ExternalDependency): # instantiated and returned. The reduce function (method) is also # attached, since python's pickle module won't be able to do anything # with this dynamically generated class otherwise. - def reduce(_): - return (cls.factory, - (name, environment, language, kwargs, tools, tool_name)) + def reduce(self): + return (cls._unpickle, (), self.__dict__) sub = type('{}Dependency'.format(name.capitalize()), (cls, ), {'tools': tools, 'tool_name': tool_name, '__reduce__': reduce}) return sub(name, environment, language, kwargs) + @classmethod + def _unpickle(cls): + return cls.__new__(cls) + def find_config(self, versions=None): """Helper method that searchs for config tool binaries in PATH and returns the one that best matches the given version requirements. @@ -331,7 +342,8 @@ class ConfigToolDependency(ExternalDependency): return [] return shlex.split(out) - def get_methods(self): + @staticmethod + def get_methods(): return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL] def get_configtool_variable(self, variable_name): @@ -369,9 +381,7 @@ class PkgConfigDependency(ExternalDependency): pkgname = environment.cross_info.config['binaries']['pkgconfig'] potential_pkgbin = ExternalProgram(pkgname, silent=True) if potential_pkgbin.found(): - # FIXME, we should store all pkg-configs in ExternalPrograms. - # However that is too destabilizing a change to do just before release. - self.pkgbin = potential_pkgbin.get_command()[0] + self.pkgbin = potential_pkgbin PkgConfigDependency.class_pkgbin = self.pkgbin else: mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name) @@ -393,7 +403,7 @@ class PkgConfigDependency(ExternalDependency): self.type_string = 'Native' mlog.debug('Determining dependency {!r} with pkg-config executable ' - '{!r}'.format(name, self.pkgbin)) + '{!r}'.format(name, self.pkgbin.get_path())) ret, self.version = self._call_pkgbin(['--modversion', name]) if ret != 0: if self.required: @@ -452,7 +462,7 @@ class PkgConfigDependency(ExternalDependency): def _call_pkgbin(self, args, env=None): if not env: env = os.environ - p, out = Popen_safe([self.pkgbin] + args, env=env)[0:2] + p, out = Popen_safe(self.pkgbin.get_command() + args, env=env)[0:2] return p.returncode, out.strip() def _convert_mingw_paths(self, args): @@ -484,7 +494,13 @@ class PkgConfigDependency(ExternalDependency): return converted def _set_cargs(self): - ret, out = self._call_pkgbin(['--cflags', self.name]) + env = None + if self.language == 'fortran': + # gfortran doesn't appear to look in system paths for INCLUDE files, + # so don't allow pkg-config to suppress -I flags for system paths + env = os.environ.copy() + env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1' + ret, out = self._call_pkgbin(['--cflags', self.name], env=env) if ret != 0: raise DependencyException('Could not generate cargs for %s:\n\n%s' % (self.name, out)) @@ -505,6 +521,7 @@ class PkgConfigDependency(ExternalDependency): (self.name, out)) self.link_args = [] libpaths = [] + static_libs_notfound = [] for lib in self._convert_mingw_paths(shlex.split(out)): # If we want to use only static libraries, we have to look for the # file ourselves instead of depending on the compiler to find it @@ -514,12 +531,18 @@ class PkgConfigDependency(ExternalDependency): if lib.startswith('-L'): libpaths.append(lib[2:]) continue - elif lib.startswith('-l') and libpaths: + # FIXME: try to handle .la files in static mode too? + elif lib.startswith('-l'): args = self.compiler.find_library(lib[2:], self.env, libpaths, libtype='static') if not args or len(args) < 1: - raise DependencyException('Static library not found for {!r}' - ''.format(lib[2:])) - lib = args[0] + if lib in static_libs_notfound: + continue + mlog.warning('Static library {!r} not found for dependency {!r}, may ' + 'not be statically linked'.format(lib[2:], self.name)) + static_libs_notfound.append(lib) + else: + # Replace -l arg with full path to static library + lib = args[0] elif lib.endswith(".la"): shared_libname = self.extract_libtool_shlib(lib) shared_lib = os.path.join(os.path.dirname(lib), shared_libname) @@ -533,6 +556,11 @@ class PkgConfigDependency(ExternalDependency): lib = shared_lib self.is_libtool = True self.link_args.append(lib) + # Add all -Lbar args if we have -lfoo args in link_args + if static_libs_notfound: + # Order of -L flags doesn't matter with ld, but it might with other + # linkers such as MSVC, so prepend them. + self.link_args = ['-L' + lp for lp in libpaths] + self.link_args def get_pkgconfig_variable(self, variable_name, kwargs): options = ['--variable=' + variable_name, self.name] @@ -569,7 +597,8 @@ class PkgConfigDependency(ExternalDependency): mlog.debug('Got pkgconfig variable %s : %s' % (variable_name, variable)) return variable - def get_methods(self): + @staticmethod + def get_methods(): return [DependencyMethods.PKGCONFIG] def check_pkgconfig(self): @@ -578,21 +607,23 @@ class PkgConfigDependency(ExternalDependency): pkgbin = os.environ[evar].strip() else: pkgbin = 'pkg-config' - try: - p, out = Popen_safe([pkgbin, '--version'])[0:2] - if p.returncode != 0: - # Set to False instead of None to signify that we've already - # searched for it and not found it + pkgbin = ExternalProgram(pkgbin, silent=True) + if pkgbin.found(): + try: + p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found pkg-config {!r} but couldn\'t run it' + ''.format(' '.join(pkgbin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + pkgbin = False + except (FileNotFoundError, PermissionError): pkgbin = False - except (FileNotFoundError, PermissionError): + else: pkgbin = False - if pkgbin and not os.path.isabs(pkgbin) and shutil.which(pkgbin): - # Sometimes shutil.which fails where Popen succeeds, so - # only find the abs path if it can be found by shutil.which - pkgbin = shutil.which(pkgbin) if not self.silent: if pkgbin: - mlog.log('Found pkg-config:', mlog.bold(pkgbin), + mlog.log('Found pkg-config:', mlog.bold(pkgbin.get_path()), '(%s)' % out.strip()) else: mlog.log('Found Pkg-config:', mlog.red('NO')) @@ -918,7 +949,12 @@ def find_external_dependency(name, env, kwargs): if lname in packages: if lname not in _packages_accept_language and 'language' in kwargs: raise DependencyException('%s dependency does not accept "language" keyword argument' % (lname, )) - dep = packages[lname](env, kwargs) + # Create the dependency object using a factory class method, if one + # exists, otherwise it is just constructed directly. + if getattr(packages[lname], '_factory', None): + dep = packages[lname]._factory(env, kwargs) + else: + dep = packages[lname](env, kwargs) if required and not dep.found(): raise DependencyException('Dependency "%s" not found' % name) return dep diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py new file mode 100644 index 0000000..a17fb58 --- /dev/null +++ b/mesonbuild/dependencies/boost.py @@ -0,0 +1,683 @@ +# Copyright 2013-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for miscellaneous external dependencies. + +import glob +import os + +from .. import mlog +from .. import mesonlib +from ..environment import detect_cpu_family + +from .base import (DependencyException, ExternalDependency) + +# On windows 3 directory layouts are supported: +# * The default layout (versioned) installed: +# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp +# - $BOOST_ROOT/lib/*.lib +# * The non-default layout (system) installed: +# - $BOOST_ROOT/include/boost/*.hpp +# - $BOOST_ROOT/lib/*.lib +# * The pre-built binaries from sf.net: +# - $BOOST_ROOT/boost/*.hpp +# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1 +# +# Note that we should also try to support: +# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/) +# libboost_<module>-mt.dll.a +# +# Library names supported: +# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static) +# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared) +# - libboost_<module>.lib (static) +# - boost_<module>.lib|.dll (shared) +# where compiler is vc141 for example. +# +# NOTE: -gd means runtime and build time debugging is on +# -mt means threading=multi +# +# The `modules` argument accept library names. This is because every module that +# has libraries to link against also has multiple options regarding how to +# link. See for example: +# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html +# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html +# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html + +# **On Unix**, official packaged versions of boost libraries follow the following schemes: +# +# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib) +# libboost_<module>.a +# cygboost_<module>_1_64.dll (location = /usr/bin) +# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib) +# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib) +# +# Its not clear that any other abi tags (e.g. -gd) are used in official packages. +# +# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag. +# +# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36". +# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming) +# However, its not clear that any Unix distribution follows this scheme. +# Furthermore, the boost documentation for unix above uses examples from windows like +# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows. +# +# Probably we should use the linker search path to decide which libraries to use. This will +# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will +# also mean that it would be possible to use user-installed boost libraries when official +# packages are installed. +# +# We thus follow the following strategy: +# 1. Look for libraries using compiler.find_library( ) +# 1.1 On Linux, just look for boost_<module> +# 1.2 On other systems (e.g. Mac) look for boost_<module>-mt if multithreading. +# 1.3 Otherwise look for boost_<module> +# 2. Fall back to previous approach +# 2.1. Search particular directories. +# 2.2. Find boost libraries with unknown suffixes using file-name globbing. + +# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set. +# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug. + +class BoostDependency(ExternalDependency): + def __init__(self, environment, kwargs): + super().__init__('boost', environment, 'cpp', kwargs) + self.need_static_link = ['boost_exception', 'boost_test_exec_monitor'] + # FIXME: is this the right way to find the build type? + self.is_debug = environment.cmd_line_options.buildtype.startswith('debug') + threading = kwargs.get("threading", "multi") + self.is_multithreading = threading == "multi" + + self.requested_modules = self.get_requested(kwargs) + + self.boost_root = None + self.boost_roots = [] + self.incdir = None + self.libdir = None + + if 'BOOST_ROOT' in os.environ: + self.boost_root = os.environ['BOOST_ROOT'] + self.boost_roots = [self.boost_root] + if not os.path.isabs(self.boost_root): + raise DependencyException('BOOST_ROOT must be an absolute path.') + if 'BOOST_INCLUDEDIR' in os.environ: + self.incdir = os.environ['BOOST_INCLUDEDIR'] + if 'BOOST_LIBRARYDIR' in os.environ: + self.libdir = os.environ['BOOST_LIBRARYDIR'] + + if self.boost_root is None: + if mesonlib.for_windows(self.want_cross, self.env): + self.boost_roots = self.detect_win_roots() + else: + self.boost_roots = self.detect_nix_roots() + + if self.incdir is None: + if mesonlib.for_windows(self.want_cross, self.env): + self.incdir = self.detect_win_incdir() + else: + self.incdir = self.detect_nix_incdir() + + if self.check_invalid_modules(): + self.log_fail() + return + + mlog.debug('Boost library root dir is', mlog.bold(self.boost_root)) + mlog.debug('Boost include directory is', mlog.bold(self.incdir)) + + # 1. check if we can find BOOST headers. + self.detect_headers_and_version() + + # 2. check if we can find BOOST libraries. + if self.is_found: + self.detect_lib_modules() + mlog.debug('Boost library directory is', mlog.bold(self.libdir)) + + # 3. Report success or failure + if self.is_found: + self.log_success() + else: + self.log_fail() + + def check_invalid_modules(self): + invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in BOOST_LIBS] + + # previous versions of meson allowed include dirs as modules + remove = [] + for m in invalid_modules: + if m in BOOST_DIRS: + mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. ' + 'This will be an error in the future') + remove.append(m) + + self.requested_modules = [x for x in self.requested_modules if x not in remove] + invalid_modules = [x for x in invalid_modules if x not in remove] + + if invalid_modules: + mlog.error('Invalid Boost modules: ' + ', '.join(invalid_modules)) + return True + else: + return False + + def log_fail(self): + module_str = ', '.join(self.requested_modules) + mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO')) + + def log_success(self): + module_str = ', '.join(self.requested_modules) + if self.boost_root: + info = self.version + ', ' + self.boost_root + else: + info = self.version + mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info) + + def detect_nix_roots(self): + return [os.path.abspath(os.path.join(x, '..')) + for x in self.compiler.get_default_include_dirs()] + + def detect_win_roots(self): + res = [] + # Where boost documentation says it should be + globtext = 'C:\\Program Files\\boost\\boost_*' + files = glob.glob(globtext) + res.extend(files) + + # Where boost built from source actually installs it + if os.path.isdir('C:\\Boost'): + res.append('C:\\Boost') + + # Where boost prebuilt binaries are + globtext = 'C:\\local\\boost_*' + files = glob.glob(globtext) + res.extend(files) + return res + + def detect_nix_incdir(self): + if self.boost_root: + return os.path.join(self.boost_root, 'include') + return None + + # FIXME: Should pick a version that matches the requested version + # Returns the folder that contains the boost folder. + def detect_win_incdir(self): + for root in self.boost_roots: + globtext = os.path.join(root, 'include', 'boost-*') + incdirs = glob.glob(globtext) + if len(incdirs) > 0: + return incdirs[0] + incboostdir = os.path.join(root, 'include', 'boost') + if os.path.isdir(incboostdir): + return os.path.join(root, 'include') + incboostdir = os.path.join(root, 'boost') + if os.path.isdir(incboostdir): + return root + return None + + def get_compile_args(self): + args = [] + include_dir = self.incdir + + # Use "-isystem" when including boost headers instead of "-I" + # to avoid compiler warnings/failures when "-Werror" is used + + # Careful not to use "-isystem" on default include dirs as it + # breaks some of the headers for certain gcc versions + + # For example, doing g++ -isystem /usr/include on a simple + # "int main()" source results in the error: + # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory" + + # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129 + # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors + # for more details + + if include_dir and include_dir not in self.compiler.get_default_include_dirs(): + args.append("".join(self.compiler.get_include_args(include_dir, True))) + return args + + def get_requested(self, kwargs): + candidates = mesonlib.extract_as_list(kwargs, 'modules') + for c in candidates: + if not isinstance(c, str): + raise DependencyException('Boost module argument is not a string.') + return candidates + + def detect_headers_and_version(self): + try: + version = self.compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), []) + except mesonlib.EnvironmentException: + return + except TypeError: + return + # Remove quotes + version = version[1:-1] + # Fix version string + self.version = version.replace('_', '.') + self.is_found = True + + def detect_lib_modules(self): + self.lib_modules = {} + + # 1. Try to find modules using compiler.find_library( ) + if self.find_libraries_with_abi_tags(self.abi_tags()): + pass + # 2. Fall back to the old method + else: + if mesonlib.for_windows(self.want_cross, self.env): + self.detect_lib_modules_win() + else: + self.detect_lib_modules_nix() + + # 3. Check if we can find the modules + for m in self.requested_modules: + if 'boost_' + m not in self.lib_modules: + mlog.debug('Requested Boost library {!r} not found'.format(m)) + self.is_found = False + + def modname_from_filename(self, filename): + modname = os.path.basename(filename) + modname = modname.split('.', 1)[0] + modname = modname.split('-', 1)[0] + if modname.startswith('libboost'): + modname = modname[3:] + return modname + + def compiler_tag(self): + tag = None + compiler = self.env.detect_cpp_compiler(self.want_cross) + if mesonlib.for_windows(self.want_cross, self.env): + if compiler.get_id() == 'msvc': + comp_ts_version = compiler.get_toolset_version() + compiler_ts = comp_ts_version.split('.') + # FIXME - what about other compilers? + tag = '-vc{}{}'.format(compiler_ts[0], compiler_ts[1]) + else: + tag = '' + return tag + + def threading_tag(self): + if not self.is_multithreading: + return '' + + if mesonlib.for_darwin(self.want_cross, self.env): + # - Mac: requires -mt for multithreading, so should not fall back to non-mt libraries. + return '-mt' + elif mesonlib.for_windows(self.want_cross, self.env): + # - Windows: requires -mt for multithreading, so should not fall back to non-mt libraries. + return '-mt' + else: + # - Linux: leaves off -mt but libraries are multithreading-aware. + # - Cygwin: leaves off -mt but libraries are multithreading-aware. + return '' + + def version_tag(self): + return '-' + self.version.replace('.', '_') + + def debug_tag(self): + return '-gd' if self.is_debug else '' + + def versioned_abi_tag(self): + return self.compiler_tag() + self.threading_tag() + self.debug_tag() + self.version_tag() + + # FIXME - how to handle different distributions, e.g. for Mac? Currently we handle homebrew and macports, but not fink. + def abi_tags(self): + if mesonlib.for_windows(self.want_cross, self.env): + return [self.versioned_abi_tag(), self.threading_tag()] + else: + return [self.threading_tag()] + + def sourceforge_dir(self): + if self.env.detect_cpp_compiler(self.want_cross).get_id() != 'msvc': + return None + comp_ts_version = self.env.detect_cpp_compiler(self.want_cross).get_toolset_version() + arch = detect_cpu_family(self.env.coredata.compilers) + if arch == 'x86': + return 'lib32-msvc-{}'.format(comp_ts_version) + elif arch == 'x86_64': + return 'lib64-msvc-{}'.format(comp_ts_version) + else: + # Does anyone do Boost cross-compiling to other archs on Windows? + return None + + def find_libraries_with_abi_tag(self, tag): + + # All modules should have the same tag + self.lib_modules = {} + + all_found = True + + for module in self.requested_modules: + libname = 'boost_' + module + tag + + args = self.compiler.find_library(libname, self.env, self.extra_lib_dirs()) + if args is None: + mlog.debug("Couldn\'t find library '{}' for boost module '{}' (ABI tag = '{}')".format(libname, module, tag)) + all_found = False + else: + mlog.debug('Link args for boost module "{}" are {}'.format(module, args)) + self.lib_modules['boost_' + module] = args + + return all_found + + def find_libraries_with_abi_tags(self, tags): + for tag in tags: + if self.find_libraries_with_abi_tag(tag): + return True + return False + + def detect_lib_modules_win(self): + if not self.libdir: + # The libdirs in the distributed binaries (from sf) + lib_sf = self.sourceforge_dir() + + if self.boost_root: + roots = [self.boost_root] + else: + roots = self.boost_roots + for root in roots: + # The default libdir when building + libdir = os.path.join(root, 'lib') + if os.path.isdir(libdir): + self.libdir = libdir + break + if lib_sf: + full_path = os.path.join(root, lib_sf) + if os.path.isdir(full_path): + self.libdir = full_path + break + + if not self.libdir: + return + + for name in self.need_static_link: + # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a + libname = 'lib' + name + self.versioned_abi_tag() + '.lib' + if os.path.isfile(os.path.join(self.libdir, libname)): + self.lib_modules[self.modname_from_filename(libname)] = [libname] + else: + libname = "lib{}.lib".format(name) + if os.path.isfile(os.path.join(self.libdir, libname)): + self.lib_modules[name[3:]] = [libname] + + # globber1 applies to a layout=system installation + # globber2 applies to a layout=versioned installation + globber1 = 'libboost_*' if self.static else 'boost_*' + globber2 = globber1 + self.versioned_abi_tag() + # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a + globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib')) + for entry in globber2_matches: + fname = os.path.basename(entry) + self.lib_modules[self.modname_from_filename(fname)] = [fname] + if len(globber2_matches) == 0: + # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a + for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')): + if self.static: + fname = os.path.basename(entry) + self.lib_modules[self.modname_from_filename(fname)] = [fname] + + def detect_lib_modules_nix(self): + if self.static: + libsuffix = 'a' + elif mesonlib.for_darwin(self.want_cross, self.env): + libsuffix = 'dylib' + else: + libsuffix = 'so' + + globber = 'libboost_*.{}'.format(libsuffix) + if self.libdir: + libdirs = [self.libdir] + elif self.boost_root is None: + libdirs = mesonlib.get_library_dirs() + else: + libdirs = [os.path.join(self.boost_root, 'lib')] + for libdir in libdirs: + for name in self.need_static_link: + libname = 'lib{}.a'.format(name) + if os.path.isfile(os.path.join(libdir, libname)): + self.lib_modules[name] = [libname] + for entry in glob.glob(os.path.join(libdir, globber)): + # I'm not 100% sure what to do here. Some distros + # have modules such as thread only as -mt versions. + # On debian all packages are built threading=multi + # but not suffixed with -mt. + # FIXME: implement detect_lib_modules_{debian, redhat, ...} + # FIXME: this wouldn't work with -mt-gd either. -BDR + if self.is_multithreading and mesonlib.is_debianlike(): + pass + elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)): + pass + elif not entry.endswith('-mt.{}'.format(libsuffix)): + pass + else: + continue + modname = self.modname_from_filename(entry) + if modname not in self.lib_modules: + self.lib_modules[modname] = [entry] + + def extra_lib_dirs(self): + if self.libdir: + return [self.libdir] + elif self.boost_root: + return [os.path.join(self.boost_root, 'lib')] + return [] + + def get_link_args(self): + args = [] + for dir in self.extra_lib_dirs(): + args += self.compiler.get_linker_search_args(dir) + for lib in self.requested_modules: + args += self.lib_modules['boost_' + lib] + return args + + def get_sources(self): + return [] + + def need_threads(self): + return 'thread' in self.requested_modules + + +# Generated with boost_names.py +BOOST_LIBS = [ + 'boost_atomic', + 'boost_chrono', + 'boost_chrono', + 'boost_container', + 'boost_context', + 'boost_coroutine', + 'boost_date_time', + 'boost_exception', + 'boost_fiber', + 'boost_filesystem', + 'boost_graph', + 'boost_iostreams', + 'boost_locale', + 'boost_log', + 'boost_log_setup', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_math_tr1', + 'boost_math_tr1f', + 'boost_math_tr1l', + 'boost_math_c99', + 'boost_math_c99f', + 'boost_math_c99l', + 'boost_mpi', + 'boost_program_options', + 'boost_python', + 'boost_python3', + 'boost_numpy', + 'boost_numpy3', + 'boost_random', + 'boost_regex', + 'boost_serialization', + 'boost_wserialization', + 'boost_signals', + 'boost_stacktrace_noop', + 'boost_stacktrace_backtrace', + 'boost_stacktrace_addr2line', + 'boost_stacktrace_basic', + 'boost_stacktrace_windbg', + 'boost_stacktrace_windbg_cached', + 'boost_system', + 'boost_prg_exec_monitor', + 'boost_test_exec_monitor', + 'boost_unit_test_framework', + 'boost_thread', + 'boost_timer', + 'boost_type_erasure', + 'boost_wave' +] + +BOOST_DIRS = [ + 'lambda', + 'optional', + 'convert', + 'system', + 'uuid', + 'archive', + 'align', + 'timer', + 'chrono', + 'gil', + 'logic', + 'signals', + 'predef', + 'tr1', + 'multi_index', + 'property_map', + 'multi_array', + 'context', + 'random', + 'endian', + 'circular_buffer', + 'proto', + 'assign', + 'format', + 'math', + 'phoenix', + 'graph', + 'locale', + 'mpl', + 'pool', + 'unordered', + 'core', + 'exception', + 'ptr_container', + 'flyweight', + 'range', + 'typeof', + 'thread', + 'move', + 'spirit', + 'dll', + 'compute', + 'serialization', + 'ratio', + 'msm', + 'config', + 'metaparse', + 'coroutine2', + 'qvm', + 'program_options', + 'concept', + 'detail', + 'hana', + 'concept_check', + 'compatibility', + 'variant', + 'type_erasure', + 'mpi', + 'test', + 'fusion', + 'log', + 'sort', + 'local_function', + 'units', + 'functional', + 'preprocessor', + 'integer', + 'container', + 'polygon', + 'interprocess', + 'numeric', + 'iterator', + 'wave', + 'lexical_cast', + 'multiprecision', + 'utility', + 'tti', + 'asio', + 'dynamic_bitset', + 'algorithm', + 'xpressive', + 'bimap', + 'signals2', + 'type_traits', + 'regex', + 'statechart', + 'parameter', + 'icl', + 'python', + 'lockfree', + 'intrusive', + 'io', + 'pending', + 'geometry', + 'tuple', + 'iostreams', + 'heap', + 'atomic', + 'filesystem', + 'smart_ptr', + 'function', + 'fiber', + 'type_index', + 'accumulators', + 'function_types', + 'coroutine', + 'vmd', + 'date_time', + 'property_tree', + 'bind' +] diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index c254947..eae8ff7 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -120,18 +120,19 @@ class LLVMDependency(ConfigToolDependency): # newest back to oldest (3.5 is arbitrary), and finally the devel version. # Please note that llvm-config-6.0 is a development snapshot and it should # not be moved to the beginning of the list. The only difference between - # llvm-config-6.0 and llvm-config-devel is that the former is used by + # llvm-config-7 and llvm-config-devel is that the former is used by # Debian and the latter is used by FreeBSD. tools = [ 'llvm-config', # base - 'llvm-config-5.0', 'llvm-config50', # latest stable release - 'llvm-config-4.0', 'llvm-config40', # old stable releases + 'llvm-config-6.0', 'llvm-config60', + 'llvm-config-5.0', 'llvm-config50', + 'llvm-config-4.0', 'llvm-config40', 'llvm-config-3.9', 'llvm-config39', 'llvm-config-3.8', 'llvm-config38', 'llvm-config-3.7', 'llvm-config37', 'llvm-config-3.6', 'llvm-config36', 'llvm-config-3.5', 'llvm-config35', - 'llvm-config-6.0', 'llvm-config-devel', # development snapshot + 'llvm-config-7', 'llvm-config-devel', # development snapshot ] tool_name = 'llvm-config' __cpp_blacklist = {'-DNDEBUG'} diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 542de39..2a218be 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -14,11 +14,9 @@ # This file contains the detection logic for miscellaneous external dependencies. -import glob import os import re import shlex -import shutil import sysconfig from pathlib import Path @@ -33,426 +31,6 @@ from .base import ( ConfigToolDependency, ) -# On windows 3 directory layouts are supported: -# * The default layout (versioned) installed: -# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp -# - $BOOST_ROOT/lib/*.lib -# * The non-default layout (system) installed: -# - $BOOST_ROOT/include/boost/*.hpp -# - $BOOST_ROOT/lib/*.lib -# * The pre-built binaries from sf.net: -# - $BOOST_ROOT/boost/*.hpp -# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1 -# -# Library names supported: -# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static) -# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared) -# - libboost_<module>.lib (static) -# - boost_<module>.lib|.dll (shared) -# where compiler is vc141 for example. -# -# NOTE: -gd means runtime and build time debugging is on -# -mt means threading=multi -# -# The `modules` argument accept library names. This is because every module that -# has libraries to link against also has multiple options regarding how to -# link. See for example: -# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html -# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html -# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html - -# **On Unix**, official packaged versions of boost libraries follow the following schemes: -# -# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0 -# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0 -# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0 -# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib) -# libboost_<module>.a -# cygboost_<module>_1_64.dll (location = /usr/bin) -# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib) -# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib) -# -# Its not clear that any other abi tags (e.g. -gd) are used in official packages. -# -# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag. -# -# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36". -# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming) -# However, its not clear that any Unix distribution follows this scheme. -# Furthermore, the boost documentation for unix above uses examples from windows like -# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows. -# -# Probably we should use the linker search path to decide which libraries to use. This will -# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will -# also mean that it would be possible to use user-installed boost libraries when official -# packages are installed. -# -# We thus follow the following strategy: -# 1. Look for libraries using compiler.find_library( ) -# 1.1 On Linux, just look for boost_<module> -# 1.2 On other systems (e.g. Mac) look for boost_<module>-mt if multithreading. -# 1.3 Otherwise look for boost_<module> -# 2. Fall back to previous approach -# 2.1. Search particular directories. -# 2.2. Find boost libraries with unknown suffixes using file-name globbing. - -# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set. -# TODO: Determine a suffix (e.g. "-mt" or "") and use it. -# TODO: Get_win_link_args( ) and get_link_args( ) -# TODO: Genericize: 'args += ['-L' + dir] => args += self.compiler.get_linker_search_args(dir) -# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug. -# TODO: fix cross: -# is_windows() -> for_windows(self.want_cross, self.env) -# is_osx() and self.want_cross -> for_darwin(self.want_cross, self.env) - -class BoostDependency(ExternalDependency): - def __init__(self, environment, kwargs): - super().__init__('boost', environment, 'cpp', kwargs) - self.need_static_link = ['boost_exception', 'boost_test_exec_monitor'] - self.is_debug = environment.cmd_line_options.buildtype.startswith('debug') - threading = kwargs.get("threading", "multi") - self.is_multithreading = threading == "multi" - - self.requested_modules = self.get_requested(kwargs) - - self.boost_root = None - self.boost_roots = [] - self.incdir = None - self.libdir = None - - if 'BOOST_ROOT' in os.environ: - self.boost_root = os.environ['BOOST_ROOT'] - self.boost_roots = [self.boost_root] - if not os.path.isabs(self.boost_root): - raise DependencyException('BOOST_ROOT must be an absolute path.') - if 'BOOST_INCLUDEDIR' in os.environ: - self.incdir = os.environ['BOOST_INCLUDEDIR'] - if 'BOOST_LIBRARYDIR' in os.environ: - self.libdir = os.environ['BOOST_LIBRARYDIR'] - - if self.boost_root is None: - if mesonlib.is_windows(): - self.boost_roots = self.detect_win_roots() - else: - self.boost_roots = self.detect_nix_roots() - - if self.boost_root is None and not self.boost_roots: - self.log_fail() - return - - if self.incdir is None: - if mesonlib.is_windows(): - self.incdir = self.detect_win_incdir() - else: - self.incdir = self.detect_nix_incdir() - - if self.incdir is None and mesonlib.is_windows(): - self.log_fail() - return - - if self.check_invalid_modules(): - return - - mlog.debug('Boost library root dir is', mlog.bold(self.boost_root)) - mlog.debug('Boost include directory is', mlog.bold(self.incdir)) - - self.lib_modules = {} - self.detect_version() - if self.is_found: - self.detect_lib_modules() - mlog.debug('Boost library directory is', mlog.bold(self.libdir)) - for m in self.requested_modules: - if 'boost_' + m not in self.lib_modules: - mlog.debug('Requested Boost library {!r} not found'.format(m)) - self.log_fail() - self.is_found = False - return - self.log_success() - else: - self.log_fail() - - def check_invalid_modules(self): - invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in BOOST_LIBS] - - # previous versions of meson allowed include dirs as modules - remove = [] - for m in invalid_modules: - if m in BOOST_DIRS: - mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. ' - 'This will be an error in the future') - remove.append(m) - - self.requested_modules = [x for x in self.requested_modules if x not in remove] - invalid_modules = [x for x in invalid_modules if x not in remove] - - if invalid_modules: - mlog.log(mlog.red('ERROR:'), 'Invalid Boost modules: ' + ', '.join(invalid_modules)) - self.log_fail() - return True - else: - return False - - def log_fail(self): - module_str = ', '.join(self.requested_modules) - mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO')) - - def log_success(self): - module_str = ', '.join(self.requested_modules) - if self.boost_root: - info = self.version + ', ' + self.boost_root - else: - info = self.version - mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info) - - def detect_nix_roots(self): - return [os.path.abspath(os.path.join(x, '..')) - for x in self.compiler.get_default_include_dirs()] - - def detect_win_roots(self): - res = [] - # Where boost documentation says it should be - globtext = 'C:\\Program Files\\boost\\boost_*' - files = glob.glob(globtext) - res.extend(files) - - # Where boost built from source actually installs it - if os.path.isdir('C:\\Boost'): - res.append('C:\\Boost') - - # Where boost prebuilt binaries are - globtext = 'C:\\local\\boost_*' - files = glob.glob(globtext) - res.extend(files) - return res - - def detect_nix_incdir(self): - if self.boost_root: - return os.path.join(self.boost_root, 'include') - return None - - # FIXME: Should pick a version that matches the requested version - # Returns the folder that contains the boost folder. - def detect_win_incdir(self): - for root in self.boost_roots: - globtext = os.path.join(root, 'include', 'boost-*') - incdirs = glob.glob(globtext) - if len(incdirs) > 0: - return incdirs[0] - incboostdir = os.path.join(root, 'include', 'boost') - if os.path.isdir(incboostdir): - return os.path.join(root, 'include') - incboostdir = os.path.join(root, 'boost') - if os.path.isdir(incboostdir): - return root - return None - - def get_compile_args(self): - args = [] - include_dir = self.incdir - - # Use "-isystem" when including boost headers instead of "-I" - # to avoid compiler warnings/failures when "-Werror" is used - - # Careful not to use "-isystem" on default include dirs as it - # breaks some of the headers for certain gcc versions - - # For example, doing g++ -isystem /usr/include on a simple - # "int main()" source results in the error: - # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory" - - # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129 - # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors - # for more details - - if include_dir and include_dir not in self.compiler.get_default_include_dirs(): - args.append("".join(self.compiler.get_include_args(include_dir, True))) - return args - - def get_requested(self, kwargs): - candidates = mesonlib.extract_as_list(kwargs, 'modules') - for c in candidates: - if not isinstance(c, str): - raise DependencyException('Boost module argument is not a string.') - return candidates - - def detect_version(self): - try: - version = self.compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), []) - except mesonlib.EnvironmentException: - return - except TypeError: - return - # Remove quotes - version = version[1:-1] - # Fix version string - self.version = version.replace('_', '.') - self.is_found = True - - def detect_lib_modules(self): - if mesonlib.is_windows(): - return self.detect_lib_modules_win() - return self.detect_lib_modules_nix() - - def modname_from_filename(self, filename): - modname = os.path.basename(filename) - modname = modname.split('.', 1)[0] - modname = modname.split('-', 1)[0] - if modname.startswith('libboost'): - modname = modname[3:] - return modname - - def detect_lib_modules_win(self): - arch = detect_cpu_family(self.env.coredata.compilers) - comp_ts_version = self.env.detect_cpp_compiler(self.want_cross).get_toolset_version() - compiler_ts = comp_ts_version.split('.') - compiler = 'vc{}{}'.format(compiler_ts[0], compiler_ts[1]) - if not self.libdir: - # The libdirs in the distributed binaries (from sf) - if arch == 'x86': - lib_sf = 'lib32-msvc-{}'.format(comp_ts_version) - elif arch == 'x86_64': - lib_sf = 'lib64-msvc-{}'.format(comp_ts_version) - else: - # Does anyone do Boost cross-compiling to other archs on Windows? - lib_sf = None - if self.boost_root: - roots = [self.boost_root] - else: - roots = self.boost_roots - for root in roots: - # The default libdir when building - libdir = os.path.join(root, 'lib') - if os.path.isdir(libdir): - self.libdir = libdir - break - if lib_sf: - full_path = os.path.join(root, lib_sf) - if os.path.isdir(full_path): - self.libdir = full_path - break - - if not self.libdir: - return - - for name in self.need_static_link: - libname = "lib{}".format(name) + '-' + compiler - if self.is_multithreading: - libname = libname + '-mt' - if self.is_debug: - libname = libname + '-gd' - libname = libname + "-{}.lib".format(self.version.replace('.', '_')) - if os.path.isfile(os.path.join(self.libdir, libname)): - self.lib_modules[self.modname_from_filename(libname)] = [libname] - else: - libname = "lib{}.lib".format(name) - if os.path.isfile(os.path.join(self.libdir, libname)): - self.lib_modules[name[3:]] = [libname] - - # globber1 applies to a layout=system installation - # globber2 applies to a layout=versioned installation - globber1 = 'libboost_*' if self.static else 'boost_*' - globber2 = globber1 + '-' + compiler - if self.is_multithreading: - globber2 = globber2 + '-mt' - if self.is_debug: - globber2 = globber2 + '-gd' - globber2 = globber2 + '-{}'.format(self.version.replace('.', '_')) - globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib')) - for entry in globber2_matches: - fname = os.path.basename(entry) - self.lib_modules[self.modname_from_filename(fname)] = [fname] - if len(globber2_matches) == 0: - for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')): - if self.static: - fname = os.path.basename(entry) - self.lib_modules[self.modname_from_filename(fname)] = [fname] - - # - Linux leaves off -mt but libraries are multithreading-aware. - # - Cygwin leaves off -mt but libraries are multithreading-aware. - # - Mac requires -mt for multithreading, so should not fall back - # to non-mt libraries. - def abi_tag(self): - if mesonlib.for_windows(self.want_cross, self.env): - return None - if self.is_multithreading and mesonlib.for_darwin(self.want_cross, self.env): - return '-mt' - else: - return '' - - def detect_lib_modules_nix(self): - all_found = True - for module in self.requested_modules: - libname = 'boost_' + module + self.abi_tag() - - args = self.compiler.find_library(libname, self.env, self.extra_lib_dirs()) - if args is None: - mlog.debug('Couldn\'t find library "{}" for boost module "{}"'.format(module, libname)) - all_found = False - else: - mlog.debug('Link args for boost module "{}" are {}'.format(module, args)) - self.lib_modules['boost_' + module] = args - if all_found: - return - - if self.static: - libsuffix = 'a' - elif mesonlib.is_osx() and not self.want_cross: - libsuffix = 'dylib' - else: - libsuffix = 'so' - - globber = 'libboost_*.{}'.format(libsuffix) - if self.libdir: - libdirs = [self.libdir] - elif self.boost_root is None: - libdirs = mesonlib.get_library_dirs() - else: - libdirs = [os.path.join(self.boost_root, 'lib')] - for libdir in libdirs: - for name in self.need_static_link: - libname = 'lib{}.a'.format(name) - if os.path.isfile(os.path.join(libdir, libname)): - self.lib_modules[name] = [libname] - for entry in glob.glob(os.path.join(libdir, globber)): - # I'm not 100% sure what to do here. Some distros - # have modules such as thread only as -mt versions. - # On debian all packages are built threading=multi - # but not suffixed with -mt. - # FIXME: implement detect_lib_modules_{debian, redhat, ...} - # FIXME: this wouldn't work with -mt-gd either. -BDR - if self.is_multithreading and mesonlib.is_debianlike(): - pass - elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)): - pass - elif not entry.endswith('-mt.{}'.format(libsuffix)): - pass - else: - continue - modname = self.modname_from_filename(entry) - if modname not in self.lib_modules: - self.lib_modules[modname] = [entry] - - def extra_lib_dirs(self): - if self.libdir: - return [self.libdir] - elif self.boost_root: - return [os.path.join(self.boost_root, 'lib')] - return [] - - def get_link_args(self): - args = [] - for dir in self.extra_lib_dirs(): - args += self.compiler.get_linker_search_args(self.libdir) - for lib in self.requested_modules: - args += self.lib_modules['boost_' + lib] - return args - - def get_sources(self): - return [] - - def need_threads(self): - return 'thread' in self.requested_modules - class MPIDependency(ExternalDependency): def __init__(self, environment, kwargs): @@ -795,7 +373,8 @@ class Python3Dependency(ExternalDependency): self.version = sysconfig.get_config_var('py_version') self.is_found = True - def get_methods(self): + @staticmethod + def get_methods(): if mesonlib.is_windows(): return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG] elif mesonlib.is_osx(): @@ -813,90 +392,80 @@ class Python3Dependency(ExternalDependency): class PcapDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('pcap', environment, None, kwargs) - kwargs['required'] = False - if DependencyMethods.PKGCONFIG in self.methods: + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + if DependencyMethods.PKGCONFIG in methods: try: pcdep = PkgConfigDependency('pcap', environment, kwargs) if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return + return pcdep except Exception as e: mlog.debug('Pcap not found via pkgconfig. Trying next, error was:', str(e)) - if DependencyMethods.CONFIG_TOOL in self.methods: + if DependencyMethods.CONFIG_TOOL in methods: try: ctdep = ConfigToolDependency.factory( 'pcap', environment, None, kwargs, ['pcap-config'], 'pcap-config') if ctdep.found(): - self.config = ctdep.config - self.type_name = 'config-tool' - self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - self.link_args = ctdep.get_config_value(['--libs'], 'link_args') - self.version = self.get_pcap_lib_version() - self.is_found = True - return + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + ctdep.version = cls.get_pcap_lib_version(ctdep) + return ctdep except Exception as e: mlog.debug('Pcap not found via pcap-config. Trying next, error was:', str(e)) - def get_methods(self): + return PcapDependency(environment, kwargs) + + @staticmethod + def get_methods(): if mesonlib.is_osx(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] else: return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] - def get_pcap_lib_version(self): - return self.compiler.get_return_value('pcap_lib_version', 'string', - '#include <pcap.h>', self.env, [], [self]) + @staticmethod + def get_pcap_lib_version(ctdep): + return ctdep.compiler.get_return_value('pcap_lib_version', 'string', + '#include <pcap.h>', ctdep.env, [], [ctdep]) class CupsDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('cups', environment, None, kwargs) - kwargs['required'] = False - if DependencyMethods.PKGCONFIG in self.methods: + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + if DependencyMethods.PKGCONFIG in methods: try: pcdep = PkgConfigDependency('cups', environment, kwargs) if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return + return pcdep except Exception as e: mlog.debug('cups not found via pkgconfig. Trying next, error was:', str(e)) - if DependencyMethods.CONFIG_TOOL in self.methods: + if DependencyMethods.CONFIG_TOOL in methods: try: ctdep = ConfigToolDependency.factory( 'cups', environment, None, kwargs, ['cups-config'], 'cups-config') if ctdep.found(): - self.config = ctdep.config - self.type_name = 'config-tool' - self.version = ctdep.version - self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - self.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args') - self.is_found = True - return + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args') + return ctdep except Exception as e: mlog.debug('cups not found via cups-config. Trying next, error was:', str(e)) - if DependencyMethods.EXTRAFRAMEWORK in self.methods: + if DependencyMethods.EXTRAFRAMEWORK in methods: if mesonlib.is_osx(): - fwdep = ExtraFrameworkDependency('cups', False, None, self.env, - self.language, kwargs) + fwdep = ExtraFrameworkDependency('cups', False, None, environment, + kwargs.get('language', None), kwargs) if fwdep.found(): - self.is_found = True - self.compile_args = fwdep.get_compile_args() - self.link_args = fwdep.get_link_args() - self.version = fwdep.get_version() - return - mlog.log('Dependency', mlog.bold('cups'), 'found:', mlog.red('NO')) + return fwdep + mlog.log('Dependency', mlog.bold('cups'), 'found:', mlog.red('NO')) + + return CupsDependency(environment, kwargs) - def get_methods(self): + @staticmethod + def get_methods(): if mesonlib.is_osx(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] else: @@ -906,231 +475,34 @@ class CupsDependency(ExternalDependency): class LibWmfDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('libwmf', environment, None, kwargs) - if DependencyMethods.PKGCONFIG in self.methods: + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + if DependencyMethods.PKGCONFIG in methods: try: kwargs['required'] = False pcdep = PkgConfigDependency('libwmf', environment, kwargs) if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return + return pcdep except Exception as e: mlog.debug('LibWmf not found via pkgconfig. Trying next, error was:', str(e)) - if DependencyMethods.CONFIG_TOOL in self.methods: + if DependencyMethods.CONFIG_TOOL in methods: try: ctdep = ConfigToolDependency.factory( 'libwmf', environment, None, kwargs, ['libwmf-config'], 'libwmf-config') if ctdep.found(): - self.config = ctdep.config - self.type_name = 'config-too' - self.version = ctdep.version - self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - self.link_args = ctdep.get_config_value(['--libs'], 'link_args') - self.is_found = True - return + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + return ctdep except Exception as e: mlog.debug('cups not found via libwmf-config. Trying next, error was:', str(e)) - def get_methods(self): + return LibWmfDependency(environment, kwargs) + + @staticmethod + def get_methods(): if mesonlib.is_osx(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] else: return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] - - -# Generated with boost_names.py -BOOST_LIBS = [ - 'boost_atomic', - 'boost_chrono', - 'boost_chrono', - 'boost_container', - 'boost_context', - 'boost_coroutine', - 'boost_date_time', - 'boost_exception', - 'boost_fiber', - 'boost_filesystem', - 'boost_graph', - 'boost_iostreams', - 'boost_locale', - 'boost_log', - 'boost_log_setup', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_math_tr1', - 'boost_math_tr1f', - 'boost_math_tr1l', - 'boost_math_c99', - 'boost_math_c99f', - 'boost_math_c99l', - 'boost_mpi', - 'boost_program_options', - 'boost_python', - 'boost_python3', - 'boost_numpy', - 'boost_numpy3', - 'boost_random', - 'boost_regex', - 'boost_serialization', - 'boost_wserialization', - 'boost_signals', - 'boost_stacktrace_noop', - 'boost_stacktrace_backtrace', - 'boost_stacktrace_addr2line', - 'boost_stacktrace_basic', - 'boost_stacktrace_windbg', - 'boost_stacktrace_windbg_cached', - 'boost_system', - 'boost_prg_exec_monitor', - 'boost_test_exec_monitor', - 'boost_unit_test_framework', - 'boost_thread', - 'boost_timer', - 'boost_type_erasure', - 'boost_wave' -] - -BOOST_DIRS = [ - 'lambda', - 'optional', - 'convert', - 'system', - 'uuid', - 'archive', - 'align', - 'timer', - 'chrono', - 'gil', - 'logic', - 'signals', - 'predef', - 'tr1', - 'multi_index', - 'property_map', - 'multi_array', - 'context', - 'random', - 'endian', - 'circular_buffer', - 'proto', - 'assign', - 'format', - 'math', - 'phoenix', - 'graph', - 'locale', - 'mpl', - 'pool', - 'unordered', - 'core', - 'exception', - 'ptr_container', - 'flyweight', - 'range', - 'typeof', - 'thread', - 'move', - 'spirit', - 'dll', - 'compute', - 'serialization', - 'ratio', - 'msm', - 'config', - 'metaparse', - 'coroutine2', - 'qvm', - 'program_options', - 'concept', - 'detail', - 'hana', - 'concept_check', - 'compatibility', - 'variant', - 'type_erasure', - 'mpi', - 'test', - 'fusion', - 'log', - 'sort', - 'local_function', - 'units', - 'functional', - 'preprocessor', - 'integer', - 'container', - 'polygon', - 'interprocess', - 'numeric', - 'iterator', - 'wave', - 'lexical_cast', - 'multiprecision', - 'utility', - 'tti', - 'asio', - 'dynamic_bitset', - 'algorithm', - 'xpressive', - 'bimap', - 'signals2', - 'type_traits', - 'regex', - 'statechart', - 'parameter', - 'icl', - 'python', - 'lockfree', - 'intrusive', - 'io', - 'pending', - 'geometry', - 'tuple', - 'iostreams', - 'heap', - 'atomic', - 'filesystem', - 'smart_ptr', - 'function', - 'fiber', - 'type_index', - 'accumulators', - 'function_types', - 'coroutine', - 'vmd', - 'date_time', - 'property_tree', - 'bind' -] diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index c066c31..2f31196 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -17,14 +17,13 @@ import os import re -import shutil import subprocess from collections import OrderedDict from .. import mlog from .. import mesonlib from ..mesonlib import ( - MesonException, Popen_safe, extract_as_list, for_windows, + MesonException, Popen_safe, extract_as_list, for_windows, for_cygwin, version_compare_many ) from ..environment import detect_cpu @@ -38,19 +37,6 @@ from .base import ConfigToolDependency class GLDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('gl', environment, None, kwargs) - if DependencyMethods.PKGCONFIG in self.methods: - try: - pcdep = PkgConfigDependency('gl', environment, kwargs) - if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return - except Exception: - pass if DependencyMethods.SYSTEM in self.methods: if mesonlib.is_osx(): self.is_found = True @@ -67,7 +53,19 @@ class GLDependency(ExternalDependency): self.version = '1' return - def get_methods(self): + @classmethod + def _factory(cls, environment, kwargs): + if DependencyMethods.PKGCONFIG in cls._process_method_kw(kwargs): + try: + pcdep = PkgConfigDependency('gl', environment, kwargs) + if pcdep.found(): + return pcdep + except Exception: + pass + return GLDependency(environment, kwargs) + + @staticmethod + def get_methods(): if mesonlib.is_osx() or mesonlib.is_windows(): return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM] else: @@ -283,10 +281,15 @@ class QtBaseDependency(ExternalDependency): (k, v) = tuple(line.split(':', 1)) qvars[k] = v if mesonlib.is_osx(): - return self._framework_detect(qvars, mods, kwargs) + self._framework_detect(qvars, mods, kwargs) + return qmake incdir = qvars['QT_INSTALL_HEADERS'] self.compile_args.append('-I' + incdir) libdir = qvars['QT_INSTALL_LIBS'] + if for_cygwin(self.env.is_cross_build(), self.env): + shlibext = '.dll.a' + else: + shlibext = '.so' # Used by self.compilers_detect() self.bindir = self.get_qmake_host_bins(qvars) self.is_found = True @@ -308,7 +311,7 @@ class QtBaseDependency(ExternalDependency): self.is_found = False break else: - libfile = os.path.join(libdir, 'lib{}{}.so'.format(self.qtpkgname, module)) + libfile = os.path.join(libdir, 'lib{}{}{}'.format(self.qtpkgname, module, shlibext)) if not os.path.isfile(libfile): self.is_found = False break @@ -317,15 +320,23 @@ class QtBaseDependency(ExternalDependency): def _framework_detect(self, qvars, modules, kwargs): libdir = qvars['QT_INSTALL_LIBS'] + + # ExtraFrameworkDependency doesn't support any methods + fw_kwargs = kwargs.copy() + fw_kwargs.pop('method', None) + for m in modules: fname = 'Qt' + m fwdep = ExtraFrameworkDependency(fname, False, libdir, self.env, - self.language, kwargs) + self.language, fw_kwargs) self.compile_args.append('-F' + libdir) if fwdep.found(): - self.is_found = True self.compile_args += fwdep.get_compile_args() self.link_args += fwdep.get_link_args() + else: + break + else: + self.is_found = True # Used by self.compilers_detect() self.bindir = self.get_qmake_host_bins(qvars) @@ -337,7 +348,8 @@ class QtBaseDependency(ExternalDependency): else: return qvars['QT_INSTALL_BINS'] - def get_methods(self): + @staticmethod + def get_methods(): return [DependencyMethods.PKGCONFIG, DependencyMethods.QMAKE] def get_exe_args(self, compiler): @@ -380,47 +392,40 @@ class Qt5Dependency(QtBaseDependency): class SDL2Dependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('sdl2', environment, None, kwargs) - kwargs['required'] = False - if DependencyMethods.PKGCONFIG in self.methods: + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + if DependencyMethods.PKGCONFIG in methods: try: pcdep = PkgConfigDependency('sdl2', environment, kwargs) if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return + return pcdep except Exception as e: mlog.debug('SDL 2 not found via pkgconfig. Trying next, error was:', str(e)) - if DependencyMethods.CONFIG_TOOL in self.methods: + if DependencyMethods.CONFIG_TOOL in methods: try: ctdep = ConfigToolDependency.factory( 'sdl2', environment, None, kwargs, ['sdl2-config'], 'sdl2-config') if ctdep.found(): - self.type_name = 'config-tool' - self.config = ctdep.config - self.version = ctdep.version - self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - self.links_args = ctdep.get_config_value(['--libs'], 'link_args') - self.is_found = True - return + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.links_args = ctdep.get_config_value(['--libs'], 'link_args') + return ctdep except Exception as e: mlog.debug('SDL 2 not found via sdl2-config. Trying next, error was:', str(e)) - if DependencyMethods.EXTRAFRAMEWORK in self.methods: + if DependencyMethods.EXTRAFRAMEWORK in methods: if mesonlib.is_osx(): - fwdep = ExtraFrameworkDependency('sdl2', False, None, self.env, - self.language, kwargs) + fwdep = ExtraFrameworkDependency('sdl2', False, None, environment, + kwargs.get('language', None), kwargs) if fwdep.found(): - self.is_found = True - self.compile_args = fwdep.get_compile_args() - self.link_args = fwdep.get_link_args() - self.version = '2' # FIXME - return + fwdep.version = '2' # FIXME + return fwdep mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.red('NO')) - def get_methods(self): + return SDL2Dependency(environment, kwargs) + + @staticmethod + def get_methods(): if mesonlib.is_osx(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] else: @@ -439,8 +444,8 @@ class WxDependency(ConfigToolDependency): self.requested_modules = self.get_requested(kwargs) # wx-config seems to have a cflags as well but since it requires C++, # this should be good, at least for now. - self.compile_args = self.get_config_value(['--cxxflags'], 'compile_args') - self.link_args = self.get_config_value(['--libs'], 'link_args') + self.compile_args = self.get_config_value(['--cxxflags'] + self.requested_modules, 'compile_args') + self.link_args = self.get_config_value(['--libs'] + self.requested_modules, 'link_args') def get_requested(self, kwargs): if 'modules' not in kwargs: @@ -456,20 +461,6 @@ class VulkanDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('vulkan', environment, None, kwargs) - if DependencyMethods.PKGCONFIG in self.methods: - try: - pcdep = PkgConfigDependency('vulkan', environment, kwargs) - if pcdep.found(): - self.type_name = 'pkgconfig' - self.is_found = True - self.compile_args = pcdep.get_compile_args() - self.link_args = pcdep.get_link_args() - self.version = pcdep.get_version() - self.pcdep = pcdep - return - except Exception: - pass - if DependencyMethods.SYSTEM in self.methods: try: self.vulkan_sdk = os.environ['VULKAN_SDK'] @@ -526,5 +517,18 @@ class VulkanDependency(ExternalDependency): self.link_args.append(lib) return - def get_methods(self): + @classmethod + def _factory(cls, environment, kwargs): + if DependencyMethods.PKGCONFIG in cls._process_method_kw(kwargs): + try: + pcdep = PkgConfigDependency('vulkan', environment, kwargs) + if pcdep.found(): + return pcdep + except Exception: + pass + + return VulkanDependency(environment, kwargs) + + @staticmethod + def get_methods(): return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM] diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 52c670a..ff7c706 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -19,7 +19,6 @@ from .linkers import ArLinker, VisualStudioLinker from . import mesonlib from .mesonlib import EnvironmentException, Popen_safe from . import mlog -import sys from . import compilers from .compilers import ( @@ -54,6 +53,7 @@ from .compilers import ( IntelFortranCompiler, JavaCompiler, MonoCompiler, + VisualStudioCsCompiler, NAGFortranCompiler, Open64FortranCompiler, PathScaleFortranCompiler, @@ -76,19 +76,32 @@ cflags_mapping = {'c': 'CFLAGS', 'd': 'DFLAGS', 'vala': 'VALAFLAGS'} +def detect_gcovr(version='3.1', log=False): + gcovr_exe = 'gcovr' + try: + p, found = Popen_safe([gcovr_exe, '--version'])[0:2] + except (FileNotFoundError, PermissionError): + # Doesn't exist in PATH or isn't executable + return None, None + found = search_version(found) + if p.returncode == 0: + if log: + mlog.log('Found gcovr-{} at {}'.format(found, shlex.quote(shutil.which(gcovr_exe)))) + return gcovr_exe, mesonlib.version_compare(found, '>=' + version) + return None, None def find_coverage_tools(): - gcovr_exe = 'gcovr' + gcovr_exe, gcovr_new_rootdir = detect_gcovr() + lcov_exe = 'lcov' genhtml_exe = 'genhtml' - if not mesonlib.exe_exists([gcovr_exe, '--version']): - gcovr_exe = None if not mesonlib.exe_exists([lcov_exe, '--version']): lcov_exe = None if not mesonlib.exe_exists([genhtml_exe, '--version']): genhtml_exe = None - return gcovr_exe, lcov_exe, genhtml_exe + + return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe def detect_ninja(version='1.5', log=False): for n in ['ninja', 'ninja-build']: @@ -251,8 +264,7 @@ class Environment: os.makedirs(self.scratch_dir, exist_ok=True) os.makedirs(self.log_dir, exist_ok=True) try: - cdf = os.path.join(self.get_build_dir(), Environment.coredata_file) - self.coredata = coredata.load(cdf) + self.coredata = coredata.load(self.get_build_dir()) self.first_invocation = False except FileNotFoundError: # WARNING: Don't use any values from coredata in __init__. It gets @@ -275,6 +287,10 @@ class Environment: else: self.default_c = ['cc', 'gcc', 'clang'] self.default_cpp = ['c++', 'g++', 'clang++'] + if mesonlib.is_windows(): + self.default_cs = ['csc', 'mcs'] + else: + self.default_cs = ['mcs', 'csc'] self.default_objc = ['cc'] self.default_objcpp = ['c++'] self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort'] @@ -311,9 +327,8 @@ class Environment: return self.cross_info is not None def dump_coredata(self): - cdf = os.path.join(self.get_build_dir(), Environment.coredata_file) - coredata.save(self.coredata, cdf) - return cdf + coredata.save(self.coredata, self.get_build_dir()) + return os.path.join(self.get_build_dir(), Environment.coredata_file) def get_script_dir(self): import mesonbuild.scripts @@ -419,7 +434,7 @@ class Environment: def _get_compilers(self, lang, evar, want_cross): ''' The list of compilers is detected in the exact same way for - C, C++, ObjC, ObjC++, Fortran so consolidate it here. + C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here. ''' if self.is_cross_build() and want_cross: compilers = mesonlib.stringlistify(self.cross_info.config['binaries'][lang]) @@ -664,16 +679,24 @@ class Environment: raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') def detect_cs_compiler(self): - exelist = ['mcs'] - try: - p, out, err = Popen_safe(exelist + ['--version']) - except OSError: - raise EnvironmentException('Could not execute C# compiler "%s"' % ' '.join(exelist)) - version = search_version(out) - full_version = out.split('\n', 1)[0] - if 'Mono' in out: - return MonoCompiler(exelist, version, full_version=full_version) - raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') + compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', 'CSC', False) + popen_exceptions = {} + for comp in compilers: + if not isinstance(comp, list): + comp = [comp] + try: + p, out, err = Popen_safe(comp + ['--version']) + except OSError as e: + popen_exceptions[' '.join(comp + ['--version'])] = e + continue + + version = search_version(out) + if 'Mono' in out: + return MonoCompiler(comp, version) + elif "Visual C#" in out: + return VisualStudioCsCompiler(comp, version) + + self._handle_exceptions(popen_exceptions, compilers) def detect_vala_compiler(self): if 'VALAC' in os.environ: @@ -695,10 +718,11 @@ class Environment: for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] + arg = ['--version'] try: - p, out = Popen_safe(compiler + ['--version'])[0:2] + p, out = Popen_safe(compiler + arg)[0:2] except OSError as e: - popen_exceptions[compiler] = e + popen_exceptions[' '.join(compiler + arg)] = e continue version = search_version(out) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 31d7616..c87a49b 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -21,11 +21,11 @@ from . import optinterpreter from . import compilers from .wrap import wrap, WrapMode from . import mesonlib -from .mesonlib import FileMode, Popen_safe, listify, extract_as_list +from .mesonlib import FileMode, Popen_safe, listify, extract_as_list, has_path_sep from .dependencies import ExternalProgram from .dependencies import InternalDependency, Dependency, DependencyException from .interpreterbase import InterpreterBase -from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs +from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs, permittedMethodKwargs from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler from .modules import ModuleReturnValue @@ -33,9 +33,11 @@ from .modules import ModuleReturnValue import os, sys, shutil, uuid import re, shlex from collections import namedtuple +from pathlib import PurePath import importlib + def stringifyUserArguments(args): if isinstance(args, list): return '[%s]' % ', '.join([stringifyUserArguments(x) for x in args]) @@ -246,7 +248,7 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder): return val def get(self, name): - return self.held_object.values[name] # (val, desc) + return self.held_object.values[name] # (val, desc) def keys(self): return self.held_object.values.keys() @@ -651,10 +653,11 @@ class RunTargetHolder(InterpreterObject, ObjectHolder): return r.format(self.__class__.__name__, h.get_id(), h.command) class Test(InterpreterObject): - def __init__(self, name, suite, exe, is_parallel, cmd_args, env, should_fail, timeout, workdir): + def __init__(self, name, project, suite, exe, is_parallel, cmd_args, env, should_fail, timeout, workdir): InterpreterObject.__init__(self) self.name = name self.suite = suite + self.project_name = project self.exe = exe self.is_parallel = is_parallel self.cmd_args = cmd_args @@ -683,6 +686,8 @@ class SubprojectHolder(InterpreterObject, ObjectHolder): varname = args[0] if not isinstance(varname, str): raise InterpreterException('Get_variable takes a string argument.') + if varname not in self.held_object.variables: + raise InvalidArguments('Requested variable "{0}" not found.'.format(varname)) return self.held_object.variables[varname] class CompilerHolder(InterpreterObject): @@ -715,9 +720,11 @@ class CompilerHolder(InterpreterObject): 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, }) + @permittedMethodKwargs({}) def version_method(self, args, kwargs): return self.compiler.version + @permittedMethodKwargs({}) def cmd_array_method(self, args, kwargs): return self.compiler.exelist @@ -757,6 +764,11 @@ class CompilerHolder(InterpreterObject): deps = final_deps return deps + @permittedMethodKwargs({ + 'prefix', + 'args', + 'dependencies', + }) def alignment_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Alignment method takes exactly one positional argument.') @@ -771,6 +783,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking for alignment of "', mlog.bold(typename), '": ', result, sep='') return result + @permittedMethodKwargs({ + 'name', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def run_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Run method takes exactly one positional argument.') @@ -796,9 +815,11 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking if "', mlog.bold(testname), '" runs: ', h, sep='') return TryRunResultHolder(result) + @permittedMethodKwargs({}) def get_id_method(self, args, kwargs): return self.compiler.get_id() + @permittedMethodKwargs({}) def symbols_have_underscore_prefix_method(self, args, kwargs): ''' Check if the compiler prefixes _ (underscore) to global C symbols @@ -806,6 +827,7 @@ class CompilerHolder(InterpreterObject): ''' return self.compiler.symbols_have_underscore_prefix(self.environment) + @permittedMethodKwargs({}) def unittest_args_method(self, args, kwargs): ''' This function is deprecated and should not be used. @@ -813,8 +835,16 @@ class CompilerHolder(InterpreterObject): ''' if not hasattr(self.compiler, 'get_feature_args'): raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language())) - return self.compiler.get_feature_args({'unittest': 'true'}) - + build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) + return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src) + + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_member_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('Has_member takes exactly two arguments.') @@ -836,6 +866,13 @@ class CompilerHolder(InterpreterObject): '" has member "', mlog.bold(membername), '": ', hadtxt, sep='') return had + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_members_method(self, args, kwargs): check_stringlist(args) typename = args[0] @@ -856,6 +893,13 @@ class CompilerHolder(InterpreterObject): '" has members ', members, ': ', hadtxt, sep='') return had + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_function_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Has_function takes exactly one argument.') @@ -874,6 +918,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking for function "', mlog.bold(funcname), '": ', hadtxt, sep='') return had + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_type_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Has_type takes exactly one argument.') @@ -892,6 +943,16 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking for type "', mlog.bold(typename), '": ', hadtxt, sep='') return had + @permittedMethodKwargs({ + 'prefix', + 'low', + 'high', + 'guess', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def compute_int_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Compute_int takes exactly one argument.') @@ -915,6 +976,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Computing int of "%s": %d' % (expression, res)) return res + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def sizeof_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Sizeof takes exactly one argument.') @@ -929,6 +997,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking for size of "%s": %d' % (element, esize)) return esize + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def get_define_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('get_define() takes exactly one argument.') @@ -943,6 +1018,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Fetching value of define "%s": %s' % (element, value)) return value + @permittedMethodKwargs({ + 'name', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def compiles_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('compiles method takes exactly one argument.') @@ -966,6 +1048,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking if "', mlog.bold(testname), '" compiles: ', h, sep='') return result + @permittedMethodKwargs({ + 'name', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def links_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('links method takes exactly one argument.') @@ -989,6 +1078,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Checking if "', mlog.bold(testname), '" links: ', h, sep='') return result + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_header_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('has_header method takes exactly one argument.') @@ -1007,6 +1103,13 @@ class CompilerHolder(InterpreterObject): mlog.log('Has header "%s":' % hname, h) return haz + @permittedMethodKwargs({ + 'prefix', + 'no_builtin_args', + 'include_directories', + 'args', + 'dependencies', + }) def has_header_symbol_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('has_header_symbol method takes exactly two arguments.') @@ -1026,6 +1129,10 @@ class CompilerHolder(InterpreterObject): mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h) return haz + @permittedMethodKwargs({ + 'required', + 'dirs', + }) def find_library_method(self, args, kwargs): # TODO add dependencies support? if len(args) != 1: @@ -1047,6 +1154,7 @@ class CompilerHolder(InterpreterObject): self.compiler.language) return ExternalLibraryHolder(lib) + @permittedMethodKwargs({}) def has_argument_method(self, args, kwargs): args = mesonlib.stringlistify(args) if len(args) != 1: @@ -1059,6 +1167,7 @@ class CompilerHolder(InterpreterObject): mlog.log('Compiler for {} supports argument {}:'.format(self.compiler.get_display_language(), args[0]), h) return result + @permittedMethodKwargs({}) def has_multi_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) result = self.compiler.has_multi_arguments(args, self.environment) @@ -1072,6 +1181,7 @@ class CompilerHolder(InterpreterObject): h) return result + @permittedMethodKwargs({}) def get_supported_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) result = self.compiler.get_supported_arguments(args, self.environment) @@ -1087,6 +1197,7 @@ class CompilerHolder(InterpreterObject): h) return result + @permittedMethodKwargs({}) def first_supported_argument_method(self, args, kwargs): for i in mesonlib.stringlistify(args): if self.compiler.has_argument(i, self.environment): @@ -1306,6 +1417,7 @@ class MesonMain(InterpreterObject): return args[1] raise InterpreterException('Unknown cross property: %s.' % propname) + pch_kwargs = set(['c_pch', 'cpp_pch']) lang_arg_kwargs = set([ @@ -1383,12 +1495,12 @@ permitted_kwargs = {'add_global_arguments': {'language'}, 'configure_file': {'input', 'output', 'configuration', 'command', 'install_dir', 'capture', 'install'}, 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'}, 'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version'}, - 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'version'}, + 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'}, 'executable': exe_kwargs, 'find_program': {'required', 'native'}, 'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'}, 'include_directories': {'is_system'}, - 'install_data': {'install_dir', 'install_mode', 'sources'}, + 'install_data': {'install_dir', 'install_mode', 'rename', 'sources'}, 'install_headers': {'install_dir', 'subdir'}, 'install_man': {'install_dir'}, 'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'}, @@ -1501,6 +1613,8 @@ class Interpreter(InterpreterBase): 'test': self.func_test, 'vcs_tag': self.func_vcs_tag, }) + if 'MESON_UNIT_TEST' in os.environ: + self.funcs.update({'exception': self.func_exception}) def holderify(self, item): if isinstance(item, list): @@ -1579,6 +1693,8 @@ class Interpreter(InterpreterBase): self.build.cross_stdlibs[l] = subproj.get_variable_method([depname], {}) except KeyError: pass + except InvalidArguments: + pass @stringArgs @noKwargs @@ -1611,6 +1727,7 @@ class Interpreter(InterpreterBase): raise InterpreterException('Version must be a string.') incs = extract_as_list(kwargs, 'include_directories', unholder=True) libs = extract_as_list(kwargs, 'link_with', unholder=True) + libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True) sources = extract_as_list(kwargs, 'sources') sources = listify(self.source_strings_to_files(sources), unholder=True) deps = extract_as_list(kwargs, 'dependencies', unholder=True) @@ -1630,7 +1747,7 @@ class Interpreter(InterpreterBase): raise InterpreterException('''Entries in "link_with" may only be self-built targets, external dependencies (including libraries) must go to "dependencies".''') dep = dependencies.InternalDependency(version, incs, compile_args, - link_args, libs, sources, final_deps) + link_args, libs, libs_whole, sources, final_deps) return DependencyHolder(dep) @noKwargs @@ -1668,10 +1785,17 @@ external dependencies (including libraries) must go to "dependencies".''') cargs = args[1:] srcdir = self.environment.get_source_dir() builddir = self.environment.get_build_dir() - m = 'must be a string, or the output of find_program(), files(), or ' \ - 'configure_file(); not {!r}' + m = 'must be a string, or the output of find_program(), files() '\ + 'or configure_file(), or a compiler object; not {!r}' if isinstance(cmd, ExternalProgramHolder): cmd = cmd.held_object + elif isinstance(cmd, CompilerHolder): + cmd = cmd.compiler.get_exelist()[0] + prog = ExternalProgram(cmd, silent=True) + if not prog.found(): + raise InterpreterException('Program {!r} not found ' + 'or not executable'.format(cmd)) + cmd = prog else: if isinstance(cmd, mesonlib.File): cmd = cmd.absolute_path(srcdir, builddir) @@ -1684,7 +1808,14 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Program or command {!r} not found ' 'or not executable'.format(cmd)) cmd = prog - cmd_path = os.path.relpath(cmd.get_path(), start=srcdir) + try: + cmd_path = os.path.relpath(cmd.get_path(), start=srcdir) + except ValueError: + # On Windows a relative path can't be evaluated for + # paths on two different drives (i.e. c:\foo and f:\bar). + # The only thing left to is is to use the original absolute + # path. + cmd_path = cmd.get_path() if not cmd_path.startswith('..') and cmd_path not in self.build_def_files: self.build_def_files.append(cmd_path) expanded_args = [] @@ -1700,7 +1831,7 @@ external dependencies (including libraries) must go to "dependencies".''') for a in expanded_args: if not os.path.isabs(a): a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a) - if os.path.exists(a): + if os.path.isfile(a): a = os.path.relpath(a, start=srcdir) if not a.startswith('..'): if a not in self.build_def_files: @@ -1724,8 +1855,16 @@ external dependencies (including libraries) must go to "dependencies".''') return self.do_subproject(dirname, kwargs) def do_subproject(self, dirname, kwargs): - if '/' in dirname or '\\' in dirname: - raise InterpreterException('Subproject name must not contain a path separator.') + if dirname == '': + raise InterpreterException('Subproject dir name must not be empty.') + if dirname[0] == '.': + raise InterpreterException('Subproject dir name must not start with a period.') + if '..' in dirname: + raise InterpreterException('Subproject name must not contain a ".." path segment.') + if os.path.isabs(dirname): + raise InterpreterException('Subproject name must not be an absolute path.') + if has_path_sep(dirname): + mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.') if dirname in self.subproject_stack: fullstack = self.subproject_stack + [dirname] incpath = ' => '.join(fullstack) @@ -1737,6 +1876,12 @@ external dependencies (including libraries) must go to "dependencies".''') try: resolved = r.resolve(dirname) except RuntimeError as e: + # if the reason subproject execution failed was because + # the directory doesn't exist, try to give some helpful + # advice if it's a nested subproject that needs + # promotion... + self.print_nested_info(dirname) + msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}' raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e)) subdir = os.path.join(self.subproject_dir, resolved) @@ -1911,6 +2056,8 @@ to directly access options of other subprojects.''') raise InterpreterException('Subproject_dir must not contain a ".." segment.') self.subproject_dir = spdirname + self.build.subproject_dir = self.subproject_dir + if 'meson_version' in kwargs: cv = coredata.version pv = kwargs['meson_version'] @@ -1962,7 +2109,12 @@ to directly access options of other subprojects.''') @noKwargs def func_error(self, node, args, kwargs): self.validate_arguments(args, 1, [str]) - raise InterpreterException('Error encountered: ' + args[0]) + raise InterpreterException('Problem encountered: ' + args[0]) + + @noKwargs + def func_exception(self, node, args, kwargs): + self.validate_arguments(args, 0, []) + raise Exception() def detect_compilers(self, lang, need_cross_compiler): cross_comp = None @@ -2057,6 +2209,19 @@ to directly access options of other subprojects.''') else: version_string = ' (%s %s)' % (comp.id, comp.version) mlog.log('Native %s compiler: ' % comp.get_display_language(), mlog.bold(' '.join(comp.get_exelist())), version_string, sep='') + + # If <language>_args/_link_args settings are given on the + # command line, use them. + for optspec in self.build.environment.cmd_line_options.projectoptions: + (optname, optvalue) = optspec.split('=', maxsplit=1) + if optname.endswith('_link_args'): + lang = optname[:-10] + self.coredata.external_link_args.setdefault(lang, []).append(optvalue) + elif optname.endswith('_args'): + lang = optname[:-5] + self.coredata.external_args.setdefault(lang, []).append(optvalue) + # Otherwise, look for definitions from environment + # variables such as CFLAGS. if not comp.get_language() in self.coredata.external_args: (preproc_args, compile_args, link_args) = environment.get_args_from_envvars(comp) self.coredata.external_preprocess_args[comp.get_language()] = preproc_args @@ -2139,7 +2304,7 @@ to directly access options of other subprojects.''') if progobj is None: progobj = self.program_from_system(args) if required and (progobj is None or not progobj.found()): - raise InvalidArguments('Program "%s" not found or not executable' % args[0]) + raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args)) if progobj is None: return ExternalProgramHolder(dependencies.NonExistingExternalProgram()) return progobj @@ -2184,17 +2349,17 @@ to directly access options of other subprojects.''') def check_subproject_version(wanted, found): if wanted == 'undefined': return True - if found == 'undefined' or not mesonlib.version_compare(found, wanted): + if found == 'undefined' or not mesonlib.version_compare_many(found, wanted)[0]: return False return True def get_subproject_dep(self, name, dirname, varname, required): try: dep = self.subprojects[dirname].get_variable_method([varname], {}) - except KeyError: + except InvalidArguments as e: if required: - raise DependencyException('Could not find dependency {} in subproject {}' - ''.format(varname, dirname)) + raise DependencyException('Could not find dependency {} in subproject {}; {}' + ''.format(varname, dirname, str(e))) # If the dependency is not required, don't raise an exception subproj_path = os.path.join(self.subproject_dir, dirname) mlog.log('Dependency', mlog.bold(name), 'from subproject', @@ -2276,7 +2441,6 @@ to directly access options of other subprojects.''') # we won't actually read all the build files. return fallback_dep if not dep: - self.print_nested_info(name) assert(exception is not None) raise exception @@ -2308,7 +2472,7 @@ root and issuing %s. cmds = [] command_templ = 'meson wrap promote ' for l in found: - cmds.append(command_templ + l[len(self.source_root)+1:]) + cmds.append(command_templ + l[len(self.source_root) + 1:]) final_message = message + '\n'.join(cmds) print(final_message) @@ -2339,10 +2503,10 @@ root and issuing %s. raise # If the subproject execution failed in a non-fatal way, don't raise an # exception; let the caller handle things. - except: + except Exception as e: mlog.log('Also couldn\'t find a fallback subproject in', mlog.bold(os.path.join(self.subproject_dir, dirname)), - 'for the dependency', mlog.bold(name)) + 'for the dependency', mlog.bold(name), '\nReason:', str(e)) return None dep = self.get_subproject_dep(name, dirname, varname, kwargs.get('required', True)) if not dep: @@ -2556,14 +2720,12 @@ root and issuing %s. if not isinstance(timeout, int): raise InterpreterException('Timeout must be an integer.') suite = [] + prj = self.subproject if self.is_subproject() else self.build.project_name for s in mesonlib.stringlistify(kwargs.get('suite', '')): if len(s) > 0: s = ':' + s - if self.is_subproject(): - suite.append(self.subproject.replace(' ', '_').replace(':', '_') + s) - else: - suite.append(self.build.project_name.replace(' ', '_').replace(':', '_') + s) - t = Test(args[0], suite, exe.held_object, par, cmd_args, env, should_fail, timeout, workdir) + suite.append(prj.replace(' ', '_').replace(':', '_') + s) + t = Test(args[0], prj, suite, exe.held_object, par, cmd_args, env, should_fail, timeout, workdir) if is_base_test: self.build.tests.append(t) mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='') @@ -2664,7 +2826,8 @@ root and issuing %s. if not isinstance(install_dir, (str, type(None))): raise InvalidArguments('Keyword argument install_dir not a string.') install_mode = self._get_kwarg_install_mode(kwargs) - data = DataHolder(build.Data(sources, install_dir, install_mode)) + rename = kwargs.get('rename', None) + data = DataHolder(build.Data(sources, install_dir, install_mode, rename)) self.build.data.append(data.held_object) return data @@ -2810,9 +2973,11 @@ root and issuing %s. conffile = os.path.normpath(inputfile.relative_name()) if conffile not in self.build_def_files: self.build_def_files.append(conffile) - # Install file if requested + # Install file if requested, we check for the empty string + # for backwards compatibility. That was the behaviour before + # 0.45.0 so preserve it. idir = kwargs.get('install_dir', None) - if isinstance(idir, str): + if isinstance(idir, str) and idir: cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname) self.build.data.append(build.Data([cfile], idir)) return mesonlib.File.from_built_file(self.subdir, output) @@ -2820,12 +2985,17 @@ root and issuing %s. @permittedKwargs(permitted_kwargs['include_directories']) @stringArgs def func_include_directories(self, node, args, kwargs): + return self.build_incdir_object(args, kwargs.get('is_system', False)) + + def build_incdir_object(self, incdir_strings, is_system=False): + if not isinstance(is_system, bool): + raise InvalidArguments('Is_system must be boolean.') src_root = self.environment.get_source_dir() build_root = self.environment.get_build_dir() absbase_src = os.path.join(src_root, self.subdir) absbase_build = os.path.join(build_root, self.subdir) - for a in args: + for a in incdir_strings: if a.startswith(src_root): raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use relative paths instead. @@ -2848,10 +3018,7 @@ different subdirectory. absdir_build = os.path.join(absbase_build, a) if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build): raise InvalidArguments('Include dir %s does not exist.' % a) - is_system = kwargs.get('is_system', False) - if not isinstance(is_system, bool): - raise InvalidArguments('Is_system must be boolean.') - i = IncludeDirsHolder(build.IncludeDirs(self.subdir, args, is_system)) + i = IncludeDirsHolder(build.IncludeDirs(self.subdir, incdir_strings, is_system)) return i @permittedKwargs(permitted_kwargs['add_test_setup']) @@ -2860,8 +3027,10 @@ different subdirectory. if len(args) != 1: raise InterpreterException('Add_test_setup needs one argument for the setup name.') setup_name = args[0] - if re.fullmatch('[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None: + if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None: raise InterpreterException('Setup name may only contain alphanumeric characters.') + if ":" not in setup_name: + setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name try: inp = extract_as_list(kwargs, 'exe_wrapper') exe_wrapper = [] @@ -2885,14 +3054,10 @@ different subdirectory. if not isinstance(timeout_multiplier, int): raise InterpreterException('Timeout multiplier must be a number.') env = self.unpack_env_kwarg(kwargs) - setupobj = build.TestSetup(exe_wrapper=exe_wrapper, - gdb=gdb, - timeout_multiplier=timeout_multiplier, - env=env) - if self.subproject == '': - # Dunno what we should do with subprojects really. Let's start simple - # and just use the master project ones. - self.build.test_setups[setup_name] = setupobj + self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper=exe_wrapper, + gdb=gdb, + timeout_multiplier=timeout_multiplier, + env=env) @permittedKwargs(permitted_kwargs['add_global_arguments']) @stringArgs @@ -2963,11 +3128,16 @@ different subdirectory. def evaluate_subproject_info(self, path_from_source_root, subproject_dirname): depth = 0 subproj_name = '' - segs = path_from_source_root.split(os.path.sep) - while segs and segs[0] == subproject_dirname: - depth += 1 - subproj_name = segs[1] - segs = segs[2:] + segs = PurePath(path_from_source_root).parts + segs_spd = PurePath(subproject_dirname).parts + while segs and segs[0] == segs_spd[0]: + if len(segs_spd) == 1: + subproj_name = segs[1] + segs = segs[2:] + depth += 1 + else: + segs_spd = segs_spd[1:] + segs = segs[1:] return (depth, subproj_name) # Check that the indicated file is within the same subproject @@ -3074,6 +3244,7 @@ different subdirectory. else: mlog.debug('Unknown target type:', str(targetholder)) raise RuntimeError('Unreachable code') + self.kwarg_strings_to_includedirs(kwargs) target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs) if is_cross: self.add_cross_stdlib_info(target) @@ -3082,6 +3253,23 @@ different subdirectory. self.project_args_frozen = True return l + def kwarg_strings_to_includedirs(self, kwargs): + if 'd_import_dirs' in kwargs: + items = mesonlib.extract_as_list(kwargs, 'd_import_dirs') + cleaned_items = [] + for i in items: + if isinstance(i, str): + # BW compatibility. This was permitted so we must support it + # for a few releases so people can transition to "correct" + # path declarations. + if i.startswith(self.environment.get_source_dir()): + mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead. +This will become a hard error in the future.''') + i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir)) + i = self.build_incdir_object([i]) + cleaned_items.append(i) + kwargs['d_import_dirs'] = cleaned_items + def get_used_languages(self, target): result = {} for i in target.sources: @@ -3120,6 +3308,7 @@ different subdirectory. if idx >= len(arg_strings): raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx)) return arg_strings[idx] + return re.sub(r'@(\d+)@', arg_replace, templ) # Only permit object extraction from the same subproject diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 6618dc8..9279506 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -80,6 +80,22 @@ class permittedKwargs: return wrapped +class permittedMethodKwargs: + + def __init__(self, permitted): + self.permitted = permitted + + def __call__(self, f): + @wraps(f) + def wrapped(obj, args, kwargs): + for k in kwargs: + if k not in self.permitted: + mlog.warning('''Passed invalid keyword argument "{}".'''.format(k)) + mlog.warning('This will become a hard error in the future.') + return f(obj, args, kwargs) + return wrapped + + class InterpreterException(mesonlib.MesonException): pass @@ -267,9 +283,8 @@ class InterpreterBase: def validate_comparison_types(self, val1, val2): if type(val1) != type(val2): - mlog.warning('''Trying to compare values of different types ({}, {}). -The result of this is undefined and will become a hard error -in a future Meson release.'''.format(type(val1).__name__, type(val2).__name__)) + return False + return True def evaluate_comparison(self, node): val1 = self.evaluate_statement(node.left) @@ -278,11 +293,23 @@ in a future Meson release.'''.format(type(val1).__name__, type(val2).__name__)) val2 = self.evaluate_statement(node.right) if is_disabler(val2): return val2 - self.validate_comparison_types(val1, val2) + valid = self.validate_comparison_types(val1, val2) + # Ordering comparisons of different types isn't allowed since PR #1810 + # (0.41.0). Since PR #2884 we also warn about equality comparisons of + # different types, which will one day become an error. + if not valid and (node.ctype == '==' or node.ctype == '!='): + mlog.warning('''Trying to compare values of different types ({}, {}) using {}. +The result of this is undefined and will become a hard error in a future Meson release.''' + .format(type(val1).__name__, type(val2).__name__, node.ctype), location=node) if node.ctype == '==': return val1 == val2 elif node.ctype == '!=': return val1 != val2 + elif not valid: + raise InterpreterException( + 'Values of different types ({}, {}) cannot be compared using {}.'.format(type(val1).__name__, + type(val2).__name__, + node.ctype)) elif not self.is_elementary_type(val1): raise InterpreterException('{} can only be compared for equality.'.format(node.left.value)) elif not self.is_elementary_type(val2): @@ -397,7 +424,7 @@ in a future Meson release.'''.format(type(val1).__name__, type(val2).__name__)) varname = node.var_name addition = self.evaluate_statement(node.value) if is_disabler(addition): - set_variable(varname, addition) + self.set_variable(varname, addition) return # Remember that all variables are immutable. We must always create a # full new variable and then assign it. diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 771e9ee..b409615 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, os -import pickle +import os +import sys import argparse -from . import coredata, mesonlib +from . import (coredata, mesonlib, build) parser = argparse.ArgumentParser(prog='meson configure') @@ -25,95 +25,73 @@ parser.add_argument('directory', nargs='*') parser.add_argument('--clearcache', action='store_true', default=False, help='Clear cached state (e.g. found dependencies)') + class ConfException(mesonlib.MesonException): pass + class Conf: def __init__(self, build_dir): self.build_dir = build_dir - self.coredata_file = os.path.join(build_dir, 'meson-private/coredata.dat') - self.build_file = os.path.join(build_dir, 'meson-private/build.dat') - if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file): + if not os.path.isdir(os.path.join(build_dir, 'meson-private')): raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir) - with open(self.coredata_file, 'rb') as f: - self.coredata = pickle.load(f) - with open(self.build_file, 'rb') as f: - self.build = pickle.load(f) - if self.coredata.version != coredata.version: - raise ConfException('Version mismatch (%s vs %s)' % - (coredata.version, self.coredata.version)) + self.build = build.load(self.build_dir) + self.coredata = coredata.load(self.build_dir) def clear_cache(self): self.coredata.deps = {} def save(self): # Only called if something has changed so overwrite unconditionally. - with open(self.coredata_file, 'wb') as f: - pickle.dump(self.coredata, f) + coredata.save(self.coredata, self.build_dir) # We don't write the build file because any changes to it # are erased when Meson is executed the next time, i.e. when # Ninja is run. - def print_aligned(self, arr): + @staticmethod + def print_aligned(arr): + def make_lower_case(val): + if isinstance(val, bool): + return str(val).lower() + elif isinstance(val, list): + return [make_lower_case(i) for i in val] + else: + return str(val) + if not arr: return - titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'} - len_name = longest_name = len(titles['name']) - len_descr = longest_descr = len(titles['descr']) - len_value = longest_value = len(titles['value']) - longest_choices = 0 # not printed if we don't get any optional values - - # calculate the max length of each - for x in arr: - name = x['name'] - descr = x['descr'] - value = x['value'] if isinstance(x['value'], str) else str(x['value']).lower() - choices = '' - if isinstance(x['choices'], list): - if x['choices']: - x['choices'] = [s if isinstance(s, str) else str(s).lower() for s in x['choices']] - choices = '[%s]' % ', '.join(map(str, x['choices'])) - elif x['choices']: - choices = x['choices'] if isinstance(x['choices'], str) else str(x['choices']).lower() - longest_name = max(longest_name, len(name)) - longest_descr = max(longest_descr, len(descr)) - longest_value = max(longest_value, len(value)) - longest_choices = max(longest_choices, len(choices)) + titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'} - # update possible non strings - x['value'] = value - x['choices'] = choices + name_col = [titles['name'], '-' * len(titles['name'])] + value_col = [titles['value'], '-' * len(titles['value'])] + choices_col = [titles['choices'], '-' * len(titles['choices'])] + descr_col = [titles['descr'], '-' * len(titles['descr'])] - # prints header - namepad = ' ' * (longest_name - len_name) - valuepad = ' ' * (longest_value - len_value) - if longest_choices: - len_choices = len(titles['choices']) - longest_choices = max(longest_choices, len_choices) - choicepad = ' ' * (longest_choices - len_choices) - print(' %s%s %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['choices'], choicepad, titles['descr'])) - print(' %s%s %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_choices, choicepad, '-' * len_descr)) - else: - print(' %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['descr'])) - print(' %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_descr)) + choices_found = False + for opt in arr: + name_col.append(opt['name']) + descr_col.append(opt['descr']) + if isinstance(opt['value'], list): + value_col.append('[{0}]'.format(', '.join(make_lower_case(opt['value'])))) + else: + value_col.append(make_lower_case(opt['value'])) + if opt['choices']: + choices_found = True + choices_col.append('[{0}]'.format(', '.join(make_lower_case(opt['choices'])))) + else: + choices_col.append('') - # print values - for i in arr: - name = i['name'] - descr = i['descr'] - value = i['value'] - choices = i['choices'] + col_widths = (max([len(i) for i in name_col], default=0), + max([len(i) for i in value_col], default=0), + max([len(i) for i in choices_col], default=0), + max([len(i) for i in descr_col], default=0)) - namepad = ' ' * (longest_name - len(name)) - valuepad = ' ' * (longest_value - len(value)) - if longest_choices: - choicespad = ' ' * (longest_choices - len(choices)) - f = ' %s%s %s%s %s%s %s' % (name, namepad, value, valuepad, choices, choicespad, descr) + for line in zip(name_col, value_col, choices_col, descr_col): + if choices_found: + print(' {0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3:{width[3]}}'.format(*line, width=col_widths)) else: - f = ' %s%s %s%s %s' % (name, namepad, value, valuepad, descr) - - print(f) + print(' {0:{width[0]}} {1:{width[1]}} {3:{width[3]}}'.format(*line, width=col_widths)) def set_options(self, options): for o in options: @@ -156,8 +134,7 @@ class Conf: print('Core properties:') print(' Source dir', self.build.environment.source_dir) print(' Build dir ', self.build.environment.build_dir) - print('') - print('Core options:') + print('\nCore options:\n') carr = [] for key in ['buildtype', 'warning_level', 'werror', 'strip', 'unity', 'default_library']: carr.append({'name': key, @@ -165,48 +142,39 @@ class Conf: 'value': self.coredata.get_builtin_option(key), 'choices': coredata.get_builtin_option_choices(key)}) self.print_aligned(carr) - print('') - bekeys = sorted(self.coredata.backend_options.keys()) - if not bekeys: + if not self.coredata.backend_options: print(' No backend options\n') else: bearr = [] - for k in bekeys: + for k in sorted(self.coredata.backend_options): o = self.coredata.backend_options[k] bearr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''}) self.print_aligned(bearr) - print('') - print('Base options:') - okeys = sorted(self.coredata.base_options.keys()) - if not okeys: + print('\nBase options:') + if not self.coredata.base_options: print(' No base options\n') else: coarr = [] - for k in okeys: + for k in sorted(self.coredata.base_options): o = self.coredata.base_options[k] coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': o.choices}) self.print_aligned(coarr) - print('') - print('Compiler arguments:') + print('\nCompiler arguments:') for (lang, args) in self.coredata.external_args.items(): print(' ' + lang + '_args', str(args)) - print('') - print('Linker args:') + print('\nLinker args:') for (lang, args) in self.coredata.external_link_args.items(): print(' ' + lang + '_link_args', str(args)) - print('') - print('Compiler options:') - okeys = sorted(self.coredata.compiler_options.keys()) - if not okeys: + print('\nCompiler options:') + if not self.coredata.compiler_options: print(' No compiler options\n') else: coarr = [] - for k in okeys: + for k in self.coredata.compiler_options: o = self.coredata.compiler_options[k] coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''}) self.print_aligned(coarr) - print('') - print('Directories:') + print('\nDirectories:') parr = [] for key in ['prefix', 'libdir', @@ -227,30 +195,24 @@ class Conf: 'value': self.coredata.get_builtin_option(key), 'choices': coredata.get_builtin_option_choices(key)}) self.print_aligned(parr) - print('') - print('Project options:') + print('\nProject options:') if not self.coredata.user_options: print(' This project does not have any options') else: - options = self.coredata.user_options - keys = list(options.keys()) - keys.sort() optarr = [] - for key in keys: - opt = options[key] + for key in sorted(self.coredata.user_options): + opt = self.coredata.user_options[key] if (opt.choices is None) or (not opt.choices): # Zero length list or string choices = '' else: - # A non zero length list or string, convert to string - choices = str(opt.choices) + choices = opt.choices optarr.append({'name': key, 'descr': opt.description, 'value': opt.value, 'choices': choices}) self.print_aligned(optarr) - print('') - print('Testing options:') + print('\nTesting options:') tarr = [] for key in ['stdsplit', 'errorlogs']: tarr.append({'name': key, @@ -259,6 +221,7 @@ class Conf: 'choices': coredata.get_builtin_option_choices(key)}) self.print_aligned(tarr) + def run(args): args = mesonlib.expand_arguments(args) if not args: @@ -286,10 +249,10 @@ def run(args): if save: c.save() except ConfException as e: - print('Meson configurator encountered an error:\n') - print(e) - return 1 + print('Meson configurator encountered an error:') + raise e return 0 + if __name__ == '__main__': sys.exit(run(sys.argv[1:])) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 65b689f..a076e3e 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -21,6 +21,21 @@ import platform, subprocess, operator, os, shutil, re import collections from mesonbuild import mlog +have_fcntl = False +have_msvcrt = False + +try: + import fcntl + have_fcntl = True +except Exception: + pass + +try: + import msvcrt + have_msvcrt = True +except Exception: + pass + from glob import glob def detect_meson_py_location(): @@ -36,12 +51,11 @@ def detect_meson_py_location(): # $ <mesontool> <args> (gets run from /usr/bin/<mesontool>) in_path_exe = shutil.which(c_fname) if in_path_exe: - m_dir, c_fname = os.path.split(in_path_exe) - # Special case: when run like "./meson.py <opts>", - # we need to expand it out, because, for example, - # "ninja test" will be run from a different directory. - if m_dir == '.': + if not os.path.isabs(in_path_exe): m_dir = os.getcwd() + c_fname = in_path_exe + else: + m_dir, c_fname = os.path.split(in_path_exe) else: m_dir = os.path.abspath(c_dir) @@ -56,12 +70,18 @@ def detect_meson_py_location(): # a) meson is not installed # b) meson is installed to a non-standard location # c) the script that invoked mesonlib is not the one of meson tools (e.g. run_unittests.py) - # The only thing remaining is to try to find the bundled executable and - # pray distro packagers have not moved it. fname = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'meson.py')) - if not os.path.exists(fname): - raise RuntimeError('Could not determine how to run Meson. Please file a bug with details.') - return fname + if os.path.exists(fname): + return fname + # If meson is still not found, we might be imported by out-of-source tests + # https://github.com/mesonbuild/meson/issues/3015 + exe = shutil.which('meson') + if exe is None: + exe = shutil.which('meson.py') + if exe is not None: + return exe + # Give up. + raise RuntimeError('Could not determine how to run Meson. Please file a bug with details.') if os.path.basename(sys.executable) == 'meson.exe': # In Windows and using the MSI installed executable. @@ -514,6 +534,12 @@ def get_library_dirs(): unixdirs += glob('/lib/' + plat + '*') return unixdirs +def has_path_sep(name, sep='/\\'): + 'Checks if any of the specified @sep path separators are in @name' + for each in sep: + if each in name: + return True + return False def do_replacement(regex, line, confdata): missing_variables = set() @@ -897,6 +923,8 @@ def windows_proof_rmtree(f): try: shutil.rmtree(f) return + except FileNotFoundError: + return except (OSError, PermissionError): time.sleep(d) # Try one last time and throw if it fails. @@ -965,3 +993,26 @@ class OrderedSet(collections.MutableSet): def difference(self, set_): return type(self)(e for e in self if e not in set_) + +class BuildDirLock: + + def __init__(self, builddir): + self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock') + + def __enter__(self): + self.lockfile = open(self.lockfilename, 'w') + try: + if have_fcntl: + fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) + elif have_msvcrt: + msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) + except (BlockingIOError, PermissionError): + self.lockfile.close() + raise MesonException('Some other Meson process is already using this build directory. Exiting.') + + def __exit__(self, *args): + if have_fcntl: + fcntl.flock(self.lockfile, fcntl.LOCK_UN) + elif have_msvcrt: + msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) + self.lockfile.close() diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 619aa39..651224e 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, stat, traceback, pickle, argparse -import time, datetime +import sys, stat, traceback, argparse +import datetime import os.path from . import environment, interpreter, mesonlib from . import build @@ -147,10 +147,8 @@ class MesonApp: def generate(self): env = environment.Environment(self.source_dir, self.build_dir, self.meson_script_launcher, self.options, self.original_cmd_line_args) mlog.initialize(env.get_log_dir()) - try: + with mesonlib.BuildDirLock(self.build_dir): self._generate(env) - finally: - mlog.shutdown() def _generate(self, env): mlog.debug('Build started at', datetime.datetime.now().isoformat()) @@ -199,6 +197,7 @@ class MesonApp: mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) intr.run() try: + dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') # We would like to write coredata as late as possible since we use the existence of # this file to check if we generated the build file successfully. Since coredata # includes settings, the build files must depend on it and appear newer. However, due @@ -207,16 +206,13 @@ class MesonApp: # possible, but before build files, and if any error occurs, delete it. cdf = env.dump_coredata() g.generate(intr) - dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') - with open(dumpfile, 'wb') as f: - pickle.dump(b, f) + build.save(b, dumpfile) # Post-conf scripts must be run after writing coredata or else introspection fails. g.run_postconf_scripts() except: os.unlink(cdf) raise - def run_script_command(args): cmdname = args[0] cmdargs = args[1:] @@ -289,6 +285,13 @@ def run(original_args, mainfile=None): # First check if we want to run a subcommand. cmd_name = args[0] remaining_args = args[1:] + # "help" is a special case: Since printing of the help may be + # delegated to a subcommand, we edit cmd_name before executing + # the rest of the logic here. + if cmd_name == 'help': + remaining_args += ['--help'] + args = remaining_args + cmd_name = args[0] if cmd_name == 'test': return mtest.run(remaining_args) elif cmd_name == 'setup': @@ -302,7 +305,7 @@ def run(original_args, mainfile=None): try: return mconf.run(remaining_args) except MesonException as e: - mlog.log(mlog.red('\nError configuring project:'), e) + mlog.exception(e) sys.exit(1) elif cmd_name == 'wrap': return wraptool.run(remaining_args) @@ -322,8 +325,8 @@ def run(original_args, mainfile=None): try: sys.exit(run_script_command(args[1:])) except MesonException as e: - mlog.log(mlog.red('\nError in {} helper script:'.format(script))) - mlog.log(e) + mlog.error('\nError in {} helper script:'.format(script)) + mlog.exception(e) sys.exit(1) args = args[2:] handshake = True @@ -366,21 +369,20 @@ def run(original_args, mainfile=None): app.generate() except Exception as e: if isinstance(e, MesonException): - mlog.log() - if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'): - mlog.log('%s:%d:%d:' % (e.file, e.lineno, e.colno), mlog.red('ERROR: '), end='') - else: - mlog.log(mlog.red('ERROR: '), end='') - # Error message - mlog.log(e) + mlog.exception(e) # Path to log file + mlog.shutdown() logfile = os.path.join(app.build_dir, environment.Environment.log_dir, mlog.log_fname) mlog.log("\nA full log can be found at", mlog.bold(logfile)) if os.environ.get('MESON_FORCE_BACKTRACE'): raise + return 1 else: if os.environ.get('MESON_FORCE_BACKTRACE'): raise traceback.print_exc() - return 1 + return 2 + finally: + mlog.shutdown() + return 0 diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index 98817cb..0461cd9 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -1,5 +1,4 @@ # Copyright 2017 The Meson development team -from pyclbr import Function # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +14,10 @@ from pyclbr import Function """Code that creates simple startup projects.""" -import os, sys, argparse, re +import os, sys, argparse, re, shutil from glob import glob +from mesonbuild import mesonlib +from mesonbuild.environment import detect_ninja lib_h_template = '''#pragma once #if defined _WIN32 || defined __CYGWIN__ @@ -107,7 +108,7 @@ pkg_mod.generate( ) ''' -hello_c_template = '''#include <stdio.h> +hello_c_template = '''#include <stdio.h> #define PROJECT_NAME "{project_name}" @@ -123,16 +124,15 @@ int main(int argc, char **argv) {{ hello_c_meson_template = '''project('{project_name}', 'c', version : '{version}', - default_options : ['warning_level=3', - 'cpp_std=c++14']) + default_options : ['warning_level=3']) exe = executable('{exe_name}', '{source_name}', install : true) - + test('basic', exe) ''' -hello_cpp_template = '''#include <iostream> +hello_cpp_template = '''#include <iostream> #define PROJECT_NAME "{project_name}" @@ -148,11 +148,12 @@ int main(int argc, char **argv) {{ hello_cpp_meson_template = '''project('{project_name}', 'cpp', version : '{version}', - default_options : ['warning_level=3']) + default_options : ['warning_level=3', + 'cpp_std=c++14']) exe = executable('{exe_name}', '{source_name}', install : true) - + test('basic', exe) ''' @@ -178,9 +179,9 @@ class {utoken}_PUBLIC {class_name} {{ public: {class_name}(); int get_number() const; - + private: - + int number; }}; @@ -270,7 +271,6 @@ ninja -C builddir def create_exe_c_sample(project_name, project_version): lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower()) - uppercase_token = lowercase_token.upper() source_name = lowercase_token + '.c' open(source_name, 'w').write(hello_c_template.format(project_name=project_name)) open('meson.build', 'w').write(hello_c_meson_template.format(project_name=project_name, @@ -291,7 +291,7 @@ def create_lib_c_sample(project_name, version): 'function_name': function_name, 'header_file': lib_h_name, 'source_file': lib_c_name, - 'test_source_file': test_c_name, + 'test_source_file': test_c_name, 'test_exe_name': lowercase_token, 'project_name': project_name, 'lib_name': lowercase_token, @@ -305,13 +305,12 @@ def create_lib_c_sample(project_name, version): def create_exe_cpp_sample(project_name, project_version): lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower()) - uppercase_token = lowercase_token.upper() source_name = lowercase_token + '.cpp' open(source_name, 'w').write(hello_cpp_template.format(project_name=project_name)) open('meson.build', 'w').write(hello_cpp_meson_template.format(project_name=project_name, - exe_name=lowercase_token, - source_name=source_name, - version=project_version)) + exe_name=lowercase_token, + source_name=source_name, + version=project_version)) def create_lib_cpp_sample(project_name, version): lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower()) @@ -328,7 +327,7 @@ def create_lib_cpp_sample(project_name, version): 'namespace': namespace, 'header_file': lib_h_name, 'source_file': lib_c_name, - 'test_source_file': test_c_name, + 'test_source_file': test_c_name, 'test_exe_name': lowercase_token, 'project_name': project_name, 'lib_name': lowercase_token, @@ -359,15 +358,123 @@ def create_sample(options): raise RuntimeError('Unreachable code') print(info_message) +def autodetect_options(options, sample=False): + if not options.name: + options.name = os.path.basename(os.getcwd()) + if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample: + print('Name of current directory "{}" is not usable as a sample project name.\n' + 'Specify a project name with --name.'.format(options.name)) + sys.exit(1) + print('Using "{}" (name of current directory) as project name.' + .format(options.name)) + if not options.executable: + options.executable = options.name + print('Using "{}" (project name) as name of executable to build.' + .format(options.executable)) + if sample: + # The rest of the autodetection is not applicable to generating sample projects. + return + if not options.srcfiles: + srcfiles = [] + for f in os.listdir(): + if f.endswith('.cc') or f.endswith('.cpp') or f.endswith('.c'): + srcfiles.append(f) + if not srcfiles: + print("No recognizable source files found.\n" + "Run me in an empty directory to create a sample project.") + sys.exit(1) + options.srcfiles = srcfiles + print("Detected source files: " + ' '.join(srcfiles)) + if not options.language: + for f in options.srcfiles: + if f.endswith('.cc') or f.endswith('.cpp'): + options.language = 'cpp' + break + if f.endswith('.c'): + options.language = 'c' + break + if not options.language: + print("Can't autodetect language, please specify it with -l.") + sys.exit(1) + print("Detected language: " + options.language) + +meson_executable_template = '''project('{project_name}', '{language}', + version : '{version}', + default_options : [{default_options}]) + +executable('{executable}', + {sourcespec},{depspec} + install : true) +''' + +def create_meson_build(options): + if options.type != 'executable': + print('\nGenerating a meson.build file from existing sources is\n' + 'supported only for project type "executable".\n' + 'Run me in an empty directory to create a sample project.') + sys.exit(1) + default_options = ['warning_level=3'] + if options.language == 'cpp': + # This shows how to set this very common option. + default_options += ['cpp_std=c++14'] + # If we get a meson.build autoformatter one day, this code could + # be simplified quite a bit. + formatted_default_options = ', '.join("'{}'".format(x) for x in default_options) + sourcespec = ',\n '.join("'{}'".format(x) for x in options.srcfiles) + depspec = '' + if options.deps: + depspec = '\n dependencies : [\n ' + depspec += ',\n '.join("dependency('{}')".format(x) + for x in options.deps.split(',')) + depspec += '],' + content = meson_executable_template.format(project_name=options.name, + language=options.language, + version=options.version, + executable=options.executable, + sourcespec=sourcespec, + depspec=depspec, + default_options=formatted_default_options) + open('meson.build', 'w').write(content) + print('Generated meson.build file:\n\n' + content) + def run(args): parser = argparse.ArgumentParser(prog='meson') - parser.add_argument('--name', default = 'mesonsample') + parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", + help="source files. default: all recognized files in current directory") + parser.add_argument("-n", "--name", help="project name. default: name of current directory") + parser.add_argument("-e", "--executable", help="executable name. default: project name") + parser.add_argument("-d", "--deps", help="dependencies, comma-separated") + parser.add_argument("-l", "--language", choices=['c', 'cpp'], + help="project language. default: autodetected based on source files") + parser.add_argument("-b", "--build", help="build after generation", action='store_true') + parser.add_argument("--builddir", help="directory for build", default='build') + parser.add_argument("-f", "--force", action="store_true", + help="force overwrite of existing files and directories.") parser.add_argument('--type', default='executable', choices=['executable', 'library']) - parser.add_argument('--language', default='c', choices=['c', 'cpp']) - parser.add_argument('--version', default='1.0') + parser.add_argument('--version', default='0.1') options = parser.parse_args(args) - if len(glob('*')) != 0: - sys.exit('This command must be run in an empty directory.') - create_sample(options) + if len(glob('*')) == 0: + autodetect_options(options, sample=True) + if not options.language: + print('Defaulting to generating a C language project.') + options.language = 'c' + create_sample(options) + else: + autodetect_options(options) + if os.path.isfile('meson.build') and not options.force: + print('meson.build already exists. Use --force to overwrite.') + sys.exit(1) + create_meson_build(options) + if options.build: + if os.path.isdir(options.builddir) and options.force: + print('Build directory already exists, deleting it.') + shutil.rmtree(options.builddir) + print('Building...') + err = os.system('{} "{}"'.format(' '.join(mesonlib.meson_command), options.builddir)) + if err: + sys.exit(1) + err = os.system('{} -C "{}"'.format(detect_ninja(), options.builddir)) + if err: + sys.exit(1) return 0 diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 8cf66af..23e666c 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -19,8 +19,9 @@ tests and so on. All output is in JSON for simple parsing. Currently only works for the Ninja backend. Others use generated project files and don't need this info.""" -import json, pickle -from . import coredata, build +import json +from . import build, mtest, coredata as cdata +from .backend import ninjabackend import argparse import sys, os import pathlib @@ -132,16 +133,16 @@ def add_keys(optlist, options): for key in keys: opt = options[key] optdict = {'name': key, 'value': opt.value} - if isinstance(opt, coredata.UserStringOption): + if isinstance(opt, cdata.UserStringOption): typestr = 'string' - elif isinstance(opt, coredata.UserBooleanOption): + elif isinstance(opt, cdata.UserBooleanOption): typestr = 'boolean' - elif isinstance(opt, coredata.UserComboOption): + elif isinstance(opt, cdata.UserComboOption): optdict['choices'] = opt.choices typestr = 'combo' - elif isinstance(opt, coredata.UserIntegerOption): + elif isinstance(opt, cdata.UserIntegerOption): typestr = 'integer' - elif isinstance(opt, coredata.UserArrayOption): + elif isinstance(opt, cdata.UserArrayOption): typestr = 'array' else: raise RuntimeError("Unknown option type") @@ -149,7 +150,7 @@ def add_keys(optlist, options): optdict['description'] = opt.description optlist.append(optdict) -def list_buildsystem_files(coredata, builddata): +def list_buildsystem_files(builddata): src_dir = builddata.environment.get_source_dir() # I feel dirty about this. But only slightly. filelist = [] @@ -185,6 +186,7 @@ def list_tests(testdata): to['workdir'] = t.workdir to['timeout'] = t.timeout to['suite'] = t.suite + to['is_parallel'] = t.is_parallel result.append(to) print(json.dumps(result)) @@ -208,26 +210,15 @@ def run(args): 'change the working directory to it.') return 1 - corefile = os.path.join(datadir, 'coredata.dat') - buildfile = os.path.join(datadir, 'build.dat') - installfile = os.path.join(datadir, 'install.dat') - testfile = os.path.join(datadir, 'meson_test_setup.dat') - benchmarkfile = os.path.join(datadir, 'meson_benchmark_setup.dat') + coredata = cdata.load(options.builddir) + builddata = build.load(options.builddir) + testdata = mtest.load_tests(options.builddir) + benchmarkdata = mtest.load_benchmarks(options.builddir) - # Load all data files - with open(corefile, 'rb') as f: - coredata = pickle.load(f) - with open(buildfile, 'rb') as f: - builddata = pickle.load(f) - with open(testfile, 'rb') as f: - testdata = pickle.load(f) - with open(benchmarkfile, 'rb') as f: - benchmarkdata = pickle.load(f) # Install data is only available with the Ninja backend - if os.path.isfile(installfile): - with open(installfile, 'rb') as f: - installdata = pickle.load(f) - else: + try: + installdata = ninjabackend.load(options.builddir) + except FileNotFoundError: installdata = None if options.list_targets: @@ -237,7 +228,7 @@ def run(args): elif options.target_files is not None: list_target_files(options.target_files, coredata, builddata) elif options.buildsystem_files: - list_buildsystem_files(coredata, builddata) + list_buildsystem_files(builddata) elif options.buildoptions: list_buildoptions(coredata, builddata) elif options.tests: diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 273552d..347cede 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -102,19 +102,38 @@ def log(*args, **kwargs): arr = process_markup(args, True) force_print(*arr, **kwargs) -def warning(*args, **kwargs): +def _log_error(severity, *args, **kwargs): from . import environment + if severity == 'warning': + args = (yellow('WARNING:'),) + args + elif severity == 'error': + args = (red('ERROR:'),) + args + else: + assert False, 'Invalid severity ' + severity - args = (yellow('WARNING:'),) + args - - if kwargs.get('location'): + if 'location' in kwargs: location = kwargs['location'] del kwargs['location'] - location = '{}:{}:'.format(os.path.join(location.subdir, environment.build_filename), location.lineno) - args = (location,) + args + location_str = '{}:{}:'.format(os.path.join(location.subdir, + environment.build_filename), + location.lineno) + args = (location_str,) + args log(*args, **kwargs) +def error(*args, **kwargs): + return _log_error('error', *args, **kwargs) + +def warning(*args, **kwargs): + return _log_error('warning', *args, **kwargs) + +def exception(e): + log() + if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'): + log('%s:%d:%d:' % (e.file, e.lineno, e.colno), red('ERROR: '), e) + else: + log(red('ERROR:'), e) + # Format a list for logging purposes as a string. It separates # all but the last item with commas, and the last with 'and'. def format_list(list): diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 218e3b3..8b6397e 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -15,13 +15,11 @@ '''This module provides helper functions for Gnome/GLib related functionality such as gobject-introspection, gresources and gtk-doc''' -from .. import build import os import copy import subprocess -from . import ModuleReturnValue -from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list -from ..dependencies import Dependency, PkgConfigDependency, InternalDependency + +from .. import build from .. import mlog from .. import mesonlib from .. import compilers @@ -29,6 +27,9 @@ from .. import interpreter from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget from . import find_program, get_include_args from . import ExtensionModule +from . import ModuleReturnValue +from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list +from ..dependencies import Dependency, PkgConfigDependency, InternalDependency from ..interpreterbase import noKwargs, permittedKwargs # gresource compilation is broken due to the way @@ -233,17 +234,6 @@ class GnomeModule(ExtensionModule): dep_files = stdout.split('\n')[:-1] - # In generate-dependencies mode, glib-compile-resources doesn't raise - # an error for missing resources but instead prints whatever filename - # was listed in the input file. That's good because it means we can - # handle resource files that get generated as part of the build, as - # follows. - # - # If there are multiple generated resource files with the same basename - # then this code will get confused. - def exists_in_srcdir(f): - return os.path.exists(os.path.join(state.environment.get_source_dir(), f)) - depends = [] subdirs = [] for resfile in dep_files[:]: @@ -267,21 +257,29 @@ class GnomeModule(ExtensionModule): break if fname is not None: dep_files.remove(resfile) - dep_files.append( - mesonlib.File( - is_built=True, - subdir=dep.get_subdir(), - fname=fname)) depends.append(dep) subdirs.append(dep.get_subdir()) break else: - if not exists_in_srcdir(resfile): + # In generate-dependencies mode, glib-compile-resources doesn't raise + # an error for missing resources but instead prints whatever filename + # was listed in the input file. That's good because it means we can + # handle resource files that get generated as part of the build, as + # follows. + # + # If there are multiple generated resource files with the same basename + # then this code will get confused. + try: + f = mesonlib.File.from_source_file(state.environment.get_source_dir(), + ".", resfile) + except MesonException: raise MesonException( 'Resource "%s" listed in "%s" was not found. If this is a ' 'generated file, pass the target that generates it to ' 'gnome.compile_resources() using the "dependencies" ' 'keyword argument.' % (resfile, input_file)) + dep_files.remove(resfile) + dep_files.append(f) return dep_files, depends, subdirs def _get_link_args(self, state, lib, depends=None, include_rpath=False, @@ -1352,7 +1350,7 @@ G_END_DECLS''' # - add relevant directories to include dirs incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)] sources = [vapi_target] + vapi_depends - rv = InternalDependency(None, incs, [], [], link_with, sources, []) + rv = InternalDependency(None, incs, [], [], link_with, [], sources, []) created_values.append(rv) return ModuleReturnValue(rv, created_values) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 5573a2e..79a4423 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -44,22 +44,43 @@ class DependenciesHelper: self.priv_reqs += reqs def add_pub_reqs(self, reqs): - self.pub_reqs += mesonlib.stringlistify(reqs) + self.pub_reqs += self._process_reqs(reqs) def add_priv_reqs(self, reqs): - self.priv_reqs += mesonlib.stringlistify(reqs) + self.priv_reqs += self._process_reqs(reqs) + + def _process_reqs(self, reqs): + '''Returns string names of requirements''' + processed_reqs = [] + for obj in mesonlib.listify(reqs, unholder=True): + if hasattr(obj, 'generated_pc'): + processed_reqs.append(obj.generated_pc) + elif hasattr(obj, 'pcdep'): + pcdeps = mesonlib.listify(obj.pcdep) + processed_reqs += [i.name for i in pcdeps] + elif isinstance(obj, dependencies.PkgConfigDependency): + if obj.found(): + processed_reqs.append(obj.name) + elif isinstance(obj, str): + processed_reqs.append(obj) + elif isinstance(obj, dependencies.Dependency) and not obj.found(): + pass + else: + raise mesonlib.MesonException('requires argument not a string, ' + 'library with pkgconfig-generated file ' + 'or pkgconfig-dependency object, ' + 'got {!r}'.format(obj)) + return processed_reqs def add_cflags(self, cflags): self.cflags += mesonlib.stringlistify(cflags) def _process_libs(self, libs, public): - libs = mesonlib.listify(libs) + libs = mesonlib.listify(libs, unholder=True) processed_libs = [] processed_reqs = [] processed_cflags = [] for obj in libs: - if hasattr(obj, 'held_object'): - obj = obj.held_object if hasattr(obj, 'pcdep'): pcdeps = mesonlib.listify(obj.pcdep) processed_reqs += [i.name for i in pcdeps] @@ -75,7 +96,20 @@ class DependenciesHelper: if obj.found(): processed_libs += obj.get_link_args() processed_cflags += obj.get_compile_args() - elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): + elif isinstance(obj, build.SharedLibrary): + processed_libs.append(obj) + if public: + if not hasattr(obj, 'generated_pc'): + obj.generated_pc = self.name + elif isinstance(obj, build.StaticLibrary): + # Due to a "feature" in pkgconfig, it leaks out private dependencies. + # Thus we will not add them to the pc file unless the target + # we are processing is a static library. + # + # This way (hopefully) "pkgconfig --libs --static foobar" works + # and "pkgconfig --cflags/--libs foobar" does not have any trace + # of dependencies that the build file creator has not explicitly + # added to the dependency list. processed_libs.append(obj) if public: if not hasattr(obj, 'generated_pc'): @@ -90,13 +124,20 @@ class DependenciesHelper: return processed_libs, processed_reqs, processed_cflags def remove_dups(self): - self.pub_libs = list(set(self.pub_libs)) - self.pub_reqs = list(set(self.pub_reqs)) - self.priv_libs = list(set(self.priv_libs)) - self.priv_reqs = list(set(self.priv_reqs)) - self.cflags = list(set(self.cflags)) - - # Remove from pivate libs/reqs if they are in public already + def _fn(xs): + # Remove duplicates whilst preserving original order + result = [] + for x in xs: + if x not in result: + result.append(x) + return result + self.pub_libs = _fn(self.pub_libs) + self.pub_reqs = _fn(self.pub_reqs) + self.priv_libs = _fn(self.priv_libs) + self.priv_reqs = _fn(self.priv_reqs) + self.cflags = _fn(self.cflags) + + # Remove from private libs/reqs if they are in public already self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs] self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs] diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 989e839..9fd9f80 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys import sysconfig from .. import mesonlib, dependencies diff --git a/mesonbuild/modules/unstable_icestorm.py b/mesonbuild/modules/unstable_icestorm.py index 0b7b339..1f548b6 100644 --- a/mesonbuild/modules/unstable_icestorm.py +++ b/mesonbuild/modules/unstable_icestorm.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import mesonlib, compilers, mlog +from .. import mesonlib from . import ExtensionModule @@ -33,7 +33,6 @@ class IceStormModule(ExtensionModule): def project(self, interpreter, state, args, kwargs): if not self.yosys_bin: self.detect_binaries(interpreter) - result = [] if not len(args): raise mesonlib.MesonException('Project requires at least one argument, which is the project name.') proj_name = args[0] @@ -46,7 +45,7 @@ class IceStormModule(ExtensionModule): all_sources = interpreter.source_strings_to_files(interpreter.flatten(arg_sources + kwarg_sources)) if 'constraint_file' not in kwargs: raise mesonlib.MesonException('Constraint file not specified.') - + constraint_file = interpreter.source_strings_to_files(kwargs['constraint_file']) if len(constraint_file) != 1: raise mesonlib.MesonException('Constraint file must contain one and only one entry.') @@ -73,13 +72,13 @@ class IceStormModule(ExtensionModule): 'input': asc_target, 'output': bin_fname, 'command': [self.icepack_bin, '@INPUT@', '@OUTPUT@'], - 'build_by_default' : True}) + 'build_by_default': True}) - up_target = interpreter.func_run_target(None, [upload_name], { + interpreter.func_run_target(None, [upload_name], { 'command': [self.iceprog_bin, bin_target]}) - time_target = interpreter.func_run_target(None, [time_name], { - 'command' : [self.icetime_bin, bin_target]}) + interpreter.func_run_target(None, [time_name], { + 'command': [self.icetime_bin, bin_target]}) def initialize(): return IceStormModule() diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 94d56e5..0e7524c 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -141,6 +141,10 @@ class Lexer: elif tid == 'dblquote': raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col) elif tid == 'string': + # Handle here and not on the regexp to give a better error message. + if match_text.find("\n") != -1: + mlog.warning("""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead. +This will become a hard error in a future Meson release.""", self.getline(line_start), lineno, col) value = match_text[1:-1].replace(r"\'", "'") value = newline_rx.sub(r'\1\n', value) value = value.replace(r" \\ ".strip(), r" \ ".strip()) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 95e532c..4ed80b1 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -28,6 +28,7 @@ import concurrent.futures as conc import platform import signal import random +from copy import deepcopy # GNU autotools interprets a return code of 77 from tests it executes to # mean that the test should be skipped. @@ -164,6 +165,22 @@ def run_with_mono(fname): return True return False +def load_benchmarks(build_dir): + datafile = os.path.join(build_dir, 'meson-private', 'meson_benchmark_setup.dat') + if not os.path.isfile(datafile): + raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir)) + with open(datafile, 'rb') as f: + obj = pickle.load(f) + return obj + +def load_tests(build_dir): + datafile = os.path.join(build_dir, 'meson-private', 'meson_test_setup.dat') + if not os.path.isfile(datafile): + raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir)) + with open(datafile, 'rb') as f: + obj = pickle.load(f) + return obj + class TestHarness: def __init__(self, options): self.options = options @@ -179,12 +196,10 @@ class TestHarness: self.logfile = None self.jsonlogfile = None if self.options.benchmark: - datafile = os.path.join(options.wd, 'meson-private', 'meson_benchmark_setup.dat') + self.tests = load_benchmarks(options.wd) else: - datafile = os.path.join(options.wd, 'meson-private', 'meson_test_setup.dat') - if not os.path.isfile(datafile): - raise TestException('Directory %s does not seem to be a Meson build directory.' % options.wd) - self.load_datafile(datafile) + self.tests = load_tests(options.wd) + self.load_suites() def __del__(self): if self.logfile: @@ -192,7 +207,39 @@ class TestHarness: if self.jsonlogfile: self.jsonlogfile.close() - def run_single_test(self, wrap, test): + def merge_suite_options(self, options, test): + if ":" in options.setup: + if options.setup not in self.build_data.test_setups: + sys.exit("Unknown test setup '%s'." % options.setup) + current = self.build_data.test_setups[options.setup] + else: + full_name = test.project_name + ":" + options.setup + if full_name not in self.build_data.test_setups: + sys.exit("Test setup '%s' not found from project '%s'." % (options.setup, test.project_name)) + current = self.build_data.test_setups[full_name] + if not options.gdb: + options.gdb = current.gdb + if options.timeout_multiplier is None: + options.timeout_multiplier = current.timeout_multiplier + # if options.env is None: + # options.env = current.env # FIXME, should probably merge options here. + if options.wrapper is not None and current.exe_wrapper is not None: + sys.exit('Conflict: both test setup and command line specify an exe wrapper.') + if options.wrapper is None: + options.wrapper = current.exe_wrapper + return current.env.get_env(os.environ.copy()) + + def get_test_env(self, options, test): + if options.setup: + env = self.merge_suite_options(options, test) + else: + env = os.environ.copy() + if isinstance(test.env, build.EnvironmentVariables): + test.env = test.env.get_env(env) + env.update(test.env) + return env + + def run_single_test(self, test): if test.fname[0].endswith('.jar'): cmd = ['java', '-jar'] + test.fname elif not test.is_cross_built and run_with_mono(test.fname[0]): @@ -215,47 +262,64 @@ class TestHarness: stde = None returncode = GNU_SKIP_RETURNCODE else: + test_opts = deepcopy(self.options) + test_env = self.get_test_env(test_opts, test) + wrap = self.get_wrapper(test_opts) + + if test_opts.gdb: + test.timeout = None + cmd = wrap + cmd + test.cmd_args + self.options.test_args starttime = time.time() - child_env = os.environ.copy() - child_env.update(self.options.global_env.get_env(child_env)) - if isinstance(test.env, build.EnvironmentVariables): - test.env = test.env.get_env(child_env) - child_env.update(test.env) if len(test.extra_paths) > 0: - child_env['PATH'] = os.pathsep.join(test.extra_paths + ['']) + child_env['PATH'] + test_env['PATH'] = os.pathsep.join(test.extra_paths + ['']) + test_env['PATH'] # If MALLOC_PERTURB_ is not set, or if it is set to an empty value, # (i.e., the test or the environment don't explicitly set it), set - # it ourselves. We do this unconditionally because it is extremely - # useful to have in tests. + # it ourselves. We do this unconditionally for regular tests + # because it is extremely useful to have. # Setting MALLOC_PERTURB_="0" will completely disable this feature. - if 'MALLOC_PERTURB_' not in child_env or not child_env['MALLOC_PERTURB_']: - child_env['MALLOC_PERTURB_'] = str(random.randint(1, 255)) + if ('MALLOC_PERTURB_' not in test_env or not test_env['MALLOC_PERTURB_']) and not self.options.benchmark: + test_env['MALLOC_PERTURB_'] = str(random.randint(1, 255)) - setsid = None stdout = None stderr = None if not self.options.verbose: stdout = subprocess.PIPE stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT - if not is_windows(): - setsid = os.setsid + # Let gdb handle ^C instead of us + if test_opts.gdb: + previous_sigint_handler = signal.getsignal(signal.SIGINT) + # Make the meson executable ignore SIGINT while gdb is running. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + def preexec_fn(): + if test_opts.gdb: + # Restore the SIGINT handler for the child process to + # ensure it can handle it. + signal.signal(signal.SIGINT, signal.SIG_DFL) + else: + # We don't want setsid() in gdb because gdb needs the + # terminal in order to handle ^C and not show tcsetpgrp() + # errors avoid not being able to use the terminal. + os.setsid() p = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, - env=child_env, + env=test_env, cwd=test.workdir, - preexec_fn=setsid) + preexec_fn=preexec_fn if not is_windows() else None) timed_out = False kill_test = False if test.timeout is None: timeout = None + elif test_opts.timeout_multiplier is not None: + timeout = test.timeout * test_opts.timeout_multiplier else: - timeout = test.timeout * self.options.timeout_multiplier + timeout = test.timeout try: (stdo, stde) = p.communicate(timeout=timeout) except subprocess.TimeoutExpired: @@ -265,6 +329,10 @@ class TestHarness: except KeyboardInterrupt: mlog.warning("CTRL-C detected while running %s" % (test.name)) kill_test = True + finally: + if test_opts.gdb: + # Let us accept ^C again + signal.signal(signal.SIGINT, previous_sigint_handler) if kill_test or timed_out: # Python does not provide multiplatform support for @@ -361,9 +429,6 @@ TIMEOUT: %4d def doit(self): if self.is_run: raise RuntimeError('Test harness object can only be used once.') - if not os.path.isfile(self.datafile): - print('Test data file. Probably this means that you did not run this in the build directory.') - return 1 self.is_run = True tests = self.get_tests() if not tests: @@ -402,15 +467,6 @@ TIMEOUT: %4d ss.add(s) self.suites = list(ss) - def load_tests(self): - with open(self.datafile, 'rb') as f: - self.tests = pickle.load(f) - - def load_datafile(self, datafile): - self.datafile = datafile - self.load_tests() - self.load_suites() - def get_tests(self): if not self.tests: print('No tests defined.') @@ -444,31 +500,31 @@ TIMEOUT: %4d logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase) if self.options.wrapper: - namebase = os.path.basename(self.get_wrapper()[0]) + namebase = os.path.basename(self.get_wrapper(self.options)[0]) elif self.options.setup: - namebase = self.options.setup + namebase = self.options.setup.replace(":", "_") if namebase: logfile_base += '-' + namebase.replace(' ', '_') self.logfilename = logfile_base + '.txt' self.jsonlogfilename = logfile_base + '.json' - self.jsonlogfile = open(self.jsonlogfilename, 'w') - self.logfile = open(self.logfilename, 'w') + self.jsonlogfile = open(self.jsonlogfilename, 'w', encoding='utf-8') + self.logfile = open(self.logfilename, 'w', encoding='utf-8') self.logfile.write('Log of Meson test suite run on %s\n\n' % datetime.datetime.now().isoformat()) - def get_wrapper(self): + def get_wrapper(self, options): wrap = [] - if self.options.gdb: + if options.gdb: wrap = ['gdb', '--quiet', '--nh'] - if self.options.repeat > 1: + if options.repeat > 1: wrap += ['-ex', 'run', '-ex', 'quit'] # Signal the end of arguments to gdb wrap += ['--args'] - if self.options.wrapper: - wrap += self.options.wrapper + if options.wrapper: + wrap += options.wrapper assert(isinstance(wrap, list)) return wrap @@ -487,28 +543,25 @@ TIMEOUT: %4d futures = [] numlen = len('%d' % len(tests)) self.open_log_files() - wrap = self.get_wrapper() startdir = os.getcwd() if self.options.wd: os.chdir(self.options.wd) + self.build_data = build.load(os.getcwd()) try: for _ in range(self.options.repeat): for i, test in enumerate(tests): visible_name = self.get_pretty_suite(test) - if self.options.gdb: - test.timeout = None - if not test.is_parallel or self.options.gdb: self.drain_futures(futures) futures = [] - res = self.run_single_test(wrap, test) + res = self.run_single_test(test) self.print_stats(numlen, tests, visible_name, res, i) else: if not executor: executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes) - f = executor.submit(self.run_single_test, wrap, test) + f = executor.submit(self.run_single_test, test) futures.append((f, numlen, tests, visible_name, i)) if self.options.repeat > 1 and self.fail_count: break @@ -549,26 +602,6 @@ def list_tests(th): for t in tests: print(th.get_pretty_suite(t)) -def merge_suite_options(options): - buildfile = os.path.join(options.wd, 'meson-private/build.dat') - with open(buildfile, 'rb') as f: - build = pickle.load(f) - setups = build.test_setups - if options.setup not in setups: - sys.exit('Unknown test setup: %s' % options.setup) - current = setups[options.setup] - if not options.gdb: - options.gdb = current.gdb - if options.timeout_multiplier is None: - options.timeout_multiplier = current.timeout_multiplier -# if options.env is None: -# options.env = current.env # FIXME, should probably merge options here. - if options.wrapper is not None and current.exe_wrapper is not None: - sys.exit('Conflict: both test setup and command line specify an exe wrapper.') - if options.wrapper is None: - options.wrapper = current.exe_wrapper - return current.env - def rebuild_all(wd): if not os.path.isfile(os.path.join(wd, 'build.ninja')): print("Only ninja backend is supported to rebuild tests before running them.") @@ -594,15 +627,6 @@ def run(args): if options.benchmark: options.num_processes = 1 - if options.setup is not None: - global_env = merge_suite_options(options) - else: - global_env = build.EnvironmentVariables() - if options.timeout_multiplier is None: - options.timeout_multiplier = 1 - - setattr(options, 'global_env', global_env) - if options.verbose and options.quiet: print('Can not be both quiet and verbose at the same time.') return 1 diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index d4ea06a..b4156ff 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -78,7 +78,7 @@ def BooleanParser(name, description, kwargs): kwargs.get('value', True), kwargs.get('yield', coredata.default_yielding)) -@permitted_kwargs({'value', 'yiel', 'choices'}) +@permitted_kwargs({'value', 'yield', 'choices'}) def ComboParser(name, description, kwargs): if 'choices' not in kwargs: raise OptionException('Combo option missing "choices" keyword.') @@ -154,6 +154,30 @@ class OptionInterpreter: continue self.cmd_line_options[key] = value + def get_bad_options(self): + subproj_len = len(self.subproject) + if subproj_len > 0: + subproj_len += 1 + retval = [] + # The options need to be sorted (e.g. here) to get consistent + # error messages (on all platforms) which is required by some test + # cases that check (also) the order of these options. + for option in sorted(self.cmd_line_options): + if option in list(self.options) + forbidden_option_names: + continue + if any(option[subproj_len:].startswith(p) for p in forbidden_prefixes): + continue + retval += [option] + return retval + + def check_for_bad_options(self): + bad = self.get_bad_options() + if bad: + sub = 'In subproject {}: '.format(self.subproject) if self.subproject else '' + mlog.warning( + '{}Unknown command line options: "{}"\n' + 'This will become a hard error in a future Meson release.'.format(sub, ', '.join(bad))) + def process(self, option_file): try: with open(option_file, 'r', encoding='utf8') as f: @@ -173,14 +197,7 @@ class OptionInterpreter: e.colno = cur.colno e.file = os.path.join('meson_options.txt') raise e - bad = [o for o in sorted(self.cmd_line_options) if not - (o in list(self.options) + forbidden_option_names or - any(o.startswith(p) for p in forbidden_prefixes))] - if bad: - sub = 'In subproject {}: '.format(self.subproject) if self.subproject else '' - mlog.warning( - '{}Unknown command line options: "{}"\n' - 'This will become a hard error in a future Meson release.'.format(sub, ', '.join(bad))) + self.check_for_bad_options() def reduce_single(self, arg): if isinstance(arg, str): diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 0191c30..fad7ba0 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -54,11 +54,7 @@ def run(args): sys.exit('Unknown command: ' + options.commands[0]) except Exception as e: if isinstance(e, MesonException): - if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'): - mlog.log(mlog.red('\nMeson encountered an error in file %s, line %d, column %d:' % (e.file, e.lineno, e.colno))) - else: - mlog.log(mlog.red('\nMeson encountered an error:')) - mlog.log(e) + mlog.exception(e) else: traceback.print_exc() return 1 diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 47f4cda..2d1f8c3 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -17,15 +17,20 @@ from mesonbuild import environment import sys, os, subprocess, pathlib def coverage(source_root, build_root, log_dir): - (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools() + (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools() if gcovr_exe: + # gcovr >= 3.1 interprets rootdir differently + if gcovr_new_rootdir: + rootdir = build_root + else: + rootdir = source_root subprocess.check_call([gcovr_exe, '-x', - '-r', source_root, + '-r', rootdir, '-o', os.path.join(log_dir, 'coverage.xml'), ]) subprocess.check_call([gcovr_exe, - '-r', source_root, + '-r', rootdir, '-o', os.path.join(log_dir, 'coverage.txt'), ]) if lcov_exe and genhtml_exe: @@ -65,13 +70,21 @@ def coverage(source_root, build_root, log_dir): '--show-details', '--branch-coverage', covinfo]) + elif gcovr_exe and gcovr_new_rootdir: + htmloutdir = os.path.join(log_dir, 'coveragereport') + subprocess.check_call([gcovr_exe, + '--html', + '--html-details', + '-r', build_root, + '-o', os.path.join(htmloutdir, 'index.html'), + ]) if gcovr_exe: print('') print('XML coverage report can be found at', pathlib.Path(log_dir, 'coverage.xml').as_uri()) print('Text coverage report can be found at', pathlib.Path(log_dir, 'coverage.txt').as_uri()) - if lcov_exe and genhtml_exe: + if (lcov_exe and genhtml_exe) or (gcovr_exe and gcovr_new_rootdir): print('Html coverage report can be found at', pathlib.Path(htmloutdir, 'index.html').as_uri()) return 0 diff --git a/mesonbuild/scripts/meson_install.py b/mesonbuild/scripts/meson_install.py index cbc782d..1414ace 100644 --- a/mesonbuild/scripts/meson_install.py +++ b/mesonbuild/scripts/meson_install.py @@ -97,6 +97,10 @@ def restore_selinux_contexts(): # is ignored quietly. return + if not shutil.which('restorecon'): + # If we don't have restorecon, failure is ignored quietly. + return + with subprocess.Popen(['restorecon', '-F', '-f-', '-0'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0') @@ -283,7 +287,7 @@ def run_install_script(d): rc = subprocess.call(script + args, env=child_env) if rc != 0: sys.exit(rc) - except: + except OSError: print('Failed to run install script {!r}'.format(name)) sys.exit(1) diff --git a/mesonbuild/scripts/yelphelper.py b/mesonbuild/scripts/yelphelper.py index ab99267..0f8b0b8 100644 --- a/mesonbuild/scripts/yelphelper.py +++ b/mesonbuild/scripts/yelphelper.py @@ -17,6 +17,7 @@ import subprocess import shutil import argparse from .. import mlog +from ..mesonlib import has_path_sep from . import destdir_join from .gettext import read_linguas @@ -79,7 +80,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr elif symlinks: srcfile = os.path.join(c_install_dir, m) mlog.log('Symlinking %s to %s.' % (outfile, srcfile)) - if '/' in m or '\\' in m: + if has_path_sep(m): os.makedirs(os.path.dirname(outfile), exist_ok=True) try: try: @@ -94,7 +95,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr # Lang doesn't have media file so copy it over 'C' one infile = os.path.join(srcdir, 'C', m) mlog.log('Installing %s to %s' % (infile, outfile)) - if '/' in m or '\\' in m: + if has_path_sep(m): os.makedirs(os.path.dirname(outfile), exist_ok=True) shutil.copyfile(infile, outfile) shutil.copystat(infile, outfile) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index bd440a1..54a928e 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -165,17 +165,21 @@ class Resolver: return False # Submodule has not been added, add it if out.startswith(b'+'): - mlog.warning('submodule {} might be out of date'.format(dirname)) + mlog.warning('git submodule {} might be out of date'.format(dirname)) return True elif out.startswith(b'U'): raise RuntimeError('submodule {} has merge conflicts'.format(dirname)) + # Submodule exists, but is deinitialized or wasn't initialized elif out.startswith(b'-'): - if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) != 0: - return False - # Submodule was added already, but it wasn't populated. Do a checkout. - elif out.startswith(b' '): - if subprocess.call(['git', 'checkout', '.'], cwd=dirname): + if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) == 0: return True + raise RuntimeError('Failed to git submodule init {!r}'.format(dirname)) + # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout. + elif out.startswith(b' '): + subprocess.call(['git', 'checkout', '.'], cwd=dirname) + # Even if checkout failed, try building it anyway and let the user + # handle any problems manually. + return True m = 'Unknown git submodule output: {!r}' raise RuntimeError(m.format(out)) diff --git a/mesonconf.py b/mesonconf.py index d1874e0..894ec01 100755 --- a/mesonconf.py +++ b/mesonconf.py @@ -14,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mesonbuild import mesonmain import sys if __name__ == '__main__': - print('Warning: This executable is deprecated. Use "meson configure" instead.', - file=sys.stderr) - sys.exit(mesonmain.run(['configure'] + sys.argv[1:])) + sys.exit('Error: This executable is no more. Use "meson configure" instead.') diff --git a/mesonintrospect.py b/mesonintrospect.py index 5cc07bf..9ef1535 100755 --- a/mesonintrospect.py +++ b/mesonintrospect.py @@ -14,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mesonbuild import mesonmain import sys if __name__ == '__main__': - print('Warning: This executable is deprecated. Use "meson introspect" instead.', - file=sys.stderr) - sys.exit(mesonmain.run(['introspect'] + sys.argv[1:])) + sys.exit('Error: This executable is no more. Use "meson introspect" instead.') diff --git a/mesonrewriter.py b/mesonrewriter.py index e6f2637..ef47e57 100755 --- a/mesonrewriter.py +++ b/mesonrewriter.py @@ -23,10 +23,7 @@ # - move targets # - reindent? -from mesonbuild import mesonmain import sys if __name__ == '__main__': - print('Warning: This executable is deprecated. Use "meson rewrite" instead.', - file=sys.stderr) - sys.exit(mesonmain.run(['rewrite'] + sys.argv[1:])) + sys.exit('Error: This executable is no more. Use "meson rewrite" instead.') diff --git a/mesontest.py b/mesontest.py index c2d39d6..e973d56 100755 --- a/mesontest.py +++ b/mesontest.py @@ -16,10 +16,7 @@ # A tool to run tests in many different ways. -from mesonbuild import mesonmain import sys if __name__ == '__main__': - print('Warning: This executable is deprecated. Use "meson test" instead.', - file=sys.stderr) - sys.exit(mesonmain.run(['test'] + sys.argv[1:])) + sys.exit('Error: This executable is no more. Use "meson test" instead.') diff --git a/msi/createmsi.py b/msi/createmsi.py index 3ea0958..499f4b0 100755 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -50,10 +50,14 @@ class PackageGenerator: self.staging_dirs = ['dist', 'dist2'] if self.bytesize == 64: self.progfile_dir = 'ProgramFiles64Folder' - self.redist_path = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\14.11.25325\\MergeModules\\Microsoft_VC141_CRT_x64.msm' + redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC141_CRT_x64.msm' else: self.progfile_dir = 'ProgramFilesFolder' - self.redist_path = 'C:\\Program Files\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\14.11.25325\\MergeModules\\Microsoft_VC141_CRT_x86.msm' + redist_glob = 'C:\\Program Files\\Microsoft Visual Studio\\2017\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC141_CRT_x86.msm' + trials = glob(redist_glob) + if len(trials) != 1: + sys.exit('There are more than one potential redist dirs.') + self.redist_path = trials[0] self.component_num = 0 self.feature_properties = { self.staging_dirs[0]: { @@ -149,7 +153,7 @@ class PackageGenerator: 'SourceFile': self.redist_path, 'DiskId': '1', 'Language': '0', - }) + }) ET.SubElement(product, 'Property', { 'Id': 'WIXUI_INSTALLDIR', @@ -181,7 +185,7 @@ class PackageGenerator: 'AllowAdvertise': 'no', 'Display': 'hidden', 'Level': '1', - }) + }) ET.SubElement(vcredist_feature, 'MergeRef', {'Id': 'VCRedist'}) ET.ElementTree(self.root).write(self.main_xml, encoding='utf-8', xml_declaration=True) # ElementTree can not do prettyprinting so do it manually @@ -219,7 +223,6 @@ class PackageGenerator: }) self.component_num += 1 for f in cur_node.files: - file_source = os.path.join(current_dir, f).replace('\\', '\\\\') file_id = os.path.join(current_dir, f).replace('\\', '_').replace('#', '_').replace('-', '_') ET.SubElement(comp_xml_node, 'File', { 'Id': file_id, @@ -253,6 +256,7 @@ class PackageGenerator: if __name__ == '__main__': if not os.path.exists('meson.py'): sys.exit(print('Run me in the top level source dir.')) + subprocess.check_call(['pip', 'install', '--upgrade', 'cx_freeze']) p = PackageGenerator() p.build_dist() diff --git a/run_project_tests.py b/run_project_tests.py index 1d17000..a1d36ef 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -77,7 +77,8 @@ class AutoDeletedDir: failing_logs = [] print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ -do_debug = not {'MESON_PRINT_TEST_OUTPUT', 'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) +under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) +do_debug = under_ci or print_debug no_meson_log_msg = 'No meson-log.txt found.' system_compiler = None @@ -108,8 +109,6 @@ def setup_commands(optbackend): if backend is None: if msbuild_exe is not None: backend = 'vs' # Meson will auto-detect VS version to use - elif mesonlib.is_osx(): - backend = 'xcode' else: backend = 'ninja' # Set backend arguments for Meson @@ -321,9 +320,12 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen mesonlog = no_meson_log_msg gen_time = time.time() - gen_start if should_fail == 'meson': - if returncode != 0: + if returncode == 1: return TestResult('', BuildStep.configure, stdo, stde, mesonlog, gen_time) - return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time) + elif returncode != 0: + return TestResult('Test exited with unexpected status {}'.format(returncode), BuildStep.configure, stdo, stde, mesonlog, gen_time) + else: + return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time) if returncode != 0: return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, gen_time) # Touch the meson.build file to force a regenerate so we can test that @@ -434,6 +436,55 @@ def have_java(): return True return False +def skippable(suite, test): + if not under_ci: + return True + + if not suite.endswith('frameworks'): + return True + + # gtk-doc test may be skipped, pending upstream fixes for spaces in + # filenames landing in the distro used for CI + if test.endswith('10 gtk-doc'): + return True + + # No frameworks test should be skipped on linux CI, as we expect all + # prerequisites to be installed + if mesonlib.is_linux(): + return False + + # Boost test should only be skipped for windows CI build matrix entries + # which don't define BOOST_ROOT + if test.endswith('1 boost'): + if mesonlib.is_windows(): + return 'BOOST_ROOT' not in os.environ + return False + + # Other framework tests are allowed to be skipped on other platforms + return True + +def skip_csharp(backend): + if backend is not Backend.ninja: + return True + if not shutil.which('resgen'): + return True + if shutil.which('mcs'): + return False + if shutil.which('csc'): + # Only support VS2017 for now. Earlier versions fail + # under CI in mysterious ways. + try: + stdo = subprocess.check_output(['csc', '/version']) + except subprocess.CalledProcessError: + return True + # Having incrementing version numbers would be too easy. + # Microsoft reset the versioning back to 1.0 (from 4.x) + # when they got the Roslyn based compiler. Thus there + # is NO WAY to reliably do version number comparisons. + # Only support the version that ships with VS2017. + return not stdo.startswith(b'2.') + return True + def detect_tests_to_run(): # Name, subdirectory, skip condition. all_tests = [ @@ -447,7 +498,7 @@ def detect_tests_to_run(): ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), - ('C#', 'csharp', backend is not Backend.ninja or not shutil.which('mcs')), + ('C#', 'csharp', skip_csharp(backend)), ('vala', 'vala', backend is not Backend.ninja or not shutil.which('valac')), ('rust', 'rust', backend is not Backend.ninja or not shutil.which('rustc')), ('d', 'd', backend is not Backend.ninja or not have_d_compiler()), @@ -456,20 +507,10 @@ def detect_tests_to_run(): ('fortran', 'fortran', backend is not Backend.ninja or not shutil.which('gfortran')), ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), ('python3', 'python3', backend is not Backend.ninja), + ('fpga', 'fpga', shutil.which('yosys') is None), + ('frameworks', 'frameworks', False), ] gathered_tests = [(name, gather_tests('test cases/' + subdir), skip) for name, subdir, skip in all_tests] - if mesonlib.is_windows(): - # TODO: Set BOOST_ROOT in .appveyor.yml - gathered_tests += [('framework', ['test cases/frameworks/1 boost'], 'BOOST_ROOT' not in os.environ)] - elif mesonlib.is_osx(): - if os.path.exists('/usr/local/include/boost'): - # Just do the BOOST test - gathered_tests += [('framework', ['test cases/frameworks/1 boost'], False)] - elif mesonlib.is_cygwin(): - # Just do the BOOST test - gathered_tests += [('framework', ['test cases/frameworks/1 boost'], False)] - else: - gathered_tests += [('framework', gather_tests('test cases/frameworks'), False)] return gathered_tests def run_tests(all_tests, log_name_base, extra_args): @@ -528,7 +569,7 @@ def _run_tests(all_tests, log_name_base, extra_args): for (testname, t, result) in futures: sys.stdout.flush() result = result.result() - if result is None or 'MESON_SKIP_TEST' in result.stdo: + if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t))): print(yellow('Skipping:'), t) current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, 'classname': name}) diff --git a/run_unittests.py b/run_unittests.py index 103847a..94e39c8 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -21,22 +21,23 @@ import tempfile import textwrap import os import shutil -import sys import unittest from unittest import mock from configparser import ConfigParser from glob import glob -from pathlib import PurePath +from pathlib import (PurePath, Path) import mesonbuild.mlog import mesonbuild.compilers import mesonbuild.environment import mesonbuild.mesonlib import mesonbuild.coredata +import mesonbuild.modules.gnome from mesonbuild.interpreter import ObjectHolder from mesonbuild.mesonlib import ( - is_linux, is_windows, is_osx, is_cygwin, is_dragonflybsd, + is_windows, is_osx, is_cygwin, is_dragonflybsd, windows_proof_rmtree, python_command, meson_command, version_compare, + BuildDirLock ) from mesonbuild.environment import Environment, detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException @@ -49,6 +50,9 @@ from run_tests import should_run_linux_cross_tests def get_dynamic_section_entry(fname, entry): + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Test only applicable to ELF platforms') + try: raw_out = subprocess.check_output(['readelf', '-d', fname], universal_newlines=True) @@ -65,9 +69,11 @@ def get_dynamic_section_entry(fname, entry): def get_soname(fname): return get_dynamic_section_entry(fname, 'soname') + def get_rpath(fname): return get_dynamic_section_entry(fname, r'(?:rpath|runpath)') + class InternalTests(unittest.TestCase): def test_version_number(self): @@ -426,6 +432,21 @@ class InternalTests(unittest.TestCase): kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]} self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources')) + def test_snippets(self): + hashcounter = re.compile('^ *(#)+') + snippet_dir = Path('docs/markdown/snippets') + self.assertTrue(snippet_dir.is_dir()) + for f in snippet_dir.glob('*'): + self.assertTrue(f.is_file()) + if f.suffix == '.md': + for line in f.open(): + m = re.match(hashcounter, line) + if m: + self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name) + else: + if f.name != 'add_release_note_snippets_here': + self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name) + class BasePlatformTests(unittest.TestCase): def setUp(self): @@ -464,10 +485,8 @@ class BasePlatformTests(unittest.TestCase): self.builddirs = [] self.new_builddir() - def new_builddir(self): - # In case the directory is inside a symlinked directory, find the real - # path otherwise we might not find the srcdir from inside the builddir. - self.builddir = os.path.realpath(tempfile.mkdtemp()) + def change_builddir(self, newdir): + self.builddir = newdir self.privatedir = os.path.join(self.builddir, 'meson-private') self.logdir = os.path.join(self.builddir, 'meson-logs') self.installdir = os.path.join(self.builddir, 'install') @@ -475,6 +494,12 @@ class BasePlatformTests(unittest.TestCase): self.mtest_command = meson_command + ['test', '-C', self.builddir] self.builddirs.append(self.builddir) + def new_builddir(self): + # In case the directory is inside a symlinked directory, find the real + # path otherwise we might not find the srcdir from inside the builddir. + newdir = os.path.realpath(tempfile.mkdtemp()) + self.change_builddir(newdir) + def _print_meson_log(self): log = os.path.join(self.logdir, 'meson-log.txt') if not os.path.isfile(log): @@ -489,7 +514,8 @@ class BasePlatformTests(unittest.TestCase): windows_proof_rmtree(path) except FileNotFoundError: pass - os.environ = self.orig_env + os.environ.clear() + os.environ.update(self.orig_env) super().tearDown() def _run(self, command, workdir=None): @@ -521,7 +547,16 @@ class BasePlatformTests(unittest.TestCase): self.privatedir = os.path.join(self.builddir, 'meson-private') if inprocess: try: - out = run_configure(self.meson_mainfile, self.meson_args + args + extra_args)[1] + (returncode, out, err) = run_configure(self.meson_mainfile, self.meson_args + args + extra_args) + if 'MESON_SKIP_TEST' in out: + raise unittest.SkipTest('Project requested skipping.') + if returncode != 0: + self._print_meson_log() + print('Stdout:\n') + print(out) + print('Stderr:\n') + print(err) + raise RuntimeError('Configure failed') except: self._print_meson_log() raise @@ -555,10 +590,11 @@ class BasePlatformTests(unittest.TestCase): def run_tests(self): self._run(self.test_command, workdir=self.builddir) - def install(self): + def install(self, *, use_destdir=True): if self.backend is not Backend.ninja: raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) - os.environ['DESTDIR'] = self.installdir + if use_destdir: + os.environ['DESTDIR'] = self.installdir self._run(self.install_command, workdir=self.builddir) def uninstall(self): @@ -941,6 +977,31 @@ class AllPlatformTests(BasePlatformTests): # Setup with only a timeout works self._run(self.mtest_command + ['--setup=timeout']) + def test_testsetup_selection(self): + testdir = os.path.join(self.unit_test_dir, '13 testsetup selection') + self.init(testdir) + self.build() + + # Run tests without setup + self.run_tests() + + self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo']) + self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:']) + + self._run(self.mtest_command + ['--setup=worksforall']) + self._run(self.mtest_command + ['--setup=main:worksforall']) + + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:']) + self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:']) + self._run(self.mtest_command + ['--setup=bar:onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=foo:onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=main:onlyinbar']) + def assertFailedTestCount(self, failure_count, command): try: self._run(command) @@ -1323,9 +1384,9 @@ class AllPlatformTests(BasePlatformTests): subprocess.check_call(['git', 'config', 'user.email', 'teh_coderz@example.com'], cwd=project_dir) subprocess.check_call(['git', 'add', 'meson.build', 'distexe.c'], cwd=project_dir, - stdout=subprocess.DEVNULL) + stdout=subprocess.DEVNULL) subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir, - stdout=subprocess.DEVNULL) + stdout=subprocess.DEVNULL) try: self.dist_impl(git_init) @@ -1466,7 +1527,6 @@ int main(int argc, char **argv) { cmd += ['-c', source, '-o', objectfile] + extra_args subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - def test_prebuilt_object(self): (compiler, _, object_suffix, _) = self.detect_prebuild_env() tdir = os.path.join(self.unit_test_dir, '14 prebuilt object') @@ -1559,9 +1619,13 @@ int main(int argc, char **argv) { def test_pkgconfig_static(self): ''' - Test that the we only use static libraries when `static: true` is + Test that the we prefer static libraries when `static: true` is passed to dependency() with pkg-config. Can't be an ordinary test because we need to build libs and try to find them from meson.build + + Also test that it's not a hard error to have unsatisfiable library deps + since system libraries -lm will never be found statically. + https://github.com/mesonbuild/meson/issues/2785 ''' if not shutil.which('pkg-config'): raise unittest.SkipTest('pkg-config not found') @@ -1723,6 +1787,16 @@ int main(int argc, char **argv) { ]: self.assertRegex(out, re.escape(expected)) + def test_permitted_method_kwargs(self): + tdir = os.path.join(self.unit_test_dir, '23 non-permitted kwargs') + out = self.init(tdir) + for expected in [ + r'WARNING: Passed invalid keyword argument "prefixxx".', + r'WARNING: Passed invalid keyword argument "argsxx".', + r'WARNING: Passed invalid keyword argument "invalidxx".', + ]: + self.assertRegex(out, re.escape(expected)) + def test_templates(self): ninja = detect_ninja() if ninja is None: @@ -1736,6 +1810,104 @@ int main(int argc, char **argv) { workdir=tmpdir) self._run(ninja, workdir=os.path.join(tmpdir, 'builddir')) + with tempfile.TemporaryDirectory() as tmpdir: + open(os.path.join(tmpdir, 'foo.' + lang), 'w').write('int main() {}') + self._run(meson_command + ['init', '-b'], workdir=tmpdir) + + # The test uses mocking and thus requires that + # the current process is the one to run the Meson steps. + # If we are using an external test executable (most commonly + # in Debian autopkgtests) then the mocking won't work. + @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.') + def test_cross_file_system_paths(self): + if is_windows(): + raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)') + + testdir = os.path.join(self.common_test_dir, '1 trivial') + cross_content = textwrap.dedent("""\ + [binaries] + c = '/usr/bin/cc' + ar = '/usr/bin/ar' + strip = '/usr/bin/ar' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'x86' + cpu = 'i686' + endian = 'little' + """) + + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): + self.init(testdir, ['--cross-file=' + name], inprocess=True) + self.wipe() + + with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): + os.environ.pop('XDG_DATA_HOME', None) + self.init(testdir, ['--cross-file=' + name], inprocess=True) + self.wipe() + + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): + self.init(testdir, ['--cross-file=' + name], inprocess=True) + self.wipe() + + def test_compiler_run_command(self): + ''' + The test checks that the compiler object can be passed to + run_command(). + ''' + testdir = os.path.join(self.unit_test_dir, '23 compiler run_command') + self.init(testdir) + + def test_identical_target_name_in_subproject_flat_layout(self): + ''' + Test that identical targets in different subprojects do not collide + if layout is flat. + ''' + testdir = os.path.join(self.common_test_dir, '182 identical target name in subproject flat layout') + self.init(testdir, extra_args=['--layout=flat']) + self.build() + + def test_flock(self): + exception_raised = False + with tempfile.TemporaryDirectory() as tdir: + os.mkdir(os.path.join(tdir, 'meson-private')) + with BuildDirLock(tdir): + try: + with BuildDirLock(tdir): + pass + except MesonException: + exception_raised = True + self.assertTrue(exception_raised, 'Double locking did not raise exception.') + + def test_ndebug_if_release_disabled(self): + testdir = os.path.join(self.unit_test_dir, '25 ndebug if-release') + self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release']) + self.build() + exe = os.path.join(self.builddir, 'main') + self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip()) + + def test_ndebug_if_release_enabled(self): + testdir = os.path.join(self.unit_test_dir, '25 ndebug if-release') + self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release']) + self.build() + exe = os.path.join(self.builddir, 'main') + self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip()) class FailureTests(BasePlatformTests): @@ -1902,8 +2074,8 @@ class FailureTests(BasePlatformTests): env = Environment('', self.builddir, self.meson_command, get_fake_options(self.prefix), []) try: - objc = env.detect_objc_compiler(False) - objcpp = env.detect_objcpp_compiler(False) + env.detect_objc_compiler(False) + env.detect_objcpp_compiler(False) except EnvironmentException: code = "add_languages('objc')\nadd_languages('objcpp')" self.assertMesonRaises(code, "Unknown compiler") @@ -1933,6 +2105,17 @@ class FailureTests(BasePlatformTests): self.assertRegex(out, r'Also couldn\'t find a fallback subproject in ' '.*subprojects.*failingsubproj.*for the dependency.*somedep') + def test_exception_exit_status(self): + ''' + Test exit status on python exception + ''' + tdir = os.path.join(self.unit_test_dir, '21 exit status') + os.environ['MESON_UNIT_TEST'] = '1' + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(tdir, inprocess=False) + self.assertEqual(cm.exception.returncode, 2) + self.wipe() + class WindowsTests(BasePlatformTests): ''' @@ -2033,6 +2216,9 @@ class LinuxlikeTests(BasePlatformTests): is true and not when it is false. This can't be an ordinary test case because we need to inspect the compiler database. ''' + if is_cygwin() or is_osx(): + raise unittest.SkipTest('PIC not relevant') + testdir = os.path.join(self.common_test_dir, '3 static') self.init(testdir) compdb = self.get_compdb() @@ -2099,6 +2285,14 @@ class LinuxlikeTests(BasePlatformTests): '-llibinternal', '-lcustom2', '-lfoo'])) + cmd = ['pkg-config', 'requires-test'] + out = self._run(cmd + ['--print-requires']).strip().split() + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo', 'libhello'])) + + cmd = ['pkg-config', 'requires-private-test'] + out = self._run(cmd + ['--print-requires-private']).strip().split() + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo', 'libhello'])) + def test_pkg_unfound(self): testdir = os.path.join(self.unit_test_dir, '22 unfound pkgconfig') self.init(testdir) @@ -2112,6 +2306,8 @@ class LinuxlikeTests(BasePlatformTests): database. https://github.com/mesonbuild/meson/issues/864 ''' + if not shutil.which('valac'): + raise unittest.SkipTest('valac not installed.') testdir = os.path.join(self.vala_test_dir, '5 target glib') self.init(testdir) compdb = self.get_compdb() @@ -2168,11 +2364,8 @@ class LinuxlikeTests(BasePlatformTests): if not shutil.which('qmake-qt5'): if not shutil.which('qmake'): raise unittest.SkipTest('QMake not found') - # For some inexplicable reason qmake --version gives different - # results when run from the command line vs invoked by Python. - # Check for both cases in case this behavior changes in the future. - output = subprocess.getoutput(['qmake', '--version']) - if 'Qt version 5' not in output and 'qt5' not in output: + output = subprocess.getoutput('qmake --version') + if 'Qt version 5' not in output: raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') # Disable pkg-config codepath and force searching with qmake/qmake-qt5 testdir = os.path.join(self.framework_test_dir, '4 qt') @@ -2184,6 +2377,9 @@ class LinuxlikeTests(BasePlatformTests): self.assertTrue(msg in mesonlog or msg2 in mesonlog) def _test_soname_impl(self, libpath, install): + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames') + testdir = os.path.join(self.unit_test_dir, '1 soname') self.init(testdir) self.build() @@ -2257,7 +2453,9 @@ class LinuxlikeTests(BasePlatformTests): # Check that all the listed -std=xxx options for this compiler work # just fine when used for v in compiler.get_options()[lang_std].choices: - if compiler.get_id() == 'clang' and version_compare(compiler.version, '<5.0.0') and '17' in v: + if (compiler.get_id() == 'clang' and '17' in v and + (version_compare(compiler.version, '<5.0.0') or + (compiler.clang_type == mesonbuild.compilers.CLANG_OSX and version_compare(compiler.version, '<9.2')))): continue std_opt = '{}={}'.format(lang_std, v) self.init(testdir, ['-D' + std_opt]) @@ -2309,8 +2507,8 @@ class LinuxlikeTests(BasePlatformTests): def test_unity_subproj(self): testdir = os.path.join(self.common_test_dir, '49 subproject') self.init(testdir, extra_args='--unity=subprojects') - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/simpletest@exe/simpletest-unity.c')) - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@sha/sublib-unity.c')) + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@@simpletest@exe/simpletest-unity.c')) + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@@sublib@sha/sublib-unity.c')) self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) self.build() @@ -2390,6 +2588,9 @@ class LinuxlikeTests(BasePlatformTests): self.assertNotIn('-Werror', c03_comp) def test_run_installed(self): + if is_cygwin() or is_osx(): + raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable') + testdir = os.path.join(self.unit_test_dir, '7 run installed') self.init(testdir) self.build() @@ -2459,6 +2660,8 @@ class LinuxlikeTests(BasePlatformTests): self.assertTrue(gobject_found) def test_build_rpath(self): + if is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') testdir = os.path.join(self.unit_test_dir, '11 build_rpath') self.init(testdir) self.build() @@ -2476,6 +2679,9 @@ class LinuxlikeTests(BasePlatformTests): self.assertEqual(install_rpath, 'baz') def test_pch_with_address_sanitizer(self): + if is_cygwin(): + raise unittest.SkipTest('asan not available on Cygwin') + testdir = os.path.join(self.common_test_dir, '13 pch') self.init(testdir, ['-Db_sanitize=address']) self.build() @@ -2484,10 +2690,11 @@ class LinuxlikeTests(BasePlatformTests): self.assertIn("-fsanitize=address", i["command"]) def test_coverage(self): - if not shutil.which('gcovr'): + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: raise unittest.SkipTest('gcovr not found') - if not shutil.which('genhtml'): - raise unittest.SkipTest('genhtml not found') + if not shutil.which('genhtml') and not gcovr_new_rootdir: + raise unittest.SkipTest('genhtml not found and gcovr is too old') if 'clang' in os.environ.get('CC', ''): # We need to use llvm-cov instead of gcovr with clang raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!') @@ -2523,49 +2730,75 @@ endian = 'little' self.init(testdir, ['-Db_lto=true'], default_args=False) self.build('reconfigure') - def test_cross_file_system_paths(self): - testdir = os.path.join(self.common_test_dir, '1 trivial') - cross_content = textwrap.dedent("""\ - [binaries] - c = '/usr/bin/cc' - ar = '/usr/bin/ar' - strip = '/usr/bin/ar' - - [properties] - - [host_machine] - system = 'linux' - cpu_family = 'x86' - cpu = 'i686' - endian = 'little' - """) - - with tempfile.TemporaryDirectory() as d: - dir_ = os.path.join(d, 'meson', 'cross') - os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: - f.write(cross_content) - name = os.path.basename(f.name) - - with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): - self.init(testdir, ['--cross-file=' + name], inprocess=True) - self.wipe() - - with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): - os.environ.pop('XDG_DATA_HOME', None) - self.init(testdir, ['--cross-file=' + name], inprocess=True) - self.wipe() + def test_vala_generated_source_buildir_inside_source_tree(self): + ''' + Test that valac outputs generated C files in the expected location when + the builddir is a subdir of the source tree. + ''' + if not shutil.which('valac'): + raise unittest.SkipTest('valac not installed.') - with tempfile.TemporaryDirectory() as d: - dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') - os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: - f.write(cross_content) - name = os.path.basename(f.name) + testdir = os.path.join(self.vala_test_dir, '8 generated sources') + newdir = os.path.join(self.builddir, 'srctree') + shutil.copytree(testdir, newdir) + testdir = newdir + # New builddir + builddir = os.path.join(testdir, 'subdir/_build') + os.makedirs(builddir, exist_ok=True) + self.change_builddir(builddir) + self.init(testdir) + self.build() - with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): - self.init(testdir, ['--cross-file=' + name], inprocess=True) - self.wipe() + def test_old_gnome_module_codepaths(self): + ''' + A lot of code in the GNOME module is conditional on the version of the + glib tools that are installed, and breakages in the old code can slip + by once the CI has a newer glib version. So we force the GNOME module + to pretend that it's running on an ancient glib so the fallback code is + also tested. + ''' + testdir = os.path.join(self.framework_test_dir, '7 gnome') + os.environ['MESON_UNIT_TEST_PRETEND_GLIB_OLD'] = "1" + mesonbuild.modules.gnome.native_glib_version = '2.20' + self.init(testdir, inprocess=True) + self.build() + mesonbuild.modules.gnome.native_glib_version = None + + @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.') + def test_pkgconfig_usage(self): + testdir1 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependency') + testdir2 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependee') + if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) != 0: + raise unittest.SkipTest('Glib 2.0 dependency not available.') + with tempfile.TemporaryDirectory() as tempdirname: + self.init(testdir1, ['--prefix=' + tempdirname, '--libdir=lib'], default_args=False) + self.install(use_destdir=False) + shutil.rmtree(self.builddir) + os.mkdir(self.builddir) + pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig') + self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc'))) + lib_dir = os.path.join(tempdirname, 'lib') + os.environ['PKG_CONFIG_PATH'] = pkg_dir + # Private internal libraries must not leak out. + pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep']) + self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.') + # Dependencies must not leak to cflags when building only a shared library. + pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep']) + self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.') + # Test that the result is usable. + self.init(testdir2) + self.build() + myenv = os.environ.copy() + myenv['LD_LIBRARY_PATH'] = lib_dir + if is_cygwin(): + bin_dir = os.path.join(tempdirname, 'bin') + myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH'] + self.assertTrue(os.path.isdir(lib_dir)) + test_exe = os.path.join(self.builddir, 'pkguser') + self.assertTrue(os.path.isfile(test_exe)) + subprocess.check_call(test_exe, env=myenv) class LinuxArmCrossCompileTests(BasePlatformTests): @@ -2589,6 +2822,7 @@ class LinuxArmCrossCompileTests(BasePlatformTests): compdb = self.get_compdb() self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command']) + class RewriterTests(unittest.TestCase): def setUp(self): @@ -2664,7 +2898,7 @@ def unset_envs(): if __name__ == '__main__': unset_envs() cases = ['InternalTests', 'AllPlatformTests', 'FailureTests'] - if is_linux(): + if not is_windows(): cases += ['LinuxlikeTests'] if should_run_linux_cross_tests(): cases += ['LinuxArmCrossCompileTests'] @@ -19,8 +19,9 @@ import sys from mesonbuild.coredata import version -if sys.version_info[0] < 3: - print('Tried to install with Python 2, Meson only supports Python 3.') +if sys.version_info < (3, 5, 0): + print('Tried to install with an unsupported version of Python. ' + 'Meson requires Python 3.5.0 or greater') sys.exit(1) # We need to support Python installations that have nothing but the basic @@ -62,6 +63,7 @@ setup(name='meson', author_email='jpakkane@gmail.com', url='http://mesonbuild.com', license=' Apache License, Version 2.0', + python_requires='>=3.5', packages=['mesonbuild', 'mesonbuild.backend', 'mesonbuild.compilers', diff --git a/skip_ci.py b/skip_ci.py new file mode 100755 index 0000000..752dfdc --- /dev/null +++ b/skip_ci.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import argparse +import os +import subprocess +import sys +import traceback + + +def check_pr(is_pr_env): + if is_pr_env not in os.environ: + print('This is not pull request: {} is not set'.format(is_pr_env)) + sys.exit() + elif os.environ[is_pr_env] == 'false': + print('This is not pull request: {} is false'.format(is_pr_env)) + sys.exit() + + +def get_base_branch(base_env): + if base_env not in os.environ: + print('Unable to determine base branch: {} is not set'.format(base_env)) + sys.exit() + return os.environ[base_env] + + +def get_git_files(base): + diff = subprocess.check_output(['git', 'diff', '--name-only', base + '...HEAD']) + return diff.strip().split(b'\n') + + +def is_documentation(filename): + return filename.startswith(b'docs/') + + +def main(): + try: + parser = argparse.ArgumentParser(description='CI Skipper') + parser.add_argument('--base-branch-env', required=True, + help='Branch push is targeted to') + parser.add_argument('--is-pull-env', required=True, + help='Variable set if it is a PR') + args = parser.parse_args() + check_pr(args.is_pull_env) + base = get_base_branch(args.base_branch_env) + if all(is_documentation(f) for f in get_git_files(base)): + print("Don't run CI for documentation-only changes, add '[skip ci]' to commit title.") + print('See http://mesonbuild.com/Contributing.html#skipping-integration-tests') + sys.exit(1) + except Exception: + # If this script fails we want build to proceed. + # Failure likely means some corner case we did not consider or bug. + # Either case this should not prevent CI from running if it is needed, + # and we tolerate it if it is run where it is not required. + traceback.print_exc() + print('There is a BUG in skip_ci.py, exiting.') + sys.exit() + +if __name__ == '__main__': + main() diff --git a/test cases/common/12 data/installed_files.txt b/test cases/common/12 data/installed_files.txt index af1a735..43bb0e5 100644 --- a/test cases/common/12 data/installed_files.txt +++ b/test cases/common/12 data/installed_files.txt @@ -2,5 +2,10 @@ usr/share/progname/datafile.dat usr/share/progname/fileobject_datafile.dat usr/share/progname/vanishing.dat usr/share/progname/vanishing2.dat +usr/share/data install test/renamed file.txt +usr/share/data install test/somefile.txt +usr/share/data install test/some/nested/path.txt +usr/share/renamed/renamed 2.txt +usr/share/renamed/renamed 3.txt etc/etcfile.dat usr/bin/runscript.sh diff --git a/test cases/common/12 data/meson.build b/test cases/common/12 data/meson.build index d3407d1..d855bba 100644 --- a/test cases/common/12 data/meson.build +++ b/test cases/common/12 data/meson.build @@ -10,6 +10,14 @@ install_data(files('fileobject_datafile.dat'), install_dir : 'share/progname', install_mode : [false, false, 0]) +install_data(files('somefile.txt')) + subdir('vanishing') install_data(sources : 'vanishing/vanishing2.dat', install_dir : 'share/progname') + +install_data(sources : 'to_be_renamed_1.txt', rename : 'renamed file.txt') +install_data(sources : ['vanishing/to_be_renamed_2.txt', 'to_be_renamed_3.txt'], + install_dir : 'share/renamed', + rename : ['renamed 2.txt', 'renamed 3.txt']) +install_data(sources : 'to_be_renamed_4.txt', rename : 'some/nested/path.txt') diff --git a/test cases/common/12 data/somefile.txt b/test cases/common/12 data/somefile.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/12 data/somefile.txt diff --git a/test cases/common/12 data/to_be_renamed_1.txt b/test cases/common/12 data/to_be_renamed_1.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/12 data/to_be_renamed_1.txt diff --git a/test cases/common/12 data/to_be_renamed_3.txt b/test cases/common/12 data/to_be_renamed_3.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/12 data/to_be_renamed_3.txt diff --git a/test cases/common/12 data/to_be_renamed_4.txt b/test cases/common/12 data/to_be_renamed_4.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/12 data/to_be_renamed_4.txt diff --git a/test cases/common/12 data/vanishing/to_be_renamed_2.txt b/test cases/common/12 data/vanishing/to_be_renamed_2.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/12 data/vanishing/to_be_renamed_2.txt diff --git a/test cases/common/145 whole archive/allofme/meson.build b/test cases/common/145 whole archive/allofme/meson.build deleted file mode 100644 index f5c2027..0000000 --- a/test cases/common/145 whole archive/allofme/meson.build +++ /dev/null @@ -1 +0,0 @@ -stlib = static_library('allofme', '../libfile.c') diff --git a/test cases/common/145 whole archive/exe/meson.build b/test cases/common/145 whole archive/exe/meson.build index f47a246..91d298d 100644 --- a/test cases/common/145 whole archive/exe/meson.build +++ b/test cases/common/145 whole archive/exe/meson.build @@ -1,2 +1 @@ -exe = executable('prog', '../prog.c', - link_with : dylib) +exe = executable('prog', '../prog.c', link_with : sh_func2_linked_func1) diff --git a/test cases/common/145 whole archive/exe2/meson.build b/test cases/common/145 whole archive/exe2/meson.build index 5365f03..9184864 100644 --- a/test cases/common/145 whole archive/exe2/meson.build +++ b/test cases/common/145 whole archive/exe2/meson.build @@ -1 +1 @@ -exe2 = executable('prog2', '../prog.c', link_with : dylib2) +exe2 = executable('prog2', '../prog.c', link_with : sh_only_link_whole) diff --git a/test cases/common/145 whole archive/exe3/meson.build b/test cases/common/145 whole archive/exe3/meson.build new file mode 100644 index 0000000..82cf57e --- /dev/null +++ b/test cases/common/145 whole archive/exe3/meson.build @@ -0,0 +1 @@ +exe3 = executable('prog3', '../prog.c', link_with : sh_func2_dep_func1) diff --git a/test cases/common/145 whole archive/exe4/meson.build b/test cases/common/145 whole archive/exe4/meson.build new file mode 100644 index 0000000..0781250 --- /dev/null +++ b/test cases/common/145 whole archive/exe4/meson.build @@ -0,0 +1 @@ +exe4 = executable('prog4', '../prog.c', link_with : sh_func2_transdep_func1) diff --git a/test cases/common/145 whole archive/libfile.c b/test cases/common/145 whole archive/func1.c index b2690a0..b2690a0 100644 --- a/test cases/common/145 whole archive/libfile.c +++ b/test cases/common/145 whole archive/func1.c diff --git a/test cases/common/145 whole archive/dylib.c b/test cases/common/145 whole archive/func2.c index 9e287a4..9e287a4 100644 --- a/test cases/common/145 whole archive/dylib.c +++ b/test cases/common/145 whole archive/func2.c diff --git a/test cases/common/145 whole archive/meson.build b/test cases/common/145 whole archive/meson.build index 617ae03..012df33 100644 --- a/test cases/common/145 whole archive/meson.build +++ b/test cases/common/145 whole archive/meson.build @@ -10,15 +10,41 @@ if cc.get_id() == 'msvc' endif endif -subdir('allofme') -subdir('shlib') +# Test 1: link_whole keeps all symbols +# Make static func1 +subdir('st_func1') +# Make shared func2 linking whole func1 archive +subdir('sh_func2_linked_func1') +# Link exe with shared library only subdir('exe') - +# Test that both func1 and func2 are accessible from shared library test('prog', exe) -# link_whole only -subdir('stlib') -subdir('wholeshlib') +# Test 2: link_whole can be used instead of source list, see #2180 +# Make static func2 +subdir('st_func2') +# Link both func1 and func2 into same shared library +# which does not have any sources other than 2 static libraries +subdir('sh_only_link_whole') +# Link exe2 with shared library only subdir('exe2') - +# Test that both func1 and func2 are accessible from shared library test('prog2', exe2) + +# Test 3: link_whole can be used in declare_dependency() +func1_dep = declare_dependency(link_whole : [st_func1]) +# Use dependency to link func1 into shared library +subdir('sh_func2_dep_func1') +# Link exe3 with shared library +subdir('exe3') +# Test that both func1 and func2 are accessible from shared library +test('prog3', exe3) + +# Test 4: link_whole can be used in transitive declare_dependency() +func1_trans_dep = declare_dependency(dependencies : func1_dep) +# Use transitive dependency to link func1 into shared library +subdir('sh_func2_transdep_func1') +# Link exe4 with shared library +subdir('exe4') +# Test that both func1 and func2 are accessible from shared library +test('prog4', exe4) diff --git a/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build b/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build new file mode 100644 index 0000000..92baca6 --- /dev/null +++ b/test cases/common/145 whole archive/sh_func2_dep_func1/meson.build @@ -0,0 +1,4 @@ +# Same as sh_func2_linked_func1, # func2.c does not depend on func1(), +# so without link_whole compiler would throw func1() away. +# This is the same version of the test with a dependency object instead. +sh_func2_dep_func1 = shared_library('sh_func2_dep_func1', '../func2.c', dependencies : func1_dep) diff --git a/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build b/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build new file mode 100644 index 0000000..2858f65 --- /dev/null +++ b/test cases/common/145 whole archive/sh_func2_linked_func1/meson.build @@ -0,0 +1,3 @@ +# Nothing in func2.c uses func1, so the linker would throw it +# away and thus linking the exe would fail. +sh_func2_linked_func1 = shared_library('sh_func2_linked_func1', '../func2.c', link_whole : st_func1) diff --git a/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build b/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build new file mode 100644 index 0000000..0703077 --- /dev/null +++ b/test cases/common/145 whole archive/sh_func2_transdep_func1/meson.build @@ -0,0 +1,6 @@ +# Same as sh_func2_dep_func1 but dependency is transitive. +# func2.c does not have any reference to func1() so without link_whole compiler +# should throw func1() out. +sh_func2_transdep_func1 = shared_library( + 'sh_func2_transdep_func1', '../func2.c', + dependencies : func1_trans_dep) diff --git a/test cases/common/145 whole archive/sh_only_link_whole/meson.build b/test cases/common/145 whole archive/sh_only_link_whole/meson.build new file mode 100644 index 0000000..64baabd --- /dev/null +++ b/test cases/common/145 whole archive/sh_only_link_whole/meson.build @@ -0,0 +1 @@ +sh_only_link_whole = shared_library('sh_only_link_whole', link_whole : [st_func1, st_func2]) diff --git a/test cases/common/145 whole archive/shlib/meson.build b/test cases/common/145 whole archive/shlib/meson.build deleted file mode 100644 index 34a1b78..0000000 --- a/test cases/common/145 whole archive/shlib/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -# Nothing in dylib.c uses func1, so the linker would throw it -# away and thus linking the exe would fail. -dylib = shared_library('shlib', '../dylib.c', - link_whole : stlib) diff --git a/test cases/common/145 whole archive/st_func1/meson.build b/test cases/common/145 whole archive/st_func1/meson.build new file mode 100644 index 0000000..c84d781 --- /dev/null +++ b/test cases/common/145 whole archive/st_func1/meson.build @@ -0,0 +1 @@ +st_func1 = static_library('st_func1', '../func1.c') diff --git a/test cases/common/145 whole archive/st_func2/meson.build b/test cases/common/145 whole archive/st_func2/meson.build new file mode 100644 index 0000000..2732f96 --- /dev/null +++ b/test cases/common/145 whole archive/st_func2/meson.build @@ -0,0 +1 @@ +st_func2 = static_library('st_func2', '../func2.c') diff --git a/test cases/common/145 whole archive/stlib/meson.build b/test cases/common/145 whole archive/stlib/meson.build deleted file mode 100644 index 07a434e..0000000 --- a/test cases/common/145 whole archive/stlib/meson.build +++ /dev/null @@ -1 +0,0 @@ -static = static_library('static', '../dylib.c') diff --git a/test cases/common/145 whole archive/wholeshlib/meson.build b/test cases/common/145 whole archive/wholeshlib/meson.build deleted file mode 100644 index 69a1995..0000000 --- a/test cases/common/145 whole archive/wholeshlib/meson.build +++ /dev/null @@ -1 +0,0 @@ -dylib2 = shared_library('link_whole', link_whole : [stlib, static]) diff --git a/test cases/common/15 mixed pch/meson.build b/test cases/common/15 mixed pch/meson.build index 19129d8..8e9da93 100644 --- a/test cases/common/15 mixed pch/meson.build +++ b/test cases/common/15 mixed pch/meson.build @@ -1,6 +1,19 @@ project('mixed C and C++ pch test', 'cpp', 'c') -exe = executable('prog', 'main.cc', 'func.c', -c_pch : ['pch/func.h', 'pch/func_pch.c'], -cpp_pch : ['pch/main_pch.cc', 'pch/main.h']) +exe = executable( + 'prog', + files('main.cc', 'func.c'), + c_pch : ['pch/func.h', 'pch/func_pch.c'], + cpp_pch : ['pch/main_pch.cc', 'pch/main.h'], +) + +cc = meson.get_compiler('c') +if cc.get_id() != 'msvc' + exe2 = executable( + 'prog2', + files('main.cc', 'func.c'), + c_pch : 'pch/func.h', + cpp_pch : 'pch/main.h', + ) +endif diff --git a/test cases/common/16 configure file/meson.build b/test cases/common/16 configure file/meson.build index eda0a8f..71a2563 100644 --- a/test cases/common/16 configure file/meson.build +++ b/test cases/common/16 configure file/meson.build @@ -131,3 +131,9 @@ configure_file( configuration : conf6 ) test('test6', executable('prog6', 'prog6.c')) + +# test empty install dir string +cfile = configure_file(input : 'config.h.in', + output : 'do_not_get_installed.h', + install_dir : '', + configuration : conf) diff --git a/test cases/common/174 dependency factory/meson.build b/test cases/common/174 dependency factory/meson.build new file mode 100644 index 0000000..54f7d26 --- /dev/null +++ b/test cases/common/174 dependency factory/meson.build @@ -0,0 +1,51 @@ +project('dependency factory') + +dep = dependency('gl', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('SDL2', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('SDL2', method: 'config-tool', required: false) +if dep.found() and dep.type_name() == 'configtool' + dep.get_configtool_variable('prefix') +endif + +dep = dependency('Vulkan', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('pcap', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('pcap', method: 'config-tool', required: false) +if dep.found() and dep.type_name() == 'configtool' + dep.get_configtool_variable('prefix') +endif + +dep = dependency('cups', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('cups', method: 'config-tool', required: false) +if dep.found() and dep.type_name() == 'configtool' + dep.get_configtool_variable('prefix') +endif + +dep = dependency('libwmf', method: 'pkg-config', required: false) +if dep.found() and dep.type_name() == 'pkgconfig' + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('libwmf', method: 'config-tool', required: false) +if dep.found() and dep.type_name() == 'configtool' + dep.get_configtool_variable('prefix') +endif diff --git a/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/a.c b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/a.c new file mode 100644 index 0000000..7ac3e5e --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/a.c @@ -0,0 +1,15 @@ +int func2(); + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func() { return func2(); } + diff --git a/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build new file mode 100644 index 0000000..12f6564 --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build @@ -0,0 +1,4 @@ +project('alpha project', 'c', subproject_dir: 'var/subprojects') + +b = subproject('beta') +l = shared_library('a', 'a.c', link_with : b.get_variable('lb')) diff --git a/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here @@ -0,0 +1 @@ + diff --git a/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/b.c b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/b.c new file mode 100644 index 0000000..a95651b --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/b.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func2() { + return 42; +} diff --git a/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/meson.build b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/meson.build new file mode 100644 index 0000000..ea4cc9b --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/contrib/subprojects/beta/meson.build @@ -0,0 +1,3 @@ +project('beta project', 'c') + +lb = shared_library('b', 'b.c') diff --git a/test cases/common/177 subproject nested subproject dirs/meson.build b/test cases/common/177 subproject nested subproject dirs/meson.build new file mode 100644 index 0000000..a4d0a97 --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/meson.build @@ -0,0 +1,7 @@ +project('gamma project', 'c', subproject_dir: 'contrib/subprojects') + +a = subproject('alpha') +lib = a.get_variable('l') + +exe = executable('prog', 'prog.c', link_with : lib) +test('basic', exe) diff --git a/test cases/common/177 subproject nested subproject dirs/prog.c b/test cases/common/177 subproject nested subproject dirs/prog.c new file mode 100644 index 0000000..394f139 --- /dev/null +++ b/test cases/common/177 subproject nested subproject dirs/prog.c @@ -0,0 +1,5 @@ +int func(); + +int main(int argc, char **argv) { + return func() == 42 ? 0 : 1; +} diff --git a/test cases/common/174 preserve gendir/base.inp b/test cases/common/178 preserve gendir/base.inp index df967b9..df967b9 100644 --- a/test cases/common/174 preserve gendir/base.inp +++ b/test cases/common/178 preserve gendir/base.inp diff --git a/test cases/common/174 preserve gendir/com/mesonbuild/subbie.inp b/test cases/common/178 preserve gendir/com/mesonbuild/subbie.inp index df0f4e9..df0f4e9 100644 --- a/test cases/common/174 preserve gendir/com/mesonbuild/subbie.inp +++ b/test cases/common/178 preserve gendir/com/mesonbuild/subbie.inp diff --git a/test cases/common/174 preserve gendir/genprog.py b/test cases/common/178 preserve gendir/genprog.py index 1e10998..1e10998 100755 --- a/test cases/common/174 preserve gendir/genprog.py +++ b/test cases/common/178 preserve gendir/genprog.py diff --git a/test cases/common/174 preserve gendir/meson.build b/test cases/common/178 preserve gendir/meson.build index ce219f0..ce219f0 100644 --- a/test cases/common/174 preserve gendir/meson.build +++ b/test cases/common/178 preserve gendir/meson.build diff --git a/test cases/common/174 preserve gendir/testprog.c b/test cases/common/178 preserve gendir/testprog.c index 46b4602..46b4602 100644 --- a/test cases/common/174 preserve gendir/testprog.c +++ b/test cases/common/178 preserve gendir/testprog.c diff --git a/test cases/common/179 source in dep/bar.cpp b/test cases/common/179 source in dep/bar.cpp new file mode 100644 index 0000000..bda8cb6 --- /dev/null +++ b/test cases/common/179 source in dep/bar.cpp @@ -0,0 +1,5 @@ +extern "C" int foo(); + +int main(int, char**) { + return foo() != 42; +} diff --git a/test cases/common/179 source in dep/foo.c b/test cases/common/179 source in dep/foo.c new file mode 100644 index 0000000..1ecfa8c --- /dev/null +++ b/test cases/common/179 source in dep/foo.c @@ -0,0 +1,3 @@ +int foo() { + return 42; +} diff --git a/test cases/common/179 source in dep/meson.build b/test cases/common/179 source in dep/meson.build new file mode 100644 index 0000000..e2c007e --- /dev/null +++ b/test cases/common/179 source in dep/meson.build @@ -0,0 +1,6 @@ +project('foo', 'c', 'cpp') + +dep = declare_dependency(sources : 'foo.c') + +executable('bar', 'bar.cpp', + dependencies : dep) diff --git a/test cases/common/180 generator link whole/export.h b/test cases/common/180 generator link whole/export.h new file mode 100644 index 0000000..f4f6f45 --- /dev/null +++ b/test cases/common/180 generator link whole/export.h @@ -0,0 +1,18 @@ +#pragma once + +#if defined BUILDING_EMBEDDED + #define DLL_PUBLIC +#elif defined _WIN32 || defined __CYGWIN__ + #if defined BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif diff --git a/test cases/common/180 generator link whole/generator.py b/test cases/common/180 generator link whole/generator.py new file mode 100755 index 0000000..0076b74 --- /dev/null +++ b/test cases/common/180 generator link whole/generator.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 + +import os +import os.path +import sys + + +def main(): + name = os.path.splitext(os.path.basename(sys.argv[1]))[0] + out = sys.argv[2] + hname = os.path.join(out, name + '.h') + cname = os.path.join(out, name + '.c') + print(os.getcwd(), hname) + with open(hname, 'w') as hfile: + hfile.write(''' +#pragma once +#include "export.h" +int DLL_PUBLIC {name}(); +'''.format(name=name)) + with open(cname, 'w') as cfile: + cfile.write(''' +#include "{name}.h" +int {name}() {{ + return {size}; +}} +'''.format(name=name, size=len(name))) + + +if __name__ == '__main__': + main() diff --git a/test cases/common/180 generator link whole/main.c b/test cases/common/180 generator link whole/main.c new file mode 100644 index 0000000..acf8717 --- /dev/null +++ b/test cases/common/180 generator link whole/main.c @@ -0,0 +1,11 @@ +#include "meson_test_function.h" + +#include <stdio.h> + +int main() { + if (meson_test_function() != 19) { + printf("Bad meson_test_function()\n"); + return 1; + } + return 0; +} diff --git a/test cases/common/180 generator link whole/meson.build b/test cases/common/180 generator link whole/meson.build new file mode 100644 index 0000000..30ae9c6 --- /dev/null +++ b/test cases/common/180 generator link whole/meson.build @@ -0,0 +1,65 @@ +project('generator link_whole', 'c') + +cc = meson.get_compiler('c') +if cc.get_id() == 'msvc' + if cc.version().version_compare('<19') + error('MESON_SKIP_TEST link_whole only works on VS2015 or newer.') + endif +endif + +# This just generates foo.h and foo.c with int foo() defined. +gen_py = find_program('generator.py') +gen = generator(gen_py, + output: ['@BASENAME@.h', '@BASENAME@.c'], + arguments : ['@INPUT@', '@BUILD_DIR@']) + +# Test 1: link directly into executable +srcs = gen.process('meson_test_function.tmpl') +exe = executable('exe1', [srcs, 'main.c'], c_args : '-DBUILDING_EMBEDDED') +test('test1', exe) + +# Test 2: link into shared library and access from executable +srcs = gen.process('meson_test_function.tmpl') +shlib2 = shared_library('shlib2', [srcs], c_args : '-DBUILDING_DLL') +exe = executable('exe2', 'main.c', + link_with : shlib2, + include_directories : shlib2.private_dir_include(), +) +test('test2', exe) + +# Test 3: link into static library and access from executable +srcs = gen.process('meson_test_function.tmpl') +stlib3 = static_library('stlib3', [srcs], c_args : '-DBUILDING_EMBEDDED') +exe = executable('exe3', 'main.c', + c_args : '-DBUILDING_EMBEDDED', + link_with : stlib3, + include_directories : stlib3.private_dir_include(), +) +test('test3', exe) + +# Test 4: link into static library, link into shared +# and access from executable. To make sure static_library +# is not dropped use pull_meson_test_function helper. +srcs = gen.process('meson_test_function.tmpl') +stlib4 = static_library('stlib4', [srcs], c_args : '-DBUILDING_DLL') +shlib4 = shared_library('shlib4', 'pull_meson_test_function.c', + c_args : '-DBUILDING_DLL', + link_with : stlib4, + include_directories : stlib4.private_dir_include(), +) +exe = executable('exe4', 'main.c', + link_with : shlib4, + include_directories : stlib4.private_dir_include(), +) +test('test4', exe) + +# Test 5: link into static library, link_whole into shared +# and access from executable +srcs = gen.process('meson_test_function.tmpl') +stlib5 = static_library('stlib5', [srcs], c_args : '-DBUILDING_DLL') +shlib5 = shared_library('shlib5', link_whole : stlib5) +exe = executable('exe5', 'main.c', + link_with : shlib5, + include_directories : stlib5.private_dir_include(), +) +test('test5', exe) diff --git a/test cases/common/180 generator link whole/meson_test_function.tmpl b/test cases/common/180 generator link whole/meson_test_function.tmpl new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/180 generator link whole/meson_test_function.tmpl diff --git a/test cases/common/180 generator link whole/pull_meson_test_function.c b/test cases/common/180 generator link whole/pull_meson_test_function.c new file mode 100644 index 0000000..c54dda6 --- /dev/null +++ b/test cases/common/180 generator link whole/pull_meson_test_function.c @@ -0,0 +1,6 @@ +#include "export.h" +#include "meson_test_function.h" + +int DLL_PUBLIC function_puller() { + return meson_test_function(); +} diff --git a/test cases/common/181 initial c_args/meson.build b/test cases/common/181 initial c_args/meson.build new file mode 100644 index 0000000..70a6e7a --- /dev/null +++ b/test cases/common/181 initial c_args/meson.build @@ -0,0 +1,7 @@ +project('options', 'c') + +# Test passing c_args and c_link_args options from the command line. +assert(get_option('c_args') == ['-march=native', '-funroll-loops'], + 'Incorrect value for c_args option.') +assert(get_option('c_link_args') == ['-random_linker_option'], + 'Incorrect value for c_link_args option.') diff --git a/test cases/common/181 initial c_args/test_args.txt b/test cases/common/181 initial c_args/test_args.txt new file mode 100644 index 0000000..9a6da06 --- /dev/null +++ b/test cases/common/181 initial c_args/test_args.txt @@ -0,0 +1,4 @@ +# This file is not read by meson itself, but by the test framework. +# It is not possible to pass arguments to meson from a file. +['-Dc_args=-march=native', '-Dc_args=-funroll-loops', + '-Dc_link_args=-random_linker_option'] diff --git a/test cases/common/182 identical target name in subproject flat layout/foo.c b/test cases/common/182 identical target name in subproject flat layout/foo.c new file mode 100644 index 0000000..ed42789 --- /dev/null +++ b/test cases/common/182 identical target name in subproject flat layout/foo.c @@ -0,0 +1 @@ +int meson_test_main_foo(void) { return 10; } diff --git a/test cases/common/182 identical target name in subproject flat layout/main.c b/test cases/common/182 identical target name in subproject flat layout/main.c new file mode 100644 index 0000000..6f02aeb --- /dev/null +++ b/test cases/common/182 identical target name in subproject flat layout/main.c @@ -0,0 +1,16 @@ +#include <stdio.h> + +int meson_test_main_foo(void); +int meson_test_subproj_foo(void); + +int main(void) { + if (meson_test_main_foo() != 10) { + printf("Failed meson_test_main_foo\n"); + return 1; + } + if (meson_test_subproj_foo() != 20) { + printf("Failed meson_test_subproj_foo\n"); + return 1; + } + return 0; +} diff --git a/test cases/common/182 identical target name in subproject flat layout/meson.build b/test cases/common/182 identical target name in subproject flat layout/meson.build new file mode 100644 index 0000000..d859fda --- /dev/null +++ b/test cases/common/182 identical target name in subproject flat layout/meson.build @@ -0,0 +1,11 @@ +project('subproject targets', 'c') + +# Idea behind this test is to create targets with identical name +# but different output files. We can do this by choosing different +# name_prefix of libraries. Target id does not depend on name_prefix. + +main_foo = static_library('foo', 'foo.c', name_prefix : 'main') +subproj_foo = subproject('subproj').get_variable('foo') + +exe = executable('prog', 'main.c', link_with : [main_foo, subproj_foo]) +test('main test', exe) diff --git a/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c new file mode 100644 index 0000000..f334292 --- /dev/null +++ b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/foo.c @@ -0,0 +1 @@ +int meson_test_subproj_foo(void) { return 20; } diff --git a/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build new file mode 100644 index 0000000..c927194 --- /dev/null +++ b/test cases/common/182 identical target name in subproject flat layout/subprojects/subproj/meson.build @@ -0,0 +1,3 @@ +project('subproj', 'c') + +foo = static_library('foo', 'foo.c', name_prefix : 'subproj') diff --git a/test cases/common/184 as-needed/config.h b/test cases/common/184 as-needed/config.h new file mode 100644 index 0000000..b8fb60f --- /dev/null +++ b/test cases/common/184 as-needed/config.h @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #if defined BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif diff --git a/test cases/common/184 as-needed/libA.cpp b/test cases/common/184 as-needed/libA.cpp new file mode 100644 index 0000000..5f45bc0 --- /dev/null +++ b/test cases/common/184 as-needed/libA.cpp @@ -0,0 +1,7 @@ +#define BUILDING_DLL + +#include "libA.h" + +namespace meson_test_as_needed { + DLL_PUBLIC bool linked = false; +} diff --git a/test cases/common/184 as-needed/libA.h b/test cases/common/184 as-needed/libA.h new file mode 100644 index 0000000..8e76d22 --- /dev/null +++ b/test cases/common/184 as-needed/libA.h @@ -0,0 +1,5 @@ +#include "config.h" + +namespace meson_test_as_needed { + DLL_PUBLIC extern bool linked; +} diff --git a/test cases/common/184 as-needed/libB.cpp b/test cases/common/184 as-needed/libB.cpp new file mode 100644 index 0000000..a872394 --- /dev/null +++ b/test cases/common/184 as-needed/libB.cpp @@ -0,0 +1,19 @@ +#include "libA.h" + +#undef DLL_PUBLIC +#define BUILDING_DLL +#include "config.h" + +namespace meson_test_as_needed { + namespace { + bool set_linked() { + linked = true; + return true; + } + bool stub = set_linked(); + } + + DLL_PUBLIC int libB_unused_func() { + return 0; + } +} diff --git a/test cases/common/184 as-needed/main.cpp b/test cases/common/184 as-needed/main.cpp new file mode 100644 index 0000000..191d15c --- /dev/null +++ b/test cases/common/184 as-needed/main.cpp @@ -0,0 +1,7 @@ +#include <cstdlib> + +#include "libA.h" + +int main() { + return !meson_test_as_needed::linked ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/test cases/common/184 as-needed/meson.build b/test cases/common/184 as-needed/meson.build new file mode 100644 index 0000000..3b54aaa --- /dev/null +++ b/test cases/common/184 as-needed/meson.build @@ -0,0 +1,13 @@ +project('as-needed test', 'cpp') + +# Idea behind this test is to have -Wl,--as-needed prune +# away unneeded linkages, which would otherwise cause global +# static initialiser side-effects to set a boolean to true. + +# Credits for portable ISO C++ idea go to sarum9in + +libA = library('A', 'libA.cpp') +libB = library('B', 'libB.cpp', link_with : libA) + +main_exe = executable('C', 'main.cpp', link_with : [libA, libB]) +test('main test', main_exe) diff --git a/test cases/common/185 ndebug if-release enabled/main.c b/test cases/common/185 ndebug if-release enabled/main.c new file mode 100644 index 0000000..984ebca --- /dev/null +++ b/test cases/common/185 ndebug if-release enabled/main.c @@ -0,0 +1,15 @@ +#include <assert.h> +#include <stdlib.h> + +int meson_test_side_effect = EXIT_FAILURE; + +int meson_test_set_side_effect(void) { + meson_test_side_effect = EXIT_SUCCESS; + return 1; +} + +int main(void) { + // meson_test_side_effect is set only if assert is executed + assert(meson_test_set_side_effect()); + return meson_test_side_effect; +} diff --git a/test cases/common/185 ndebug if-release enabled/meson.build b/test cases/common/185 ndebug if-release enabled/meson.build new file mode 100644 index 0000000..be26375 --- /dev/null +++ b/test cases/common/185 ndebug if-release enabled/meson.build @@ -0,0 +1,7 @@ +project('ndebug enabled', 'c', + default_options : [ + 'buildtype=debugoptimized', + 'b_ndebug=if-release', + ]) + +test('exe', executable('main', 'main.c')) diff --git a/test cases/common/186 ndebug if-release disabled/main.c b/test cases/common/186 ndebug if-release disabled/main.c new file mode 100644 index 0000000..cb3ec3f --- /dev/null +++ b/test cases/common/186 ndebug if-release disabled/main.c @@ -0,0 +1,7 @@ +#include <assert.h> +#include <stdlib.h> + +int main(void) { + assert(0); + return EXIT_SUCCESS; +} diff --git a/test cases/common/186 ndebug if-release disabled/meson.build b/test cases/common/186 ndebug if-release disabled/meson.build new file mode 100644 index 0000000..a9a79ea --- /dev/null +++ b/test cases/common/186 ndebug if-release disabled/meson.build @@ -0,0 +1,7 @@ +project('ndebug disabled', 'c', + default_options : [ + 'buildtype=release', + 'b_ndebug=if-release', + ]) + +test('exe', executable('main', 'main.c')) diff --git a/test cases/common/187 subproject version/meson.build b/test cases/common/187 subproject version/meson.build new file mode 100644 index 0000000..bd8fc03 --- /dev/null +++ b/test cases/common/187 subproject version/meson.build @@ -0,0 +1,10 @@ +project('subproject version', 'c', + version : '2.3.4', + license: 'mylicense') + +subproject('a') + +liba_dep = dependency('a', + fallback: ['a', 'liba_dep'], + version: ['>= 0.30.0', '!= 0.99.0']) + diff --git a/test cases/common/187 subproject version/subprojects/a/meson.build b/test cases/common/187 subproject version/subprojects/a/meson.build new file mode 100644 index 0000000..dae3130 --- /dev/null +++ b/test cases/common/187 subproject version/subprojects/a/meson.build @@ -0,0 +1,5 @@ +project('mysubproject', 'c', + version : '1.0.0', + license : 'sublicense') + +liba_dep = declare_dependency (version : '1.0.0') diff --git a/test cases/common/19 comparison/meson.build b/test cases/common/19 comparison/meson.build index c4cff9f..fb641ed 100644 --- a/test cases/common/19 comparison/meson.build +++ b/test cases/common/19 comparison/meson.build @@ -126,3 +126,14 @@ test('equalfalse', exe13) test('equaltrue', exe14) test('nequaltrue', exe15) test('nequalfalse', exe16) + +# Equality comparisons of different elementary types +# (these all cause warnings currently, will become an error in future) + +assert([] != 'st', 'not equal') +assert([] != 1, 'not equal') +assert(2 != 'st', 'not equal') + +assert(not ([] == 'st'), 'not equal') +assert(not ([] == 1), 'not equal') +assert(not (2 == 'st'), 'not equal') diff --git a/test cases/common/51 pkgconfig-gen/dependencies/meson.build b/test cases/common/51 pkgconfig-gen/dependencies/meson.build index a767eb5..2e00943 100644 --- a/test cases/common/51 pkgconfig-gen/dependencies/meson.build +++ b/test cases/common/51 pkgconfig-gen/dependencies/meson.build @@ -5,7 +5,7 @@ pkgg = import('pkgconfig') # libmain internally use libinternal and expose libexpose in its API exposed_lib = shared_library('libexposed', 'exposed.c') internal_lib = shared_library('libinternal', 'internal.c') -main_lib = shared_library('libmain', link_with : [exposed_lib, internal_lib]) +main_lib = static_library('libmain', link_with : [exposed_lib, internal_lib]) pkgg.generate(libraries : exposed_lib, version : '1.0', @@ -21,7 +21,7 @@ custom_dep = declare_dependency(link_args : ['-lcustom'], compile_args : ['-DCUS custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DCUSTOM2']) # Generate a PC file: -# - Having libmain in libraries should pull implicitely libexposed and libinternal in Libs.private +# - Having libmain in libraries should pull implicitly libexposed and libinternal in Libs.private # - Having libexposed in libraries should remove it from Libs.private # - We generated a pc file for libexposed so it should be in Requires instead of Libs # - Having threads_dep in libraries should add '-pthread' in both Libs and Cflags @@ -36,3 +36,17 @@ pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep , custom_dep], filebase : 'dependency-test', description : 'A dependency test.' ) + +pkgg.generate( + name : 'requires-test', + version : '1.0', + description : 'Dependency Requires field test.', + requires : [exposed_lib, pc_dep, 'libhello'], +) + +pkgg.generate( + name : 'requires-private-test', + version : '1.0', + description : 'Dependency Requires.private field test.', + requires_private : [exposed_lib, pc_dep, 'libhello', notfound_dep], +) diff --git a/test cases/common/51 pkgconfig-gen/meson.build b/test cases/common/51 pkgconfig-gen/meson.build index f9d7f7f..7e6c670 100644 --- a/test cases/common/51 pkgconfig-gen/meson.build +++ b/test cases/common/51 pkgconfig-gen/meson.build @@ -46,3 +46,9 @@ pkgg.generate( description : 'A foo library.', variables : ['foo=bar', 'datadir=${prefix}/data'] ) + +pkgg.generate( + name : 'libhello', + description : 'A minimalistic pkgconfig file.', + version : libver, +) diff --git a/test cases/common/64 custom header generator/meson.build b/test cases/common/64 custom header generator/meson.build index bcc9a53..33ba4c5 100644 --- a/test cases/common/64 custom header generator/meson.build +++ b/test cases/common/64 custom header generator/meson.build @@ -3,9 +3,9 @@ project('custom header generator', 'c') gen = find_program('makeheader.py') generated_h = custom_target('makeheader.py', -output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too. -input : 'input.def', -command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')]) + output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too. + input : 'input.def', + command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')]) prog = executable('prog', 'prog.c', generated_h) test('gentest', prog) diff --git a/test cases/common/72 build always/version_gen.py b/test cases/common/72 build always/version_gen.py index d7b01ca..fbe2df9 100755 --- a/test cases/common/72 build always/version_gen.py +++ b/test cases/common/72 build always/version_gen.py @@ -6,14 +6,10 @@ def generate(infile, outfile, fallback): workdir = os.path.split(infile)[0] if workdir == '': workdir = '.' - version = fallback try: - p = subprocess.Popen(['git', 'describe'], cwd=workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (stdo, _) = p.communicate() - if p.returncode == 0: - version = stdo.decode().strip() - except: - pass + version = subprocess.check_output(['git', 'describe'], cwd=workdir).decode().strip() + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + version = fallback with open(infile) as f: newdata = f.read().replace('@VERSION@', version) try: @@ -21,7 +17,7 @@ def generate(infile, outfile, fallback): olddata = f.read() if olddata == newdata: return - except: + except OSError: pass with open(outfile, 'w') as f: f.write(newdata) diff --git a/test cases/common/98 gen extra/srcgen3.py b/test cases/common/98 gen extra/srcgen3.py index ad0a5cb..b737114 100644 --- a/test cases/common/98 gen extra/srcgen3.py +++ b/test cases/common/98 gen extra/srcgen3.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -import os import sys import argparse diff --git a/test cases/csharp/1 basic/meson.build b/test cases/csharp/1 basic/meson.build index 2ee6a4a..09e46c2 100644 --- a/test cases/csharp/1 basic/meson.build +++ b/test cases/csharp/1 basic/meson.build @@ -1,4 +1,4 @@ project('simple c#', 'cs') -e = executable('prog', 'prog.cs', install : true) +e = executable('prog', 'prog.cs', 'text.cs', install : true) test('basic', e) diff --git a/test cases/csharp/1 basic/prog.cs b/test cases/csharp/1 basic/prog.cs index dfb2400..6ee47b0 100644 --- a/test cases/csharp/1 basic/prog.cs +++ b/test cases/csharp/1 basic/prog.cs @@ -1,7 +1,8 @@ using System; - + public class Prog { static public void Main () { - Console.WriteLine("C# is working."); + TextGetter tg = new TextGetter(); + Console.WriteLine(tg.getText()); } } diff --git a/test cases/csharp/1 basic/text.cs b/test cases/csharp/1 basic/text.cs new file mode 100644 index 0000000..c83c424 --- /dev/null +++ b/test cases/csharp/1 basic/text.cs @@ -0,0 +1,7 @@ +using System; + +public class TextGetter { + public String getText() { + return "C# is working."; + } +} diff --git a/test cases/csharp/4 external dep/meson.build b/test cases/csharp/4 external dep/meson.build index 004d25f..019d618 100644 --- a/test cases/csharp/4 external dep/meson.build +++ b/test cases/csharp/4 external dep/meson.build @@ -1,4 +1,9 @@ project('C# external library', 'cs') -glib_sharp_2 = dependency('glib-sharp-2.0') +glib_sharp_2 = dependency('glib-sharp-2.0', required : false) + +if not glib_sharp_2.found() + error('MESON_SKIP_TEST glib# not found.') +endif + e = executable('prog', 'prog.cs', dependencies: glib_sharp_2, install : true) test('libtest', e, args: [join_paths(meson.current_source_dir(), 'hello.txt')]) diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build index 9e63710..356e9f3 100644 --- a/test cases/d/9 features/meson.build +++ b/test cases/d/9 features/meson.build @@ -1,8 +1,22 @@ project('D Features', 'd') -# directory for data +# ONLY FOR BACKWARDS COMPATIBILITY. +# DO NOT DO THIS IN NEW CODE! +# USE include_directories() INSTEAD OF BUILDING +# STRINGS TO PATHS MANUALLY! data_dir = join_paths(meson.current_source_dir(), 'data') +e_plain_bcompat = executable('dapp_menu_bcompat', + 'app.d', + d_import_dirs: [data_dir] +) +test('dapp_menu_t_fail_bcompat', e_plain_bcompat, should_fail: true) +test('dapp_menu_t_bcompat', e_plain_bcompat, args: ['menu']) + +# directory for data +# This is the correct way to do this. +data_dir = include_directories('data') + e_plain = executable('dapp_menu', 'app.d', d_import_dirs: [data_dir] @@ -10,6 +24,7 @@ e_plain = executable('dapp_menu', test('dapp_menu_t_fail', e_plain, should_fail: true) test('dapp_menu_t', e_plain, args: ['menu']) + # test feature versions and string imports e_versions = executable('dapp_versions', 'app.d', diff --git a/test cases/failing/70 install_data rename bad size/file1.txt b/test cases/failing/70 install_data rename bad size/file1.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/failing/70 install_data rename bad size/file1.txt diff --git a/test cases/failing/70 install_data rename bad size/file2.txt b/test cases/failing/70 install_data rename bad size/file2.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/failing/70 install_data rename bad size/file2.txt diff --git a/test cases/failing/70 install_data rename bad size/meson.build b/test cases/failing/70 install_data rename bad size/meson.build new file mode 100644 index 0000000..c7cde08 --- /dev/null +++ b/test cases/failing/70 install_data rename bad size/meson.build @@ -0,0 +1,3 @@ +project('data install test', 'c') + +install_data(['file1.txt', 'file2.txt'], rename : 'just one name') diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index 771ecbc..df55b30 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -5,6 +5,11 @@ add_project_arguments(['-DBOOST_LOG_DYN_LINK'], language : 'cpp' ) +dep = dependency('boost', required: false) +if not dep.found() + error('MESON_SKIP_TEST boost not found.') +endif + # We want to have multiple separate configurations of Boost # within one project. The need to be independent of each other. # Use one without a library dependency and one with it. diff --git a/test cases/frameworks/10 gtk-doc/include/meson.build b/test cases/frameworks/10 gtk-doc/include/meson.build index f6dd99f..aa32885 100644 --- a/test cases/frameworks/10 gtk-doc/include/meson.build +++ b/test cases/frameworks/10 gtk-doc/include/meson.build @@ -13,4 +13,5 @@ generate_enums_docbook = find_program('generate-enums-docbook.py') docbook = custom_target('enum-docbook', output : 'bar.xml', - command : [generate_enums_docbook, '@OUTPUT@', 'BAR', 'BAR_TYPE', 'BAR_FOO']) + command : [generate_enums_docbook, '@OUTPUT@', 'BAR', 'BAR_TYPE', 'BAR_FOO'], + build_by_default : true) diff --git a/test cases/frameworks/10 gtk-doc/installed_files.txt.bak b/test cases/frameworks/10 gtk-doc/installed_files.txt index 9004af2..6f8ca01 100644 --- a/test cases/frameworks/10 gtk-doc/installed_files.txt.bak +++ b/test cases/frameworks/10 gtk-doc/installed_files.txt @@ -1,13 +1,15 @@ +usr/include/foo-version.h +usr/share/gtk-doc/html/foobar/BAR.html usr/share/gtk-doc/html/foobar/foobar.devhelp2 -usr/share/gtk-doc/html/foobar/foobar-foo.html usr/share/gtk-doc/html/foobar/foobar.html +usr/share/gtk-doc/html/foobar/foobar-foo.html +usr/share/gtk-doc/html/foobar/foobar-foo-version.html usr/share/gtk-doc/html/foobar/home.png usr/share/gtk-doc/html/foobar/index.html -usr/share/gtk-doc/html/foobar/index.sgml -usr/share/gtk-doc/html/foobar/left-insensitive.png usr/share/gtk-doc/html/foobar/left.png -usr/share/gtk-doc/html/foobar/right-insensitive.png +usr/share/gtk-doc/html/foobar/left-insensitive.png usr/share/gtk-doc/html/foobar/right.png +usr/share/gtk-doc/html/foobar/right-insensitive.png usr/share/gtk-doc/html/foobar/style.css -usr/share/gtk-doc/html/foobar/up-insensitive.png usr/share/gtk-doc/html/foobar/up.png +usr/share/gtk-doc/html/foobar/up-insensitive.png diff --git a/test cases/frameworks/10 gtk-doc/meson.build b/test cases/frameworks/10 gtk-doc/meson.build index e5e7705..5c22ad0 100644 --- a/test cases/frameworks/10 gtk-doc/meson.build +++ b/test cases/frameworks/10 gtk-doc/meson.build @@ -1,5 +1,10 @@ project('gtkdoctest', 'c', version : '1.0.0') +gtkdoc = find_program('gtkdoc-scan', required: false) +if not gtkdoc.found() + error('MESON_SKIP_TEST gtkdoc not found.') +endif + gnome = import('gnome') assert(gnome.gtkdoc_html_dir('foobar') == 'share/gtk-doc/html/foobar', 'Gtkdoc install dir is incorrect.') @@ -8,8 +13,15 @@ inc = include_directories('include') subdir('include') -# We have to disable this test until this bug fix has landed to -# distros https://bugzilla.gnome.org/show_bug.cgi?id=753145 -error('MESON_SKIP_TEST can not enable gtk-doc test until upstream fixes have landed.') +# disable this test unless a bug fix for spaces in pathnames is present +# https://bugzilla.gnome.org/show_bug.cgi?id=753145 +result = run_command(gtkdoc, ['--version']) +gtkdoc_ver = result.stdout().strip() +if gtkdoc_ver == '' + gtkdoc_ver = result.stderr().strip() +endif +if gtkdoc_ver.version_compare('<1.26') + error('MESON_SKIP_TEST gtk-doc test requires gtkdoc >= 1.26.') +endif subdir('doc') diff --git a/test cases/frameworks/11 gir subproject/meson.build b/test cases/frameworks/11 gir subproject/meson.build index f3bde40..a599ae9 100644 --- a/test cases/frameworks/11 gir subproject/meson.build +++ b/test cases/frameworks/11 gir subproject/meson.build @@ -1,5 +1,16 @@ project('gobject-introspection-with-subproject', 'c') +gir = find_program('g-ir-scanner', required: false) +if not gir.found() + error('MESON_SKIP_TEST g-ir-scanner not found.') +endif + +python3 = import('python3') +py3 = python3.find_python() +if run_command(py3, '-c', 'import gi;').returncode() != 0 + error('MESON_SKIP_TEST python3-gi not found') +endif + gnome = import('gnome') gobj = dependency('gobject-2.0') @@ -7,4 +18,3 @@ add_global_arguments('-DMESON_TEST', language : 'c') meson_gir = dependency('meson-gir', fallback : ['mesongir', 'meson_gir']) subdir('gir') - diff --git a/test cases/frameworks/12 multiple gir/meson.build b/test cases/frameworks/12 multiple gir/meson.build index 794abc5..ddc9830 100644 --- a/test cases/frameworks/12 multiple gir/meson.build +++ b/test cases/frameworks/12 multiple gir/meson.build @@ -1,5 +1,10 @@ project('multiple-gobject-introspection', 'c') +gir = find_program('g-ir-scanner', required: false) +if not gir.found() + error('MESON_SKIP_TEST g-ir-scanner not found.') +endif + gnome = import('gnome') gobj = dependency('gobject-2.0') diff --git a/test cases/frameworks/13 yelp/meson.build b/test cases/frameworks/13 yelp/meson.build index 725ff7b..9fdde25 100644 --- a/test cases/frameworks/13 yelp/meson.build +++ b/test cases/frameworks/13 yelp/meson.build @@ -1,2 +1,8 @@ project('yelp', 'c') + +itstool = find_program('itstool', required: false) +if not itstool.found() + error('MESON_SKIP_TEST itstool not found.') +endif + subdir('help') diff --git a/test cases/frameworks/14 doxygen/include/comedian.h b/test cases/frameworks/14 doxygen/include/comedian.h index 97b5086..d62b283 100644 --- a/test cases/frameworks/14 doxygen/include/comedian.h +++ b/test cases/frameworks/14 doxygen/include/comedian.h @@ -11,7 +11,7 @@ namespace Comedy { * Do the thing people want to happen. */ virtual void tell_joke() = 0; - virtual ~Comedian(); + virtual ~Comedian(){}; }; } diff --git a/test cases/frameworks/14 doxygen/include/spede.h b/test cases/frameworks/14 doxygen/include/spede.h index 8175465..380708a 100644 --- a/test cases/frameworks/14 doxygen/include/spede.h +++ b/test cases/frameworks/14 doxygen/include/spede.h @@ -29,10 +29,7 @@ namespace Comedy { throw std::runtime_error("Not implemented"); } + private: + int num_movies; ///< How many movies has he done. }; - - -private: - - int num_movies; ///< How many movies has he done. } diff --git a/test cases/frameworks/14 doxygen/meson.build b/test cases/frameworks/14 doxygen/meson.build index 55df316..023aa0e 100644 --- a/test cases/frameworks/14 doxygen/meson.build +++ b/test cases/frameworks/14 doxygen/meson.build @@ -1,5 +1,11 @@ project('doxygen test', 'cpp', version : '0.1.0') +spede_inc = include_directories('include') + +spede_src = [ 'src/spede.cpp' ] + +spede_lib = library('spede', spede_src, include_directories: spede_inc) + doxygen = find_program('doxygen', required : false) if not doxygen.found() error('MESON_SKIP_TEST doxygen not found.') diff --git a/test cases/frameworks/14 doxygen/src/spede.cpp b/test cases/frameworks/14 doxygen/src/spede.cpp index 31c8fb2..d382902 100644 --- a/test cases/frameworks/14 doxygen/src/spede.cpp +++ b/test cases/frameworks/14 doxygen/src/spede.cpp @@ -42,7 +42,7 @@ int gesticulate(int force) { Spede::Spede() : num_movies(100) { } -Spede::slap_forehead() { +void Spede::slap_forehead() { gesticulate(42); } diff --git a/test cases/frameworks/15 llvm/meson.build b/test cases/frameworks/15 llvm/meson.build index 549adce..b5505eb 100644 --- a/test cases/frameworks/15 llvm/meson.build +++ b/test cases/frameworks/15 llvm/meson.build @@ -1,5 +1,10 @@ project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99']) +d = dependency('llvm', required : false) +if not d.found() + error('MESON_SKIP_TEST llvm not found.') +endif + d = dependency('llvm', modules : 'not-found', required : false) assert(d.found() == false, 'not-found llvm module found') @@ -12,7 +17,7 @@ assert(d.found() == true, 'optional module stopped llvm from being found.') dep_tinfo = dependency('tinfo', required : false) if not dep_tinfo.found() cpp = meson.get_compiler('cpp') - dep_tinfo = cpp.find_library('tinfo') + dep_tinfo = cpp.find_library('tinfo', required: false) endif foreach static : [true, false] diff --git a/test cases/frameworks/16 sdl2/meson.build b/test cases/frameworks/16 sdl2/meson.build index 61a34ef..1bbf09f 100644 --- a/test cases/frameworks/16 sdl2/meson.build +++ b/test cases/frameworks/16 sdl2/meson.build @@ -1,6 +1,10 @@ project('sdl2 test', 'c') -sdl2_dep = dependency('sdl2', version : '>=2.0.0') +sdl2_dep = dependency('sdl2', version : '>=2.0.0', required: false) + +if not sdl2_dep.found() + error('MESON_SKIP_TEST sdl2 not found.') +endif e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep) diff --git a/test cases/frameworks/19 pcap/meson.build b/test cases/frameworks/19 pcap/meson.build index f02f411..eb6fc2c 100644 --- a/test cases/frameworks/19 pcap/meson.build +++ b/test cases/frameworks/19 pcap/meson.build @@ -1,6 +1,10 @@ project('pcap test', 'c') -pcap_dep = dependency('pcap', version : '>=1.0') +pcap_dep = dependency('pcap', version : '>=1.0', required: false) + +if not pcap_dep.found() + error('MESON_SKIP_TEST pcap not found.') +endif pcap_ver = pcap_dep.version() assert(pcap_ver.split('.').length() > 1, 'pcap version is "@0@"'.format(pcap_ver)) @@ -9,6 +13,6 @@ e = executable('pcap_prog', 'pcap_prog.c', dependencies : pcap_dep) test('pcaptest', e) -# Ensure discovery bia the configuration tools work also +# Ensure discovery via the configuration tools work also pcap_dep = dependency('pcap', version : '>=1.0', method : 'pcap-config') pcap_dep = dependency('pcap', version : '>=1.0', method : 'config-tool') diff --git a/test cases/frameworks/19 pcap/pcap_prog.c b/test cases/frameworks/19 pcap/pcap_prog.c index 18e0ad8..0fca16c 100644 --- a/test cases/frameworks/19 pcap/pcap_prog.c +++ b/test cases/frameworks/19 pcap/pcap_prog.c @@ -4,6 +4,12 @@ int main() { char errbuf[PCAP_ERRBUF_SIZE]; - pcap_t *p = pcap_create(NULL, errbuf); +#ifdef __APPLE__ + // source = NULL for "any" doesn't work on macOS (linux only?) + char *source = "en0"; +#else + char *source = NULL; +#endif + pcap_t *p = pcap_create(source, errbuf); return p == NULL; } diff --git a/test cases/frameworks/20 cups/meson.build b/test cases/frameworks/20 cups/meson.build index 11f6f63..9040de6 100644 --- a/test cases/frameworks/20 cups/meson.build +++ b/test cases/frameworks/20 cups/meson.build @@ -1,6 +1,10 @@ project('cups test', 'c') -cups_dep = dependency('cups', version : '>=1.4') +cups_dep = dependency('cups', version : '>=1.4', required: false) + +if not cups_dep.found() + error('MESON_SKIP_TEST cups not found.') +endif e = executable('cups_prog', 'cups_prog.c', dependencies : cups_dep) diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build index b0e848d..b508df3 100644 --- a/test cases/frameworks/4 qt/meson.build +++ b/test cases/frameworks/4 qt/meson.build @@ -21,6 +21,14 @@ foreach qt : ['qt4', 'qt5'] error('Invalid qt dep incorrectly found!') endif + # This test should be skipped if qt5 isn't found + if qt == 'qt5' + dep = dependency(qt, modules : ['Core'], required : false, method : get_option('method')) + if not dep.found() + error('MESON_SKIP_TEST qt5 not found.') + endif + endif + # Ensure that the "no-Core-module-specified" code branch is hit nocoredep = dependency(qt, modules : ['Gui'], required : qt == 'qt5', method : get_option('method')) diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt index ffe543f..879f56b 100644 --- a/test cases/frameworks/6 gettext/installed_files.txt +++ b/test cases/frameworks/6 gettext/installed_files.txt @@ -1,4 +1,4 @@ -usr/bin/intlprog +usr/bin/intlprog?exe usr/share/locale/de/LC_MESSAGES/intltest.mo usr/share/locale/fi/LC_MESSAGES/intltest.mo usr/share/applications/test.desktop diff --git a/test cases/frameworks/6 gettext/meson.build b/test cases/frameworks/6 gettext/meson.build index 6b517a4..e02234b 100644 --- a/test cases/frameworks/6 gettext/meson.build +++ b/test cases/frameworks/6 gettext/meson.build @@ -1,5 +1,14 @@ project('gettext example', 'c') +gettext = find_program('gettext', required: false) +if not gettext.found() + error('MESON_SKIP_TEST gettext not found.') +endif + +if not meson.get_compiler('c').has_header('libintl.h') + error('MESON_SKIP_TEST libintl.h not found.') +endif + i18n = import('i18n') subdir('po') diff --git a/test cases/frameworks/7 gnome/meson.build b/test cases/frameworks/7 gnome/meson.build index c75c049..03335b8 100644 --- a/test cases/frameworks/7 gnome/meson.build +++ b/test cases/frameworks/7 gnome/meson.build @@ -1,5 +1,16 @@ project('gobject-introspection', 'c') +glib = dependency('glib-2.0', required: false) +if not glib.found() + error('MESON_SKIP_TEST glib not found.') +endif + +python3 = import('python3') +py3 = python3.find_python() +if run_command(py3, '-c', 'import gi;').returncode() != 0 + error('MESON_SKIP_TEST python3-gi not found') +endif + cc = meson.get_compiler('c') add_global_arguments('-DMESON_TEST', language : 'c') @@ -9,6 +20,19 @@ if cc.get_id() == 'intel' add_global_arguments('-wd2282', language : 'c') endif +py3 = import('python3').find_python() +pycode = '''import os, sys +if "MESON_UNIT_TEST_PRETEND_GLIB_OLD" in os.environ: + sys.exit(0) +sys.exit(1) +''' + +pretend_glib_old = false +res = run_command(py3, '-c', pycode) +if res.returncode() == 0 + pretend_glib_old = true +endif + gnome = import('gnome') gio = dependency('gio-2.0') giounix = dependency('gio-unix-2.0') @@ -17,6 +41,9 @@ gobj = dependency('gobject-2.0') gir = dependency('gobject-introspection-1.0') gmod = dependency('gmodule-2.0') +# Test that static deps don't error out when static libraries aren't found +glib_static = dependency('glib-2.0', static : true) + subdir('resources-data') subdir('resources') subdir('gir') diff --git a/test cases/frameworks/7 gnome/resources/meson.build b/test cases/frameworks/7 gnome/resources/meson.build index 3ebb2f5..b945cda 100644 --- a/test cases/frameworks/7 gnome/resources/meson.build +++ b/test cases/frameworks/7 gnome/resources/meson.build @@ -29,7 +29,7 @@ gnome.compile_resources('simple-resources', ) test('simple resource test (gresource)', find_program('resources.py')) -if glib.version() >= '2.52.0' +if not pretend_glib_old and glib.version() >= '2.52.0' # This test cannot pass if GLib version is older than 9.99.9. # Meson will raise an error if the user tries to use the 'dependencies' # argument and the version of GLib is too old for generated resource diff --git a/test cases/frameworks/8 flex/meson.build b/test cases/frameworks/8 flex/meson.build index 13ac9f6..cb5efde 100644 --- a/test cases/frameworks/8 flex/meson.build +++ b/test cases/frameworks/8 flex/meson.build @@ -4,8 +4,16 @@ project('flex and bison', 'c') # may output headers that are necessary to build # the sources of a different generator. -flex = find_program('flex') -bison = find_program('bison') +flex = find_program('flex', required: false) +bison = find_program('bison', required: false) + +if not flex.found() + error('MESON_SKIP_TEST flex not found.') +endif + +if not bison.found() + error('MESON_SKIP_TEST bison not found.') +endif lgen = generator(flex, output : '@PLAINNAME@.yy.c', @@ -23,4 +31,3 @@ e = executable('pgen', 'prog.c', lfiles, pfiles) test('parsertest', e) - diff --git a/test cases/frameworks/8 flex/prog.c b/test cases/frameworks/8 flex/prog.c index 1e48f61..0b84d18 100644 --- a/test cases/frameworks/8 flex/prog.c +++ b/test cases/frameworks/8 flex/prog.c @@ -6,6 +6,8 @@ #include<stdio.h> #include<stdlib.h> +extern int yyparse(); + int main(int argc, char **argv) { /* int input; diff --git a/test cases/frameworks/9 wxwidgets/meson.build b/test cases/frameworks/9 wxwidgets/meson.build index 5f9419c..d815a2d 100644 --- a/test cases/frameworks/9 wxwidgets/meson.build +++ b/test cases/frameworks/9 wxwidgets/meson.build @@ -7,4 +7,9 @@ if wxd.found() wp = executable('wxprog', 'wxprog.cpp', dependencies : wxd) test('wxtest', wp) + + # WxWidgets framework is available, we can use required here + wx_stc = dependency('wxwidgets', version : '>=3.0.0', modules : ['std', 'stc']) + stc_exe = executable('wxstc', 'wxstc.cpp', dependencies : wx_stc) + test('wxstctest', stc_exe) endif diff --git a/test cases/frameworks/9 wxwidgets/wxstc.cpp b/test cases/frameworks/9 wxwidgets/wxstc.cpp new file mode 100644 index 0000000..8499ff9 --- /dev/null +++ b/test cases/frameworks/9 wxwidgets/wxstc.cpp @@ -0,0 +1,6 @@ +#include <wx/stc/stc.h> + +int main() { + wxStyledTextCtrl *canvas = new wxStyledTextCtrl(); + delete canvas; +} diff --git a/test cases/linuxlike/9 compiler checks with dependencies/meson.build b/test cases/linuxlike/9 compiler checks with dependencies/meson.build index bebfb84..9f1755b 100644 --- a/test cases/linuxlike/9 compiler checks with dependencies/meson.build +++ b/test cases/linuxlike/9 compiler checks with dependencies/meson.build @@ -26,7 +26,7 @@ int main(int argc, char *argv[]) { return ptr == 0; } ''' - assert (cc.has_function('deflate', prefix : '#include<zlib.h>', dependencies : zlib, name : 'Test for function in zlib'), 'has_function test failed.') + assert (cc.has_function('deflate', prefix : '#include<zlib.h>', dependencies : zlib), 'has_function test failed.') assert (cc.links(linkcode, dependencies : zlib, name : 'Test link against zlib'), 'Linking test failed against zlib.') endif diff --git a/test cases/rust/7 private crate collision/installed_files.txt b/test cases/rust/7 private crate collision/installed_files.txt new file mode 100644 index 0000000..06ebd77 --- /dev/null +++ b/test cases/rust/7 private crate collision/installed_files.txt @@ -0,0 +1,2 @@ +usr/bin/prog?exe +usr/lib/librand.rlib diff --git a/test cases/rust/7 private crate collision/meson.build b/test cases/rust/7 private crate collision/meson.build new file mode 100644 index 0000000..81b6aab --- /dev/null +++ b/test cases/rust/7 private crate collision/meson.build @@ -0,0 +1,5 @@ +project('rust private crate collision', 'rust') + +l = static_library('rand', 'rand.rs', install : true) +e = executable('prog', 'prog.rs', link_with : l, install : true) +test('linktest', e) diff --git a/test cases/rust/7 private crate collision/prog.rs b/test cases/rust/7 private crate collision/prog.rs new file mode 100644 index 0000000..b9a30f1 --- /dev/null +++ b/test cases/rust/7 private crate collision/prog.rs @@ -0,0 +1,3 @@ +extern crate rand; + +fn main() { println!("printing: {}", rand::explore()); } diff --git a/test cases/rust/7 private crate collision/rand.rs b/test cases/rust/7 private crate collision/rand.rs new file mode 100644 index 0000000..8a3d427 --- /dev/null +++ b/test cases/rust/7 private crate collision/rand.rs @@ -0,0 +1,4 @@ +// use a name that collides with one of the rustc_private libraries +#![crate_name = "rand"] + +pub fn explore() -> &'static str { "librarystring" } diff --git a/test cases/unit/13 testsetup selection/main.c b/test cases/unit/13 testsetup selection/main.c new file mode 100644 index 0000000..cb3f748 --- /dev/null +++ b/test cases/unit/13 testsetup selection/main.c @@ -0,0 +1,3 @@ +int main() { + return 0; +} diff --git a/test cases/unit/13 testsetup selection/meson.build b/test cases/unit/13 testsetup selection/meson.build new file mode 100644 index 0000000..ae996c5 --- /dev/null +++ b/test cases/unit/13 testsetup selection/meson.build @@ -0,0 +1,10 @@ +project('main', 'c') + +main = executable('main', 'main.c') +test('Test main', main) + +add_test_setup('worksforall') +add_test_setup('missingfromfoo') + +subproject('foo') +subproject('bar') diff --git a/test cases/unit/13 testsetup selection/subprojects/bar/bar.c b/test cases/unit/13 testsetup selection/subprojects/bar/bar.c new file mode 100644 index 0000000..cb3f748 --- /dev/null +++ b/test cases/unit/13 testsetup selection/subprojects/bar/bar.c @@ -0,0 +1,3 @@ +int main() { + return 0; +} diff --git a/test cases/unit/13 testsetup selection/subprojects/bar/meson.build b/test cases/unit/13 testsetup selection/subprojects/bar/meson.build new file mode 100644 index 0000000..1155a88 --- /dev/null +++ b/test cases/unit/13 testsetup selection/subprojects/bar/meson.build @@ -0,0 +1,6 @@ +project('bar', 'c') +bar = executable('bar', 'bar.c') +test('Test bar', bar) +add_test_setup('onlyinbar') +add_test_setup('worksforall') +add_test_setup('missingfromfoo') diff --git a/test cases/unit/13 testsetup selection/subprojects/foo/foo.c b/test cases/unit/13 testsetup selection/subprojects/foo/foo.c new file mode 100644 index 0000000..cb3f748 --- /dev/null +++ b/test cases/unit/13 testsetup selection/subprojects/foo/foo.c @@ -0,0 +1,3 @@ +int main() { + return 0; +} diff --git a/test cases/unit/13 testsetup selection/subprojects/foo/meson.build b/test cases/unit/13 testsetup selection/subprojects/foo/meson.build new file mode 100644 index 0000000..2eef840 --- /dev/null +++ b/test cases/unit/13 testsetup selection/subprojects/foo/meson.build @@ -0,0 +1,4 @@ +project('foo', 'c') +foo = executable('foo', 'foo.c') +test('Test foo', foo) +add_test_setup('worksforall') diff --git a/test cases/unit/17 pkgconfig static/foo.pc.in b/test cases/unit/17 pkgconfig static/foo.pc.in index 94d8031..b26c0b0 100644 --- a/test cases/unit/17 pkgconfig static/foo.pc.in +++ b/test cases/unit/17 pkgconfig static/foo.pc.in @@ -7,4 +7,5 @@ Name: libfoo Description: A foo library. Version: 1.0 Libs: -L${libdir} -lfoo +Libs.private: -lm Cflags: -I${includedir} diff --git a/test cases/unit/19 bad command line options/subprojects/one/meson.build b/test cases/unit/19 bad command line options/subprojects/one/meson.build index 39ae07e..85ef742 100644 --- a/test cases/unit/19 bad command line options/subprojects/one/meson.build +++ b/test cases/unit/19 bad command line options/subprojects/one/meson.build @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -project('one subproject') +project('one subproject', default_options : [ 'b_colorout=never' ]) diff --git a/test cases/unit/21 exit status/meson.build b/test cases/unit/21 exit status/meson.build new file mode 100644 index 0000000..4f5485b --- /dev/null +++ b/test cases/unit/21 exit status/meson.build @@ -0,0 +1,2 @@ +project('exit status') +exception() diff --git a/test cases/unit/23 compiler run_command/meson.build b/test cases/unit/23 compiler run_command/meson.build new file mode 100644 index 0000000..6d9e0b9 --- /dev/null +++ b/test cases/unit/23 compiler run_command/meson.build @@ -0,0 +1,10 @@ +project('compiler_object_in_run_command', 'c') +cc = meson.get_compiler('c') + +# This test only checks that the compiler object can be passed to +# run_command(). If the compiler has been launched, it is expected +# to output something either to stdout or to stderr. +result = run_command(cc, '--version') +if result.stdout() == '' and result.stderr() == '' + error('No output in stdout and stderr. Did the compiler run at all?') +endif diff --git a/test cases/unit/23 non-permitted kwargs/meson.build b/test cases/unit/23 non-permitted kwargs/meson.build new file mode 100644 index 0000000..9f7dc1f --- /dev/null +++ b/test cases/unit/23 non-permitted kwargs/meson.build @@ -0,0 +1,5 @@ +project('non-permitted kwargs', 'c') +cc = meson.get_compiler('c') +cc.has_header_symbol('stdio.h', 'printf', prefixxx: '#define XXX') +cc.links('int main(){}', argsxx: '') +cc.get_id(invalidxx: '') diff --git a/test cases/unit/24 pkgconfig usage/dependee/meson.build b/test cases/unit/24 pkgconfig usage/dependee/meson.build new file mode 100644 index 0000000..beb446c --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependee/meson.build @@ -0,0 +1,7 @@ +project('pkgconfig user', 'c') + +pkgdep = dependency('libpkgdep') + +executable('pkguser', 'pkguser.c', + dependencies : pkgdep) + diff --git a/test cases/unit/24 pkgconfig usage/dependee/pkguser.c b/test cases/unit/24 pkgconfig usage/dependee/pkguser.c new file mode 100644 index 0000000..2bff316 --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependee/pkguser.c @@ -0,0 +1,6 @@ +#include<pkgdep.h> + +int main(int argc, char **argv) { + int res = pkgdep(); + return res != 99; +} diff --git a/test cases/unit/24 pkgconfig usage/dependency/meson.build b/test cases/unit/24 pkgconfig usage/dependency/meson.build new file mode 100644 index 0000000..89fae8e --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependency/meson.build @@ -0,0 +1,24 @@ +project('pkgconfig dep', 'c', + version : '1.0.0') + +# This is not used in the code, only to check that it does not +# leak out to --libs. +glib_dep = dependency('glib-2.0') + +pkgconfig = import('pkgconfig') + +intlib = static_library('libpkgdep-int', 'privatelib.c') +intdep = declare_dependency(link_with : intlib) + +lib = shared_library('pkgdep', 'pkgdep.c', + dependencies : [glib_dep, intdep], + install : true) + +install_headers('pkgdep.h') + +pkgconfig.generate( + filebase : 'libpkgdep', + name : 'Libpkgdep', + description : 'Sample pkgconfig dependency library', + version : meson.project_version(), + libraries : lib) diff --git a/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c new file mode 100644 index 0000000..bd5c3f4 --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.c @@ -0,0 +1,7 @@ +#include<pkgdep.h> + +int internal_thingy(); + +int pkgdep() { + return internal_thingy(); +} diff --git a/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h new file mode 100644 index 0000000..16d622e --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependency/pkgdep.h @@ -0,0 +1,3 @@ +#pragma once + +int pkgdep(); diff --git a/test cases/unit/24 pkgconfig usage/dependency/privatelib.c b/test cases/unit/24 pkgconfig usage/dependency/privatelib.c new file mode 100644 index 0000000..71d2179 --- /dev/null +++ b/test cases/unit/24 pkgconfig usage/dependency/privatelib.c @@ -0,0 +1,3 @@ +int internal_thingy() { + return 99; +} diff --git a/test cases/unit/25 ndebug if-release/main.c b/test cases/unit/25 ndebug if-release/main.c new file mode 100644 index 0000000..70b3d04 --- /dev/null +++ b/test cases/unit/25 ndebug if-release/main.c @@ -0,0 +1,11 @@ +#include <stdio.h> +#include <stdlib.h> + +int main(void) { +#ifdef NDEBUG + printf("NDEBUG=1\n"); +#else + printf("NDEBUG=0\n"); +#endif + return 0; +} diff --git a/test cases/unit/25 ndebug if-release/meson.build b/test cases/unit/25 ndebug if-release/meson.build new file mode 100644 index 0000000..4af2406 --- /dev/null +++ b/test cases/unit/25 ndebug if-release/meson.build @@ -0,0 +1,3 @@ +project('ndebug enabled', 'c') + +executable('main', 'main.c') diff --git a/test cases/vala/8 generated sources/meson.build b/test cases/vala/8 generated sources/meson.build index 277c943..711a93a 100644 --- a/test cases/vala/8 generated sources/meson.build +++ b/test cases/vala/8 generated sources/meson.build @@ -4,6 +4,11 @@ cd = configuration_data() cd.set('x', 'y') subdir('src') + +executable('generatedtestparent', [src, config, returncode, wrapper], + install : true, + dependencies: [dependency('glib-2.0'), dependency('gobject-2.0')]) + subdir('tools') subdir('onlygen') subdir('dependency-generated') |