diff options
154 files changed, 3872 insertions, 1789 deletions
@@ -16,6 +16,7 @@ __pycache__ .DS_Store *~ +*.swp packagecache /MANIFEST /build diff --git a/.travis.yml b/.travis.yml index 62385d6..6f31f99 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,7 +42,7 @@ before_install: - python ./skip_ci.py --base-branch-env=TRAVIS_BRANCH --is-pull-env=TRAVIS_PULL_REQUEST - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew uninstall python mercurial; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python@2 python@3 mercurial qt; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python@2 python@3 mercurial qt pkg-config; fi # Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219 - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkdir -p $HOME/tools; curl -L http://nirbheek.in/files/binaries/ninja/macos/ninja -o $HOME/tools/ninja; chmod +x $HOME/tools/ninja; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:bionic; fi diff --git a/MANIFEST.in b/MANIFEST.in index 3ea9fe6..53853d3 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,6 +13,7 @@ include README.md include run_cross_test.py include run_tests.py include run_unittests.py +include run_meson_command_tests.py include run_project_tests.py include mesonrewriter.py include ghwt.py diff --git a/cross/armclang.txt b/cross/armclang.txt new file mode 100644 index 0000000..955b7ef --- /dev/null +++ b/cross/armclang.txt @@ -0,0 +1,20 @@ +# This file assumes that path to the arm compiler toolchain is added +# to the environment(PATH) variable, so that Meson can find +# the armclang, armlink and armar while building. +[binaries] +c = 'armclang' +cpp = 'armclang' +ar = 'armar' +strip = 'armar' + +[properties] +# The '--target', '-mcpu' options with the appropriate values should be mentioned +# to cross compile c/c++ code with armclang. +c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] +cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'arm' +cpu = 'Cortex-M0+' +endian = 'little' diff --git a/data/com.mesonbuild.install.policy b/data/com.mesonbuild.install.policy index 9a00de2..6fba47c 100644 --- a/data/com.mesonbuild.install.policy +++ b/data/com.mesonbuild.install.policy @@ -17,8 +17,7 @@ </defaults> <annotate key="org.freedesktop.policykit.exec.path">/usr/bin/python3</annotate> <annotate key="org.freedesktop.policykit.exec.argv1">/usr/bin/meson</annotate> - <annotate key="org.freedesktop.policykit.exec.argv2">--internal</annotate> - <annotate key="org.freedesktop.policykit.exec.argv3">install</annotate> + <annotate key="org.freedesktop.policykit.exec.argv2">install</annotate> </action> </policyconfig> diff --git a/data/syntax-highlighting/emacs/meson.el b/data/syntax-highlighting/emacs/meson.el index 45c6983..a640bbe 100644 --- a/data/syntax-highlighting/emacs/meson.el +++ b/data/syntax-highlighting/emacs/meson.el @@ -1,14 +1,3 @@ -;; command to comment/uncomment text -(defun meson-comment-dwim (arg) - "Comment or uncomment current line or region in a smart way. -For detail, see `comment-dwim'." - (interactive "*P") - (require 'newcomment) - (let ( - (comment-start "#") (comment-end "") - ) - (comment-dwim arg))) - ;; keywords for syntax coloring (setq meson-keywords `( @@ -34,9 +23,9 @@ For detail, see `comment-dwim'." (setq font-lock-defaults '(meson-keywords)) (setq mode-name "meson") - - ;; modify the keymap - (define-key meson-mode-map [remap comment-dwim] 'meson-comment-dwim) -) + (setq-local comment-start "# ") + (setq-local comment-end "")) (add-to-list 'auto-mode-alist '("meson.build" . meson-mode)) +(provide 'meson) +;;; meson.el ends here diff --git a/data/syntax-highlighting/vim/syntax/meson.vim b/data/syntax-highlighting/vim/syntax/meson.vim index d58903e..c83302c 100644 --- a/data/syntax-highlighting/vim/syntax/meson.vim +++ b/data/syntax-highlighting/vim/syntax/meson.vim @@ -78,6 +78,7 @@ syn keyword mesonBuiltin \ custom_target \ declare_dependency \ dependency + \ disabler \ environment \ error \ executable diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md index ec4a3bb..66c9cb5 100644 --- a/docs/markdown/Build-options.md +++ b/docs/markdown/Build-options.md @@ -19,6 +19,7 @@ option('combo_opt', type : 'combo', choices : ['one', 'two', 'three'], value : ' option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.45.0 option('free_array_opt', type : 'array', value : ['one', 'two']) option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) +option('some_feature', type : 'feature', value : 'enabled') ``` ## Build option types @@ -60,8 +61,46 @@ empty. The `value` parameter specifies the default value of the option and if it is unset then the values of `choices` will be used as the default. +As of 0.47.0 -Dopt= and -Dopt=[] both pass an empty list, before this -Dopt= +would pass a list with an empty string. + This type is available since version 0.44.0 +### Features + +A `feature` option has three states: `enabled`, `disabled` or `auto`. It is intended +to be passed as value for the `required` keyword argument of most functions. +Currently supported in +[`dependency()`](Reference-manual.md#dependency), +[`find_library()`](Reference-manual.md#compiler-object), +[`find_program()`](Reference-manual.md#find_program) and +[`add_languages()`](Reference-manual.md#add_languages) functions. + +- `enabled` is the same as passing `required : true`. +- `auto` is the same as passing `required : false`. +- `disabled` do not look for the dependency and always return 'not-found'. + +When getting the value of this type of option using `get_option()`, a special +object is returned instead of the string representation of the option's value. +That object has three methods returning boolean and taking no argument: +`enabled()`, `disabled()`, and `auto()`. + +```meson +d = dependency('foo', required : get_option('myfeature')) +if d.found() + app = executable('myapp', 'main.c', dependencies : [d]) +endif +``` + +If the value of a `feature` option is set to `auto`, that value is overriden by +the global `auto_features` option (which defaults to `auto`). This is intended +to be used by packagers who want to have full control on which dependencies are +required and which are disabled, and not rely on build-deps being installed +(at the right version) to get a feature enabled. They could set +`auto_features=enabled` to enable all features and disable explicitly only the +few they don't want, if any. + +This type is available since version 0.47.0 ## Using build options diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index 39db4fa..b23cc94 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -12,11 +12,7 @@ universal options, base options, compiler options. A list of these options can be found by running `meson --help`. All these can be set by passing to `meson` (aka `meson setup`) in any of -these ways: - -| --option=value | -| --option value | -| -Doption=value | +these ways: `--option=value`, `--option value`, `-Doption=value`. They can also be edited after setup using `meson configure`. @@ -25,21 +21,31 @@ Installation options are all relative to the prefix, except: * When the prefix is `/usr`: `sysconfdir` defaults to `/etc`, `localstatedir` defaults to `/var`, and `sharedstatedir` defaults to `/var/lib` * When the prefix is `/usr/local`: `localstatedir` defaults to `/var/local`, and `sharedstatedir` defaults to `/var/local/lib` -| Option | Default value | Description -| ------ | ------------- | ----------- -| prefix | see below | Installation prefix -| libdir | see below | Library directory -| libexecdir | libexec | Library executable directory -| bindir | bin | Executable directory -| sbindir | sbin | System executable directory -| includedir | include | Header file directory -| datadir | share | Data file directory -| mandir | share/man | Manual page directory -| infodir | share/info | Info page directory -| localedir | share/locale | Locale data directory -| sysconfdir | etc | Sysconf data directory -| localstatedir | var | Localstate data directory -| sharedstatedir | com | Architecture-independent data directory +| Option | Default value | Description | +| ------ | ------------- | ----------- | +| prefix | see below | Installation prefix | +| libdir | see below | Library directory | +| libexecdir | libexec | Library executable directory | +| bindir | bin | Executable directory | +| sbindir | sbin | System executable directory | +| includedir | include | Header file directory | +| datadir | share | Data file directory | +| mandir | share/man | Manual page directory | +| infodir | share/info | Info page directory | +| localedir | share/locale | Locale data directory | +| sysconfdir | etc | Sysconf data directory | +| localstatedir | var | Localstate data directory | +| sharedstatedir | com | Architecture-independent data directory | +| werror | false | Treat warnings as erros | +| warnlevel {1, 2, 3} | 1 | Set the warning level. From 1 = lowest to 3 = highest | +| layout {mirror,flat} | mirror | Build directory layout. | +| default-library {shared, static, both} | shared | Default library type. | +| backend {ninja, vs,<br>vs2010, vs2015, vs2017, xcode} | | Backend to use (default: ninja). | +| stdsplit | | Split stdout and stderr in test logs. | +| errorlogs | | Whether to print the logs from failing tests. | +| cross-file CROSS_FILE | | File describing cross compilation environment. | +| wrap-mode {default, nofallback, nodownload, forcefallback} | | Special wrap mode to use | + `prefix` defaults to `C:/` on Windows, and `/usr/local/` otherwise. You should always override this value. @@ -62,19 +68,19 @@ a builddir and then run `meson configure` on it with no options. The following options are available. Note that they may not be available on all platforms or with all compilers: -| Option | Default value | Possible values | Description -| ----------- | ------------- | --------------- | ----------- -| b_asneeded | true | true, false | Use -Wl,--as-needed when linking -| b_bitcode | false | true, false | Embed Apple bitcode, see below -| b_colorout | always | auto, always, never | Use colored output -| b_coverage | false | true, false | Enable coverage tracking -| b_lundef | true | true, false | Don't allow undefined symbols when linking -| b_lto | false | true, false | Use link time optimization -| b_ndebug | false | true, false, if-release | Disable asserts -| b_pch | true | true, false | Use precompiled headers -| b_pgo | off | off, generate, use | Use profile guided optimization -| b_sanitize | none | see below | Code sanitizer to use -| b_staticpic | true | true, false | Build static libraries as position independent +| Option | Default value | Possible values | Description | +| ----------- | ------------- | --------------- | ----------- | +| b_asneeded | true | true, false | Use -Wl,--as-needed when linking | +| b_bitcode | false | true, false | Embed Apple bitcode, see below | +| b_colorout | always | auto, always, never | Use colored output | +| b_coverage | false | true, false | Enable coverage tracking | +| b_lundef | true | true, false | Don't allow undefined symbols when linking | +| b_lto | false | true, false | Use link time optimization | +| b_ndebug | false | true, false, if-release | Disable asserts | +| b_pch | true | true, false | Use precompiled headers | +| b_pgo | off | off, generate, use | Use profile guided optimization | +| b_sanitize | none | see below | Code sanitizer to use | +| b_staticpic | true | true, false | Build static libraries as position independent | The value of `b_sanitize` can be one of: `none`, `address`, `thread`, `undefined`, `memory`, `address,undefined`. @@ -97,18 +103,18 @@ The following options are available. Note that both the options themselves and the possible values they can take will depend on the target platform or compiler being used: -| Option | Default value | Possible values | Description -| ------ | ------------- | --------------- | ----------- -| c_args | | free-form comma-separated list | C compile arguments to use -| c_link_args | | free-form comma-separated list | C link arguments to use -| c_std | none | none, c89, c99, c11, gnu89, gnu99, gnu11 | C language standard to use -| c_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against -| cpp_args | | free-form comma-separated list | C++ compile arguments to use -| cpp_link_args| | free-form comma-separated list | C++ link arguments to use -| cpp_std | none | none, c++98, c++03, c++11, c++14, c++17, <br/>c++1z, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z | C++ language standard to use -| cpp_debugstl | false | true, false | C++ STL debug mode -| cpp_eh | sc | none, a, s, sc | C++ exception handling type -| cpp_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against +| Option | Default value | Possible values | Description | +| ------ | ------------- | --------------- | ----------- | +| c_args | | free-form comma-separated list | C compile arguments to use | +| c_link_args | | free-form comma-separated list | C link arguments to use | +| c_std | none | none, c89, c99, c11, gnu89, gnu99, gnu11 | C language standard to use | +| c_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against | +| cpp_args | | free-form comma-separated list | C++ compile arguments to use | +| cpp_link_args| | free-form comma-separated list | C++ link arguments to use | +| cpp_std | none | none, c++98, c++03, c++11, c++14, c++17, <br/>c++1z, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z | C++ language standard to use | +| cpp_debugstl | false | true, false | C++ STL debug mode | +| cpp_eh | sc | none, a, s, sc | C++ exception handling type | +| cpp_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against | The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific argument forms, but the libraries are: kernel32, user32, gdi32, winspool, diff --git a/docs/markdown/Continuous-Integration.md b/docs/markdown/Continuous-Integration.md index 60e76d1..9ec46b2 100644 --- a/docs/markdown/Continuous-Integration.md +++ b/docs/markdown/Continuous-Integration.md @@ -85,7 +85,37 @@ test_script: ## Travis without Docker -This setup is not recommended but included here for completeness +You can cheat your way around docker by using **python** as language and setting your compiler in the build **matrix**. This example just uses **linux** and **c** but can be easily adapted to **c++** and **osx**. + +```yaml +sudo: false + +os: linux +dist: trusty + +language: python + +python: 3.6 + +matrix: + include: + - env: CC=gcc + - env: CC=clang + +install: + - export NINJA_LATEST=$(curl -s https://api.github.com/repos/ninja-build/ninja/releases/latest | grep browser_download_url | cut -d '"' -f 4 | grep ninja-linux.zip) + - wget "$NINJA_LATEST" + - unzip -q ninja-linux.zip -d build + - export PATH="$PWD/build:$PATH" + - pip install meson + +script: + - meson builddir + - ninja -C builddir + - ninja -C builddir test +``` + +This setup uses the **beta** group. It is not recommended but included here for completeness: ```yaml sudo: false diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 7b5fe73..c0eea29 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -18,6 +18,31 @@ Github](https://github.com/mesonbuild/meson/pulls). This causes them to be run through the CI system. All submissions must pass a full CI test run before they are even considered for submission. +## Keeping pull requests up to date + +It is possible that while your pull request is being reviewed, other +changes are committed to master that cause merge conflicts that must +be resolved. The basic rule for this is very simple: keep your pull +request up to date using rebase _only_. + +Do not merge head back to your branch. Any merge commits in your pull +request make it not acceptable for merging into master and you must +remove them. + +## Special procedure for new features + +Every new feature requires some extra steps, namely: + + - Must include a project test under `test cases/`, or if that's not + possible or if the test requires a special environment, it must go + into `run_unittests.py`. + - Must be registered with the [FeatureChecks framework](Release-notes-for-0.47.0.md#Feature_detection_based_on_meson_version_in_project) + that will warn the user if they try to use a new feature while + targetting an older meson version. + - Needs a release note snippet inside `docs/markdown/snippets/` with + a heading and a brief paragraph explaining what the feature does + with an example. + ## Acceptance and merging The kind of review and acceptance any merge proposal gets depends on @@ -57,6 +82,32 @@ In a simplified list form the split would look like the following: - changes breaking backwards compatibility - support for new languages +## Strategy for merging pull requests to trunk + +Meson's merge strategy should fullfill the following guidelines: + +- preserve as much history as possible + +- have as little junk in the repo as possible + +- everything in the "master lineage" should always pass all tests + +These goals are slightly contradictory so the correct thing to do +often requires some judgement on part of the person doing the +merge. Github provides three different merge options, The rules of +thumb for choosing between them goes like this: + + - single commit pull requests should always be rebased + + - a pull request with one commit and one "fixup" commit (such as + testing something to see if it passes CI) should be squashed + + - large branches with many commits should be merged with a merge + commit, especially if one of the commits does not pass all tests + (which happens in e.g. large and difficult refactorings) + +If in doubt, ask for guidance on IRC. + ## Tests All new features must come with automatic tests that thoroughly prove @@ -116,6 +167,8 @@ Continuous integration systems currently used: allows `[skip ci]` anywhere in the commit messages. - [AppVeyor](https://www.appveyor.com/docs/how-to/filtering-commits/#skip-directive-in-commit-message) requires `[skip ci]` or `[skip appveyor]` in the commit title. +- [Sider](https://sider.review) + runs Flake8 (see below) ## Documentation @@ -157,6 +210,13 @@ $ cd meson $ flake8 ``` +To run it automatically before committing: + +```console +$ flake8 --install-hook=git +$ git config --bool flake8.strict true +``` + ## C/C++ coding style Meson has a bunch of test code in several languages. The rules for diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md index 7cf67a4..1c65fa8 100644 --- a/docs/markdown/Design-rationale.md +++ b/docs/markdown/Design-rationale.md @@ -193,7 +193,7 @@ keyword arguments. They look like this. ```meson project('compile several', 'c') -sources = ['main.c', 'file1.c', 'file2.c', 'file3.c'] +sourcelist = ['main.c', 'file1.c', 'file2.c', 'file3.c'] executable('program', sources : sourcelist) ``` @@ -202,7 +202,7 @@ External dependencies are simple to use. ```meson project('external lib', 'c') libdep = find_dep('extlibrary', required : true) -sources = ['main.c', 'file1.c', 'file2.c', 'file3.c'] +sourcelist = ['main.c', 'file1.c', 'file2.c', 'file3.c'] executable('program', sources : sourcelist, dep : libdep) ``` diff --git a/docs/markdown/Getting-meson.md b/docs/markdown/Getting-meson.md index 29f2424..4f45ef7 100644 --- a/docs/markdown/Getting-meson.md +++ b/docs/markdown/Getting-meson.md @@ -63,7 +63,7 @@ select the installer options as follows:   - + With this, you will have `python` and `pip` in `PATH`, and you can install Meson with pip. You will also be able to directly run `meson` in any shell on diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index 634fe79..6b63f8a 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -40,8 +40,8 @@ file called `foobar.h`, which you can then include in your sources. * `install_dir`: (*Added 0.37.0*) location to install the header or bundle depending on previous options * `install_header`: (*Added 0.37.0*) if true, install the header file -* `source_dir`: a list of subdirectories where the resource compiler - should look up the files, relative to the location of the XML file +* `source_dir`: a list of directories where the resource compiler + should look up the files Returns an array containing: `[c_source, header_file]` or `[gresource_bundle]` diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md index b8e6a81..8348d4a 100644 --- a/docs/markdown/Installing.md +++ b/docs/markdown/Installing.md @@ -4,7 +4,8 @@ short-description: Installing targets # Installing -By default Meson will not install anything. Build targets can be installed by tagging them as installable in the definition. +By default Meson will not install anything. Build targets can be +installed by tagging them as installable in the definition. ```meson project('install', 'c') @@ -14,8 +15,8 @@ shared_library('mylib', 'libfile.c', install : true) There is usually no need to specify install paths or the like. Meson will automatically install it to the standards-conforming location. In this particular case the executable is installed to the `bin` -subdirectory of the install prefix. However if you wish to override the -install dir, you can do that with the `install_dir` argument. +subdirectory of the install prefix. However if you wish to override +the install dir, you can do that with the `install_dir` argument. ```meson executable('prog', 'prog.c', install : true, install_dir : 'my/special/dir') @@ -42,7 +43,9 @@ install_data(['file1.txt', 'file2.txt'], install_dir : 'share/myapp') ``` -Sometimes you want to copy an entire subtree directly. For this use case there is the `install_subdir` command, which can be used like this. +Sometimes you want to copy an entire subtree directly. For this use +case there is the `install_subdir` command, which can be used like +this. ```meson install_subdir('mydir', install_dir : 'include') # mydir subtree -> include/mydir @@ -59,7 +62,10 @@ install_data(sources : 'foo.dat', install_dir : '/etc') # -> /etc/foo.dat ## Custom install behavior -Sometimes you need to do more than just install basic targets. Meson makes this easy by allowing you to specify a custom script to execute at install time. As an example, here is a script that generates an empty file in a custom directory. +Sometimes you need to do more than just install basic targets. Meson +makes this easy by allowing you to specify a custom script to execute +at install time. As an example, here is a script that generates an +empty file in a custom directory. ```bash #!/bin/sh @@ -68,7 +74,10 @@ mkdir "${DESTDIR}/${MESON_INSTALL_PREFIX}/mydir" touch "${DESTDIR}/${MESON_INSTALL_PREFIX}/mydir/file.dat" ``` -As you can see, Meson sets up some environment variables to help you write your script (`DESTDIR` is not set by Meson, it is inherited from the outside environment). In addition to the install prefix, Meson also sets the variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT`. +As you can see, Meson sets up some environment variables to help you +write your script (`DESTDIR` is not set by Meson, it is inherited from +the outside environment). In addition to the install prefix, Meson +also sets the variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT`. Telling Meson to run this script at install time is a one-liner. @@ -76,12 +85,32 @@ Telling Meson to run this script at install time is a one-liner. meson.add_install_script('myscript.sh') ``` -The argument is the name of the script file relative to the current subdirectory. +The argument is the name of the script file relative to the current +subdirectory. ## DESTDIR support -Sometimes you need to install to a different directory than the install prefix. This is most common when building rpm or deb packages. This is done with the `DESTDIR` environment variable and it is used just like with other build systems: +Sometimes you need to install to a different directory than the +install prefix. This is most common when building rpm or deb +packages. This is done with the `DESTDIR` environment variable and it +is used just like with other build systems: ```console $ DESTDIR=/path/to/staging/area ninja install ``` + +## Custom install behaviour + +The default install target (executed via, e.g., `ninja install`) does +installing with reasonable default options. More control over the +install behaviour can be achieved with the `meson install` command, +that has been available since 0.47.0. + +For example, if you wish to install the current setup without +rebuilding the code (which the default install target always does) and +only installing those files that have changed, you would run this +command in the build tree: + +```console +$ meson install --no-rebuild --only-changed +``` diff --git a/docs/markdown/Module-reference.md b/docs/markdown/Module-reference.md deleted file mode 100644 index 60be7bd..0000000 --- a/docs/markdown/Module-reference.md +++ /dev/null @@ -1,20 +0,0 @@ -Meson has a selection of modules to make common requirements easy to use. -Modules can be thought of like the standard library of a programming language. -Currently Meson provides the following modules. - -* [Gnome](Gnome-module.md) -* [i18n](i18n-module.md) -* [Qt4](Qt4-module.md) -* [Qt5](Qt5-module.md) -* [Pkgconfig](Pkgconfig-module.md) -* [Python3](Python-3-module.md) -* [RPM](RPM-module.md) -* [Windows](Windows-module.md) - -In addition there are unstable modules. These are meant for testing new -functionality but note that they do *not* provide a stable API. It can -change in arbitrary ways between releases. The modules might also be removed -without warning in future releases. - - * [SIMD](Simd-module.md) - diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 9829781..db8eed1 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -60,7 +60,8 @@ endif Takes one keyword argument, `required`. It defaults to `true`, which means that if any of the languages specified is not found, Meson will halt. Returns true if all languages specified were found and false -otherwise. +otherwise. Since *0.47.0* the value of a [`feature`](Build-options.md#features) +option can also be passed to the `required` keyword argument. ### add_project_arguments() @@ -118,11 +119,12 @@ are never run in parallel. buildtarget both_libraries(library_name, list_of_sources, ...) ``` -Builds both a static and shared library with the given sources. Positional and -keyword arguments are otherwise the same as for [`library`](#library). Source -files will be compiled only once and object files will be reused to build both -shared and static libraries, unless `b_staticpic` user option or `pic` argument -are set to false in which case sources will be compiled twice. +Builds both a static and shared library with the given +sources. Positional and keyword arguments are otherwise the same as +for [`library`](#library). Source files will be compiled only once and +object files will be reused to build both shared and static libraries, +unless `b_staticpic` user option or `pic` argument are set to false in +which case sources will be compiled twice. The returned [buildtarget](#build-target-object) always represents the shared library. In addition it supports the following extra methods: @@ -231,9 +233,12 @@ following. - `build_by_default` *(added 0.38.0)* causes, when set to true, to have this target be built by default, that is, when invoking plain `ninja`; the default value is false -- `build_always` if `true` this target is always considered out of - date and is rebuilt every time, useful for things such as build - timestamps or revision control tags +- `build_always` (deprecated) if `true` this target is always considered out of + date and is rebuilt every time. Equivalent to setting both + `build_always_stale` and `build_by_default` to true. +- `build_always_stale` if `true` the target is always considered out of date. + Useful for things such as build timestamps or revision control tags. + The associated command is run even if the outputs are up to date. - `capture`, there are some compilers that can't be told to write their output to a file but instead write it to standard output. When this argument is set to true, Meson captures `stdout` and writes it @@ -354,7 +359,8 @@ otherwise. This function supports the following keyword arguments: cross compiled binary will run on), usually only needed if you build a tool to be used during compilation. - `required`, when set to false, Meson will proceed with the build - even if the dependency is not found + even if the dependency is not found. Since *0.47.0* the value of a + [`feature`](Build-options.md#features) option can also be passed. - `static` tells the dependency provider to try to get static libraries instead of dynamic ones (note that this is not supported by all dependency backends) @@ -540,7 +546,9 @@ Keyword arguments are the following: abort if no program can be found. If `required` is set to `false`, Meson continue even if none of the programs can be found. You can then use the `.found()` method on the returned object to check - whether it was found or not. + whether it was found or not. Since *0.47.0* the value of a + [`feature`](Build-options.md#features) option can also be passed to the + `required` keyword argument. - `native` *(since 0.43)* defines how this executable should be searched. By default it is set to `false`, which causes Meson to first look for the @@ -663,17 +671,24 @@ installed, and so on, use a [`custom_target`](#custom_target) instead. value get_option(option_name) ``` -Obtains the value of the [project build option](Build-options.md) specified in the positional argument. +Obtains the value of the [project build option](Build-options.md) +specified in the positional argument. -Note that the value returned for built-in options that end in `dir` such as -`bindir` and `libdir` is always a path relative to (and inside) the `prefix`. +Note that the value returned for built-in options that end in `dir` +such as `bindir` and `libdir` is always a path relative to (and +inside) the `prefix`. -The only exceptions are: `sysconfdir`, `localstatedir`, and `sharedstatedir` -which will return the value passed during configuration as-is, which may be -absolute, or relative to `prefix`. [`install_dir` arguments](Installing.md) -handles that as expected, but if you need the absolute path to one of these -e.g. to use in a define etc., you should use `join_paths(get_option('prefix'), -get_option('localstatedir')))` +The only exceptions are: `sysconfdir`, `localstatedir`, and +`sharedstatedir` which will return the value passed during +configuration as-is, which may be absolute, or relative to `prefix`. +[`install_dir` arguments](Installing.md) handles that as expected, but +if you need the absolute path to one of these e.g. to use in a define +etc., you should use `join_paths(get_option('prefix'), +get_option('localstatedir'))` + +For options of type `feature` a special object is returned instead of +a string. See [`feature` options](Build-options.md#features) +documentation for more details. ### get_variable() @@ -721,8 +736,8 @@ the search path, since there is no global search path. For something like that, see [`add_project_arguments()`](#add_project_arguments). See also `implicit_include_directories` parameter of -[executable()](#executable), which adds current source and build directories -to include path. +[executable()](#executable), which adds current source and build +directories to include path. Each directory given is converted to two include paths: one that is relative to the source root and one relative to the build root. @@ -758,9 +773,15 @@ executable('some-tool', sources, ... ``` -If the build tree is `/tmp/build-tree`, the following include paths will be added to the `executable()` call: `-I/tmp/build-tree/include -I/home/user/project.git/include`. +If the build tree is `/tmp/build-tree`, the following include paths +will be added to the `executable()` call: `-I/tmp/build-tree/include +-I/home/user/project.git/include`. -This function has one keyword argument `is_system` which, if set, flags the specified directories as system directories. This means that they will be used with the `-isystem` compiler argument rather than `-I` on compilers that support this flag (in practice everything except Visual Studio). +This function has one keyword argument `is_system` which, if set, +flags the specified directories as system directories. This means that +they will be used with the `-isystem` compiler argument rather than +`-I` on compilers that support this flag (in practice everything +except Visual Studio). ### install_data() @@ -789,10 +810,10 @@ arguments. The following keyword arguments are supported: To leave any of these three as the default, specify `false`. -- `rename` if specified renames each source file into corresponding file - from `rename` list. Nested paths are allowed and they are joined with - `install_dir`. Length of `rename` list must be equal to the number of sources. - *(added 0.46.0)* +- `rename` if specified renames each source file into corresponding + file from `rename` list. Nested paths are allowed and they are + joined with `install_dir`. Length of `rename` list must be equal to + the number of sources. *(added 0.46.0)* See [Installing](Installing.md) for more examples. @@ -976,10 +997,11 @@ The keyword arguments for this are the same as for [`executable`](#executable) w libraries. Defaults to `dylib` for shared libraries and `rlib` for static libraries. -`static_library`, `shared_library` and `both_libraries` also accept these keyword -arguments. +`static_library`, `shared_library` and `both_libraries` also accept +these keyword arguments. -Note: You can set `name_prefix` and `name_suffix` to `[]`, or omit them for the default behaviour for each platform. +Note: You can set `name_prefix` and `name_suffix` to `[]`, or omit +them for the default behaviour for each platform. ### message() @@ -1062,14 +1084,14 @@ Project supports the following keyword arguments. runresult run_command(command, list_of_args, ...) ``` -Runs the command specified in positional arguments. -`command` can be a string, or the output of [`find_program()`](#find_program), -[`files()`](#files) or [`configure_file()`](#configure_file), or -[a compiler object](#compiler-object). +Runs the command specified in positional arguments. `command` can be +a string, or the output of [`find_program()`](#find_program), +[`files()`](#files) or [`configure_file()`](#configure_file), or [a +compiler object](#compiler-object). -Returns [an opaque object](#run-result-object) containing the result of the -invocation. The command is run from an *unspecified* directory, and -Meson will set three environment variables `MESON_SOURCE_ROOT`, +Returns [an opaque object](#run-result-object) containing the result +of the invocation. The command is run from an *unspecified* directory, +and Meson will set three environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT` and `MESON_SUBDIR` that specify the source directory, build directory and subdirectory the target was defined in, respectively. @@ -1097,7 +1119,7 @@ output as far as Meson is concerned. It is only meant for tasks such as running a code formatter or flashing an external device's firmware with a built file. -The script is run from an *unspecified* directory, and Meson will set +The command is run from an *unspecified* directory, and Meson will set three environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT` and `MESON_SUBDIR` that specify the source directory, build directory and subdirectory the target was defined in, respectively. @@ -1165,9 +1187,10 @@ variables defined in the [`executable`](#executable) it is loaded by, you will need to set the `export_dynamic` argument of the executable to `true`. -**Note:** Linking to a shared module is not supported on some platforms, notably -OSX. Consider using a [`shared_library`](#shared_library) instead, if you need -to both `dlopen()` and link with a library. +**Note:** Linking to a shared module is not supported on some +platforms, notably OSX. Consider using a +[`shared_library`](#shared_library) instead, if you need to both +`dlopen()` and link with a library. *Added 0.37.0* @@ -1272,15 +1295,16 @@ argument to [`dependency()`](#dependency). void test(name, executable, ...) ``` -Defines a test to run with the test harness. Takes two positional arguments, -the first is the name of the test and the second is the executable to run. -The executable can be an [executable build target object](#build-target-object) -returned by [`executable()`](#executable) or an -[external program object](#external-program-object) returned by -[`find_program()`](#find_program). The executable's exit code is used by the -test harness to record the outcome of the test, for example exit code zero -indicates success. For more on the Meson test harness protocol read -[Unit Tests](Unit-tests.md). +Defines a test to run with the test harness. Takes two positional +arguments, the first is the name of the test and the second is the +executable to run. The executable can be an [executable build target +object](#build-target-object) returned by +[`executable()`](#executable) or an [external program +object](#external-program-object) returned by +[`find_program()`](#find_program). The executable's exit code is used +by the test harness to record the outcome of the test, for example +exit code zero indicates success. For more on the Meson test harness +protocol read [Unit Tests](Unit-tests.md). Keyword arguments are the following: @@ -1374,6 +1398,28 @@ the following methods. `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. All additional arguments are passed as parameters. + Meson uses the `DESTDIR` environment variable as set by the + inherited environment to determine the (temporary) installation + location for files. Your install script must be aware of this while + manipulating and installing files. The correct way to handle this is + with the `MESON_INSTALL_DESTDIR_PREFIX` variable which is always set + and contains `DESTDIR` (if set) and `prefix` joined together. This + is useful because both are usually absolute paths and there are + platform-specific edge-cases in joining two absolute paths. + + In case it is needed, `MESON_INSTALL_PREFIX` is also always set and + has the value of the `prefix` option passed to Meson. + + `MESONINTROSPECT` contains the path to the introspect command that + corresponds to the `meson` executable that was used to configure the + build. (This might be a different path then the first executable + found in `PATH`.) It can be used to query build configuration. Note + that the value will contain many parts, f.ex., it may be `python3 + /path/to/meson.py introspect`. The user is responsible for splitting + the string to an array if needed by splitting lexically like a UNIX + shell would. If your script uses Python, `shlex.split()` is the + easiest correct way to do this. + - `add_postconf_script(script_name, arg1, arg2, ...)` will run the executable given as an argument after all project files have been generated. This script will have the environment variables @@ -1437,26 +1483,6 @@ the following methods. build](Unity-builds.md) (multiple sources are combined before compilation to reduce build time) and `false` otherwise. - To determine the installation location, the script should use the - `DESTDIR`, `MESON_INSTALL_PREFIX`, `MESON_INSTALL_DESTDIR_PREFIX` - variables. `DESTDIR` will be set only if it is inherited from the - outside environment. `MESON_INSTALL_PREFIX` is always set and has - the value of the `prefix` option passed to - Meson. `MESON_INSTALL_DESTDIR_PREFIX` is always set and contains - `DESTDIR` and `prefix` joined together. This is useful because both - are absolute paths, and many path-joining functions such as - [`os.path.join` in - Python](https://docs.python.org/3/library/os.path.html#os.path.join) - special-case absolute paths. - - `MESONINTROSPECT` contains the path to the introspect command that - corresponds to the `meson` executable that was used to configure the - build. (This might be a different path then the first executable - found in `PATH`.) It can be used to query build configuration. Note - that the value may contain many parts, i.e. it may be `python3 - /path/to/meson.py introspect`. The user is responsible for splitting - the string to an array if needed. - - `override_find_program(progname, program)` [*(Added 0.46.0)*](Release-notes-for-0-46-0.html#can-override-find_program) specifies that whenever `find_program` is used to find a program @@ -1464,11 +1490,14 @@ the following methods. instead return `program`, which may either be the result of `find_program` or `configure_file`. -- `project_version()` returns the version string specified in `project` function call. +- `project_version()` returns the version string specified in + `project` function call. -- `project_license()` returns the array of licenses specified in `project` function call. +- `project_license()` returns the array of licenses specified in + `project` function call. -- `project_name()` returns the project name specified in the `project` function call. +- `project_name()` returns the project name specified in the `project` + function call. - `version()` return a string with the version of Meson. @@ -1479,15 +1508,17 @@ doing the actual compilation. See [Cross-compilation](Cross-compilation.md). It has the following methods: -- `cpu_family()` returns the CPU family name. [This table](Reference-tables.md#cpu-families) - contains all known CPU families. These are guaranteed to continue working. +- `cpu_family()` returns the CPU family name. [This + table](Reference-tables.md#cpu-families) contains all known CPU + families. These are guaranteed to continue working. - `cpu()` returns a more specific CPU name, such as `i686`, `amd64`, etc. -- `system()` returns the operating system name. - [This table](Reference-tables.html#operating-system-names) Lists all of the - currently known Operating System names, these are guaranteed to continue working. +- `system()` returns the operating system name. [This + table](Reference-tables.html#operating-system-names) Lists all of + the currently known Operating System names, these are guaranteed to + continue working. - `endian()` returns `big` on big-endian systems and `little` on little-endian systems. @@ -1497,8 +1528,8 @@ Currently, these values are populated using and [`platform.machine()`](https://docs.python.org/3.4/library/platform.html#platform.machine). If you think the returned values for any of these are incorrect for your -system or CPU, or if your OS is not in the linked table, please file [a -bug report](https://github.com/mesonbuild/meson/issues/new) with +system or CPU, or if your OS is not in the linked table, please file +[a bug report](https://github.com/mesonbuild/meson/issues/new) with details and we'll look into it. ### `host_machine` object @@ -1563,14 +1594,16 @@ the following methods: Meson will proceed even if the library is not found. By default the library is searched for in the system library directory (e.g. /usr/lib). This can be overridden with the `dirs` keyword - argument, which can be either a string or a list of strings. + argument, which can be either a string or a list of strings. Since + *0.47.0* the value of a [`feature`](Build-options.md#features) + option can also be passed to the `required` keyword argument. - `first_supported_argument(list_of_strings)`, given a list of strings, returns the first argument that passes the `has_argument` test or an empty array if none pass. -- `first_supported_link_argument(list_of_strings)` *(added 0.46.0)*, given a - list of strings, returns the first argument that passes the +- `first_supported_link_argument(list_of_strings)` *(added 0.46.0)*, + given a list of strings, returns the first argument that passes the `has_link_argument` test or an empty array if none pass. - `get_define(definename)` returns the given preprocessor symbol's @@ -1593,11 +1626,12 @@ the following methods: the specified command line argument, that is, can compile code without erroring out or printing a warning about an unknown flag. -- `has_link_argument(argument_name)` *(added 0.46.0)* returns true if the linker - accepts the specified command line argument, that is, can compile and link - code without erroring out or printing a warning about an unknown flag. Link - arguments will be passed to the compiler, so should usually have the `-Wl,` - prefix. On VisualStudio a `/link` argument will be prepended. +- `has_link_argument(argument_name)` *(added 0.46.0)* returns true if + the linker accepts the specified command line argument, that is, can + compile and link code without erroring out or printing a warning + about an unknown flag. Link arguments will be passed to the + compiler, so should usually have the `-Wl,` prefix. On VisualStudio + a `/link` argument will be prepended. - `has_function(funcname)` returns true if the given function is provided by the standard library or a library passed in with the @@ -1642,9 +1676,9 @@ the following methods: `has_argument` but takes multiple arguments and uses them all in a single compiler invocation, available since 0.37.0. -- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(added 0.46.0)* is the same - as `has_link_argument` but takes multiple arguments and uses them all in a - single compiler invocation. +- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(added 0.46.0)* + is the same as `has_link_argument` but takes multiple arguments and + uses them all in a single compiler invocation. - `has_type(typename)` returns true if the specified token is a type, you can specify external dependencies to use with `dependencies` @@ -1801,10 +1835,10 @@ The following methods are defined for all [dictionaries](Syntax.md#dictionaries) - `has_key(key)` returns `true` if the dictionary contains the key given as argument, `false` otherwise -- `get(key, fallback)`, returns the value for the key given as first argument - if it is present in the dictionary, or the optional fallback value given - as the second argument. If a single argument was given and the key was not - found, causes a fatal error +- `get(key, fallback)`, returns the value for the key given as first + argument if it is present in the dictionary, or the optional + fallback value given as the second argument. If a single argument + was given and the key was not found, causes a fatal error You can also iterate over dictionaries with the [`foreach` statement](Syntax.md#foreach-statements). @@ -1824,10 +1858,11 @@ A build target is either an [executable](#executable), - `extract_all_objects()` is same as `extract_objects` but returns all object files generated by this target. Since 0.46.0 keyword argument - `recursive` must be set to `true` to also return objects passed to the - `object` argument of this target. By default only objects built for this - target are returned to maintain backward compatibility with previous versions. - The default will eventually be changed to `true` in a future version. + `recursive` must be set to `true` to also return objects passed to + the `object` argument of this target. By default only objects built + for this target are returned to maintain backward compatibility with + previous versions. The default will eventually be changed to `true` + in a future version. - `extract_objects()` returns an opaque value representing the generated object files of arguments, usually used to take single @@ -1894,10 +1929,11 @@ contains a target with the following methods: this and will also allow Meson to setup inter-target dependencies correctly. Please file a bug if that doesn't work for you. -- `[index]` returns an opaque object that references this target, and can be - used as a source in other targets. When it is used as such it will make that - target depend on this custom target, but the only source added will be the - one that corresponds to the index of the custom target's output argument. +- `[index]` returns an opaque object that references this target, and + can be used as a source in other targets. When it is used as such it + will make that target depend on this custom target, but the only + source added will be the one that corresponds to the index of the + custom target's output argument. ### `dependency` object @@ -1914,22 +1950,23 @@ an external dependency with the following methods: (*Added 0.45.0*) A warning is issued if the variable is not defined, unless a `default` parameter is specified. - - `get_configtool_variable(varname)` (*Added 0.44.0*) will get the + - `get_configtool_variable(varname)` (*Added 0.44.0*) will get the command line argument from the config tool (with `--` prepended), or, if invoked on a non config-tool dependency, error out. - `type_name()` which returns a string describing the type of the dependency, the most common values are `internal` for deps created - with `declare_dependencies` and `pkgconfig` for system dependencies + with `declare_dependency()` and `pkgconfig` for system dependencies obtained with Pkg-config. - `version()` is the version number as a string, for example `1.2.8` - - `partial_dependency(compile_args : false, link_args : false, links : false, - includes : false, source : false)` (*added 0.46.0) returns a new dependency - object with the same name, version, found status, type name, and methods as - the object that called it. This new object will only inherit other - attributes from its parent as controlled by keyword arguments. + - `partial_dependency(compile_args : false, link_args : false, links + : false, includes : false, source : false)` (*added 0.46.0*) returns + a new dependency object with the same name, version, found status, + type name, and methods as the object that called it. This new + object will only inherit other attributes from its parent as + controlled by keyword arguments. If the parent has any dependencies, those will be applied to the new partial dependency with the same rules. So , given: @@ -1940,8 +1977,9 @@ an external dependency with the following methods: dep3 = dep2.partial_dependency(compile_args : true) ``` - dep3 will add `['-Werror=foo', '-Werror=bar']` to the compiler args of - any target it is added to, but libfoo will not be added to the link_args. + dep3 will add `['-Werror=foo', '-Werror=bar']` to the compiler args + of any target it is added to, but libfoo will not be added to the + link_args. The following arguments will add the following attributes: @@ -1969,7 +2007,7 @@ and has the following methods: - `found()` which returns whether the executable was found -- `path()` which returns a string pointing to the script or executable +- `path()` which returns a string pointing to the script or executable **NOTE:** You should not need to use this method. Passing the object itself should work in all cases. F.ex.: `run_command(obj, arg1, arg2)` @@ -1980,27 +2018,42 @@ detailed information about how environment variables should be set during tests. It should be passed as the `env` keyword argument to tests. It has the following methods. -- `append(varname, value)` appends the given value to the old value of - the environment variable, e.g. `env.append('FOO', 'BAR', separator - : ';')` produces `BOB;BAR` if `FOO` had the value `BOB` and plain - `BAR` if the value was not defined. If the separator is not - specified explicitly, the default path separator for the host - operating system will be used, i.e. ';' for Windows and ':' for - UNIX/POSIX systems. +- `append(varname, value1, value2, ...)` appends the given values to + the old value of the environment variable, e.g. `env.append('FOO', + 'BAR', 'BAZ', separator : ';')` produces `BOB;BAR;BAZ` if `FOO` had + the value `BOB` and plain `BAR;BAZ` if the value was not defined. If + the separator is not specified explicitly, the default path + separator for the host operating system will be used, i.e. ';' for + Windows and ':' for UNIX/POSIX systems. -- `prepend(varname, value)` is the same as `append` except that it +- `prepend(varname, value1, value2, ...)` is the same as `append` except that it writes to the beginning of the variable -- `set(varname, value)` sets environment variable in the first - argument to the value in the second argument, e.g. - `env.set('FOO', 'BAR')` sets envvar `FOO` to value `BAR` +- `set(varname, value1, value2)` sets the environment variable + specified in the first argument to the values in the second argument + joined by the separator, e.g. `env.set('FOO', 'BAR'),` sets envvar + `FOO` to value `BAR`. See `append()` above for how separators work. + +**Note:** All these methods overwrite the previously-defined value(s) +if called twice with the same `varname`. ### `external library` object This object is returned by [`find_library()`](#find_library) and contains an external (i.e. not built as part of this project) -library. This object has only one method, `found`, which returns -whether the library was found. +library. This object has the following methods: + + - `found` which returns whether the library was found. + + - `type_name()` (*added 0.48.0*) which returns a string describing + the type of the dependency, which will be `library` in this case. + + - `partial_dependency(compile_args : false, link_args : false, links + : false, includes : false, source : false)` (*added 0.46.0*) returns + a new dependency object with the same name, version, found status, + type name, and methods as the object that called it. This new + object will only inherit other attributes from its parent as + controlled by keyword arguments. ### `generator` object diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index d0a2c83..6486aa2 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -23,6 +23,7 @@ These are return values of the `get_id` method in a compiler object. | nagfor | The NAG Fortran compiler | | lcc | Elbrus C/C++/Fortran Compiler | | arm | ARM compiler | +| armclang | ARMCLANG compiler | ## Script environment variables @@ -46,8 +47,10 @@ set in the cross file. | ia64 | Itanium processor | | arm | 32 bit ARM processor | | aarch64 | 64 bit ARM processor | -| ppc64 | 64 bit PPC processors (Big Endian) | -| ppc64le | 64 bit PPC processors (Little Endian) | +| mips | 32 bit MIPS processor | +| mips64 | 64 bit MIPS processor | +| ppc | 32 bit PPC processors | +| ppc64 | 64 bit PPC processors | | e2k | MCST Elbrus processor | | parisc | HP PA-RISC processor | | sparc64 | SPARC v9 processor | diff --git a/docs/markdown/Release-notes-for-0.47.0.md b/docs/markdown/Release-notes-for-0.47.0.md index fa4a34c..9736256 100644 --- a/docs/markdown/Release-notes-for-0.47.0.md +++ b/docs/markdown/Release-notes-for-0.47.0.md @@ -1,20 +1,10 @@ --- title: Release 0.47 -short-description: Release notes for 0.46 (preliminary) +short-description: Release notes for 0.47 ... # New features -This page is a placeholder for the eventual release notes. - -Notable new features should come with release note updates. This is -done by creating a file snippet called `snippets/featurename.md` and -whose contents should look like this: - - ## Feature name - - A short description explaining the new feature and how it should be used. - ## Allow early return from a script Added the function `subdir_done()`. Its invocation exits the current script at @@ -22,8 +12,291 @@ the point of invocation. All previously invoked build targets and commands are build/executed. All following ones are ignored. If the current script was invoked via `subdir()` the parent script continues normally. -## Concatenate string literals returned from get_define +## Concatenate string literals returned from `get_define()` After obtaining the value of a preprocessor symbol consecutive string literals are merged into a single string literal. For example a preprocessor symbol's value `"ab" "cd"` is returned as `"abcd"`. + +## ARM compiler(version 6) for C and CPP + +Cross-compilation is now supported for ARM targets using ARM compiler +version 6 - ARMCLANG. The required ARMCLANG compiler options for +building a shareable library are not included in the current Meson +implementation for ARMCLANG support, so it can not build shareable +libraries. This current Meson implementation for ARMCLANG support can +not build assembly files with arm syntax (we need to use armasm instead +of ARMCLANG for the `.s` files with this syntax) and only supports GNU +syntax. + +The default extension of the executable output is `.axf`. +The environment path should be set properly for the ARM compiler executables. +The `--target`, `-mcpu` options with the appropriate values should be mentioned +in the cross file as shown in the snippet below. + +``` +[properties] +c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] +cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] + +``` + +Note: +- The current changes are tested on Windows only. +- PIC support is not enabled by default for ARM, + if users want to use it, they need to add the required arguments + explicitly from cross-file(`c_args`/`cpp_args`) or some other way. + +## New base build option for LLVM (Apple) bitcode support + +When building with clang on macOS, you can now build your static and shared +binaries with embedded bitcode by enabling the `b_bitcode` [base +option](Builtin-options.md#Base_options) by passing `-Db_bitcode=true` to +Meson. + +This is better than passing the options manually in the environment since Meson +will automatically disable conflicting options such as `b_asneeded`, and will +disable bitcode support on targets that don't support it such as +`shared_module()`. + +Since this requires support in the linker, it is currently only enabled when +using Apple ld. In the future it can be extended to clang on other platforms +too. + +## New compiler check: `check_header()` + +The existing compiler check `has_header()` only checks if the header exists, +either with the `__has_include` C++11 builtin, or by running the pre-processor. + +However, sometimes the header you are looking for is unusable on some platforms +or with some compilers in a way that is only detectable at compile-time. For +such cases, you should use `check_header()` which will include the header and +run a full compile. + +Note that `has_header()` is much faster than `check_header()`, so it should be +used whenever possible. + +## New action `copy:` for `configure_file()` + +In addition to the existing actions `configuration:` and `command:`, +[`configure_file()`](#Reference-manual.md#configure_file) now accepts a keyword +argument `copy:` which specifies a new action to copy the file specified with +the `input:` keyword argument to a file in the build directory with the name +specified with the `output:` keyword argument. + +These three keyword arguments are, as before, mutually exclusive. You can only +do one action at a time. + +## New keyword argument `encoding:` for `configure_file()` + +Add a new keyword to [`configure_file()`](#Reference-manual.md#configure_file) +that allows the developer to specify the input and output file encoding. The +default value is the same as before: UTF-8. + +In the past, Meson would not handle non-UTF-8/ASCII files correctly, and in the +worst case would try to coerce it to UTF-8 and mangle the data. UTF-8 is the +standard encoding now, but sometimes it is necessary to process files that use +a different encoding. + +For additional details see [#3135](https://github.com/mesonbuild/meson/pull/3135). + +## New keyword argument `output_format:` for `configure_file()` + +When called without an input file, `configure_file` generates a +C header file by default. A keyword argument was added to allow +specifying the output format, for example for use with nasm or yasm: + +``` +conf = configuration_data() +conf.set('FOO', 1) + +configure_file('config.asm', + configuration: conf, + output_format: 'nasm') +``` + +## Substitutions in `custom_target(depfile:)` + +The `depfile` keyword argument to `custom_target` now accepts the `@BASENAME@` +and `@PLAINNAME@` substitutions. + +## Deprecated `build_always:` for custom targets + +Setting `build_always` to `true` for a custom target not only marks the target +to be always considered out of date, but also adds it to the set of default +targets. This option is therefore deprecated and the new option +`build_always_stale` is introduced. + +`build_always_stale` *only* marks the target to be always considered out of +date, but does not add it to the set of default targets. The old behaviour can +be achieved by combining `build_always_stale` with `build_by_default`. + +The documentation has been updated accordingly. + +## New built-in object type: dictionary + +Meson dictionaries use a syntax similar to python's dictionaries, +but have a narrower scope: they are immutable, keys can only +be string literals, and initializing a dictionary with duplicate +keys causes a fatal error. + +Example usage: + +```meson +d = {'foo': 42, 'bar': 'baz'} + +foo = d.get('foo') +foobar = d.get('foobar', 'fallback-value') + +foreach key, value : d + Do something with key and value +endforeach +``` + +## Array options treat `-Dopt=` and `-Dopt=[]` as equivalent + +Prior to this change passing -Dopt= to an array opt would be interpreted as +`['']` (an array with an empty string), now `-Dopt=` is the same as `-Dopt=[]`, an +empty list. + +## Feature detection based on `meson_version:` in `project()` + +Meson will now print a `WARNING:` message during configuration if you use +a function or a keyword argument that was added in a meson version that's newer +than the version specified inside `project()`. For example: + +```meson +project('featurenew', meson_version: '>=0.43') + +cdata = configuration_data() +cdata.set('FOO', 'bar') +message(cdata.get_unquoted('FOO')) +``` + +This will output: + +``` +The Meson build system +Version: 0.47.0.dev1 +Source dir: C:\path\to\srctree +Build dir: C:\path\to\buildtree +Build type: native build +Project name: featurenew +Project version: undefined +Build machine cpu family: x86_64 +Build machine cpu: x86_64 +WARNING: Project targetting '>=0.43' but tried to use feature introduced in '0.44.0': configuration_data.get_unquoted() +Message: bar +Build targets in project: 0 +WARNING: Project specifies a minimum meson_version '>=0.43' which conflicts with: + * 0.44.0: {'configuration_data.get_unquoted()'} +``` + +## New type of build option for features + +A new type of [option called `feature`](Build-options.md#features) can be +defined in `meson_options.txt` for the traditional `enabled / disabled / auto` +tristate. The value of this option can be passed to the `required` keyword +argument of functions `dependency()`, `find_library()`, `find_program()` and +`add_languages()`. + +A new global option `auto_features` has been added to override the value of all +`auto` features. It is intended to be used by packagers to have full control on +which feature must be enabled or disabled. + +## New options to `gnome.gdbus_codegen()` + +You can now pass additional arguments to gdbus-codegen using the `extra_args` +keyword. This is the same for the other gnome function calls. + +Meson now automatically adds autocleanup support to the generated code. This +can be modified by setting the autocleanup keyword. + +For example: + +```meson +sources += gnome.gdbus_codegen('com.mesonbuild.Test', + 'com.mesonbuild.Test.xml', + autocleanup : 'none', + extra_args : ['--pragma-once']) +``` + +## Made 'install' a top level Meson command + +You can now run `meson install` in your build directory and it will do +the install. It has several command line options you can toggle the +behaviour that is not in the default `ninja install` invocation. This +is similar to how `meson test` already works. + +For example, to install only the files that have changed, you can do: + +```console +$ meson install --only-changed +``` + +## `install_mode:` keyword argument extended to all installable targets + +It is now possible to pass an `install_mode` argument to all installable targets, +such as `executable()`, libraries, headers, man pages and custom/generated +targets. + +The `install_mode` argument can be used to specify the file mode in symbolic +format and optionally the owner/uid and group/gid for the installed files. + +## New built-in option `install_umask` with a default value 022 + +This umask is used to define the default permissions of files and directories +created in the install tree. Files will preserve their executable mode, but the +exact permissions will obey the `install_umask`. + +The `install_umask` can be overridden in the meson command-line: + +```console +$ meson --install-umask=027 builddir/ +``` + +A project can also override the default in the `project()` call: + +```meson +project('myproject', 'c', + default_options : ['install_umask=027']) +``` + +To disable the `install_umask`, set it to `preserve`, in which case permissions +are copied from the files in their origin. + +## Octal and binary string literals + +Octal and binary integer literals can now be used in build and option files. + +```meson +int_493 = 0o755 +int_1365 = 0b10101010101 +``` + +## New keyword arguments: 'check' and 'capture' for `run_command()` + +If `check:` is `true`, then the configuration will fail if the command returns a +non-zero exit status. The default value is `false` for compatibility reasons. + +`run_command()` used to always capture the output and stored it for use in +build files. However, sometimes the stdout is in a binary format which is meant +to be discarded. For that case, you can now set the `capture:` keyword argument +to `false`. + +## Windows resource files dependencies + +The `compile_resources()` function of the `windows` module now takes +the `depend_files:` and `depends:` keywords. + +When using binutils's `windres`, dependencies on files `#include`'d by the +preprocessor are now automatically tracked. + +## Polkit support for privileged installation + +When running `install`, if installation fails with a permission error and +`pkexec` is available, Meson will attempt to use it to spawn a permission +dialog for privileged installation and retry the installation. + +If `pkexec` is not available, the old behaviour is retained and you will need +to explicitly run the install step with `sudo`. diff --git a/docs/markdown/Release-notes-for-0.48.0.md b/docs/markdown/Release-notes-for-0.48.0.md new file mode 100644 index 0000000..cf3db4c --- /dev/null +++ b/docs/markdown/Release-notes-for-0.48.0.md @@ -0,0 +1,17 @@ +--- +title: Release 0.48 +short-description: Release notes for 0.48 (preliminary) +... + +# New features + +This page is a placeholder for the eventual release notes. + +Notable new features should come with release note updates. This is +done by creating a file snippet called `snippets/featurename.md` and +whose contents should look like this: + + ## Feature name + + A short description explaining the new feature and how it should be used. + diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md index 7db22bf..00b70ed 100644 --- a/docs/markdown/Running-Meson.md +++ b/docs/markdown/Running-Meson.md @@ -10,6 +10,9 @@ directly from the source tree with the command the command is simply `meson`. In this manual we only use the latter format for simplicity. +Additionally, the invocation can pass options to meson. +The list of options is documented [here](Builtin-options.md). + At the time of writing only a command line version of Meson is available. This means that Meson must be invoked using the terminal. If you wish to use the MSVC compiler, you need to run Meson diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index e5e4107..a8e7273 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -113,6 +113,12 @@ $ meson test --gdb --repeat=10000 testname This runs the test up to 10 000 times under GDB automatically. If the program crashes, GDB will halt and the user can debug the application. Note that testing timeouts are disabled in this case so `meson test` will not kill `gdb` while the developer is still debugging it. The downside is that if the test binary freezes, the test runner will wait forever. +```console +$ meson test --print-errorlogs +``` + +Meson will report the output produced by the failing tests along with other useful informations as the environmental variables. This is useful, for example, when you run the tests on Travis-CI, Jenkins and the like. + For further information see the command line help of Meson by running `meson test -h`. **NOTE:** If `meson test` does not work for you, you likely have a old version of Meson. In that case you should call `mesontest` instead. If `mesontest` doesn't work either you have a very old version prior to 0.37.0 and should upgrade. diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index a15e5eb..d328b97 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -17,17 +17,19 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson). - [Dpdk](http://dpdk.org/browse/dpdk), Data plane development kit, a set of libraries and drivers for fast packet processing - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine - [Emeus](https://github.com/ebassi/emeus), Constraint based layout manager for GTK+ + - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson) Sample project for using the ESP8266 Arduino port with Meson + - [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME - [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit - [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop. - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer (not the default yet) - - [Gnome Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a Gnome hypervisor - - [Gnome Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the Gnome platform - - [Gnome MPV](https://github.com/gnome-mpv/gnome-mpv), Gnome frontend to the mpv video player - - [Gnome Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes - - [Gnome Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for Gnome - - [Gnome Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on Gnome desktop - - [Gnome Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a Gnome application for visualizing system resources + - [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor + - [GNOME Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the GNOME platform + - [GNOME MPV](https://github.com/gnome-mpv/gnome-mpv), GNOME frontend to the mpv video player + - [GNOME Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes + - [GNOME Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for GNOME + - [GNOME Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on GNOME desktop + - [GNOME Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a GNOME application for visualizing system resources - [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics - [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework - [GStreamer](https://cgit.freedesktop.org/gstreamer/gstreamer/), multimedia framework (not the default yet) @@ -45,6 +47,7 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson). - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 - [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app + - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata. - [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location lookup based on OpenStreetMap data - [libspng](https://gitlab.com/randy408/libspng), a C library for reading and writing Portable Network Graphics (PNG) @@ -54,7 +57,7 @@ format files - [Kiwix libraries](https://github.com/kiwix/kiwix-lib) - [Mesa](https://cgit.freedesktop.org/mesa/mesa/), An open source graphics driver project - [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast - - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the Gnome file manager + - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment - [Orc](http://cgit.freedesktop.org/gstreamer/orc/), the Optimized Inner Loop Runtime Compiler (not the default yet) - [Outlier](https://github.com/kerolasa/outlier), a small Hello World style meson example project diff --git a/docs/markdown/Windows-module.md b/docs/markdown/Windows-module.md index 8098c11..39f1ba6 100644 --- a/docs/markdown/Windows-module.md +++ b/docs/markdown/Windows-module.md @@ -16,6 +16,8 @@ has the following keyword argument. - `depend_files` lists resource files that the resource script depends on (e.g. bitmap, cursor, font, html, icon, message table, binary data or manifest files referenced by the resource script) (*since 0.47.0*) +- `depends` lists target(s) that this target depends on, even though it does not + take them as an argument (e.g. as above, but generated) (*since 0.47.0*) - `include_directories` lists directories to be both searched by the resource compiler for referenced resource files, and added to the preprocessor include search path. diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index acc18d7..adcec7c 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -1,6 +1,8 @@ # How do I do X in Meson? -This page lists code snippets for common tasks. These are written mostly using the C compiler, but the same approach should work on almost all other compilers. +This page lists code snippets for common tasks. These are written +mostly using the C compiler, but the same approach should work on +almost all other compilers. ## Set compiler @@ -10,6 +12,20 @@ When first running Meson, set it in an environment variable. $ CC=mycc meson <options> ``` +Note that environment variables like `CC` _always_ refer to the native +compiler. That is, the compiler used to compile programs that run on +the current machine. The compiler used in cross compilation is set +with the cross file. + +This behaviour is different from e.g. Autotools, where cross +compilation is done by setting `CC` to point to the cross compiler +(such as `/usr/bin/arm-linux-gnueabihf-gcc`). The reason for this is +that Meson supports natively the case where you compile helper tools +(such as code generators) and use the results during the +build. Because of this Meson needs to know both the native and the +cross compiler. The former is set via the environment variables and +the latter via the cross file only. + ## Set default C/C++ language version ```meson @@ -25,7 +41,9 @@ executable(..., override_options : ['c_std=c11']) ## Enable threads -Lots of people seem to do this manually with `find_library('pthread')` or something similar. Do not do that. It is not portable. Instead do this. +Lots of people seem to do this manually with `find_library('pthread')` +or something similar. Do not do that. It is not portable. Instead do +this. ```meson thread_dep = dependency('threads') @@ -77,7 +95,8 @@ configure_file(...) ## Generate a runnable script with `configure_file` -`configure_file` preserves metadata so if your template file has execute permissions, the generated file will have them too. +`configure_file` preserves metadata so if your template file has +execute permissions, the generated file will have them too. ## Producing a coverage report @@ -99,7 +118,11 @@ The coverage report can be found in the meson-logs subdirectory. ## Add some optimization to debug builds -By default the debug build does not use any optimizations. This is the desired approach most of the time. However some projects benefit from having some minor optimizations enabled. GCC even has a specific compiler flag `-Og` for this. To enable its use, just issue the following command. +By default the debug build does not use any optimizations. This is the +desired approach most of the time. However some projects benefit from +having some minor optimizations enabled. GCC even has a specific +compiler flag `-Og` for this. To enable its use, just issue the +following command. ```console $ meson configure -Dc_args=-Og @@ -109,13 +132,17 @@ This causes all subsequent builds to use this command line argument. ## Use address sanitizer -Clang comes with a selection of analysis tools such as the [address sanitizer](https://clang.llvm.org/docs/AddressSanitizer.html). Meson has native support for these with the `b_sanitize` option. +Clang comes with a selection of analysis tools such as the [address +sanitizer](https://clang.llvm.org/docs/AddressSanitizer.html). Meson +has native support for these with the `b_sanitize` option. ```console $ meson <other options> -Db_sanitize=address ``` -After this you just compile your code and run the test suite. Address sanitizer will abort executables which have bugs so they show up as test failures. +After this you just compile your code and run the test suite. Address +sanitizer will abort executables which have bugs so they show up as +test failures. ## Use Clang static analyzer @@ -125,7 +152,9 @@ Install scan-build and configure your project. Then do this: $ ninja scan-build ``` -You can use the `SCANBUILD` environment variable to choose the scan-build executable. +You can use the `SCANBUILD` environment variable to choose the +scan-build executable. + ```console $ SCANBUILD=<your exe> ninja scan-build ``` @@ -133,16 +162,20 @@ $ SCANBUILD=<your exe> ninja scan-build ## Use profile guided optimization -Using profile guided optimization with GCC is a two phase operation. First we set up the project with profile measurements enabled and compile it. +Using profile guided optimization with GCC is a two phase +operation. First we set up the project with profile measurements +enabled and compile it. ```console $ meson <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate $ ninja -C builddir ``` -Then we need to run the program with some representative input. This step depends on your project. +Then we need to run the program with some representative input. This +step depends on your project. -Once that is done we change the compiler flags to use the generated information and rebuild. +Once that is done we change the compiler flags to use the generated +information and rebuild. ```console $ meson configure -Db_pgo=use @@ -153,7 +186,9 @@ After these steps the resulting binary is fully optimized. ## Add math library (`-lm`) portably -Some platforms (e.g. Linux) have a standalone math library. Other platforms (pretty much everyone else) do not. How to specify that `m` is used only when needed? +Some platforms (e.g. Linux) have a standalone math library. Other +platforms (pretty much everyone else) do not. How to specify that `m` +is used only when needed? ```meson cc = meson.get_compiler('c') diff --git a/docs/markdown/snippets/bitcode_support.md b/docs/markdown/snippets/bitcode_support.md deleted file mode 100644 index a0d7ad9..0000000 --- a/docs/markdown/snippets/bitcode_support.md +++ /dev/null @@ -1,15 +0,0 @@ -## New base build option for LLVM (Apple) bitcode support - -When building with clang on macOS, you can now build your static and shared -binaries with embedded bitcode by enabling the `b_bitcode` [base -option](Builtin-options.md#Base_options) by passing `-Db_bitcode=true` to -Meson. - -This is better than passing the options manually in the environment since Meson -will automatically disable conflicting options such as `b_asneeded`, and will -disable bitcode support on targets that don't support it such as -`shared_module()`. - -Since this requires support in the linker, it is currently only enabled when -using Apple ld. In the future it can be extended to clang on other platforms -too. diff --git a/docs/markdown/snippets/compiler_check_header.md b/docs/markdown/snippets/compiler_check_header.md deleted file mode 100644 index 8981d13..0000000 --- a/docs/markdown/snippets/compiler_check_header.md +++ /dev/null @@ -1,12 +0,0 @@ -## New compiler check: check_header() - -The existing compiler check `has_header()` only checks if the header exists, -either with the `__has_include` C++11 builtin, or by running the pre-processor. - -However, sometimes the header you are looking for is unusable on some platforms -or with some compilers in a way that is only detectable at compile-time. For -such cases, you should use `check_header()` which will include the header and -run a full compile. - -Note that `has_header()` is much faster than `check_header()`, so it should be -used whenever possible. diff --git a/docs/markdown/snippets/configure_file_copy.md b/docs/markdown/snippets/configure_file_copy.md deleted file mode 100644 index fee04e4..0000000 --- a/docs/markdown/snippets/configure_file_copy.md +++ /dev/null @@ -1,10 +0,0 @@ -## New action 'copy' for configure_file() - -In addition to `configuration:` and `command:`, -[`configure_file()`](#Reference-manual.md#configure_file) now accepts a keyword -argument `copy:` which specifies a new action: copying the file specified with -the `input:` keyword argument to a file in the build directory with the name -specified with the `output:` keyword argument. - -These three keyword arguments are, as before, mutually exclusive. You can only -do one action at a time. diff --git a/docs/markdown/snippets/configure_file_encoding.md b/docs/markdown/snippets/configure_file_encoding.md deleted file mode 100644 index 8082177..0000000 --- a/docs/markdown/snippets/configure_file_encoding.md +++ /dev/null @@ -1,12 +0,0 @@ -## New encoding keyword for configure_file - -Add a new keyword to [`configure_file()`](#Reference-manual.md#configure_file) -that allows the developer to specify the input and output file encoding. - -If the file encoding of the input is not UTF-8 meson can crash (see #1542). -A crash as with UTF-16 is the best case and the worst meson will silently -corrupt the output file for example with ISO-2022-JP. For additional details -see pull request #3135. - -The new keyword defaults to UTF-8 and the documentation strongly suggest to -convert the file to UTF-8 when possible. diff --git a/docs/markdown/snippets/configure_file_output_format.md b/docs/markdown/snippets/configure_file_output_format.md deleted file mode 100644 index e522885..0000000 --- a/docs/markdown/snippets/configure_file_output_format.md +++ /dev/null @@ -1,14 +0,0 @@ -## New keyword argument `output_format` for configure_file() - -When called without an input file, `configure_file` generates a -C header file by default. A keyword argument was added to allow -specifying the output format, for example for use with nasm or yasm: - -``` -conf = configuration_data() -conf.set('FOO', 1) - -configure_file('config.asm', - configuration: conf, - output_format: 'nasm') -``` diff --git a/docs/markdown/snippets/configure_file_overwrite_warning.md b/docs/markdown/snippets/configure_file_overwrite_warning.md new file mode 100644 index 0000000..550407d --- /dev/null +++ b/docs/markdown/snippets/configure_file_overwrite_warning.md @@ -0,0 +1,39 @@ +## Meson warns if two calls to configure_file() write to the same file + +If two calls to [`configure_file()`](#Reference-manual.md#configure_file) +write to the same file Meson will print a `WARNING:` message during +configuration. For example: +```meson +project('configure_file', 'cpp') + +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] + ) +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) + +``` + +This will output: + +``` +The Meson build system +Version: 0.47.0.dev1 +Source dir: /path/to/srctree +Build dir: /path/to/buildtree +Build type: native build +Project name: configure_file +Project version: undefined +Build machine cpu family: x86_64 +Build machine cpu: x86_64 +Configuring out with command +WARNING: Output file out for configure_file overwritten. First time written in line 3 now in line 8 +Configuring out with command +Build targets in project: 0 +Found ninja-1.8.2 at /usr/bin/ninja +``` diff --git a/docs/markdown/snippets/custom-target-depends.md b/docs/markdown/snippets/custom-target-depends.md deleted file mode 100644 index e2b2ed7..0000000 --- a/docs/markdown/snippets/custom-target-depends.md +++ /dev/null @@ -1,4 +0,0 @@ -## Substitutions in `custom_target(depends:)` - -The `depfile` keyword argument to `custom_target` now accepts the `@BASENAME@` -and `@PLAINNAME@` substitutions. diff --git a/docs/markdown/snippets/dict_builtin.md b/docs/markdown/snippets/dict_builtin.md deleted file mode 100644 index 1bd24ce..0000000 --- a/docs/markdown/snippets/dict_builtin.md +++ /dev/null @@ -1,19 +0,0 @@ -## New built-in object dictionary - -Meson dictionaries use a syntax similar to python's dictionaries, -but have a narrower scope: they are immutable, keys can only -be string literals, and initializing a dictionary with duplicate -keys causes a fatal error. - -Example usage: - -```meson -dict = {'foo': 42, 'bar': 'baz'} - -foo = dict.get('foo') -foobar = dict.get('foobar', 'fallback-value') - -foreach key, value : dict - Do something with key and value -endforeach -``` diff --git a/docs/markdown/snippets/feature_new.md b/docs/markdown/snippets/feature_new.md deleted file mode 100644 index 7480634..0000000 --- a/docs/markdown/snippets/feature_new.md +++ /dev/null @@ -1,32 +0,0 @@ -## Feature detection based on meson_version in project() - -Meson will now print a `WARNING:` message during configuration if you use -a function or a keyword argument that was added in a meson version that's newer -than the version specified inside `project()`. For example: - -```meson -project('featurenew', meson_version: '>=0.43') - -cdata = configuration_data() -cdata.set('FOO', 'bar') -message(cdata.get_unquoted('FOO')) -``` - -This will output: - -``` -The Meson build system -Version: 0.47.0.dev1 -Source dir: C:\path\to\srctree -Build dir: C:\path\to\buildtree -Build type: native build -Project name: featurenew -Project version: undefined -Build machine cpu family: x86_64 -Build machine cpu: x86_64 -WARNING: Project targetting '>=0.43' but tried to use feature introduced in '0.44.0': get_unquoted -Message: bar -Build targets in project: 0 -Minimum version of features used: -0.44.0: {'get_unquoted'} -``` diff --git a/docs/markdown/snippets/gdbus_codegen_options.md b/docs/markdown/snippets/gdbus_codegen_options.md deleted file mode 100644 index d3dd84c..0000000 --- a/docs/markdown/snippets/gdbus_codegen_options.md +++ /dev/null @@ -1,14 +0,0 @@ -## New options to gnome.gdbus_codegen - -You can now pass additional arguments to gdbus-codegen using the `extra_args` -keyword. This is the same for the other gnome function calls. - -Meson now automatically adds autocleanup support to the generated code. This -can be modified by setting the autocleanup keyword. - -For example: - - sources += gnome.gdbus_codegen('com.mesonbuild.Test', - 'com.mesonbuild.Test.xml', - autocleanup : 'none', - extra_args : ['--pragma-once']) diff --git a/docs/markdown/snippets/install_mode-extended.md b/docs/markdown/snippets/install_mode-extended.md deleted file mode 100644 index b0ee4c3..0000000 --- a/docs/markdown/snippets/install_mode-extended.md +++ /dev/null @@ -1,8 +0,0 @@ -## install_mode argument extended to all installable targets - -It is now possible to pass an install_mode argument to all installable targets, -such as executable(), libraries, headers, man pages and custom/generated -targets. - -The install_mode argument can be used to specify the file mode in symbolic -format and optionally the owner/uid and group/gid for the installed files. diff --git a/docs/markdown/snippets/install_umask.md b/docs/markdown/snippets/install_umask.md deleted file mode 100644 index b3a2427..0000000 --- a/docs/markdown/snippets/install_umask.md +++ /dev/null @@ -1,17 +0,0 @@ -## New built-in option install_umask with a default value 022 - -This umask is used to define the default permissions of files and directories -created in the install tree. Files will preserve their executable mode, but the -exact permissions will obey the install_umask. - -The install_umask can be overridden in the meson command-line: - - $ meson --install-umask=027 builddir/ - -A project can also override the default in the project() call: - - project('myproject', 'c', - default_options : ['install_umask=027']) - -To disable the install_umask, set it to 'preserve', in which case permissions -are copied from the files in their origin. diff --git a/docs/markdown/snippets/integer-base.md b/docs/markdown/snippets/integer-base.md deleted file mode 100644 index 0a27c9a..0000000 --- a/docs/markdown/snippets/integer-base.md +++ /dev/null @@ -1,9 +0,0 @@ -## Octal and binary string literals - -Octal and binary integer literals can now be used in build and option files. - -```meson -int_493 = 0o755 -int_1365 = 0b10101010101 -``` - diff --git a/docs/markdown/snippets/run_command_check.md b/docs/markdown/snippets/run_command_check.md deleted file mode 100644 index 018456b..0000000 --- a/docs/markdown/snippets/run_command_check.md +++ /dev/null @@ -1,4 +0,0 @@ -## New 'check' keyword argument for the run_command function - -If `check` is `true`, then the configuration will fail if the command returns a -non-zero exit status. The default value is `false` for compatibility reasons. diff --git a/docs/markdown/snippets/windows-resources-dependencies.md b/docs/markdown/snippets/windows-resources-dependencies.md deleted file mode 100644 index e30e18c..0000000 --- a/docs/markdown/snippets/windows-resources-dependencies.md +++ /dev/null @@ -1,7 +0,0 @@ -## Windows resource files dependencies - -The `compile_resources()` function of the `windows` module now takes -the `depend_files:` keyword. - -When using binutils's `windres`, dependencies on files `#include`'d by the -preprocessor are now automatically tracked. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index e915df2..2d43e18 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -5,6 +5,7 @@ index.md Manual.md Overview.md Running-Meson.md + Builtin-options.md Using-with-Visual-Studio.md Meson-sample.md Syntax.md @@ -67,6 +68,7 @@ index.md Shipping-prebuilt-binaries-as-wraps.md fallback-wraptool.md Release-notes.md + Release-notes-for-0.48.0.md Release-notes-for-0.47.0.md Release-notes-for-0.46.0.md Release-notes-for-0.45.0.md diff --git a/man/meson.1 b/man/meson.1 index b6f09de..747def8 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "April 2018" "meson 0.46.0" "User Commands" +.TH MESON "1" "July 2018" "meson 0.47.0" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION diff --git a/man/mesonconf.1 b/man/mesonconf.1 deleted file mode 100644 index 9ead6ae..0000000 --- a/man/mesonconf.1 +++ /dev/null @@ -1,10 +0,0 @@ -.TH MESONCONF "1" "April 2018" "mesonconf 0.46.0" "User Commands" -.SH NAME -mesonconf - a tool to configure Meson builds -.SH DESCRIPTION - -This executable is deprecated and will be removed in the future. The -functionality that was in this executable can be invoked via the main Meson -command like this: - -.B meson configure <options> diff --git a/man/mesonintrospect.1 b/man/mesonintrospect.1 deleted file mode 100644 index 3251299..0000000 --- a/man/mesonintrospect.1 +++ /dev/null @@ -1,13 +0,0 @@ -.TH MESONINTROSPECT "1" "April 2018" "mesonintrospect 0.46.0" "User Commands" -.SH NAME -mesonintrospect - a tool to extract information about a Meson build -.SH DESCRIPTION - -This executable is deprecated and will be removed in the future. The -functionality that was in this executable can be invoked via the main Meson -command like this: - -.B meson introspect <options> - -.SH SEE ALSO -http://mesonbuild.com/ diff --git a/man/mesontest.1 b/man/mesontest.1 deleted file mode 100644 index 97f8f0e..0000000 --- a/man/mesontest.1 +++ /dev/null @@ -1,13 +0,0 @@ -.TH MESON "1" "April 2018" "meson 0.46.0" "User Commands" -.SH NAME -mesontest - test tool for the Meson build system -.SH DESCRIPTION - -This executable is deprecated and will be removed in the future. The -functionality that was in this executable can be invoked via the main Meson -command like this: - -.B meson test <options> - -.SH SEE ALSO -http://mesonbuild.com/ diff --git a/man/wraptool.1 b/man/wraptool.1 deleted file mode 100644 index 9d3a056..0000000 --- a/man/wraptool.1 +++ /dev/null @@ -1,13 +0,0 @@ -.TH WRAPTOOL "1" "April 2018" "meson 0.46.0" "User Commands" -.SH NAME -wraptool - source dependency downloader -.SH DESCRIPTION - -This executable is deprecated and will be removed in the future. The -functionality that was in this executable can be invoked via the main Meson -command like this: - -.B meson wrap <options> - -.SH SEE ALSO -http://wrapdb.mesonbuild.com/ diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index ef677a9..354d25a 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -54,6 +54,16 @@ class InstallData: self.install_subdirs = [] self.mesonintrospect = mesonintrospect +class TargetInstallData: + def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode): + self.fname = fname + self.outdir = outdir + self.aliases = aliases + self.strip = strip + self.install_name_mappings = install_name_mappings + self.install_rpath = install_rpath + self.install_mode = install_mode + class ExecutableSerialisation: def __init__(self, name, fname, cmd_args, env, is_cross, exe_wrapper, workdir, extra_paths, capture): @@ -328,7 +338,7 @@ class Backend: return self.build.static_cross_linker, [] else: return self.build.static_linker, [] - l, stdlib_args = target.get_clike_dynamic_linker_and_stdlibs() + l, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs() return l, stdlib_args @staticmethod @@ -740,7 +750,7 @@ class Backend: result = OrderedDict() # Get all build and custom targets that must be built by default for name, t in self.build.get_targets().items(): - if t.build_by_default or t.install or t.build_always: + if t.build_by_default or t.install: result[name] = t # Get all targets used as test executables and arguments. These must # also be built by default. XXX: Sometime in the future these should be @@ -827,7 +837,7 @@ class Backend: outdir = self.get_target_dir(target) if absolute_outputs: source_root = self.environment.get_source_dir() - build_root = self.environment.get_source_dir() + build_root = self.environment.get_build_dir() outdir = os.path.join(self.environment.get_build_dir(), outdir) outputs = [] for i in target.get_outputs(): diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index a773439..035f835 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -12,7 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, pickle, re, shlex, subprocess +import os +import re +import shlex +import pickle +import subprocess from collections import OrderedDict import itertools from pathlib import PurePath @@ -24,11 +28,11 @@ from .. import build from .. import mlog from .. import dependencies from .. import compilers -from ..compilers import CompilerArgs +from ..compilers import CompilerArgs, CCompiler, get_macos_dylib_install_name from ..linkers import ArLinker from ..mesonlib import File, MesonException, OrderedSet from ..mesonlib import get_compiler_for_source, has_path_sep -from .backends import CleanTrees, InstallData +from .backends import CleanTrees, InstallData, TargetInstallData from ..build import InvalidArguments if mesonlib.is_windows(): @@ -504,7 +508,7 @@ int dummy; deps = self.unwrap_dep_list(target) deps += self.get_custom_target_depend_files(target) desc = 'Generating {0} with a {1} command.' - if target.build_always: + if target.build_always_stale: deps.append('PHONY') if target.depfile is None: rulename = 'CUSTOM_COMMAND' @@ -683,7 +687,7 @@ int dummy; elem = NinjaBuildElement(self.all_outputs, 'meson-install', 'CUSTOM_COMMAND', 'PHONY') elem.add_dep('all') elem.add_item('DESC', 'Installing files.') - elem.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'install', install_data_file]) + elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild']) elem.add_item('pool', 'console') self.generate_depmf_install(d) self.generate_target_install(d) @@ -699,34 +703,61 @@ int dummy; with open(install_data_file, 'wb') as ofile: pickle.dump(d, ofile) + def get_target_install_dirs(self, t): + # Find the installation directory. + if isinstance(t, build.SharedModule): + default_install_dir = self.environment.get_shared_module_dir() + elif isinstance(t, build.SharedLibrary): + default_install_dir = self.environment.get_shared_lib_dir() + elif isinstance(t, build.StaticLibrary): + default_install_dir = self.environment.get_static_lib_dir() + elif isinstance(t, build.Executable): + default_install_dir = self.environment.get_bindir() + elif isinstance(t, build.CustomTarget): + default_install_dir = None + else: + assert(isinstance(t, build.BuildTarget)) + # XXX: Add BuildTarget-specific install dir cases here + default_install_dir = self.environment.get_libdir() + outdirs = t.get_custom_install_dir() + if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: + # Either the value is set to a non-default value, or is set to + # False (which means we want this specific output out of many + # outputs to not be installed). + custom_install_dir = True + else: + custom_install_dir = False + outdirs[0] = default_install_dir + return outdirs, custom_install_dir + + def get_target_link_deps_mappings(self, t, prefix): + ''' + On macOS, we need to change the install names of all built libraries + that a target depends on using install_name_tool so that the target + continues to work after installation. For this, we need a dictionary + mapping of the install_name value to the new one, so we can change them + on install. + ''' + result = {} + if isinstance(t, build.StaticLibrary): + return result + for ld in t.get_all_link_deps(): + if ld is t or not isinstance(ld, build.SharedLibrary): + continue + old = get_macos_dylib_install_name(ld.prefix, ld.name, ld.suffix, ld.soversion) + if old in result: + continue + fname = ld.get_filename() + outdirs, _ = self.get_target_install_dirs(ld) + new = os.path.join(prefix, outdirs[0], fname) + result.update({old: new}) + return result + def generate_target_install(self, d): for t in self.build.get_targets().values(): if not t.should_install(): continue - # Find the installation directory. - if isinstance(t, build.SharedModule): - default_install_dir = self.environment.get_shared_module_dir() - elif isinstance(t, build.SharedLibrary): - default_install_dir = self.environment.get_shared_lib_dir() - elif isinstance(t, build.StaticLibrary): - default_install_dir = self.environment.get_static_lib_dir() - elif isinstance(t, build.Executable): - default_install_dir = self.environment.get_bindir() - elif isinstance(t, build.CustomTarget): - default_install_dir = None - else: - assert(isinstance(t, build.BuildTarget)) - # XXX: Add BuildTarget-specific install dir cases here - default_install_dir = self.environment.get_libdir() - outdirs = t.get_custom_install_dir() - if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: - # Either the value is set to a non-default value, or is set to - # False (which means we want this specific output out of many - # outputs to not be installed). - custom_install_dir = True - else: - custom_install_dir = False - outdirs[0] = default_install_dir + outdirs, custom_install_dir = self.get_target_install_dirs(t) # Sanity-check the outputs and install_dirs num_outdirs, num_out = len(outdirs), len(t.get_outputs()) if num_outdirs != 1 and num_outdirs != num_out: @@ -741,8 +772,10 @@ int dummy; # Install primary build output (library/executable/jar, etc) # Done separately because of strip/aliases/rpath if outdirs[0] is not False: - i = [self.get_target_filename(t), outdirs[0], - t.get_aliases(), should_strip, t.install_rpath, install_mode] + mappings = self.get_target_link_deps_mappings(t, d.prefix) + i = TargetInstallData(self.get_target_filename(t), outdirs[0], + t.get_aliases(), should_strip, mappings, + t.install_rpath, install_mode) d.targets.append(i) # On toolchains/platforms that use an import library for # linking (separate from the shared library with all the @@ -756,11 +789,8 @@ int dummy; else: implib_install_dir = self.environment.get_import_lib_dir() # Install the import library. - i = [self.get_target_filename_for_linking(t), - implib_install_dir, - # It has no aliases, should not be stripped, and - # doesn't have an install_rpath - {}, False, '', install_mode] + i = TargetInstallData(self.get_target_filename_for_linking(t), + implib_install_dir, {}, False, {}, '', install_mode) d.targets.append(i) # Install secondary outputs. Only used for Vala right now. if num_outdirs > 1: @@ -769,7 +799,8 @@ int dummy; if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - d.targets.append([f, outdir, {}, False, None, install_mode]) + i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode) + d.targets.append(i) elif isinstance(t, build.CustomTarget): # If only one install_dir is specified, assume that all # outputs will be installed into it. This is for @@ -781,14 +812,16 @@ int dummy; if num_outdirs == 1 and num_out > 1: for output in t.get_outputs(): f = os.path.join(self.get_target_dir(t), output) - d.targets.append([f, outdirs[0], {}, False, None, install_mode]) + i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode) + d.targets.append(i) else: for output, outdir in zip(t.get_outputs(), outdirs): # User requested that we not install this output if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - d.targets.append([f, outdir, {}, False, None, install_mode]) + i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode) + d.targets.append(i) def generate_custom_install_script(self, d): result = [] @@ -941,7 +974,7 @@ int dummy; class_list = [] compiler = target.compilers['java'] c = 'c' - m = '' + m = 'm' e = '' f = 'f' main_class = target.get_main_class() @@ -951,8 +984,18 @@ int dummy; plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile) class_list.append(plain_class_path) class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] + manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF') + manifest_fullpath = os.path.join(self.environment.get_build_dir(), manifest_path) + os.makedirs(os.path.dirname(manifest_fullpath), exist_ok=True) + with open(manifest_fullpath, 'w') as manifest: + if any(target.link_targets): + manifest.write('Class-Path: ') + cp_paths = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + manifest.write(' '.join(cp_paths)) + manifest.write('\n') jar_rule = 'java_LINKER' commands = [c + m + e + f] + commands.append(manifest_path) if e != '': commands.append(main_class) commands.append(self.get_target_filename(target)) @@ -1034,12 +1077,14 @@ int dummy; self.generate_generator_list_rules(target, outfile) def generate_single_java_compile(self, src, target, compiler, outfile): + deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] args = [] args += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target)) args += self.build.get_global_args(compiler) args += self.build.get_project_args(compiler, target.subproject) args += target.get_java_args() args += compiler.get_output_args(self.get_target_private_dir(target)) + args += target.get_classpath_args() curdir = target.get_subdir() sourcepath = os.path.join(self.build_to_src, curdir) + os.pathsep sourcepath += os.path.normpath(curdir) + os.pathsep @@ -1051,6 +1096,7 @@ int dummy; plain_class_path = src.fname[:-4] + 'class' rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) element = NinjaBuildElement(self.all_outputs, rel_obj, compiler.get_language() + '_COMPILER', rel_src) + element.add_dep(deps) element.add_item('ARGS', args) element.write(outfile) return plain_class_path @@ -1497,14 +1543,10 @@ int dummy; if static_linker is None: return rule = 'rule STATIC%s_LINKER\n' % crstr - # We don't use @file.rsp on Windows with ArLinker because llvm-ar and - # gcc-ar blindly pass the --plugin argument to `ar` and you cannot pass - # options as arguments while using the @file.rsp syntax. - # See: https://github.com/mesonbuild/meson/issues/1646 if static_linker.can_linker_accept_rsp(): - command_template = ''' command = {executable} @$out.rsp + command_template = ''' command = {executable} $LINK_ARGS {output_args} @$out.rsp rspfile = $out.rsp - rspfile_content = $LINK_ARGS {output_args} $in + rspfile_content = $in ''' else: command_template = ' command = {executable} $LINK_ARGS {output_args} $in\n' @@ -1688,7 +1730,7 @@ rule FORTRAN_DEP_HACK%s if compiler.can_linker_accept_rsp(): command_template = ' command = {executable} @$out.rsp\n' \ ' rspfile = $out.rsp\n' \ - ' rspfile_content = $ARGS{cross_args} {output_args} {compile_only_args} $in\n' + ' rspfile_content = $ARGS {cross_args} {output_args} {compile_only_args} $in\n' else: command_template = ' command = {executable} $ARGS {cross_args} {output_args} {compile_only_args} $in\n' command = command_template.format( @@ -2398,7 +2440,6 @@ rule FORTRAN_DEP_HACK%s return linker.get_no_stdlib_link_args() def get_target_type_link_args(self, target, linker): - abspath = os.path.join(self.environment.get_build_dir(), target.subdir) commands = [] if isinstance(target, build.Executable): # Currently only used with the Swift compiler to add '-emit-executable' @@ -2422,8 +2463,7 @@ rule FORTRAN_DEP_HACK%s commands += linker.get_pic_args() # Add -Wl,-soname arguments on Linux, -install_name on OS X commands += linker.get_soname_args(target.prefix, target.name, target.suffix, - abspath, target.soversion, - isinstance(target, build.SharedModule)) + target.soversion, isinstance(target, build.SharedModule)) # This is only visited when building for Windows using either GCC or Visual Studio if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'): commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src)) @@ -2440,13 +2480,18 @@ rule FORTRAN_DEP_HACK%s target_args = self.build_target_link_arguments(linker, target.link_whole_targets) return linker.get_link_whole_for(target_args) if len(target_args) else [] - def guess_library_absolute_path(self, libname, search_dirs, prefixes, suffixes): - for directory in search_dirs: - for suffix in suffixes: - for prefix in prefixes: - trial = os.path.join(directory, prefix + libname + '.' + suffix) - if os.path.isfile(trial): - return trial + @staticmethod + def guess_library_absolute_path(linker, libname, search_dirs, patterns): + for d in search_dirs: + for p in patterns: + trial = CCompiler._get_trials_from_pattern(p, d, libname) + if not trial: + continue + trial = CCompiler._get_file_from_list(trial) + if not trial: + continue + # Return the first result + return trial def guess_external_link_dependencies(self, linker, target, commands, internal): # Ideally the linker would generate dependency information that could be used. @@ -2495,17 +2540,19 @@ rule FORTRAN_DEP_HACK%s # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker if hasattr(linker, 'get_library_naming'): search_dirs = list(search_dirs) + linker.get_library_dirs() - prefixes_static, suffixes_static = linker.get_library_naming(self.environment, 'static', strict=True) - prefixes_shared, suffixes_shared = linker.get_library_naming(self.environment, 'shared', strict=True) + static_patterns = linker.get_library_naming(self.environment, 'static', strict=True) + shared_patterns = linker.get_library_naming(self.environment, 'shared', strict=True) for libname in libs: # be conservative and record most likely shared and static resolution, because we don't know exactly # which one the linker will prefer - static_resolution = self.guess_library_absolute_path(libname, search_dirs, prefixes_static, suffixes_static) - shared_resolution = self.guess_library_absolute_path(libname, search_dirs, prefixes_shared, suffixes_shared) - if static_resolution: - guessed_dependencies.append(os.path.realpath(static_resolution)) - if shared_resolution: - guessed_dependencies.append(os.path.realpath(shared_resolution)) + staticlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, static_patterns) + sharedlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, shared_patterns) + if staticlibs: + guessed_dependencies.append(os.path.realpath(staticlibs)) + if sharedlibs: + guessed_dependencies.append(os.path.realpath(sharedlibs)) return guessed_dependencies + absolute_libs @@ -2576,25 +2623,32 @@ rule FORTRAN_DEP_HACK%s dependencies = target.get_dependencies() internal = self.build_target_link_arguments(linker, dependencies) commands += internal - # For 'automagic' deps: Boost and GTest. Also dependency('threads'). - # pkg-config puts the thread flags itself via `Cflags:` - for d in target.external_deps: - if d.need_threads(): - commands += linker.thread_link_flags(self.environment) - elif d.need_openmp(): - commands += linker.openmp_flags() # Only non-static built targets need link args and link dependencies if not isinstance(target, build.StaticLibrary): + # For 'automagic' deps: Boost and GTest. Also dependency('threads'). + # pkg-config puts the thread flags itself via `Cflags:` + need_threads = False + need_openmp = False + commands += target.link_args # External deps must be last because target link libraries may depend on them. for dep in target.get_external_deps(): # Extend without reordering or de-dup to preserve `-L -l` sets # https://github.com/mesonbuild/meson/issues/1718 commands.extend_direct(dep.get_link_args()) + need_threads |= dep.need_threads() + need_openmp |= dep.need_openmp() for d in target.get_dependencies(): if isinstance(d, build.StaticLibrary): for dep in d.get_external_deps(): + need_threads |= dep.need_threads() + need_openmp |= dep.need_openmp() commands.extend_direct(dep.get_link_args()) + if need_openmp: + commands += linker.openmp_flags() + if need_threads: + commands += linker.thread_link_flags(self.environment) + # Add link args for c_* or cpp_* build options. Currently this only # adds c_winlibs and cpp_winlibs when building for Windows. This needs # to be after all internal and external libraries so that unresolved diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 331b552..65438b0 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -24,8 +24,8 @@ from .mesonlib import File, MesonException, listify, extract_as_list from .mesonlib import typeslistify, stringlistify, classify_unity_sources from .mesonlib import get_filenames_templates_dict, substitute_values from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep -from .compilers import is_object, clike_langs, sort_clike, lang_suffixes -from .interpreterbase import FeatureNew, FeatureNewKwargs +from .compilers import is_object, clink_langs, sort_clink, lang_suffixes +from .interpreterbase import FeatureNew pch_kwargs = set(['c_pch', 'cpp_pch']) @@ -309,7 +309,7 @@ a hard error in the future.''' % name) self.subproject = subproject self.build_by_default = build_by_default self.install = False - self.build_always = False + self.build_always_stale = False self.option_overrides = {} def get_basename(self): @@ -332,9 +332,6 @@ a hard error in the future.''' % name) myid = subdir_part + '@@' + myid return myid - @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories']) - @FeatureNewKwargs('build target', '0.41.0', ['rust_args']) - @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) def process_kwargs(self, kwargs): if 'build_by_default' in kwargs: self.build_by_default = kwargs['build_by_default'] @@ -490,16 +487,16 @@ class BuildTarget(Target): extra = set() for t in itertools.chain(self.link_targets, self.link_whole_targets): for name, compiler in t.compilers.items(): - if name in clike_langs: + if name in clink_langs: extra.add((name, compiler)) - for name, compiler in sorted(extra, key=lambda p: sort_clike(p[0])): + for name, compiler in sorted(extra, key=lambda p: sort_clink(p[0])): self.compilers[name] = compiler if not self.compilers: # No source files or parent targets, target consists of only object - # files of unknown origin. Just add the first clike compiler + # files of unknown origin. Just add the first clink compiler # that we have and hope that it can link these objects - for lang in clike_langs: + for lang in clink_langs: if lang in compilers: self.compilers[lang] = compilers[lang] break @@ -556,9 +553,9 @@ class BuildTarget(Target): if lang not in self.compilers: self.compilers[lang] = compiler break - # Re-sort according to clike_langs + # Re-sort according to clink_langs self.compilers = OrderedDict(sorted(self.compilers.items(), - key=lambda t: sort_clike(t[0]))) + key=lambda t: sort_clink(t[0]))) # If all our sources are Vala, our target also needs the C compiler but # it won't get added above. @@ -995,7 +992,7 @@ You probably should put it in link_with instead.''') Sometimes you want to link to a C++ library that exports C API, which means the linker must link in the C++ stdlib, and we must use a C++ compiler for linking. The same is also applicable for objc/objc++, etc, - so we can keep using clike_langs for the priority order. + so we can keep using clink_langs for the priority order. See: https://github.com/mesonbuild/meson/issues/1653 ''' @@ -1014,9 +1011,9 @@ You probably should put it in link_with instead.''') langs.append(language) return langs - def get_clike_dynamic_linker_and_stdlibs(self): + def get_clink_dynamic_linker_and_stdlibs(self): ''' - We use the order of languages in `clike_langs` to determine which + We use the order of languages in `clink_langs` to determine which linker to use in case the target has sources compiled with multiple compilers. All languages other than those in this list have their own linker. @@ -1033,7 +1030,7 @@ You probably should put it in link_with instead.''') # Languages used by dependencies dep_langs = self.get_langs_used_by_deps() # Pick a compiler based on the language priority-order - for l in clike_langs: + for l in clink_langs: if l in self.compilers or l in dep_langs: try: linker = all_compilers[l] @@ -1071,7 +1068,7 @@ You probably should put it in link_with instead.''') 2. If the target contains only objects, process_compilers guesses and picks the first compiler that smells right. ''' - linker, _ = self.get_clike_dynamic_linker_and_stdlibs() + linker, _ = self.get_clink_dynamic_linker_and_stdlibs() # Mixing many languages with MSVC is not supported yet so ignore stdlibs. if linker and linker.get_id() == 'msvc': return True @@ -1095,7 +1092,6 @@ recommended as it is not supported on some platforms''') return class Generator: - @FeatureNewKwargs('generator', '0.43.0', ['capture']) def __init__(self, args, kwargs): if len(args) != 1: raise InvalidArguments('Generator requires exactly one positional argument: the executable') @@ -1636,6 +1632,7 @@ class CustomTarget(Target): 'install_dir', 'install_mode', 'build_always', + 'build_always_stale', 'depends', 'depend_files', 'depfile', @@ -1712,7 +1709,11 @@ class CustomTarget(Target): if not c.found(): m = 'Tried to use not-found external program {!r} in "command"' raise InvalidArguments(m.format(c.name)) - self.depend_files.append(File.from_absolute_file(c.get_path())) + path = c.get_path() + if os.path.isabs(path): + # Can only add a dependency on an external program which we + # know the absolute path of + self.depend_files.append(File.from_absolute_file(path)) final_cmd += c.get_command() elif isinstance(c, (BuildTarget, CustomTarget)): self.dependencies.append(c) @@ -1740,7 +1741,7 @@ class CustomTarget(Target): if i.strip() == '': raise InvalidArguments('Output must not consist only of whitespace.') if has_path_sep(i): - raise InvalidArguments('Output must not contain a path segment.') + raise InvalidArguments('Output {!r} must not contain a path segment.'.format(i)) if '@INPUT@' in i or '@INPUT0@' in i: m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \ 'mean @PLAINNAME@ or @BASENAME@?' @@ -1779,7 +1780,7 @@ class CustomTarget(Target): 'when installing a target') if isinstance(kwargs['install_dir'], list): - FeatureNew('multiple install_dir for custom_target', '0.40.0').use() + FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject) # If an item in this list is False, the output corresponding to # the list index of that item will not be installed self.install_dir = typeslistify(kwargs['install_dir'], (str, bool)) @@ -1788,9 +1789,17 @@ class CustomTarget(Target): self.install = False self.install_dir = [None] self.install_mode = None - self.build_always = kwargs.get('build_always', False) - if not isinstance(self.build_always, bool): - raise InvalidArguments('Argument build_always must be a boolean.') + if 'build_always' in kwargs and 'build_always_stale' in kwargs: + raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.') + elif 'build_always' in kwargs: + mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.') + if 'build_by_default' not in kwargs: + self.build_by_default = kwargs['build_always'] + self.build_always_stale = kwargs['build_always'] + elif 'build_always_stale' in kwargs: + self.build_always_stale = kwargs['build_always_stale'] + if not isinstance(self.build_always_stale, bool): + raise InvalidArguments('Argument build_always_stale must be a boolean.') extra_deps, depend_files = extract_as_list(kwargs, 'depends', 'depend_files', pop = False) for ed in extra_deps: while hasattr(ed, 'held_object'): @@ -1902,6 +1911,9 @@ class Jar(BuildTarget): for s in self.sources: if not s.endswith('.java'): raise InvalidArguments('Jar source %s is not a java file.' % s) + for t in self.link_targets: + if not isinstance(t, Jar): + raise InvalidArguments('Link target %s is not a jar target.' % t) self.filename = self.name + '.jar' self.outputs = [self.filename] self.java_args = kwargs.get('java_args', []) @@ -1919,6 +1931,13 @@ class Jar(BuildTarget): # All jar targets are installable. pass + def is_linkable_target(self): + return True + + def get_classpath_args(self): + cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets] + return ['-cp', os.pathsep.join(cp_paths)] + class CustomTargetIndex: """A special opaque object returned by indexing a CustomTarget. This object @@ -1941,7 +1960,6 @@ class CustomTargetIndex: def get_subdir(self): return self.target.get_subdir() - class ConfigureFile: def __init__(self, subdir, sourcename, targetname, configuration_data): diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py index 849e229..bb6c9a9 100644 --- a/mesonbuild/compilers/__init__.py +++ b/mesonbuild/compilers/__init__.py @@ -27,9 +27,11 @@ __all__ = [ 'all_languages', 'base_options', - 'clike_langs', + 'clib_langs', + 'clink_langs', 'c_suffixes', 'cpp_suffixes', + 'get_macos_dylib_install_name', 'get_base_compile_args', 'get_base_link_args', 'is_assembly', @@ -40,10 +42,12 @@ __all__ = [ 'is_source', 'lang_suffixes', 'sanitizer_compile_args', - 'sort_clike', + 'sort_clink', 'ArmCCompiler', 'ArmCPPCompiler', + 'ArmclangCCompiler', + 'ArmclangCPPCompiler', 'CCompiler', 'ClangCCompiler', 'ClangCompiler', @@ -102,9 +106,11 @@ from .compilers import ( ICC_STANDARD, all_languages, base_options, - clike_langs, + clib_langs, + clink_langs, c_suffixes, cpp_suffixes, + get_macos_dylib_install_name, get_base_compile_args, get_base_link_args, is_header, @@ -115,15 +121,16 @@ from .compilers import ( is_library, lang_suffixes, sanitizer_compile_args, - sort_clike, + sort_clink, ClangCompiler, CompilerArgs, GnuCompiler, IntelCompiler, ) from .c import ( - ArmCCompiler, CCompiler, + ArmCCompiler, + ArmclangCCompiler, ClangCCompiler, GnuCCompiler, ElbrusCCompiler, @@ -131,8 +138,9 @@ from .c import ( VisualStudioCCompiler, ) from .cpp import ( - ArmCPPCompiler, CPPCompiler, + ArmCPPCompiler, + ArmclangCPPCompiler, ClangCPPCompiler, GnuCPPCompiler, ElbrusCPPCompiler, diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index ef67e03..af3e2c4 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -12,14 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import subprocess, os.path, re +import re +import glob +import os.path +import subprocess from .. import mlog from .. import coredata from . import compilers from ..mesonlib import ( EnvironmentException, version_compare, Popen_safe, listify, - for_windows, for_darwin, for_cygwin, for_haiku, + for_windows, for_darwin, for_cygwin, for_haiku, for_openbsd, ) from .compilers import ( @@ -32,6 +35,7 @@ from .compilers import ( vs32_instruction_set_args, vs64_instruction_set_args, ArmCompiler, + ArmclangCompiler, ClangCompiler, Compiler, CompilerArgs, @@ -93,7 +97,7 @@ class CCompiler(Compiler): # Almost every compiler uses this for disabling warnings return ['-w'] - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, *args): return [] def split_shlib_to_parts(self, fname): @@ -172,7 +176,7 @@ class CCompiler(Compiler): for line in stdo.split('\n'): if line.startswith('libraries:'): libstr = line.split('=', 1)[1] - paths = [os.path.realpath(p) for p in libstr.split(':')] + paths = [os.path.realpath(p) for p in libstr.split(':') if os.path.exists(os.path.realpath(p))] return paths def get_library_dirs(self): @@ -281,7 +285,10 @@ class CCompiler(Compiler): else: cmdlist = [binary_name] mlog.debug('Running test binary command: ' + ' '.join(cmdlist)) - pe = subprocess.Popen(cmdlist) + try: + pe = subprocess.Popen(cmdlist) + except Exception as e: + raise EnvironmentException('Could not invoke sanity test executable: %s.' % str(e)) pe.wait() if pe.returncode != 0: raise EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string())) @@ -796,6 +803,22 @@ class CCompiler(Compiler): return False raise RuntimeError('BUG: {!r} check failed unexpectedly'.format(n)) + def _get_patterns(self, env, prefixes, suffixes, shared=False): + patterns = [] + for p in prefixes: + for s in suffixes: + patterns.append(p + '{}.' + s) + if shared and for_openbsd(self.is_cross, env): + # Shared libraries on OpenBSD can be named libfoo.so.X.Y: + # https://www.openbsd.org/faq/ports/specialtopics.html#SharedLibs + # + # This globbing is probably the best matching we can do since regex + # is expensive. It's wrong in many edge cases, but it will match + # correctly-named libraries and hopefully no one on OpenBSD names + # their files libfoo.so.9a.7b.1.0 + patterns.append('lib{}.so.[0-9]*.[0-9]*') + return patterns + def get_library_naming(self, env, libtype, strict=False): ''' Get library prefixes and suffixes for the target platform ordered by @@ -826,41 +849,95 @@ class CCompiler(Compiler): else: # Linux/BSDs shlibext = ['so'] + patterns = [] # Search priority if libtype in ('default', 'shared-static'): - suffixes = shlibext + stlibext + patterns += self._get_patterns(env, prefixes, shlibext, True) + patterns += self._get_patterns(env, prefixes, stlibext, False) elif libtype == 'static-shared': - suffixes = stlibext + shlibext + patterns += self._get_patterns(env, prefixes, stlibext, False) + patterns += self._get_patterns(env, prefixes, shlibext, True) elif libtype == 'shared': - suffixes = shlibext + patterns += self._get_patterns(env, prefixes, shlibext, True) elif libtype == 'static': - suffixes = stlibext + patterns += self._get_patterns(env, prefixes, stlibext, False) else: raise AssertionError('BUG: unknown libtype {!r}'.format(libtype)) - return prefixes, suffixes + return patterns + + @staticmethod + def _sort_shlibs_openbsd(libs): + filtered = [] + for lib in libs: + # Validate file as a shared library of type libfoo.so.X.Y + ret = lib.rsplit('.so.', maxsplit=1) + if len(ret) != 2: + continue + try: + float(ret[1]) + except ValueError: + continue + filtered.append(lib) + float_cmp = lambda x: float(x.rsplit('.so.', maxsplit=1)[1]) + return sorted(filtered, key=float_cmp, reverse=True) + + @classmethod + def _get_trials_from_pattern(cls, pattern, directory, libname): + f = os.path.join(directory, pattern.format(libname)) + # Globbing for OpenBSD + if '*' in pattern: + # NOTE: globbing matches directories and broken symlinks + # so we have to do an isfile test on it later + return cls._sort_shlibs_openbsd(glob.glob(f)) + return [f] + + @staticmethod + def _get_file_from_list(files): + for f in files: + if os.path.isfile(f): + return f + return None def find_library_real(self, libname, env, extra_dirs, code, libtype): # First try if we can just add the library as -l. # Gcc + co seem to prefer builtin lib dirs to -L dirs. # Only try to find std libs if no extra dirs specified. - if not extra_dirs and libtype == 'default': + if not extra_dirs: args = ['-l' + libname] if self.links(code, env, extra_args=args): return args - # Ensure that we won't modify the list that was passed to us - extra_dirs = extra_dirs[:] - # Search in the system libraries too - extra_dirs += self.get_library_dirs() # Not found or we want to use a specific libtype? Try to find the # library file itself. - prefixes, suffixes = self.get_library_naming(env, libtype) - # Triply-nested loop! + patterns = self.get_library_naming(env, libtype) for d in extra_dirs: - for suffix in suffixes: - for prefix in prefixes: - trial = os.path.join(d, prefix + libname + '.' + suffix) - if os.path.isfile(trial): - return [trial] + for p in patterns: + trial = self._get_trials_from_pattern(p, d, libname) + if not trial: + continue + trial = self._get_file_from_list(trial) + if not trial: + continue + return [trial] + # Search in the system libraries too + for d in self.get_library_dirs(): + for p in patterns: + trial = self._get_trials_from_pattern(p, d, libname) + if not trial: + continue + trial = self._get_file_from_list(trial) + if not trial: + continue + # When searching the system paths used by the compiler, we + # need to check linking with link-whole, as static libs + # (.a) need to be checked to ensure they are the right + # architecture, e.g. 32bit or 64-bit. + # Just a normal test link won't work as the .a file doesn't + # seem to be checked by linker if there are no unresolved + # symbols from the main C file. + extra_link_args = self.get_link_whole_for([trial]) + extra_link_args = self.linker_to_compiler_args(extra_link_args) + if self.links(code, env, extra_args=extra_link_args): + return [trial] return None def find_library_impl(self, libname, env, extra_dirs, code, libtype): @@ -978,6 +1055,34 @@ class ClangCCompiler(ClangCompiler, CCompiler): return basic +class ArmclangCCompiler(ArmclangCompiler, CCompiler): + def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self): + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none', 'c90', 'c99', 'c11', + 'gnu90', 'gnu99', 'gnu11'], + 'none')}) + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['c_std'] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options): + return [] + + class GnuCCompiler(GnuCompiler, CCompiler): def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 4cea6d4..25835a3 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -48,13 +48,17 @@ lang_suffixes = { all_languages = lang_suffixes.keys() cpp_suffixes = lang_suffixes['cpp'] + ('h',) c_suffixes = lang_suffixes['c'] + ('h',) +# List of languages that by default consume and output libraries following the +# C ABI; these can generally be used interchangebly +clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',) # List of languages that can be linked with C code directly by the linker # used in build.py:process_compilers() and build.py:get_dynamic_linker() -clike_langs = ('d', 'objcpp', 'cpp', 'objc', 'c', 'fortran', ) -clike_suffixes = () -for _l in clike_langs + ('vala',): - clike_suffixes += lang_suffixes[_l] -clike_suffixes += ('h', 'll', 's') +# XXX: Add Rust to this? +clink_langs = ('d',) + clib_langs +clink_suffixes = () +for _l in clink_langs + ('vala',): + clink_suffixes += lang_suffixes[_l] +clink_suffixes += ('h', 'll', 's') soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') @@ -68,18 +72,18 @@ cflags_mapping = {'c': 'CFLAGS', 'vala': 'VALAFLAGS', 'rust': 'RUSTFLAGS'} -# All these are only for C-like languages; see `clike_langs` above. +# All these are only for C-linkable languages; see `clink_langs` above. -def sort_clike(lang): +def sort_clink(lang): ''' Sorting function to sort the list of languages according to - reversed(compilers.clike_langs) and append the unknown langs in the end. + reversed(compilers.clink_langs) and append the unknown langs in the end. The purpose is to prefer C over C++ for files that can be compiled by both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc. ''' - if lang not in clike_langs: + if lang not in clink_langs: return 1 - return -clike_langs.index(lang) + return -clink_langs.index(lang) def is_header(fname): if hasattr(fname, 'fname'): @@ -91,7 +95,7 @@ def is_source(fname): if hasattr(fname, 'fname'): fname = fname.fname suffix = fname.split('.')[-1].lower() - return suffix in clike_suffixes + return suffix in clink_suffixes def is_assembly(fname): if hasattr(fname, 'fname'): @@ -127,6 +131,12 @@ gnulike_buildtype_args = {'plain': [], 'release': ['-O3'], 'minsize': ['-Os', '-g']} +armclang_buildtype_args = {'plain': [], + 'debug': ['-O0', '-g'], + 'debugoptimized': ['-O1', '-g'], + 'release': ['-Os'], + 'minsize': ['-Oz']} + arm_buildtype_args = {'plain': [], 'debug': ['-O0', '--debug'], 'debugoptimized': ['-O1', '--debug'], @@ -699,6 +709,21 @@ class Compiler: def get_default_suffix(self): return self.default_suffix + def get_define(self, dname, prefix, env, extra_args, dependencies): + raise EnvironmentException('%s does not support get_define ' % self.get_id()) + + def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): + raise EnvironmentException('%s does not support compute_int ' % self.get_id()) + + def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): + raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) + + def has_type(self, typename, prefix, env, extra_args, dependencies=None): + raise EnvironmentException('%s does not support has_type ' % self.get_id()) + + def symbols_have_underscore_prefix(self, env): + raise EnvironmentException('%s does not support symbols_have_underscore_prefix ' % self.get_id()) + def get_exelist(self): return self.exelist[:] @@ -970,7 +995,9 @@ class Compiler: abs_rpaths = [os.path.join(build_dir, p) for p in rpath_paths] if build_rpath != '': abs_rpaths.append(build_rpath) - args = ['-Wl,-rpath,' + rp for rp in abs_rpaths] + # Ensure that there is enough space for large RPATHs + args = ['-Wl,-headerpad_max_install_names'] + args += ['-Wl,-rpath,' + rp for rp in abs_rpaths] return args def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): @@ -1056,7 +1083,14 @@ ICC_WIN = 2 GNU_LD_AS_NEEDED = '-Wl,--as-needed' APPLE_LD_AS_NEEDED = '-Wl,-dead_strip_dylibs' -def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module): +def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion): + install_name = prefix + shlib_name + if soversion is not None: + install_name += '.' + soversion + install_name += '.dylib' + return '@rpath/' + install_name + +def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, soversion, is_shared_module): if soversion is None: sostr = '' else: @@ -1069,11 +1103,8 @@ def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, i elif gcc_type == GCC_OSX: if is_shared_module: return [] - install_name = prefix + shlib_name - if soversion is not None: - install_name += '.' + soversion - install_name += '.dylib' - return ['-install_name', os.path.join('@rpath', install_name)] + name = get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion) + return ['-install_name', name] else: raise RuntimeError('Not implemented yet.') @@ -1213,8 +1244,8 @@ class GnuCompiler: def split_shlib_to_parts(self, fname): return os.path.dirname(fname), fname - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): - return get_gcc_soname_args(self.gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module) + def get_soname_args(self, prefix, shlib_name, suffix, soversion, is_shared_module): + return get_gcc_soname_args(self.gcc_type, prefix, shlib_name, suffix, soversion, is_shared_module) def get_std_shared_lib_link_args(self): return ['-shared'] @@ -1330,7 +1361,7 @@ class ClangCompiler: # so it might change semantics at any time. return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, prefix, shlib_name, suffix, soversion, is_shared_module): if self.clang_type == CLANG_STANDARD: gcc_type = GCC_STANDARD elif self.clang_type == CLANG_OSX: @@ -1339,7 +1370,7 @@ class ClangCompiler: gcc_type = GCC_MINGW else: raise MesonException('Unreachable code when converting clang type to gcc type.') - return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module) + return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, soversion, is_shared_module) def has_multi_arguments(self, args, env): myargs = ['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument'] @@ -1389,6 +1420,80 @@ class ClangCompiler: return [] +class ArmclangCompiler: + def __init__(self): + if not self.is_cross: + raise EnvironmentException('armclang supports only cross-compilation.') + # Check whether 'armlink.exe' is available in path + self.linker_exe = 'armlink.exe' + args = '--vsn' + try: + p, stdo, stderr = Popen_safe(self.linker_exe, args) + except OSError as e: + err_msg = 'Unknown linker\nRunning "{0}" gave \n"{1}"'.format(' '.join([self.linker_exe] + [args]), e) + raise EnvironmentException(err_msg) + # Verify the armlink version + ver_str = re.search('.*Component.*', stdo) + if ver_str: + ver_str = ver_str.group(0) + else: + EnvironmentException('armlink version string not found') + # Using the regular expression from environment.search_version, + # which is used for searching compiler version + version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)' + linker_ver = re.search(version_regex, ver_str) + if linker_ver: + linker_ver = linker_ver.group(0) + if not version_compare(self.version, '==' + linker_ver): + raise EnvironmentException('armlink version does not match with compiler version') + self.id = 'armclang' + self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', + 'b_ndebug', 'b_staticpic', 'b_colorout'] + # Assembly + self.can_compile_suffixes.update('s') + + def can_linker_accept_rsp(self): + return False + + def get_pic_args(self): + # PIC support is not enabled by default for ARM, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_colorout_args(self, colortype): + return clang_color_args[colortype][:] + + def get_buildtype_args(self, buildtype): + return armclang_buildtype_args[buildtype] + + def get_buildtype_linker_args(self, buildtype): + return arm_buildtype_linker_args[buildtype] + + # Override CCompiler.get_std_shared_lib_link_args + def get_std_shared_lib_link_args(self): + return [] + + def get_pch_suffix(self): + return 'gch' + + def get_pch_use_args(self, pch_dir, header): + # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 + # This flag is internal to Clang (or at least not documented on the man page) + # so it might change semantics at any time. + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] + + # Override CCompiler.get_dependency_gen_args + def get_dependency_gen_args(self, outtarget, outfile): + return [] + + # Override CCompiler.build_rpath_args + def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + return [] + + def get_linker_exelist(self): + return [self.linker_exe] + + # Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1 class IntelCompiler: def __init__(self, icc_type): @@ -1422,7 +1527,7 @@ class IntelCompiler: def split_shlib_to_parts(self, fname): return os.path.dirname(fname), fname - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, prefix, shlib_name, suffix, soversion, is_shared_module): if self.icc_type == ICC_STANDARD: gcc_type = GCC_STANDARD elif self.icc_type == ICC_OSX: @@ -1431,7 +1536,7 @@ class IntelCompiler: gcc_type = GCC_MINGW else: raise MesonException('Unreachable code when converting icc type to gcc type.') - return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module) + return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, soversion, is_shared_module) # TODO: centralise this policy more globally, instead # of fragmenting it into GnuCompiler and ClangCompiler @@ -1456,6 +1561,9 @@ class IntelCompiler: else: return ['-openmp'] + def get_link_whole_for(self, args): + return GnuCompiler.get_link_whole_for(self, args) + class ArmCompiler: # Functionality that is common to all ARM family compilers. diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index c3bb59d..7344114 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -27,6 +27,7 @@ from .compilers import ( ElbrusCompiler, IntelCompiler, ArmCompiler, + ArmclangCompiler, ) class CPPCompiler(CCompiler): @@ -98,6 +99,34 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): return ['-lstdc++'] +class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): + def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self): + opts = CPPCompiler.get_options(self) + opts.update({'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use', + ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', + 'gnu++98', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17'], + 'none')}) + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['cpp_std'] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options): + return [] + + class GnuCPPCompiler(GnuCompiler, CPPCompiler): def __init__(self, exelist, version, gcc_type, is_cross, exe_wrap, defines, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py index f78e364..e17cd4e 100644 --- a/mesonbuild/compilers/cs.py +++ b/mesonbuild/compilers/cs.py @@ -41,7 +41,7 @@ class CsCompiler(Compiler): def get_link_args(self, fname): return ['-r:' + fname] - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, *args): return [] def get_werror_args(self): diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 5cb3659..f0f3d54 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -89,9 +89,9 @@ class DCompiler(Compiler): def get_std_shared_lib_link_args(self): return ['-shared'] - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, prefix, shlib_name, suffix, soversion, is_shared_module): # FIXME: Make this work for Windows, MacOS and cross-compiling - return get_gcc_soname_args(GCC_STANDARD, prefix, shlib_name, suffix, path, soversion, is_shared_module) + return get_gcc_soname_args(GCC_STANDARD, prefix, shlib_name, suffix, soversion, is_shared_module) def get_feature_args(self, kwargs, build_to_src): res = [] @@ -212,6 +212,14 @@ class DCompiler(Compiler): for la in linkargs: dcargs.append('-L' + la.strip()) continue + elif arg.startswith('-link-defaultlib') or arg.startswith('-linker'): + # these are special arguments to the LDC linker call, + # arguments like "-link-defaultlib-shared" do *not* + # denote a library to be linked, but change the default + # Phobos/DRuntime linking behavior, while "-linker" sets the + # default linker. + dcargs.append(arg) + continue elif arg.startswith('-l'): # translate library link flag dcargs.append('-L' + arg) diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 11d07b8..d6e41e3 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -58,14 +58,14 @@ class FortranCompiler(Compiler): def get_no_warn_args(self): return CCompiler.get_no_warn_args(self) - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): - return CCompiler.get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module) + def get_soname_args(self, *args): + return CCompiler.get_soname_args(self, *args) def split_shlib_to_parts(self, fname): return CCompiler.split_shlib_to_parts(self, fname) - def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): - return CCompiler.build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + def build_rpath_args(self, *args): + return CCompiler.build_rpath_args(self, *args) def get_dependency_gen_args(self, outtarget, outfile): return [] @@ -173,8 +173,11 @@ class FortranCompiler(Compiler): def run(self, code, env, extra_args=None, dependencies=None): return CCompiler.run(self, code, env, extra_args, dependencies) - def get_library_naming(self, env, libtype, strict=False): - return CCompiler.get_library_naming(self, env, libtype, strict) + def _get_patterns(self, *args, **kwargs): + return CCompiler._get_patterns(self, *args, **kwargs) + + def get_library_naming(self, *args, **kwargs): + return CCompiler.get_library_naming(self, *args, **kwargs) def find_library_real(self, *args): return CCompiler.find_library_real(self, *args) diff --git a/mesonbuild/compilers/java.py b/mesonbuild/compilers/java.py index a8138d7..978562c 100644 --- a/mesonbuild/compilers/java.py +++ b/mesonbuild/compilers/java.py @@ -25,7 +25,7 @@ class JavaCompiler(Compiler): self.id = 'unknown' self.javarunner = 'java' - def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + def get_soname_args(self, *args): return [] def get_werror_args(self): diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 4db8e4a..b26516c 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -22,7 +22,7 @@ from .mesonlib import default_libdir, default_libexecdir, default_prefix import ast import argparse -version = '0.47.0.dev1' +version = '0.48.0.dev1' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode'] default_yielding = False @@ -154,6 +154,8 @@ class UserArrayOption(UserOption): if isinstance(value, str): if value.startswith('['): newvalue = ast.literal_eval(value) + elif value == '': + newvalue = [] else: if self.shlex_split: newvalue = shlex.split(value) @@ -167,7 +169,7 @@ class UserArrayOption(UserOption): if len(set(newvalue)) != len(newvalue): msg = 'Duplicated values in array option "%s" is deprecated. ' \ 'This will become a hard error in the future.' % (self.name) - mlog.log(mlog.red('DEPRECATION:'), msg) + mlog.deprecation(msg) for i in newvalue: if not isinstance(i, str): raise MesonException('String array element "{0}" is not a string.'.format(str(newvalue))) @@ -178,6 +180,21 @@ class UserArrayOption(UserOption): ', '.join(bad), ', '.join(self.choices))) return newvalue +class UserFeatureOption(UserComboOption): + static_choices = ['enabled', 'disabled', 'auto'] + + def __init__(self, name, description, value, yielding=None): + super().__init__(name, description, self.static_choices, value, yielding) + + def is_enabled(self): + return self.value == 'enabled' + + def is_disabled(self): + return self.value == 'disabled' + + def is_auto(self): + return self.value == 'auto' + # This class contains all data that must persist over multiple # invocations of Meson. It is roughly the same thing as # cmakecache. @@ -316,7 +333,6 @@ class CoreData: 'Default project to execute in Visual Studio', '') - def get_builtin_option(self, optname): if optname in self.builtins: return self.builtins[optname].value @@ -392,7 +408,7 @@ class CoreData: if unknown_options: unknown_options = ', '.join(sorted(unknown_options)) sub = 'In subproject {}: '.format(subproject) if subproject else '' - raise MesonException('{}Unknown options: "{}"'.format(sub, unknown_options)) + mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options)) def load(build_dir): filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') @@ -437,6 +453,8 @@ def get_builtin_option_choices(optname): return builtin_options[optname][2] elif builtin_options[optname][0] == UserBooleanOption: return [True, False] + elif builtin_options[optname][0] == UserFeatureOption: + return UserFeatureOption.static_choices else: return None else: @@ -549,6 +567,7 @@ builtin_options = { 'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True], 'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True], 'install_umask': [UserUmaskOption, 'Default umask to apply on permissions of installed files.', '022'], + 'auto_features': [UserFeatureOption, "Override value of all 'auto' features.", 'auto'], } # Special prefix-dependent defaults for installation directories that reside in diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index c4e566b..864fd73 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -28,9 +28,9 @@ from pathlib import PurePath from .. import mlog from .. import mesonlib -from ..mesonlib import ( - MesonException, Popen_safe, version_compare_many, version_compare, listify -) +from ..compilers import clib_langs +from ..mesonlib import MesonException, OrderedSet +from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify # These must be defined in this file to avoid cyclical references. packages = {} @@ -103,6 +103,9 @@ class Dependency: self.type_name = type_name self.compile_args = [] self.link_args = [] + # Raw -L and -l arguments without manual library searching + # If None, self.link_args will be used + self.raw_link_args = None self.sources = [] self.methods = self._process_method_kw(kwargs) @@ -113,7 +116,9 @@ class Dependency: def get_compile_args(self): return self.compile_args - def get_link_args(self): + def get_link_args(self, raw=False): + if raw and self.raw_link_args is not None: + return self.raw_link_args return self.link_args def found(self): @@ -222,6 +227,7 @@ class ExternalDependency(Dependency): self.want_cross = not kwargs['native'] else: self.want_cross = self.env.is_cross_build() + self.clib_compiler = None # Set the compiler that will be used by this dependency # This is only used for configuration checks if self.want_cross: @@ -232,19 +238,20 @@ class ExternalDependency(Dependency): # else try to pick something that looks usable. if self.language: if self.language not in compilers: - m = self.name.capitalize() + ' requires a {} compiler' + m = self.name.capitalize() + ' requires a {0} compiler, but ' \ + '{0} is not in the list of project languages' raise DependencyException(m.format(self.language.capitalize())) - self.compiler = compilers[self.language] + self.clib_compiler = compilers[self.language] else: - # Try to find a compiler that this dependency can use for compiler - # checks. It's ok if we don't find one. - for lang in ('c', 'cpp', 'objc', 'objcpp', 'fortran', 'd'): - self.compiler = compilers.get(lang, None) - if self.compiler: + # Try to find a compiler that can find C libraries for + # running compiler.find_library() + for lang in clib_langs: + self.clib_compiler = compilers.get(lang, None) + if self.clib_compiler: break def get_compiler(self): - return self.compiler + return self.clib_compiler def get_partial_dependency(self, *, compile_args=False, link_args=False, links=False, includes=False, sources=False): @@ -438,8 +445,7 @@ class PkgConfigDependency(ExternalDependency): if self.required: raise DependencyException('Pkg-config binary missing from cross file') else: - pkgname = environment.cross_info.config['binaries']['pkgconfig'] - potential_pkgbin = ExternalProgram(pkgname, silent=True) + potential_pkgbin = ExternalProgram.from_cross_info(environment.cross_info, 'pkgconfig') if potential_pkgbin.found(): self.pkgbin = potential_pkgbin PkgConfigDependency.class_pkgbin = self.pkgbin @@ -520,8 +526,12 @@ class PkgConfigDependency(ExternalDependency): self.version_reqs) def _call_pkgbin_real(self, args, env): - p, out = Popen_safe(self.pkgbin.get_command() + args, env=env)[0:2] - return p.returncode, out.strip() + cmd = self.pkgbin.get_command() + args + p, out = Popen_safe(cmd, env=env)[0:2] + rc, out = p.returncode, out.strip() + call = ' '.join(cmd) + mlog.debug("Called `{}` -> {}\n{}".format(call, rc, out)) + return rc, out def _call_pkgbin(self, args, env=None): if env is None: @@ -581,38 +591,76 @@ class PkgConfigDependency(ExternalDependency): libcmd = [self.name, '--libs'] if self.static: libcmd.append('--static') - # Force pkg-config to output -L fields even if they are system - # paths so we can do manual searching with cc.find_library() later. - env = os.environ.copy() - env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1' + # Force pkg-config to output -L fields even if they are system + # paths so we can do manual searching with cc.find_library() later. + env = os.environ.copy() + env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1' ret, out = self._call_pkgbin(libcmd, env=env) if ret != 0: raise DependencyException('Could not generate libs for %s:\n\n%s' % (self.name, out)) - self.link_args = [] - libpaths = [] - static_libs_notfound = [] + # Also get the 'raw' output without -Lfoo system paths for usage when + # a library can't be found, and also in gnome.generate_gir + # + gnome.gtkdoc which need -L -l arguments. + ret, out_raw = self._call_pkgbin(libcmd) + if ret != 0: + raise DependencyException('Could not generate libs for %s:\n\n%s' % + (self.name, out_raw)) + link_args = [] + raw_link_args = [] + # Library paths should be safe to de-dup + libpaths = OrderedSet() + raw_libpaths = OrderedSet() + # Track -lfoo libraries to avoid duplicate work + libs_found = OrderedSet() + # Track not-found libraries to know whether to add library paths + libs_notfound = [] + libtype = 'static' if self.static else 'default' + # We always look for the file ourselves instead of depending on the + # compiler to find it with -lfoo or foo.lib (if possible) because: + # 1. We want to be able to select static or shared + # 2. We need the full path of the library to calculate RPATH values + # + # Libraries that are provided by the toolchain or are not found by + # find_library() will be added with -L -l pairs. for lib in self._convert_mingw_paths(shlex.split(out)): - # If we want to use only static libraries, we have to look for the - # file ourselves instead of depending on the compiler to find it - # with -lfoo or foo.lib. However, we can only do this if we already - # have some library paths gathered. - if self.static: - if lib.startswith('-L'): - libpaths.append(lib[2:]) + if lib.startswith(('-L-l', '-L-L')): + # These are D language arguments, add them as-is + pass + elif lib.startswith('-L'): + libpaths.add(lib[2:]) + continue + elif lib.startswith('-l'): + # Don't resolve the same -lfoo argument again + if lib in libs_found: continue - # FIXME: try to handle .la files in static mode too? - elif lib.startswith('-l'): - args = self.compiler.find_library(lib[2:], self.env, libpaths, libtype='static') - if not args or len(args) < 1: - if lib in static_libs_notfound: - continue + if self.clib_compiler: + args = self.clib_compiler.find_library(lib[2:], self.env, + list(libpaths), libtype) + # If the project only uses a non-clib language such as D, Rust, + # C#, Python, etc, all we can do is limp along by adding the + # arguments as-is and then adding the libpaths at the end. + else: + args = None + if args: + libs_found.add(lib) + # Replace -l arg with full path to library if available + # else, library is provided by the compiler and can't be resolved + if not args[0].startswith('-l'): + lib = args[0] + else: + # Library wasn't found, maybe we're looking in the wrong + # places or the library will be provided with LDFLAGS or + # LIBRARY_PATH from the environment (on macOS), and many + # other edge cases that we can't account for. + # + # Add all -L paths and use it as -lfoo + if lib in libs_notfound: + continue + if self.static: mlog.warning('Static library {!r} not found for dependency {!r}, may ' 'not be statically linked'.format(lib[2:], self.name)) - static_libs_notfound.append(lib) - else: - # Replace -l arg with full path to static library - lib = args[0] + libs_notfound.append(lib) elif lib.endswith(".la"): shared_libname = self.extract_libtool_shlib(lib) shared_lib = os.path.join(os.path.dirname(lib), shared_libname) @@ -623,14 +671,24 @@ class PkgConfigDependency(ExternalDependency): raise DependencyException('Got a libtools specific "%s" dependencies' 'but we could not compute the actual shared' 'library path' % lib) - lib = shared_lib self.is_libtool = True - self.link_args.append(lib) + lib = shared_lib + if lib in link_args: + continue + link_args.append(lib) + # Also store the raw link arguments, and store raw_libpaths + for lib in self._convert_mingw_paths(shlex.split(out_raw)): + if lib.startswith('-L') and not lib.startswith(('-L-l', '-L-L')): + raw_libpaths.add(lib[2:]) + raw_link_args.append(lib) + # Set everything + self.link_args = link_args + self.raw_link_args = raw_link_args # Add all -Lbar args if we have -lfoo args in link_args - if static_libs_notfound: + if libs_notfound: # Order of -L flags doesn't matter with ld, but it might with other # linkers such as MSVC, so prepend them. - self.link_args = ['-L' + lp for lp in libpaths] + self.link_args + self.link_args = ['-L' + lp for lp in raw_libpaths] + self.link_args def get_pkgconfig_variable(self, variable_name, kwargs): options = ['--variable=' + variable_name, self.name] @@ -923,6 +981,24 @@ class ExternalProgram: return r.format(self.__class__.__name__, self.name, self.command) @staticmethod + def from_cross_info(cross_info, name): + if name not in cross_info.config['binaries']: + return NonExistingExternalProgram() + command = cross_info.config['binaries'][name] + if not isinstance(command, (list, str)): + raise MesonException('Invalid type {!r} for binary {!r} in cross file' + ''.format(command, name)) + if isinstance(command, list): + if len(command) == 1: + command = command[0] + # We cannot do any searching if the command is a list, and we don't + # need to search if the path is an absolute path. + if isinstance(command, list) or os.path.isabs(command): + return ExternalProgram(name, command=command, silent=True) + # Search for the command using the specified string! + return ExternalProgram(command, silent=True) + + @staticmethod def _shebang_to_cmd(script): """ Check if the file has a shebang and manually parse it to figure out @@ -1077,7 +1153,7 @@ class NonExistingExternalProgram(ExternalProgram): class ExternalLibrary(ExternalDependency): def __init__(self, name, link_args, environment, language, silent=False): - super().__init__('external', environment, language, {}) + super().__init__('library', environment, language, {}) self.name = name self.language = language self.is_found = False @@ -1090,7 +1166,7 @@ class ExternalLibrary(ExternalDependency): else: mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO')) - def get_link_args(self, language=None): + def get_link_args(self, language=None, **kwargs): ''' External libraries detected using a compiler must only be used with compatible code. For instance, Vala libraries (.vapi files) cannot be @@ -1103,7 +1179,7 @@ class ExternalLibrary(ExternalDependency): if (self.language == 'vala' and language != 'vala') or \ (language == 'vala' and self.language != 'vala'): return [] - return self.link_args + return super().get_link_args(**kwargs) def get_partial_dependency(self, *, compile_args=False, link_args=False, links=False, includes=False, sources=False): @@ -1122,6 +1198,8 @@ class ExtraFrameworkDependency(ExternalDependency): self.required = required self.detect(name, path) if self.found(): + self.compile_args = ['-I' + os.path.join(self.path, self.name, 'Headers')] + self.link_args = ['-F' + self.path, '-framework', self.name.split('.')[0]] mlog.log('Dependency', mlog.bold(name), 'found:', mlog.green('YES'), os.path.join(self.path, self.name)) else: @@ -1136,7 +1214,7 @@ class ExtraFrameworkDependency(ExternalDependency): for p in paths: for d in os.listdir(p): fullpath = os.path.join(p, d) - if lname != d.split('.')[0].lower(): + if lname != d.rsplit('.', 1)[0].lower(): continue if not stat.S_ISDIR(os.stat(fullpath).st_mode): continue @@ -1147,16 +1225,6 @@ class ExtraFrameworkDependency(ExternalDependency): if not self.found() and self.required: raise DependencyException('Framework dependency %s not found.' % (name, )) - def get_compile_args(self): - if self.found(): - return ['-I' + os.path.join(self.path, self.name, 'Headers')] - return [] - - def get_link_args(self): - if self.found(): - return ['-F' + self.path, '-framework', self.name.split('.')[0]] - return [] - def get_version(self): return 'unknown' diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 62274b5..59d8070 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -185,7 +185,7 @@ class BoostDependency(ExternalDependency): def detect_nix_roots(self): return [os.path.abspath(os.path.join(x, '..')) - for x in self.compiler.get_default_include_dirs()] + for x in self.clib_compiler.get_default_include_dirs()] def detect_win_roots(self): res = [] @@ -243,8 +243,8 @@ class BoostDependency(ExternalDependency): # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors # for more details - if include_dir and include_dir not in self.compiler.get_default_include_dirs(): - args.append("".join(self.compiler.get_include_args(include_dir, True))) + if include_dir and include_dir not in self.clib_compiler.get_default_include_dirs(): + args.append("".join(self.clib_compiler.get_include_args(include_dir, True))) return args def get_requested(self, kwargs): @@ -256,7 +256,7 @@ class BoostDependency(ExternalDependency): def detect_headers_and_version(self): try: - version = self.compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), []) + version = self.clib_compiler.get_define('BOOST_LIB_VERSION', '#include <boost/version.hpp>', self.env, self.get_compile_args(), []) except mesonlib.EnvironmentException: return except TypeError: @@ -361,7 +361,7 @@ class BoostDependency(ExternalDependency): for module in self.requested_modules: libname = 'boost_' + module + tag - args = self.compiler.find_library(libname, self.env, self.extra_lib_dirs()) + args = self.clib_compiler.find_library(libname, self.env, self.extra_lib_dirs()) if args is None: mlog.debug("Couldn\'t find library '{}' for boost module '{}' (ABI tag = '{}')".format(libname, module, tag)) all_found = False @@ -473,10 +473,10 @@ class BoostDependency(ExternalDependency): return [os.path.join(self.boost_root, 'lib')] return [] - def get_link_args(self): + def get_link_args(self, **kwargs): args = [] - for dir in self.extra_lib_dirs(): - args += self.compiler.get_linker_search_args(dir) + for d in self.extra_lib_dirs(): + args += self.clib_compiler.get_linker_search_args(d) for lib in self.requested_modules: args += self.lib_modules['boost_' + lib] return args diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index ffd9666..4ea3385 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -36,8 +36,8 @@ class GTestDependency(ExternalDependency): def detect(self): self.version = '1.something_maybe' - gtest_detect = self.compiler.find_library("gtest", self.env, []) - gtest_main_detect = self.compiler.find_library("gtest_main", self.env, []) + gtest_detect = self.clib_compiler.find_library("gtest", self.env, []) + gtest_main_detect = self.clib_compiler.find_library("gtest_main", self.env, []) if gtest_detect and (not self.main or gtest_main_detect): self.is_found = True self.compile_args = [] @@ -83,7 +83,7 @@ class GMockDependency(ExternalDependency): self.version = '1.something_maybe' # GMock may be a library or just source. # Work with both. - gmock_detect = self.compiler.find_library("gmock", self.env, []) + gmock_detect = self.clib_compiler.find_library("gmock", self.env, []) if gmock_detect: self.is_found = True self.compile_args = [] @@ -261,5 +261,5 @@ class ValgrindDependency(PkgConfigDependency): def __init__(self, env, kwargs): super().__init__('valgrind', env, kwargs) - def get_link_args(self): + def get_link_args(self, **kwargs): return [] diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 37195cc..745dff0 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -31,11 +31,9 @@ from .base import ( ConfigToolDependency, ) -from ..interpreterbase import FeatureNew class MPIDependency(ExternalDependency): - @FeatureNew('MPI Dependency', '0.42.0') def __init__(self, environment, kwargs): language = kwargs.get('language', 'c') super().__init__('mpi', environment, language, kwargs) @@ -252,13 +250,12 @@ class OpenMPDependency(ExternalDependency): '199810': '1.0', } - @FeatureNew('OpenMP Dependency', '0.46.0') def __init__(self, environment, kwargs): language = kwargs.get('language') super().__init__('openmp', environment, language, kwargs) self.is_found = False try: - openmp_date = self.compiler.get_define('_OPENMP', '', self.env, [], [self]) + openmp_date = self.clib_compiler.get_define('_OPENMP', '', self.env, [], [self]) except mesonlib.EnvironmentException as e: mlog.debug('OpenMP support not available in the compiler') mlog.debug(e) @@ -266,7 +263,7 @@ class OpenMPDependency(ExternalDependency): if openmp_date: self.version = self.VERSIONS[openmp_date] - if self.compiler.has_header('omp.h', '', self.env, dependencies=[self]): + if self.clib_compiler.has_header('omp.h', '', self.env, dependencies=[self]): self.is_found = True else: mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.') @@ -433,7 +430,6 @@ class Python3Dependency(ExternalDependency): class PcapDependency(ExternalDependency): - @FeatureNew('Pcap Dependency', '0.42.0') def __init__(self, environment, kwargs): super().__init__('pcap', environment, None, kwargs) @@ -470,8 +466,8 @@ class PcapDependency(ExternalDependency): @staticmethod def get_pcap_lib_version(ctdep): - return ctdep.compiler.get_return_value('pcap_lib_version', 'string', - '#include <pcap.h>', ctdep.env, [], [ctdep]) + return ctdep.clib_compiler.get_return_value('pcap_lib_version', 'string', + '#include <pcap.h>', ctdep.env, [], [ctdep]) class CupsDependency(ExternalDependency): @@ -517,7 +513,6 @@ class CupsDependency(ExternalDependency): class LibWmfDependency(ExternalDependency): - @FeatureNew('LibWMF Dependency', '0.44.0') def __init__(self, environment, kwargs): super().__init__('libwmf', environment, None, kwargs) diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py index 95ab727..5f89ccb 100644 --- a/mesonbuild/dependencies/platform.py +++ b/mesonbuild/dependencies/platform.py @@ -29,7 +29,7 @@ class AppleFrameworks(ExternalDependency): if not modules: raise DependencyException("AppleFrameworks dependency requires at least one module.") self.frameworks = modules - # FIXME: Use self.compiler to check if the frameworks are available + # FIXME: Use self.clib_compiler to check if the frameworks are available for f in self.frameworks: self.link_args += ['-framework', f] diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 44fdcd5..197d22c 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -33,7 +33,6 @@ from .base import ExternalDependency, ExternalProgram from .base import ExtraFrameworkDependency, PkgConfigDependency from .base import ConfigToolDependency -from ..interpreterbase import FeatureNew class GLDependency(ExternalDependency): def __init__(self, environment, kwargs): @@ -43,14 +42,14 @@ class GLDependency(ExternalDependency): self.is_found = True # FIXME: Use AppleFrameworks dependency self.link_args = ['-framework', 'OpenGL'] - # FIXME: Detect version using self.compiler + # FIXME: Detect version using self.clib_compiler self.version = '1' return if mesonlib.is_windows(): self.is_found = True - # FIXME: Use self.compiler.find_library() + # FIXME: Use self.clib_compiler.find_library() self.link_args = ['-lopengl32'] - # FIXME: Detect version using self.compiler + # FIXME: Detect version using self.clib_compiler self.version = '1' return @@ -291,10 +290,10 @@ class QtBaseDependency(ExternalDependency): self.bindir = os.path.join(prefix, 'bin') def _find_qmake(self, qmake): - # Even when cross-compiling, if we don't get a cross-info qmake, we + # Even when cross-compiling, if a cross-info qmake is not specified, we # fallback to using the qmake in PATH because that's what we used to do - if self.env.is_cross_build(): - qmake = self.env.cross_info.config['binaries'].get('qmake', qmake) + if self.env.is_cross_build() and 'qmake' in self.env.cross_info.config['binaries']: + return ExternalProgram.from_cross_info(self.env.cross_info, 'qmake') return ExternalProgram(qmake, silent=True) def _qmake_detect(self, mods, kwargs): @@ -516,7 +515,6 @@ class WxDependency(ConfigToolDependency): class VulkanDependency(ExternalDependency): - @FeatureNew('Vulkan Dependency', '0.42.0') def __init__(self, environment, kwargs): super().__init__('vulkan', environment, None, kwargs) @@ -547,7 +545,7 @@ class VulkanDependency(ExternalDependency): inc_path = os.path.join(self.vulkan_sdk, inc_dir) header = os.path.join(inc_path, 'vulkan', 'vulkan.h') lib_path = os.path.join(self.vulkan_sdk, lib_dir) - find_lib = self.compiler.find_library(lib_name, environment, lib_path) + find_lib = self.clib_compiler.find_library(lib_name, environment, lib_path) if not find_lib: raise DependencyException('VULKAN_SDK point to invalid directory (no lib)') @@ -567,8 +565,8 @@ class VulkanDependency(ExternalDependency): return else: # simply try to guess it, usually works on linux - libs = self.compiler.find_library('vulkan', environment, []) - if libs is not None and self.compiler.has_header('vulkan/vulkan.h', '', environment): + libs = self.clib_compiler.find_library('vulkan', environment, []) + if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment): self.type_name = 'system' self.is_found = True self.version = 1 # TODO diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 5a5c053..0aa0b32 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -40,6 +40,8 @@ from .compilers import ( from .compilers import ( ArmCCompiler, ArmCPPCompiler, + ArmclangCCompiler, + ArmclangCPPCompiler, ClangCCompiler, ClangCPPCompiler, ClangObjCCompiler, @@ -72,6 +74,21 @@ from .compilers import ( build_filename = 'meson.build' +known_cpu_families = ( + 'aarch64', + 'arm', + 'e2k', + 'ia64', + 'mips', + 'mips64', + 'parisc', + 'ppc', + 'ppc64', + 'sparc64', + 'x86', + 'x86_64' +) + def detect_gcovr(version='3.1', log=False): gcovr_exe = 'gcovr' try: @@ -194,6 +211,8 @@ def detect_cpu_family(compilers): return 'x86' if trial.startswith('arm'): return 'arm' + if trial.startswith('ppc64'): + return 'ppc64' if trial in ('amd64', 'x64'): trial = 'x86_64' if trial == 'x86_64': @@ -209,6 +228,12 @@ def detect_cpu_family(compilers): pass return 'x86_64' # Add fixes here as bugs are reported. + + if trial not in known_cpu_families: + mlog.warning('Unknown CPU family {!r}, please report this at ' + 'https://github.com/mesonbuild/meson/issues/new with the' + 'output of `uname -a` and `cat /proc/cpuinfo`'.format(trial)) + return trial def detect_cpu(compilers): @@ -428,6 +453,12 @@ class Environment: return GCC_CYGWIN return GCC_STANDARD + def warn_about_lang_pointing_to_cross(self, compiler_exe, evar): + evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME') + if evar_str == compiler_exe: + mlog.warning('''Env var %s seems to point to the cross compiler. +This is probably wrong, it should always point to the native compiler.''' % evar) + def _get_compilers(self, lang, evar, want_cross): ''' The list of compilers is detected in the exact same way for @@ -441,6 +472,7 @@ class Environment: ccache = self.detect_ccache() else: ccache = [] + self.warn_about_lang_pointing_to_cross(compilers[0], evar) # Return value has to be a list of compiler 'choices' compilers = [compilers] is_cross = True @@ -533,6 +565,22 @@ class Environment: cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines, full_version=full_version) + if 'armclang' in out: + # The compiler version is not present in the first line of output, + # instead it is present in second line, startswith 'Component:'. + # So, searching for the 'Component' in out although we know it is + # present in second line, as we are not sure about the + # output format in future versions + arm_ver_str = re.search('.*Component.*', out) + if arm_ver_str is None: + popen_exceptions[' '.join(compiler)] = 'version string not found' + continue + arm_ver_str = arm_ver_str.group(0) + # Override previous values + version = search_version(arm_ver_str) + full_version = arm_ver_str + cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler + return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) if 'clang' in out: if 'Apple' in out or mesonlib.for_darwin(want_cross, self): cltype = CLANG_OSX @@ -953,6 +1001,10 @@ class CrossBuildInfo: res = eval(value, {'__builtins__': None}, {'true': True, 'false': False}) except Exception: raise EnvironmentException('Malformed value in cross file variable %s.' % entry) + + if entry == 'cpu_family' and res not in known_cpu_families: + mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % value) + if self.ok_type(res): self.config[s][entry] = res elif isinstance(res, list): diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 40ea5a2..a3430aa 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -33,8 +33,10 @@ from .modules import ModuleReturnValue import os, sys, shutil, uuid import re, shlex +import subprocess from collections import namedtuple from pathlib import PurePath +import traceback import importlib @@ -56,12 +58,62 @@ def stringifyUserArguments(args): class ObjectHolder: - def __init__(self, obj): + def __init__(self, obj, subproject=None): self.held_object = obj + self.subproject = subproject def __repr__(self): return '<Holder: {!r}>'.format(self.held_object) +class FeatureOptionHolder(InterpreterObject, ObjectHolder): + def __init__(self, env, option): + InterpreterObject.__init__(self) + ObjectHolder.__init__(self, option) + if option.is_auto(): + self.held_object = env.coredata.builtins['auto_features'] + self.name = option.name + self.methods.update({'enabled': self.enabled_method, + 'disabled': self.disabled_method, + 'auto': self.auto_method, + }) + + @noPosargs + @permittedKwargs({}) + def enabled_method(self, args, kwargs): + return self.held_object.is_enabled() + + @noPosargs + @permittedKwargs({}) + def disabled_method(self, args, kwargs): + return self.held_object.is_disabled() + + @noPosargs + @permittedKwargs({}) + def auto_method(self, args, kwargs): + return self.held_object.is_auto() + +def extract_required_kwarg(kwargs): + val = kwargs.get('required', True) + disabled = False + required = False + feature = None + if isinstance(val, FeatureOptionHolder): + option = val.held_object + feature = val.name + if option.is_disabled(): + disabled = True + elif option.is_enabled(): + required = True + elif isinstance(required, bool): + required = val + else: + raise InterpreterException('required keyword argument must be boolean or a feature option') + + # Keep boolean value in kwargs to simplify other places where this kwarg is + # checked. + kwargs['required'] = required + + return disabled, required, feature class TryRunResultHolder(InterpreterObject): def __init__(self, res): @@ -95,10 +147,11 @@ class TryRunResultHolder(InterpreterObject): class RunProcess(InterpreterObject): - def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False): + def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True): super().__init__() if not isinstance(cmd, ExternalProgram): raise AssertionError('BUG: RunProcess must be passed an ExternalProgram') + self.capture = capture pc, self.stdout, self.stderr = self.run_command(cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check) self.returncode = pc.returncode self.methods.update({'returncode': self.returncode_method, @@ -119,12 +172,17 @@ class RunProcess(InterpreterObject): cwd = os.path.join(source_dir, subdir) child_env = os.environ.copy() child_env.update(env) + stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL mlog.debug('Running command:', ' '.join(command_array)) try: - p, o, e = Popen_safe(command_array, env=child_env, cwd=cwd) - mlog.debug('--- stdout----') - mlog.debug(o) - mlog.debug('----stderr----') + p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd) + if self.capture: + mlog.debug('--- stdout ---') + mlog.debug(o) + else: + o = '' + mlog.debug('--- stdout disabled ---') + mlog.debug('--- stderr ---') mlog.debug(e) mlog.debug('') @@ -154,8 +212,8 @@ class ConfigureFileHolder(InterpreterObject, ObjectHolder): def __init__(self, subdir, sourcename, targetname, configuration_data): InterpreterObject.__init__(self) - ObjectHolder.__init__(self, build.ConfigureFile(subdir, sourcename, - targetname, configuration_data)) + obj = build.ConfigureFile(subdir, sourcename, targetname, configuration_data) + ObjectHolder.__init__(self, obj) class EnvironmentVariablesHolder(MutableInterpreterObject, ObjectHolder): @@ -198,10 +256,10 @@ class EnvironmentVariablesHolder(MutableInterpreterObject, ObjectHolder): class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder): - def __init__(self): + def __init__(self, pv): MutableInterpreterObject.__init__(self) self.used = False # These objects become immutable after use in configure_file. - ObjectHolder.__init__(self, build.ConfigurationData()) + ObjectHolder.__init__(self, build.ConfigurationData(), pv) self.methods.update({'set': self.set_method, 'set10': self.set10_method, 'set_quoted': self.set_quoted_method, @@ -219,9 +277,9 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder): def validate_args(self, args, kwargs): if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2: - mlog.log(mlog.red('DEPRECATION:'), - '''Passing a list as the single argument to configuration_data.set is deprecated. -This will become a hard error in the future''') + mlog.deprecation('Passing a list as the single argument to ' + 'configuration_data.set is deprecated. This will ' + 'become a hard error in the future.') args = args[0] if len(args) != 2: @@ -230,6 +288,11 @@ This will become a hard error in the future''') raise InterpreterException("Can not set values on configuration object that has been used.") name = args[0] val = args[1] + if not isinstance(val, (int, str)): + msg = 'Setting a configuration data value to {!r} is invalid, ' \ + 'and will fail at configure_file(). If you are using it ' \ + 'just to store some values, please use a dict instead.' + mlog.deprecation(msg.format(val)) desc = kwargs.get('description', None) if not isinstance(name, str): raise InterpreterException("First argument to set must be a string.") @@ -272,7 +335,7 @@ This will become a hard error in the future''') return args[1] raise InterpreterException('Entry %s not in configuration data.' % name) - @FeatureNew('get_unquoted', '0.44.0') + @FeatureNew('configuration_data.get_unquoted()', '0.44.0') def get_unquoted_method(self, args, kwargs): if len(args) < 1 or len(args) > 2: raise InterpreterException('Get method takes one or two arguments.') @@ -307,9 +370,9 @@ This will become a hard error in the future''') # these wrappers. class DependencyHolder(InterpreterObject, ObjectHolder): - def __init__(self, dep): + def __init__(self, dep, pv): InterpreterObject.__init__(self) - ObjectHolder.__init__(self, dep) + ObjectHolder.__init__(self, dep, pv) self.methods.update({'found': self.found_method, 'type_name': self.type_name_method, 'version': self.version_method, @@ -351,7 +414,7 @@ class DependencyHolder(InterpreterObject, ObjectHolder): raise InterpreterException('Variable name must be a string.') return self.held_object.get_pkgconfig_variable(varname, kwargs) - @FeatureNew('get_configtool_variable', '0.44.0') + @FeatureNew('dep.get_configtool_variable', '0.44.0') @permittedKwargs({}) def configtool_method(self, args, kwargs): args = listify(args) @@ -366,12 +429,13 @@ class DependencyHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs(permitted_method_kwargs['partial_dependency']) def partial_dependency_method(self, args, kwargs): - return DependencyHolder(self.held_object.get_partial_dependency(**kwargs)) + pdep = self.held_object.get_partial_dependency(**kwargs) + return DependencyHolder(pdep, self.subproject) class InternalDependencyHolder(InterpreterObject, ObjectHolder): - def __init__(self, dep): + def __init__(self, dep, pv): InterpreterObject.__init__(self) - ObjectHolder.__init__(self, dep) + ObjectHolder.__init__(self, dep, pv) self.methods.update({'found': self.found_method, 'version': self.version_method, 'partial_dependency': self.partial_dependency_method, @@ -391,7 +455,8 @@ class InternalDependencyHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs(permitted_method_kwargs['partial_dependency']) def partial_dependency_method(self, args, kwargs): - return DependencyHolder(self.held_object.get_partial_dependency(**kwargs)) + pdep = self.held_object.get_partial_dependency(**kwargs) + return DependencyHolder(pdep, self.subproject) class ExternalProgramHolder(InterpreterObject, ObjectHolder): def __init__(self, ep): @@ -420,10 +485,11 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder): return self.held_object.get_name() class ExternalLibraryHolder(InterpreterObject, ObjectHolder): - def __init__(self, el): + def __init__(self, el, pv): InterpreterObject.__init__(self) - ObjectHolder.__init__(self, el) + ObjectHolder.__init__(self, el, pv) self.methods.update({'found': self.found_method, + 'type_name': self.type_name_method, 'partial_dependency': self.partial_dependency_method, }) @@ -432,6 +498,11 @@ class ExternalLibraryHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs({}) + def type_name_method(self, args, kwargs): + return self.held_object.type_name + + @noPosargs + @permittedKwargs({}) def found_method(self, args, kwargs): return self.found() @@ -451,13 +522,15 @@ class ExternalLibraryHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs(permitted_method_kwargs['partial_dependency']) def partial_dependency_method(self, args, kwargs): - return DependencyHolder(self.held_object.get_partial_dependency(**kwargs)) + pdep = self.held_object.get_partial_dependency(**kwargs) + return DependencyHolder(pdep, self.subproject) class GeneratorHolder(InterpreterObject, ObjectHolder): - def __init__(self, interpreter, args, kwargs): + @FeatureNewKwargs('generator', '0.43.0', ['capture']) + def __init__(self, interp, args, kwargs): + self.interpreter = interp InterpreterObject.__init__(self) - self.interpreter = interpreter - ObjectHolder.__init__(self, build.Generator(args, kwargs)) + ObjectHolder.__init__(self, build.Generator(args, kwargs), interp.subproject) self.methods.update({'process': self.process_method}) @FeatureNewKwargs('generator.process', '0.45.0', ['preserve_path_from']) @@ -664,7 +737,7 @@ class GeneratedObjectsHolder(InterpreterObject, ObjectHolder): class TargetHolder(InterpreterObject, ObjectHolder): def __init__(self, target, interp): InterpreterObject.__init__(self) - ObjectHolder.__init__(self, target) + ObjectHolder.__init__(self, target, interp.subproject) self.interpreter = interp class BuildTargetHolder(TargetHolder): @@ -804,6 +877,10 @@ class CustomTargetHolder(TargetHolder): def __delitem__(self, index): raise InterpreterException('Cannot delete a member of a CustomTarget') + def outdir_include(self): + return IncludeDirsHolder(build.IncludeDirs('', [], False, + [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(self.held_object))])) + class RunTargetHolder(InterpreterObject, ObjectHolder): def __init__(self, name, command, args, dependencies, subdir, subproject): InterpreterObject.__init__(self) @@ -856,10 +933,11 @@ class SubprojectHolder(InterpreterObject, ObjectHolder): return self.held_object.variables[varname] class CompilerHolder(InterpreterObject): - def __init__(self, compiler, env): + def __init__(self, compiler, env, subproject): InterpreterObject.__init__(self) self.compiler = compiler self.environment = env + self.subproject = subproject self.methods.update({'compiles': self.compiles_method, 'links': self.links_method, 'get_id': self.get_id_method, @@ -952,7 +1030,7 @@ class CompilerHolder(InterpreterObject): extra_args = mesonlib.stringlistify(kwargs.get('args', [])) deps = self.determine_dependencies(kwargs) result = self.compiler.alignment(typename, prefix, self.environment, extra_args, deps) - mlog.log('Checking for alignment of "', mlog.bold(typename), '": ', result, sep='') + mlog.log('Checking for alignment of', mlog.bold(typename, True), ':', result) return result @permittedKwargs({ @@ -984,7 +1062,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO (%d)' % result.returncode) - mlog.log('Checking if "', mlog.bold(testname), '" runs: ', h, sep='') + mlog.log('Checking if', mlog.bold(testname, True), 'runs:', h) return TryRunResultHolder(result) @noPosargs @@ -1037,8 +1115,8 @@ class CompilerHolder(InterpreterObject): hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking whether type "', mlog.bold(typename), - '" has member "', mlog.bold(membername), '": ', hadtxt, sep='') + mlog.log('Checking whether type', mlog.bold(typename, True), + 'has member', mlog.bold(membername, True), ':', hadtxt) return had @permittedKwargs({ @@ -1049,6 +1127,8 @@ class CompilerHolder(InterpreterObject): 'dependencies', }) def has_members_method(self, args, kwargs): + if len(args) < 2: + raise InterpreterException('Has_members needs at least two arguments.') check_stringlist(args) typename = args[0] membernames = args[1:] @@ -1064,8 +1144,8 @@ class CompilerHolder(InterpreterObject): else: hadtxt = mlog.red('NO') members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames])) - mlog.log('Checking whether type "', mlog.bold(typename), - '" has members ', members, ': ', hadtxt, sep='') + mlog.log('Checking whether type', mlog.bold(typename, True), + 'has members', members, ':', hadtxt) return had @permittedKwargs({ @@ -1090,7 +1170,7 @@ class CompilerHolder(InterpreterObject): hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for function "', mlog.bold(funcname), '": ', hadtxt, sep='') + mlog.log('Checking for function', mlog.bold(funcname, True), ':', hadtxt) return had @permittedKwargs({ @@ -1115,7 +1195,7 @@ class CompilerHolder(InterpreterObject): hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for type "', mlog.bold(typename), '": ', hadtxt, sep='') + mlog.log('Checking for type', mlog.bold(typename, True), ':', hadtxt) return had @FeatureNew('compiler.compute_int', '0.40.0') @@ -1222,7 +1302,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if "', mlog.bold(testname), '" compiles: ', h, sep='') + mlog.log('Checking if', mlog.bold(testname, True), 'compiles:', h) return result @permittedKwargs({ @@ -1252,10 +1332,10 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if "', mlog.bold(testname), '" links: ', h, sep='') + mlog.log('Checking if', mlog.bold(testname, True), 'links:', h) return result - @FeatureNew('check_header', '0.47.0') + @FeatureNew('compiler.check_header', '0.47.0') @permittedKwargs({ 'prefix', 'no_builtin_args', @@ -1343,9 +1423,16 @@ class CompilerHolder(InterpreterObject): libname = args[0] if not isinstance(libname, str): raise InterpreterException('Library name not a string.') - required = kwargs.get('required', True) - if not isinstance(required, bool): - raise InterpreterException('required must be boolean.') + + disabled, required, feature = extract_required_kwarg(kwargs) + if disabled: + mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled') + lib = dependencies.ExternalLibrary(libname, None, + self.environment, + self.compiler.language, + silent=True) + return ExternalLibraryHolder(lib, self.subproject) + search_dirs = mesonlib.stringlistify(kwargs.get('dirs', [])) for i in search_dirs: if not os.path.isabs(i): @@ -1355,7 +1442,7 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('{} library {!r} not found'.format(self.compiler.get_display_language(), libname)) lib = dependencies.ExternalLibrary(libname, linkargs, self.environment, self.compiler.language) - return ExternalLibraryHolder(lib) + return ExternalLibraryHolder(lib, self.subproject) @permittedKwargs({}) def has_argument_method(self, args, kwargs): @@ -1397,7 +1484,7 @@ class CompilerHolder(InterpreterObject): mlog.log('First supported argument:', mlog.red('None')) return [] - @FeatureNew('has_link_argument', '0.46.0') + @FeatureNew('compiler.has_link_argument', '0.46.0') @permittedKwargs({}) def has_link_argument_method(self, args, kwargs): args = mesonlib.stringlistify(args) @@ -1405,7 +1492,7 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('has_link_argument takes exactly one argument.') return self.has_multi_link_arguments_method(args, kwargs) - @FeatureNew('has_multi_link_argument', '0.46.0') + @FeatureNew('compiler.has_multi_link_argument', '0.46.0') @permittedKwargs({}) def has_multi_link_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) @@ -1420,7 +1507,7 @@ class CompilerHolder(InterpreterObject): h) return result - @FeatureNew('get_supported_link_arguments_method', '0.46.0') + @FeatureNew('compiler.get_supported_link_arguments_method', '0.46.0') @permittedKwargs({}) def get_supported_link_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) @@ -1430,7 +1517,7 @@ class CompilerHolder(InterpreterObject): supported_args.append(arg) return supported_args - @FeatureNew('first_supported_link_argument_method', '0.46.0') + @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0') @permittedKwargs({}) def first_supported_link_argument_method(self, args, kwargs): for i in mesonlib.stringlistify(args): @@ -1626,14 +1713,14 @@ class MesonMain(InterpreterObject): else: clist = self.build.cross_compilers if cname in clist: - return CompilerHolder(clist[cname], self.build.environment) + return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject) raise InterpreterException('Tried to access compiler for unspecified language "%s".' % cname) @noPosargs @permittedKwargs({}) def is_unity_method(self, args, kwargs): optval = self.interpreter.environment.coredata.get_builtin_option('unity') - if optval == 'on' or (optval == 'subprojects' and self.interpreter.subproject != ''): + if optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()): return True return False @@ -1650,7 +1737,7 @@ class MesonMain(InterpreterObject): raise InterpreterException('Argument must be a string.') self.build.dep_manifest_name = args[0] - @FeatureNew('override_find_program', '0.46.0') + @FeatureNew('meson.override_find_program', '0.46.0') @permittedKwargs({}) def override_find_program_method(self, args, kwargs): if len(args) != 2: @@ -1731,7 +1818,7 @@ permitted_kwargs = {'add_global_arguments': {'language'}, 'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'}, 'build_target': known_build_target_kwargs, 'configure_file': {'input', 'output', 'configuration', 'command', 'copy', 'install_dir', 'install_mode', 'capture', 'install', 'format', 'output_format', 'encoding'}, - 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'install_mode', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'}, + 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'install_mode', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default', 'build_always_stale'}, 'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version', 'private_headers'}, 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'}, 'executable': build.known_exe_kwargs, @@ -1744,7 +1831,7 @@ permitted_kwargs = {'add_global_arguments': {'language'}, 'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'}, 'jar': build.known_jar_kwargs, 'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'}, - 'run_command': {'check'}, + 'run_command': {'check', 'capture'}, 'run_target': {'command', 'depends'}, 'shared_library': build.known_shlib_kwargs, 'shared_module': build.known_shmod_kwargs, @@ -1787,11 +1874,13 @@ class Interpreter(InterpreterBase): self.global_args_frozen = False # implies self.project_args_frozen self.subprojects = {} self.subproject_stack = [] + self.configure_file_outputs = {} # Passed from the outside, only used in subprojects. if default_project_options: self.default_project_options = default_project_options.copy() else: self.default_project_options = {} + self.project_default_options = {} self.build_func_dict() # build_def_files needs to be defined before parse_project is called self.build_def_files = [os.path.join(self.subdir, environment.build_filename)] @@ -1811,6 +1900,18 @@ class Interpreter(InterpreterBase): else: self.builtin['target_machine'] = self.builtin['host_machine'] + def get_non_matching_default_options(self): + env = self.environment + for def_opt_name, def_opt_value in self.project_default_options.items(): + for option_type in [ + env.coredata.builtins, env.coredata.compiler_options, + env.coredata.backend_options, env.coredata.base_options, + env.coredata.user_options]: + for cur_opt_name, cur_opt_value in option_type.items(): + if (def_opt_name == cur_opt_name and + def_opt_value != cur_opt_value.value): + yield (def_opt_name, def_opt_value, cur_opt_value.value) + def build_func_dict(self): self.funcs.update({'add_global_arguments': self.func_add_global_arguments, 'add_project_arguments': self.func_add_project_arguments, @@ -1885,9 +1986,9 @@ class Interpreter(InterpreterBase): elif isinstance(item, build.Data): return DataHolder(item) elif isinstance(item, dependencies.InternalDependency): - return InternalDependencyHolder(item) + return InternalDependencyHolder(item, self.subproject) elif isinstance(item, dependencies.ExternalDependency): - return DependencyHolder(item) + return DependencyHolder(item, self.subproject) elif isinstance(item, dependencies.ExternalProgram): return ExternalProgramHolder(item) elif hasattr(item, 'held_object'): @@ -2002,7 +2103,7 @@ class Interpreter(InterpreterBase): external dependencies (including libraries) must go to "dependencies".''') dep = dependencies.InternalDependency(version, incs, compile_args, link_args, libs, libs_whole, sources, final_deps) - return DependencyHolder(dep) + return DependencyHolder(dep, self.subproject) @noKwargs def func_assert(self, node, args, kwargs): @@ -2028,7 +2129,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not isinstance(actual, wanted): raise InvalidArguments('Incorrect argument type.') - @FeatureNewKwargs('run_command', '0.47.0', ['check']) + @FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture']) @permittedKwargs(permitted_kwargs['run_command']) def func_run_command(self, node, args, kwargs): return self.run_command_impl(node, args, kwargs) @@ -2038,6 +2139,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Not enough arguments') cmd = args[0] cargs = args[1:] + capture = kwargs.get('capture', True) srcdir = self.environment.get_source_dir() builddir = self.environment.get_build_dir() @@ -2097,7 +2199,8 @@ external dependencies (including libraries) must go to "dependencies".''') if a not in self.build_def_files: self.build_def_files.append(a) return RunProcess(cmd, expanded_args, srcdir, builddir, self.subdir, - self.environment.get_build_command() + ['introspect'], in_builddir, check) + self.environment.get_build_command() + ['introspect'], + in_builddir=in_builddir, check=check, capture=capture) @stringArgs def func_gettext(self, nodes, args, kwargs): @@ -2152,7 +2255,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.global_args_frozen = True mlog.log() with mlog.nested(): - mlog.log('\nExecuting subproject ', mlog.bold(dirname), '.\n', sep='') + mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n') subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir, self.modules, default_options) subi.subprojects = self.subprojects @@ -2174,49 +2277,59 @@ external dependencies (including libraries) must go to "dependencies".''') self.build_def_files += subi.build_def_files return self.subprojects[dirname] - @stringArgs - @noKwargs - def func_get_option(self, nodes, args, kwargs): - if len(args) != 1: - raise InterpreterException('Argument required for get_option.') - undecorated_optname = optname = args[0] - if ':' in optname: - raise InterpreterException('''Having a colon in option name is forbidden, projects are not allowed -to directly access options of other subprojects.''') + def get_option_internal(self, optname): + undecorated_optname = optname try: - return self.environment.get_coredata().base_options[optname].value + return self.coredata.base_options[optname] except KeyError: pass try: - return self.environment.coredata.get_builtin_option(optname) - except RuntimeError: + return self.coredata.builtins[optname] + except KeyError: pass try: - return self.environment.coredata.compiler_options[optname].value + return self.coredata.compiler_options[optname] except KeyError: pass if not coredata.is_builtin_option(optname) and self.is_subproject(): optname = self.subproject + ':' + optname try: - opt = self.environment.coredata.user_options[optname] + opt = self.coredata.user_options[optname] if opt.yielding and ':' in optname: # If option not present in superproject, keep the original. - opt = self.environment.coredata.user_options.get(undecorated_optname, opt) - return opt.value + opt = self.coredata.user_options.get(undecorated_optname, opt) + return opt except KeyError: pass # Some base options are not defined in some environments, return the default value. try: - return compilers.base_options[optname].value + return compilers.base_options[optname] except KeyError: pass raise InterpreterException('Tried to access unknown option "%s".' % optname) + @stringArgs + @noKwargs + def func_get_option(self, nodes, args, kwargs): + if len(args) != 1: + raise InterpreterException('Argument required for get_option.') + optname = args[0] + if ':' in optname: + raise InterpreterException('Having a colon in option name is forbidden, ' + 'projects are not allowed to directly access ' + 'options of other subprojects.') + opt = self.get_option_internal(optname) + if isinstance(opt, coredata.UserFeatureOption): + return FeatureOptionHolder(self.environment, opt) + elif isinstance(opt, coredata.UserOption): + return opt.value + return opt + @noKwargs def func_configuration_data(self, node, args, kwargs): if args: raise InterpreterException('configuration_data takes no arguments') - return ConfigurationDataHolder() + return ConfigurationDataHolder(self.subproject) def set_options(self, default_options): # Set default options as if they were passed to the command line. @@ -2302,9 +2415,10 @@ to directly access options of other subprojects.''') # values previously set from command line. That means that changing # default_options in a project will trigger a reconfigure but won't # have any effect. + self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) + self.project_default_options = coredata.create_options_dict(self.project_default_options) if self.environment.first_invocation: - default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) - default_options = coredata.create_options_dict(default_options) + default_options = self.project_default_options default_options.update(self.default_project_options) else: default_options = {} @@ -2336,15 +2450,16 @@ to directly access options of other subprojects.''') self.build.subproject_dir = self.subproject_dir + mesonlib.project_meson_versions[self.subproject] = '' if 'meson_version' in kwargs: cv = coredata.version pv = kwargs['meson_version'] - mesonlib.target_version = pv + mesonlib.project_meson_versions[self.subproject] = pv if not mesonlib.version_compare(cv, pv): raise InterpreterException('Meson version is %s but project requires %s.' % (cv, pv)) self.build.projects[self.subproject] = proj_name - mlog.log('Project name: ', mlog.bold(proj_name), sep='') - mlog.log('Project version: ', mlog.bold(self.project_version), sep='') + mlog.log('Project name:', mlog.bold(proj_name)) + mlog.log('Project version:', mlog.bold(self.project_version)) self.add_languages(proj_langs, True) langs = self.coredata.compilers.keys() if 'vala' in langs: @@ -2356,7 +2471,12 @@ to directly access options of other subprojects.''') @permittedKwargs(permitted_kwargs['add_languages']) @stringArgs def func_add_languages(self, node, args, kwargs): - return self.add_languages(args, kwargs.get('required', True)) + disabled, required, feature = extract_required_kwarg(kwargs) + if disabled: + for lang in sorted(args, key=compilers.sort_clink): + mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled') + return False + return self.add_languages(args, required) def get_message_string_arg(self, node): # reduce arguments again to avoid flattening posargs @@ -2383,7 +2503,7 @@ to directly access options of other subprojects.''') argstr = self.get_message_string_arg(node) mlog.log(mlog.bold('Message:'), argstr) - @FeatureNew('warning()', '0.44.0') + @FeatureNew('warning', '0.44.0') @noKwargs def func_warning(self, node, args, kwargs): argstr = self.get_message_string_arg(node) @@ -2476,7 +2596,7 @@ to directly access options of other subprojects.''') def add_languages(self, args, required): success = True need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler() - for lang in sorted(args, key=compilers.sort_clike): + for lang in sorted(args, key=compilers.sort_clink): lang = lang.lower() if lang in self.coredata.compilers: comp = self.coredata.compilers[lang] @@ -2492,14 +2612,16 @@ to directly access options of other subprojects.''') else: raise if comp.full_version is not None: - version_string = ' (%s %s "%s")' % (comp.id, comp.version, comp.full_version) + version_string = '(%s %s "%s")' % (comp.id, comp.version, comp.full_version) else: - version_string = ' (%s %s)' % (comp.id, comp.version) - mlog.log('Native %s compiler: ' % comp.get_display_language(), mlog.bold(' '.join(comp.get_exelist())), version_string, sep='') - + version_string = '(%s %s)' % (comp.id, comp.version) + mlog.log('Native', comp.get_display_language(), 'compiler:', + mlog.bold(' '.join(comp.get_exelist())), version_string) self.build.add_compiler(comp) if need_cross_compiler: - mlog.log('Cross %s compiler: ' % cross_comp.get_display_language(), mlog.bold(' '.join(cross_comp.get_exelist())), ' (%s %s)' % (cross_comp.id, cross_comp.version), sep='') + version_string = '(%s %s)' % (cross_comp.id, cross_comp.version) + mlog.log('Cross', cross_comp.get_display_language(), 'compiler:', + mlog.bold(' '.join(cross_comp.get_exelist())), version_string) self.build.add_cross_compiler(cross_comp) if self.environment.is_cross_build() and not need_cross_compiler: self.build.add_cross_compiler(comp) @@ -2524,19 +2646,18 @@ to directly access options of other subprojects.''') self.emit_base_options_warnings(enabled_opts) def program_from_cross_file(self, prognames, silent=False): - bins = self.environment.cross_info.config['binaries'] + cross_info = self.environment.cross_info for p in prognames: if hasattr(p, 'held_object'): p = p.held_object if isinstance(p, mesonlib.File): continue # Always points to a local (i.e. self generated) file. if not isinstance(p, str): - raise InterpreterException('Executable name must be a string.') - if p in bins: - exename = bins[p] - extprog = dependencies.ExternalProgram(exename, silent=silent) - progobj = ExternalProgramHolder(extprog) - return progobj + raise InterpreterException('Executable name must be a string') + prog = ExternalProgram.from_cross_info(cross_info, p) + if prog.found(): + return ExternalProgramHolder(prog) + return None def program_from_system(self, args, silent=False): # Search for scripts relative to current subdir. @@ -2610,7 +2731,12 @@ to directly access options of other subprojects.''') def func_find_program(self, node, args, kwargs): if not args: raise InterpreterException('No program name specified.') - required = kwargs.get('required', True) + + disabled, required, feature = extract_required_kwarg(kwargs) + if disabled: + mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled') + return ExternalProgramHolder(dependencies.NonExistingExternalProgram()) + if not isinstance(required, bool): raise InvalidArguments('"required" argument must be a boolean.') use_native = kwargs.get('native', False) @@ -2700,20 +2826,35 @@ to directly access options of other subprojects.''') 'dep {}'.format(found, dirname, wanted, name)) return None + def _handle_featurenew_dependencies(self, name): + 'Do a feature check on dependencies used by this subproject' + if name == 'mpi': + FeatureNew('MPI Dependency', '0.42.0').use(self.subproject) + elif name == 'pcap': + FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject) + elif name == 'vulkan': + FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject) + elif name == 'libwmf': + FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject) + elif name == 'openmp': + FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject) + @FeatureNewKwargs('dependency', '0.40.0', ['method']) @FeatureNewKwargs('dependency', '0.38.0', ['default_options']) @permittedKwargs(permitted_kwargs['dependency']) def func_dependency(self, node, args, kwargs): self.validate_arguments(args, 1, [str]) - required = kwargs.get('required', True) - if not isinstance(required, bool): - raise DependencyException('Keyword "required" must be a boolean.') name = args[0] + display_name = name if name else '(anonymous)' - if name == '': - if required: - raise InvalidArguments('Dependency is both required and not-found') - return DependencyHolder(NotFoundDependency(self.environment)) + disabled, required, feature = extract_required_kwarg(kwargs) + if disabled: + mlog.log('Dependency', mlog.bold(display_name), 'skipped: feature', mlog.bold(feature), 'disabled') + return DependencyHolder(NotFoundDependency(self.environment), self.subproject) + + # writing just "dependency('')" is an error, because it can only fail + if name == '' and required and 'fallback' not in kwargs: + raise InvalidArguments('Dependency is both required and not-found') if '<' in name or '>' in name or '=' in name: raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify' @@ -2723,7 +2864,7 @@ to directly access options of other subprojects.''') if cached_dep: if required and not cached_dep.found(): m = 'Dependency {!r} was already checked and was not found' - raise DependencyException(m.format(name)) + raise DependencyException(m.format(display_name)) dep = cached_dep else: # If the dependency has already been configured, possibly by @@ -2739,18 +2880,22 @@ to directly access options of other subprojects.''') exception = None dep = NotFoundDependency(self.environment) - # Search for it outside the project - if self.coredata.wrap_mode != WrapMode.forcefallback or 'fallback' not in kwargs: + # Unless a fallback exists and is forced ... + if self.coredata.wrap_mode == WrapMode.forcefallback and 'fallback' in kwargs: + pass + # ... search for it outside the project + elif name != '': + self._handle_featurenew_dependencies(name) try: dep = dependencies.find_external_dependency(name, self.environment, kwargs) except DependencyException as e: exception = e - else: - exception = DependencyException("fallback for %s not found" % name) # Search inside the projects list if not dep.found(): if 'fallback' in kwargs: + if not exception: + exception = DependencyException("fallback for %s not found" % display_name) fallback_dep = self.dependency_fallback(name, kwargs) if fallback_dep: # Never add fallback deps to self.coredata.deps since we @@ -2764,7 +2909,7 @@ to directly access options of other subprojects.''') # Only store found-deps in the cache if dep.found(): self.coredata.deps[identifier] = dep - return DependencyHolder(dep) + return DependencyHolder(dep, self.subproject) @FeatureNew('disabler', '0.44.0') @noKwargs @@ -2802,17 +2947,18 @@ root and issuing %s. return fbinfo def dependency_fallback(self, name, kwargs): + display_name = name if name else '(anonymous)' if self.coredata.wrap_mode in (WrapMode.nofallback, WrapMode.nodownload): mlog.log('Not looking for a fallback subproject for the dependency', - mlog.bold(name), 'because:\nUse of fallback' + mlog.bold(display_name), 'because:\nUse of fallback' 'dependencies is disabled.') return None elif self.coredata.wrap_mode == WrapMode.forcefallback: mlog.log('Looking for a fallback subproject for the dependency', - mlog.bold(name), 'because:\nUse of fallback dependencies is forced.') + mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.') else: mlog.log('Looking for a fallback subproject for the dependency', - mlog.bold(name)) + mlog.bold(display_name)) dirname, varname = self.get_subproject_infos(kwargs) # Try to execute the subproject try: @@ -2828,9 +2974,14 @@ root and issuing %s. # If the subproject execution failed in a non-fatal way, don't raise an # exception; let the caller handle things. except Exception as e: - mlog.log('Couldn\'t use fallback subproject in', - mlog.bold(os.path.join(self.subproject_dir, dirname)), - 'for the dependency', mlog.bold(name), '\nReason:', str(e)) + msg = ['Couldn\'t use fallback subproject in', + mlog.bold(os.path.join(self.subproject_dir, dirname)), + 'for the dependency', mlog.bold(display_name), '\nReason:'] + if isinstance(e, mesonlib.MesonException): + msg.append(e.get_msg_with_context()) + else: + msg.append(traceback.format_exc()) + mlog.log(*msg) return None dep = self.get_subproject_dep(name, dirname, varname, kwargs.get('required', True)) if not dep: @@ -2842,10 +2993,10 @@ root and issuing %s. found = dep.version_method([], {}) if not self.check_subproject_version(wanted, found): mlog.log('Subproject', mlog.bold(subproj_path), 'dependency', - mlog.bold(varname), 'version is', mlog.bold(found), + mlog.bold(display_name), 'version is', mlog.bold(found), 'but', mlog.bold(wanted), 'is required.') return None - mlog.log('Dependency', mlog.bold(name), 'from subproject', + mlog.log('Dependency', mlog.bold(display_name), 'from subproject', mlog.bold(subproj_path), 'found:', mlog.green('YES')) return dep @@ -2868,7 +3019,7 @@ root and issuing %s. def func_both_lib(self, node, args, kwargs): return self.build_both_libraries(node, args, kwargs) - @FeatureNew('Shared Modules', '0.37.0') + @FeatureNew('shared_module', '0.37.0') @permittedKwargs(permitted_kwargs['shared_module']) def func_shared_module(self, node, args, kwargs): return self.build_target(node, args, kwargs, SharedModuleHolder) @@ -2907,7 +3058,7 @@ root and issuing %s. if 'input' not in kwargs or 'output' not in kwargs: raise InterpreterException('Keyword arguments input and output must exist') if 'fallback' not in kwargs: - FeatureNew('Optional fallback in vcs_tag', '0.41.0').use() + FeatureNew('Optional fallback in vcs_tag', '0.41.0').use(self.subproject) fallback = kwargs.pop('fallback', self.project_version) if not isinstance(fallback, str): raise InterpreterException('Keyword argument fallback must be a string.') @@ -2938,7 +3089,8 @@ root and issuing %s. source_dir, replace_string, regex_selector] + vcs_cmd - kwargs.setdefault('build_always', True) + kwargs.setdefault('build_by_default', True) + kwargs.setdefault('build_always_stale', True) return self.func_custom_target(node, [kwargs['output']], kwargs) @FeatureNew('subdir_done', '0.46.0') @@ -2951,14 +3103,14 @@ root and issuing %s. raise SubdirDoneRequest() @stringArgs - @FeatureNewKwargs('custom_target', '0.47.0', ['install_mode']) + @FeatureNewKwargs('custom_target', '0.47.0', ['install_mode', 'build_always_stale']) @FeatureNewKwargs('custom_target', '0.40.0', ['build_by_default']) @permittedKwargs(permitted_kwargs['custom_target']) def func_custom_target(self, node, args, kwargs): if len(args) != 1: raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name') if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']): - FeatureNew('substitutions in custom_target depfile', '0.47.0').use() + FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject) name = args[0] kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs) tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self) @@ -2982,6 +3134,8 @@ root and issuing %s. if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)): mlog.debug('Wrong type:', str(i)) raise InterpreterException('Invalid argument to run_target.') + if isinstance(i, dependencies.ExternalProgram) and not i.found(): + raise InterpreterException('Tried to use non-existing executable {!r}'.format(i.name)) cleaned_args.append(i) name = args[0] if not isinstance(name, str): @@ -3079,10 +3233,10 @@ root and issuing %s. env, should_fail, timeout, workdir) if is_base_test: self.build.tests.append(t) - mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='') + mlog.debug('Adding test', mlog.bold(args[0], True)) else: self.build.benchmarks.append(t) - mlog.debug('Adding benchmark "', mlog.bold(args[0]), '".', sep='') + mlog.debug('Adding benchmark', mlog.bold(args[0], True)) @FeatureNewKwargs('install_headers', '0.47.0', ['install_mode']) @permittedKwargs(permitted_kwargs['install_headers']) @@ -3306,8 +3460,16 @@ root and issuing %s. raise InterpreterException('@INPUT@ used as command argument, but no input file specified.') # Validate output output = kwargs['output'] + ofile_rpath = os.path.join(self.subdir, output) if not isinstance(output, str): raise InterpreterException('Output file name must be a string') + if ofile_rpath in self.configure_file_outputs: + mesonbuildfile = os.path.join(self.subdir, 'meson.build') + current_call = "{}:{}".format(mesonbuildfile, self.current_lineno) + first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath]) + mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call) + else: + self.configure_file_outputs[ofile_rpath] = self.current_lineno if ifile_abs: values = mesonlib.get_filenames_templates_dict([ifile_abs], None) outputs = mesonlib.substitute_values([output], values) @@ -3316,20 +3478,6 @@ root and issuing %s. raise InterpreterException('Output file name must not contain a subdirectory.') (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output)) ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname) - # Optimize copies by not doing substitution if there's nothing to - # substitute, and warn about this legacy hack - if 'configuration' in kwargs: - conf = kwargs['configuration'] - if not isinstance(conf, ConfigurationDataHolder): - raise InterpreterException('Argument "configuration" must be of type configuration_data') - if ifile_abs and not conf.keys(): - del kwargs['configuration'] - kwargs['copy'] = True - mlog.warning('Got an empty configuration_data() object: ' - 'optimizing copy automatically; if you want to ' - 'copy a file to the build dir, use the \'copy:\' ' - 'keyword argument added in 0.47.0', location=node) - conf.mark_used() # Perform the appropriate action if 'configuration' in kwargs: conf = kwargs['configuration'] @@ -3339,15 +3487,21 @@ root and issuing %s. if inputfile is not None: os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True) file_encoding = kwargs.setdefault('encoding', 'utf-8') - missing_variables = mesonlib.do_conf_file(ifile_abs, ofile_abs, - conf.held_object, fmt, - file_encoding) + missing_variables, confdata_useless = \ + mesonlib.do_conf_file(ifile_abs, ofile_abs, conf.held_object, + fmt, file_encoding) if missing_variables: var_list = ", ".join(map(repr, sorted(missing_variables))) mlog.warning( - "The variable(s) %s in the input file %s are not " + "The variable(s) %s in the input file '%s' are not " "present in the given configuration data." % ( var_list, inputfile), location=node) + if confdata_useless: + ifbase = os.path.basename(ifile_abs) + mlog.warning('Got an empty configuration_data() object and found no ' + 'substitutions in the input file {!r}. If you want to ' + 'copy a file to the build dir, use the \'copy:\' keyword ' + 'argument added in 0.47.0'.format(ifbase), location=node) else: mesonlib.dump_conf_header(ofile_abs, conf.held_object, output_format) conf.mark_used() @@ -3497,7 +3651,7 @@ different subdirectory. self.add_project_arguments(node, self.build.projects_link_args, args, kwargs) def add_global_arguments(self, node, argsdict, args, kwargs): - if self.subproject != '': + if self.is_subproject(): msg = 'Function \'{}\' cannot be used in subprojects because ' \ 'there is no way to make that reliable.\nPlease only call ' \ 'this if is_subproject() returns false. Alternatively, ' \ @@ -3541,8 +3695,27 @@ different subdirectory. def run(self): super().run() mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets)))) - FeatureNew.called_features_report() - FeatureDeprecated.called_features_report() + FeatureNew.report(self.subproject) + FeatureDeprecated.report(self.subproject) + if not self.is_subproject(): + self.print_extra_warnings() + + def print_extra_warnings(self): + for c in self.build.compilers.values(): + if c.get_id() == 'clang': + self.check_clang_asan_lundef() + break + + def check_clang_asan_lundef(self): + if 'b_lundef' not in self.coredata.base_options: + return + if 'b_sanitize' not in self.coredata.base_options: + return + if 'address' in self.coredata.base_options['b_sanitize'].value: + if self.coredata.base_options['b_lundef'].value: + mlog.warning('''Trying to use address sanitizer on Clang with b_lundef. +This will probably not work. +Try setting b_lundef to false instead.''') def evaluate_subproject_info(self, path_from_source_root, subproject_dirname): depth = 0 @@ -3583,7 +3756,7 @@ different subdirectory. (num_sps, sproj_name) = self.evaluate_subproject_info(norm, self.subproject_dir) plain_filename = os.path.basename(norm) if num_sps == 0: - if self.subproject == '': + if not self.is_subproject(): return raise InterpreterException('Sandbox violation: Tried to grab file %s from a different subproject.' % plain_filename) if num_sps > 1: @@ -3666,6 +3839,14 @@ different subdirectory. raise InterpreterException('Unknown default_library value: %s.', default_library) def build_target(self, node, args, kwargs, targetholder): + @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories']) + @FeatureNewKwargs('build target', '0.41.0', ['rust_args']) + @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) + def build_target_decorator_caller(self, node, args, kwargs): + return True + + build_target_decorator_caller(self, node, args, kwargs) + if not args: raise InterpreterException('Target does not have a name.') name = args[0] diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 1c61345..64177ab 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -31,25 +31,64 @@ def check_stringlist(a, msg='Arguments must be strings.'): mlog.debug('Element not a string:', str(a)) raise InvalidArguments(msg) -def _get_callee_args(wrapped_args): - # Functions have 4 positional args and methods have 3. +def _get_callee_args(wrapped_args, want_subproject=False): s = wrapped_args[0] - if len(wrapped_args) == 3: - node_or_state = None + n = len(wrapped_args) + # Raise an error if the codepaths are not there + subproject = None + if want_subproject and n == 2: + if hasattr(s, 'subproject'): + # Interpreter base types have 2 args: self, node + node_or_state = wrapped_args[1] + # args and kwargs are inside the node + args = None + kwargs = None + subproject = s.subproject + elif hasattr(wrapped_args[1], 'subproject'): + # Module objects have 2 args: self, interpreter + node_or_state = wrapped_args[1] + # args and kwargs are inside the node + args = None + kwargs = None + subproject = wrapped_args[1].subproject + else: + raise AssertionError('Unknown args: {!r}'.format(wrapped_args)) + elif n == 3: + # Methods on objects (*Holder, MesonMain, etc) have 3 args: self, args, kwargs + node_or_state = None # FIXME args = wrapped_args[1] kwargs = wrapped_args[2] - elif len(wrapped_args) == 4: + if want_subproject: + if hasattr(s, 'subproject'): + subproject = s.subproject + elif hasattr(s, 'interpreter'): + subproject = s.interpreter.subproject + elif n == 4: + # Meson functions have 4 args: self, node, args, kwargs + # Module functions have 4 args: self, state, args, kwargs; except, + # PythonInstallation methods have self, interpreter, args, kwargs node_or_state = wrapped_args[1] args = wrapped_args[2] kwargs = wrapped_args[3] + if want_subproject: + if isinstance(s, InterpreterBase): + subproject = s.subproject + else: + subproject = node_or_state.subproject + elif n == 5: + # Module snippets have 5 args: self, interpreter, state, args, kwargs + node_or_state = wrapped_args[2] + args = wrapped_args[3] + kwargs = wrapped_args[4] + if want_subproject: + subproject = node_or_state.subproject else: - raise InvalidArguments('Expecting 3 or 4 args, got {}'.format(len(wrapped_args))) - + raise AssertionError('Unknown args: {!r}'.format(wrapped_args)) # Sometimes interpreter methods are called internally with None instead of # empty list/dict args = args if args is not None else [] kwargs = kwargs if kwargs is not None else {} - return s, node_or_state, args, kwargs + return s, node_or_state, args, kwargs, subproject def flatten(args): if isinstance(args, mparser.StringNode): @@ -106,7 +145,7 @@ class permittedKwargs: def __call__(self, f): @wraps(f) def wrapped(*wrapped_args, **wrapped_kwargs): - s, node_or_state, args, kwargs = _get_callee_args(wrapped_args) + s, node_or_state, args, kwargs, _ = _get_callee_args(wrapped_args) loc = types.SimpleNamespace() if hasattr(s, 'subdir'): loc.subdir = s.subdir @@ -123,83 +162,93 @@ class permittedKwargs: return f(*wrapped_args, **wrapped_kwargs) return wrapped -class FeatureNew: - """Checks for new features""" - # Shared across all instances - feature_versions = dict() + +class FeatureCheckBase: + "Base class for feature version checks" def __init__(self, feature_name, version): self.feature_name = feature_name self.feature_version = version - def add_called_feature(self): - if self.feature_version not in self.feature_versions: - self.feature_versions[self.feature_version] = set() - self.feature_versions[self.feature_version].add(self.feature_name) - - def called_features_report(): - fv = FeatureNew.feature_versions - if fv: - print('Minimum version of features used:') - for version in sorted(fv.keys()): - print('{}: {}'.format(version, fv[version])) - - def use(self): - self.add_called_feature() - tv = mesonlib.target_version + @staticmethod + def get_target_version(subproject): + return mesonlib.project_meson_versions[subproject] + + def use(self, subproject): + tv = self.get_target_version(subproject) + # No target version if tv == '': return - if not mesonlib.version_compare_condition_with_min(tv, self.feature_version): - mlog.warning( - '''Project targetting \'{}\' but tried to use feature introduced in \'{}\': {}''' - .format(tv, self.feature_version, self.feature_name)) + # Target version is new enough + if mesonlib.version_compare_condition_with_min(tv, self.feature_version): + return + # Feature is too new for target version, register it + if subproject not in self.feature_registry: + self.feature_registry[subproject] = {self.feature_version: set()} + register = self.feature_registry[subproject] + if self.feature_version not in register: + register[self.feature_version] = set() + if self.feature_name in register[self.feature_version]: + # Don't warn about the same feature multiple times + # FIXME: This is needed to prevent duplicate warnings, but also + # means we won't warn about a feature used in multiple places. + return + register[self.feature_version].add(self.feature_name) + self.log_usage_warning(tv) + + @classmethod + def report(cls, subproject): + if subproject not in cls.feature_registry: + return + warning_str = cls.get_warning_str_prefix(cls.get_target_version(subproject)) + fv = cls.feature_registry[subproject] + for version in sorted(fv.keys()): + warning_str += '\n * {}: {}'.format(version, fv[version]) + mlog.warning(warning_str) def __call__(self, f): @wraps(f) def wrapped(*wrapped_args, **wrapped_kwargs): - self.use() + subproject = _get_callee_args(wrapped_args, want_subproject=True)[4] + if subproject is None: + raise AssertionError('{!r}'.format(wrapped_args)) + self.use(subproject) return f(*wrapped_args, **wrapped_kwargs) return wrapped -class FeatureDeprecated: +class FeatureNew(FeatureCheckBase): + """Checks for new features""" + # Class variable, shared across all instances + # + # Format: {subproject: {feature_version: set(feature_names)}} + feature_registry = {} + + @staticmethod + def get_warning_str_prefix(tv): + return 'Project specifies a minimum meson_version \'{}\' which conflicts with:'.format(tv) + + def log_usage_warning(self, tv): + mlog.warning('Project targetting \'{}\' but tried to use feature introduced ' + 'in \'{}\': {}'.format(tv, self.feature_version, self.feature_name)) + +class FeatureDeprecated(FeatureCheckBase): """Checks for deprecated features""" - # Shared across all instances - feature_versions = dict() + # Class variable, shared across all instances + # + # Format: {subproject: {feature_version: set(feature_names)}} + feature_registry = {} - def __init__(self, feature_name, version): - self.feature_name = feature_name - self.feature_version = version + @staticmethod + def get_warning_str_prefix(tv): + return 'Deprecated features used:' - def add_called_feature(self): - if self.feature_version not in self.feature_versions: - self.feature_versions[self.feature_version] = set() - self.feature_versions[self.feature_version].add(self.feature_name) - - def called_features_report(): - fv = FeatureDeprecated.feature_versions - if fv: - print('Deprecated features used:') - for version in sorted(fv.keys()): - print('{}: {}'.format(version, fv[version])) - - def use(self): - self.add_called_feature() - tv = mesonlib.target_version - if tv == '': - return - if not mesonlib.version_compare_condition_with_max(tv, self.feature_version): - mlog.warning( - '''Project targetting \'{}\' but tried to use feature deprecated since \'{}\': {}''' - .format(tv, self.feature_version, self.feature_name)) + def log_usage_warning(self, tv): + mlog.deprecation('Project targetting \'{}\' but tried to use feature ' + 'deprecated since \'{}\': {}' + ''.format(tv, self.feature_version, self.feature_name)) - def __call__(self, f): - @wraps(f) - def wrapped(*wrapped_args, **wrapped_kwargs): - self.use() - return f(*wrapped_args, **wrapped_kwargs) - return wrapped -class FeatureNewKwargs: +class FeatureCheckKwargsBase: def __init__(self, feature_name, feature_version, kwargs): self.feature_name = feature_name self.feature_version = feature_version @@ -208,26 +257,24 @@ class FeatureNewKwargs: def __call__(self, f): @wraps(f) def wrapped(*wrapped_args, **wrapped_kwargs): + # Which FeatureCheck class to invoke + FeatureCheckClass = self.feature_check_class + kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5] + if subproject is None: + raise AssertionError('{!r}'.format(wrapped_args)) for arg in self.kwargs: - if arg in wrapped_kwargs: - FeatureNew(arg + ' arg in ' + self.feature_name, self.feature_version).use() + if arg not in kwargs: + continue + name = arg + ' arg in ' + self.feature_name + FeatureCheckClass(name, self.feature_version).use(subproject) return f(*wrapped_args, **wrapped_kwargs) return wrapped -class FeatureDeprecatedKwargs: - def __init__(self, feature_name, feature_version, kwargs): - self.feature_name = feature_name - self.feature_version = feature_version - self.kwargs = kwargs +class FeatureNewKwargs(FeatureCheckKwargsBase): + feature_check_class = FeatureNew - def __call__(self, f): - @wraps(f) - def wrapped(*wrapped_args, **wrapped_kwargs): - for arg in self.kwargs: - if arg in wrapped_kwargs: - FeatureDeprecated(arg + ' arg in ' + self.feature_name, self.feature_version).use() - return f(*wrapped_args, **wrapped_kwargs) - return wrapped +class FeatureDeprecatedKwargs(FeatureCheckKwargsBase): + feature_check_class = FeatureDeprecated class InterpreterException(mesonlib.MesonException): @@ -347,7 +394,7 @@ class InterpreterBase: self.current_lineno = cur.lineno self.evaluate_statement(cur) except Exception as e: - if not(hasattr(e, 'lineno')): + if not hasattr(e, 'lineno'): e.lineno = cur.lineno e.colno = cur.colno e.file = os.path.join(self.subdir, 'meson.build') diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index cb07c5e..7e89de5 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -88,7 +88,7 @@ class ArLinker(StaticLinker): self.std_args = ['-csr'] def can_linker_accept_rsp(self): - return False + return mesonlib.is_windows() def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): return [] diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index e4951f9..e8e5049 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -23,8 +23,8 @@ from mesonbuild import mlog have_fcntl = False have_msvcrt = False -# Used to report conflicts between meson_version and new features used -target_version = '' +# {subproject: project_meson_version} +project_meson_versions = {} try: import fcntl @@ -83,6 +83,13 @@ an_unpicklable_object = threading.Lock() class MesonException(Exception): '''Exceptions thrown by Meson''' + def get_msg_with_context(self): + s = '' + if hasattr(self, 'lineno') and hasattr(self, 'file'): + s = get_error_location_string(self.file, self.lineno) + ' ' + s += str(self) + return s + class EnvironmentException(MesonException): '''Exceptions thrown while processing and creating the build environment''' @@ -356,6 +363,18 @@ def for_haiku(is_cross, env): return env.cross_info.config['host_machine']['system'] == 'haiku' return False +def for_openbsd(is_cross, env): + """ + Host machine is OpenBSD? + + Note: 'host' is the machine on which compiled binaries will run + """ + if not is_cross: + return is_openbsd() + elif env.cross_info.has_host(): + return env.cross_info.config['host_machine']['system'] == 'openbsd' + return False + def exe_exists(arglist): try: p = subprocess.Popen(arglist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -396,6 +415,12 @@ def grab_leading_numbers(vstr, strict=False): break return result +def make_same_len(listA, listB): + maxlen = max(len(listA), len(listB)) + for i in listA, listB: + for n in range(len(i), maxlen): + i.append(0) + numpart = re.compile('[0-9.]+') def version_compare(vstr1, vstr2, strict=False): @@ -429,6 +454,7 @@ def version_compare(vstr1, vstr2, strict=False): cmpop = operator.eq varr1 = grab_leading_numbers(vstr1, strict) varr2 = grab_leading_numbers(vstr2, strict) + make_same_len(varr1, varr2) return cmpop(varr1, varr2) def version_compare_many(vstr1, conditions): @@ -451,7 +477,7 @@ def version_compare_condition_with_min(condition, minimum): raise MesonException(msg.format(minimum)) minimum = match.group(0) if condition.startswith('>='): - cmpop = operator.lt + cmpop = operator.le condition = condition[2:] elif condition.startswith('<='): return True @@ -460,10 +486,10 @@ def version_compare_condition_with_min(condition, minimum): return True condition = condition[2:] elif condition.startswith('=='): - cmpop = operator.lt + cmpop = operator.le condition = condition[2:] elif condition.startswith('='): - cmpop = operator.lt + cmpop = operator.le condition = condition[1:] elif condition.startswith('>'): cmpop = operator.lt @@ -472,9 +498,10 @@ def version_compare_condition_with_min(condition, minimum): return True condition = condition[2:] else: - cmpop = operator.eq + cmpop = operator.le varr1 = grab_leading_numbers(minimum, True) varr2 = grab_leading_numbers(condition, True) + make_same_len(varr1, varr2) return cmpop(varr1, varr2) def version_compare_condition_with_max(condition, maximum): @@ -487,27 +514,28 @@ def version_compare_condition_with_max(condition, maximum): return False condition = condition[2:] elif condition.startswith('<='): - cmpop = operator.lt + cmpop = operator.ge condition = condition[2:] elif condition.startswith('!='): return False condition = condition[2:] elif condition.startswith('=='): - cmpop = operator.lt + cmpop = operator.ge condition = condition[2:] elif condition.startswith('='): - cmpop = operator.lt + cmpop = operator.ge condition = condition[1:] elif condition.startswith('>'): return False condition = condition[1:] elif condition.startswith('<'): - cmpop = operator.lt + cmpop = operator.gt condition = condition[2:] else: - cmpop = operator.eq + cmpop = operator.ge varr1 = grab_leading_numbers(maximum, True) varr2 = grab_leading_numbers(condition, True) + make_same_len(varr1, varr2) return cmpop(varr1, varr2) @@ -594,7 +622,9 @@ def do_replacement(regex, line, format, confdata): elif isinstance(var, int): var = str(var) else: - raise RuntimeError('Tried to replace a variable with something other than a string or int.') + msg = 'Tried to replace variable {!r} value with ' \ + 'something other than a string or int: {!r}' + raise MesonException(msg.format(varname, var)) else: missing_variables.add(varname) var = '' @@ -644,12 +674,18 @@ def do_conf_file(src, dst, confdata, format, encoding='utf-8'): result = [] missing_variables = set() + # Detect when the configuration data is empty and no tokens were found + # during substitution so we can warn the user to use the `copy:` kwarg. + confdata_useless = not confdata.keys() for line in data: if line.startswith(search_token): + confdata_useless = False line = do_mesondefine(line, confdata) else: line, missing = do_replacement(regex, line, format, confdata) missing_variables.update(missing) + if missing: + confdata_useless = False result.append(line) dst_tmp = dst + '~' try: @@ -659,7 +695,7 @@ def do_conf_file(src, dst, confdata, format, encoding='utf-8'): raise MesonException('Could not write output file %s: %s' % (dst, str(e))) shutil.copymode(src, dst_tmp) replace_if_different(dst, dst_tmp) - return missing_variables + return missing_variables, confdata_useless CONF_C_PRELUDE = '''/* * Autogenerated by the Meson build system. @@ -796,22 +832,19 @@ def expand_arguments(args): return None return expended_args -def Popen_safe(args, write=None, stderr=subprocess.PIPE, **kwargs): +def Popen_safe(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs): import locale encoding = locale.getpreferredencoding() if sys.version_info < (3, 6) or not sys.stdout.encoding or encoding.upper() != 'UTF-8': - return Popen_safe_legacy(args, write=write, stderr=stderr, **kwargs) - p = subprocess.Popen(args, universal_newlines=True, - close_fds=False, - stdout=subprocess.PIPE, - stderr=stderr, **kwargs) + return Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs) + p = subprocess.Popen(args, universal_newlines=True, close_fds=False, + stdout=stdout, stderr=stderr, **kwargs) o, e = p.communicate(write) return p, o, e -def Popen_safe_legacy(args, write=None, stderr=subprocess.PIPE, **kwargs): +def Popen_safe_legacy(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs): p = subprocess.Popen(args, universal_newlines=False, - stdout=subprocess.PIPE, - stderr=stderr, **kwargs) + stdout=stdout, stderr=stderr, **kwargs) if write is not None: write = write.encode('utf-8') o, e = p.communicate(write) @@ -1033,6 +1066,9 @@ def detect_subprojects(spdir_name, current_dir='', result=None): result[basename] = [trial] return result +def get_error_location_string(fname, lineno): + return '{}:{}:'.format(fname, lineno) + class OrderedSet(collections.MutableSet): """A set that preserves the order in which items are added, by first insertion. diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index e74fb36..011ac14 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time import sys, stat, traceback, argparse import datetime import os.path @@ -20,11 +21,10 @@ import cProfile as profile from . import environment, interpreter, mesonlib from . import build -from . import mconf, mintro, mtest, rewriter, minit from . import mlog, coredata from .mesonlib import MesonException from .environment import detect_msys2_arch -from .wrap import WrapMode, wraptool +from .wrap import WrapMode default_warning = '1' @@ -41,8 +41,8 @@ def create_parser(): help='Special wrap mode to use') p.add_argument('--profile-self', action='store_true', dest='profile', help=argparse.SUPPRESS) - p.add_argument('builddir', nargs='?', default='..') - p.add_argument('sourcedir', nargs='?', default='.') + p.add_argument('builddir', nargs='?', default=None) + p.add_argument('sourcedir', nargs='?', default=None) return p def wrapmodetype(string): @@ -111,6 +111,8 @@ class MesonApp: def generate(self): env = environment.Environment(self.source_dir, self.build_dir, self.options) mlog.initialize(env.get_log_dir()) + if self.options.profile: + mlog.set_timestamp_start(time.monotonic()) with mesonlib.BuildDirLock(self.build_dir): self._generate(env) @@ -142,6 +144,11 @@ class MesonApp: profile.runctx('intr.run()', globals(), locals(), filename=fname) else: intr.run() + # Print all default option values that don't match the current value + for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): + mlog.log('Option', mlog.bold(def_opt_name), 'is:', + mlog.bold(str(cur_opt_value)), + '[default: {}]'.format(str(def_opt_value))) try: dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') # We would like to write coredata as late as possible since we use the existence of @@ -178,9 +185,6 @@ def run_script_command(args): elif cmdname == 'cleantrees': import mesonbuild.scripts.cleantrees as abc cmdfunc = abc.run - elif cmdname == 'install': - import mesonbuild.scripts.meson_install as abc - cmdfunc = abc.run elif cmdname == 'commandrunner': import mesonbuild.scripts.commandrunner as abc cmdfunc = abc.run @@ -270,23 +274,32 @@ def run(original_args, mainfile): args = remaining_args cmd_name = args[0] if cmd_name == 'test': + from . import mtest return mtest.run(remaining_args) elif cmd_name == 'setup': args = remaining_args # FALLTHROUGH like it's 1972. + elif cmd_name == 'install': + from . import minstall + return minstall.run(remaining_args) elif cmd_name == 'introspect': + from . import mintro return mintro.run(remaining_args) elif cmd_name == 'rewrite': + from . import rewriter return rewriter.run(remaining_args) elif cmd_name == 'configure': try: + from . import mconf return mconf.run(remaining_args) except MesonException as e: mlog.exception(e) sys.exit(1) elif cmd_name == 'wrap': + from .wrap import wraptool return wraptool.run(remaining_args) elif cmd_name == 'init': + from . import minit return minit.run(remaining_args) elif cmd_name == 'runpython': import runpy @@ -318,6 +331,15 @@ def run(original_args, mainfile): dir1 = options.builddir dir2 = options.sourcedir try: + if dir1 is None: + if dir2 is None: + if not os.path.exists('meson.build') and os.path.exists('../meson.build'): + dir2 = '..' + else: + raise MesonException('Must specify at least one directory name.') + dir1 = os.getcwd() + if dir2 is None: + dir2 = os.getcwd() app = MesonApp(dir1, dir2, handshake, options) except Exception as e: # Log directory does not exist, so just print @@ -349,7 +371,11 @@ def run(original_args, mainfile): def main(): # Always resolve the command path so Ninja can find it for regen, tests, etc. - launcher = os.path.realpath(sys.argv[0]) + if 'meson.exe' in sys.executable: + assert(os.path.isabs(sys.executable)) + launcher = sys.executable + else: + launcher = os.path.realpath(sys.argv[0]) return run(sys.argv[1:], launcher) if __name__ == '__main__': diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py new file mode 100644 index 0000000..4615b6d --- /dev/null +++ b/mesonbuild/minstall.py @@ -0,0 +1,470 @@ +# Copyright 2013-2014 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys, pickle, os, shutil, subprocess, gzip, errno +import shlex +import argparse +from glob import glob +from .scripts import depfixer +from .scripts import destdir_join +from .mesonlib import is_windows, Popen_safe +from .mtest import rebuild_all +from __main__ import __file__ as main_file + +selinux_updates = [] + +def buildparser(): + parser = argparse.ArgumentParser(prog='meson install') + parser.add_argument('-C', default='.', dest='wd', + help='directory to cd into before running') + parser.add_argument('--no-rebuild', default=False, action='store_true', + help='Do not rebuild before installing.') + parser.add_argument('--only-changed', default=False, action='store_true', + help='Only overwrite files that are older than the copied file.') + return parser + +class DirMaker: + def __init__(self, lf): + self.lf = lf + self.dirs = [] + + def makedirs(self, path, exist_ok=False): + dirname = os.path.normpath(path) + dirs = [] + while dirname != os.path.dirname(dirname): + if not os.path.exists(dirname): + dirs.append(dirname) + dirname = os.path.dirname(dirname) + os.makedirs(path, exist_ok=exist_ok) + + # store the directories in creation order, with the parent directory + # before the child directories. Future calls of makedir() will not + # create the parent directories, so the last element in the list is + # the last one to be created. That is the first one to be removed on + # __exit__ + dirs.reverse() + self.dirs += dirs + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.dirs.reverse() + for d in self.dirs: + append_to_log(self.lf, d) + +def is_executable(path): + '''Checks whether any of the "x" bits are set in the source file mode.''' + return bool(os.stat(path).st_mode & 0o111) + +def append_to_log(lf, line): + lf.write(line) + if not line.endswith('\n'): + lf.write('\n') + lf.flush() + +def sanitize_permissions(path, umask): + if umask is None: + return + new_perms = 0o777 if is_executable(path) else 0o666 + new_perms &= ~umask + try: + os.chmod(path, new_perms) + except PermissionError as e: + msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...' + print(msg.format(path, new_perms, e.strerror)) + +def set_mode(path, mode, default_umask): + if mode is None or (mode.perms_s or mode.owner or mode.group) is None: + # Just sanitize permissions with the default umask + sanitize_permissions(path, default_umask) + return + # No chown() on Windows, and must set one of owner/group + if not is_windows() and (mode.owner or mode.group) is not None: + try: + shutil.chown(path, mode.owner, mode.group) + except PermissionError as e: + msg = '{!r}: Unable to set owner {!r} and group {!r}: {}, ignoring...' + print(msg.format(path, mode.owner, mode.group, e.strerror)) + except LookupError: + msg = '{!r}: Non-existent owner {!r} or group {!r}: ignoring...' + print(msg.format(path, mode.owner, mode.group)) + except OSError as e: + if e.errno == errno.EINVAL: + msg = '{!r}: Non-existent numeric owner {!r} or group {!r}: ignoring...' + print(msg.format(path, mode.owner, mode.group)) + else: + raise + # Must set permissions *after* setting owner/group otherwise the + # setuid/setgid bits will get wiped by chmod + # NOTE: On Windows you can set read/write perms; the rest are ignored + if mode.perms_s is not None: + try: + os.chmod(path, mode.perms) + except PermissionError as e: + msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...' + print(msg.format(path, mode.perms_s, e.strerror)) + else: + sanitize_permissions(path, default_umask) + +def restore_selinux_contexts(): + ''' + Restores the SELinux context for files in @selinux_updates + + If $DESTDIR is set, do not warn if the call fails. + ''' + try: + subprocess.check_call(['selinuxenabled']) + except (FileNotFoundError, PermissionError, subprocess.CalledProcessError) as e: + # If we don't have selinux or selinuxenabled returned 1, failure + # is ignored quietly. + return + + if not shutil.which('restorecon'): + # If we don't have restorecon, failure is ignored quietly. + return + + with subprocess.Popen(['restorecon', '-F', '-f-', '-0'], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: + out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0') + if proc.returncode != 0 and not os.environ.get('DESTDIR'): + print('Failed to restore SELinux context of installed files...', + 'Standard output:', out.decode(), + 'Standard error:', err.decode(), sep='\n') + + +def get_destdir_path(d, path): + if os.path.isabs(path): + output = destdir_join(d.destdir, path) + else: + output = os.path.join(d.fullprefix, path) + return output + + +def check_for_stampfile(fname): + '''Some languages e.g. Rust have output files + whose names are not known at configure time. + Check if this is the case and return the real + file instead.''' + if fname.endswith('.so') or fname.endswith('.dll'): + if os.stat(fname).st_size == 0: + (base, suffix) = os.path.splitext(fname) + files = glob(base + '-*' + suffix) + if len(files) > 1: + print("Stale dynamic library files in build dir. Can't install.") + sys.exit(1) + if len(files) == 1: + return files[0] + elif fname.endswith('.a') or fname.endswith('.lib'): + if os.stat(fname).st_size == 0: + (base, suffix) = os.path.splitext(fname) + files = glob(base + '-*' + '.rlib') + if len(files) > 1: + print("Stale static library files in build dir. Can't install.") + sys.exit(1) + if len(files) == 1: + return files[0] + return fname + +class Installer: + + def __init__(self, options, lf): + self.options = options + self.lf = lf + + def should_preserve_existing_file(self, from_file, to_file): + if not self.options.only_changed: + return False + from_time = os.stat(from_file).st_mtime + to_time = os.stat(to_file).st_mtime + return from_time <= to_time + + def do_copyfile(self, from_file, to_file): + if not os.path.isfile(from_file): + raise RuntimeError('Tried to install something that isn\'t a file:' + '{!r}'.format(from_file)) + # copyfile fails if the target file already exists, so remove it to + # allow overwriting a previous install. If the target is not a file, we + # want to give a readable error. + if os.path.exists(to_file): + if not os.path.isfile(to_file): + raise RuntimeError('Destination {!r} already exists and is not ' + 'a file'.format(to_file)) + if self.should_preserve_existing_file(from_file, to_file): + append_to_log(self.lf, '# Preserving old file %s\n' % to_file) + print('Preserving existing file %s.' % to_file) + return False + os.unlink(to_file) + outdir = os.path.split(to_file)[0] + print('Installing %s to %s' % (from_file, outdir)) + shutil.copyfile(from_file, to_file) + shutil.copystat(from_file, to_file) + selinux_updates.append(to_file) + append_to_log(self.lf, to_file) + return True + + def do_copydir(self, data, src_dir, dst_dir, exclude, install_mode): + ''' + Copies the contents of directory @src_dir into @dst_dir. + + For directory + /foo/ + bar/ + excluded + foobar + file + do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}) creates + /dst/ + dir/ + bar/ + foobar + file + + Args: + src_dir: str, absolute path to the source directory + dst_dir: str, absolute path to the destination directory + exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs), + each element of the set is a path relative to src_dir. + ''' + if not os.path.isabs(src_dir): + raise ValueError('src_dir must be absolute, got %s' % src_dir) + if not os.path.isabs(dst_dir): + raise ValueError('dst_dir must be absolute, got %s' % dst_dir) + if exclude is not None: + exclude_files, exclude_dirs = exclude + else: + exclude_files = exclude_dirs = set() + for root, dirs, files in os.walk(src_dir): + assert os.path.isabs(root) + for d in dirs[:]: + abs_src = os.path.join(root, d) + filepart = os.path.relpath(abs_src, start=src_dir) + abs_dst = os.path.join(dst_dir, filepart) + # Remove these so they aren't visited by os.walk at all. + if filepart in exclude_dirs: + dirs.remove(d) + continue + if os.path.isdir(abs_dst): + continue + if os.path.exists(abs_dst): + print('Tried to copy directory %s but a file of that name already exists.' % abs_dst) + sys.exit(1) + data.dirmaker.makedirs(abs_dst) + shutil.copystat(abs_src, abs_dst) + sanitize_permissions(abs_dst, data.install_umask) + for f in files: + abs_src = os.path.join(root, f) + filepart = os.path.relpath(abs_src, start=src_dir) + if filepart in exclude_files: + continue + abs_dst = os.path.join(dst_dir, filepart) + if os.path.isdir(abs_dst): + print('Tried to copy file %s but a directory of that name already exists.' % abs_dst) + if os.path.exists(abs_dst): + os.unlink(abs_dst) + parent_dir = os.path.dirname(abs_dst) + if not os.path.isdir(parent_dir): + os.mkdir(parent_dir) + shutil.copystat(os.path.dirname(abs_src), parent_dir) + # FIXME: what about symlinks? + self.do_copyfile(abs_src, abs_dst) + set_mode(abs_dst, install_mode, data.install_umask) + append_to_log(self.lf, abs_dst) + + def do_install(self, datafilename): + with open(datafilename, 'rb') as ifile: + d = pickle.load(ifile) + d.destdir = os.environ.get('DESTDIR', '') + d.fullprefix = destdir_join(d.destdir, d.prefix) + + if d.install_umask is not None: + os.umask(d.install_umask) + + try: + d.dirmaker = DirMaker(self.lf) + with d.dirmaker: + self.install_subdirs(d) # Must be first, because it needs to delete the old subtree. + self.install_targets(d) + self.install_headers(d) + self.install_man(d) + self.install_data(d) + restore_selinux_contexts() + self.run_install_script(d) + except PermissionError: + if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ: + print('Installation failed due to insufficient permissions.') + print('Attempting to use polkit to gain elevated privileges...') + os.execlp('pkexec', 'pkexec', sys.executable, main_file, *sys.argv[1:], + '-C', os.getcwd()) + else: + raise + + def install_subdirs(self, d): + for (src_dir, dst_dir, mode, exclude) in d.install_subdirs: + full_dst_dir = get_destdir_path(d, dst_dir) + print('Installing subdir %s to %s' % (src_dir, full_dst_dir)) + d.dirmaker.makedirs(full_dst_dir, exist_ok=True) + self.do_copydir(d, src_dir, full_dst_dir, exclude, mode) + + def install_data(self, d): + for i in d.data: + fullfilename = i[0] + outfilename = get_destdir_path(d, i[1]) + mode = i[2] + outdir = os.path.dirname(outfilename) + d.dirmaker.makedirs(outdir, exist_ok=True) + self.do_copyfile(fullfilename, outfilename) + set_mode(outfilename, mode, d.install_umask) + + def install_man(self, d): + for m in d.man: + full_source_filename = m[0] + outfilename = get_destdir_path(d, m[1]) + outdir = os.path.dirname(outfilename) + d.dirmaker.makedirs(outdir, exist_ok=True) + install_mode = m[2] + if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): + with open(outfilename, 'wb') as of: + with open(full_source_filename, 'rb') as sf: + # Set mtime and filename for reproducibility. + with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz: + gz.write(sf.read()) + shutil.copystat(full_source_filename, outfilename) + print('Installing %s to %s' % (full_source_filename, outdir)) + append_to_log(self.lf, outfilename) + else: + self.do_copyfile(full_source_filename, outfilename) + set_mode(outfilename, install_mode, d.install_umask) + + def install_headers(self, d): + for t in d.headers: + fullfilename = t[0] + fname = os.path.basename(fullfilename) + outdir = get_destdir_path(d, t[1]) + outfilename = os.path.join(outdir, fname) + install_mode = t[2] + d.dirmaker.makedirs(outdir, exist_ok=True) + self.do_copyfile(fullfilename, outfilename) + set_mode(outfilename, install_mode, d.install_umask) + + def run_install_script(self, d): + env = {'MESON_SOURCE_ROOT': d.source_dir, + 'MESON_BUILD_ROOT': d.build_dir, + 'MESON_INSTALL_PREFIX': d.prefix, + 'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix, + 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]), + } + + child_env = os.environ.copy() + child_env.update(env) + + for i in d.install_scripts: + script = i['exe'] + args = i['args'] + name = ' '.join(script + args) + print('Running custom install script {!r}'.format(name)) + try: + rc = subprocess.call(script + args, env=child_env) + if rc != 0: + sys.exit(rc) + except OSError: + print('Failed to run install script {!r}'.format(name)) + sys.exit(1) + + def install_targets(self, d): + for t in d.targets: + fname = check_for_stampfile(t.fname) + outdir = get_destdir_path(d, t.outdir) + outname = os.path.join(outdir, os.path.basename(fname)) + final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname)) + aliases = t.aliases + should_strip = t.strip + install_rpath = t.install_rpath + install_name_mappings = t.install_name_mappings + install_mode = t.install_mode + d.dirmaker.makedirs(outdir, exist_ok=True) + if not os.path.exists(fname): + raise RuntimeError('File {!r} could not be found'.format(fname)) + elif os.path.isfile(fname): + self.do_copyfile(fname, outname) + set_mode(outname, install_mode, d.install_umask) + if should_strip and d.strip_bin is not None: + if fname.endswith('.jar'): + print('Not stripping jar target:', os.path.basename(fname)) + continue + print('Stripping target {!r}'.format(fname)) + ps, stdo, stde = Popen_safe(d.strip_bin + [outname]) + if ps.returncode != 0: + print('Could not strip file.\n') + print('Stdout:\n%s\n' % stdo) + print('Stderr:\n%s\n' % stde) + sys.exit(1) + pdb_filename = os.path.splitext(fname)[0] + '.pdb' + if not should_strip and os.path.exists(pdb_filename): + pdb_outname = os.path.splitext(outname)[0] + '.pdb' + self.do_copyfile(pdb_filename, pdb_outname) + set_mode(pdb_outname, install_mode, d.install_umask) + elif os.path.isdir(fname): + fname = os.path.join(d.build_dir, fname.rstrip('/')) + outname = os.path.join(outdir, os.path.basename(fname)) + self.do_copydir(d, fname, outname, None, install_mode) + else: + raise RuntimeError('Unknown file type for {!r}'.format(fname)) + printed_symlink_error = False + for alias, to in aliases.items(): + try: + symlinkfilename = os.path.join(outdir, alias) + try: + os.unlink(symlinkfilename) + except FileNotFoundError: + pass + os.symlink(to, symlinkfilename) + append_to_log(self.lf, symlinkfilename) + except (NotImplementedError, OSError): + if not printed_symlink_error: + print("Symlink creation does not work on this platform. " + "Skipping all symlinking.") + printed_symlink_error = True + if os.path.isfile(outname): + try: + depfixer.fix_rpath(outname, install_rpath, final_path, + install_name_mappings, verbose=False) + except SystemExit as e: + if isinstance(e.code, int) and e.code == 0: + pass + else: + raise + +def run(args): + parser = buildparser() + opts = parser.parse_args(args) + datafilename = 'meson-private/install.dat' + private_dir = os.path.dirname(datafilename) + log_dir = os.path.join(private_dir, '../meson-logs') + if not os.path.exists(os.path.join(opts.wd, datafilename)): + sys.exit('Install data not found. Run this command in build directory root.') + log_dir = os.path.join(private_dir, '../meson-logs') + if not opts.no_rebuild: + if not rebuild_all(opts.wd): + sys.exit(-1) + os.chdir(opts.wd) + with open(os.path.join(log_dir, 'install-log.txt'), 'w') as lf: + installer = Installer(opts, lf) + append_to_log(lf, '# List of files installed by Meson') + append_to_log(lf, '# Does not contain files installed by custom scripts.') + installer.do_install(datafilename) + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 5d60134..94bc00b 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -53,14 +53,12 @@ def buildparser(): def determine_installed_path(target, installdata): install_target = None for i in installdata.targets: - if os.path.basename(i[0]) == target.get_filename(): # FIXME, might clash due to subprojects. + if os.path.basename(i.fname) == target.get_filename(): # FIXME, might clash due to subprojects. install_target = i break if install_target is None: raise RuntimeError('Something weird happened. File a bug.') - fname = i[0] - outdir = i[1] - outname = os.path.join(installdata.prefix, outdir, os.path.basename(fname)) + outname = os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname)) # Normalize the path by using os.path.sep consistently, etc. # Does not change the effective path. return str(pathlib.PurePath(outname)) @@ -69,8 +67,9 @@ def determine_installed_path(target, installdata): def list_installed(installdata): res = {} if installdata is not None: - for path, installdir, aliases, *unknown in installdata.targets: - res[os.path.join(installdata.build_dir, path)] = os.path.join(installdata.prefix, installdir, os.path.basename(path)) + for t in installdata.targets: + res[os.path.join(installdata.build_dir, t.fname)] = \ + os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname)) for path, installpath, unused_prefix in installdata.data: res[path] = os.path.join(installdata.prefix, installpath) for path, installdir, unused_custom_install_mode in installdata.headers: diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index d17b889..1654824 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -12,7 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, os, platform, io +import os +import io +import sys +import time +import platform from contextlib import contextmanager """This is (mostly) a standalone module used to write logging @@ -41,12 +45,17 @@ log_dir = None log_file = None log_fname = 'meson-log.txt' log_depth = 0 +log_timestamp_start = None def initialize(logdir): global log_dir, log_file log_dir = logdir log_file = open(os.path.join(logdir, log_fname), 'w', encoding='utf8') +def set_timestamp_start(start): + global log_timestamp_start + log_timestamp_start = start + def shutdown(): global log_file if log_file is not None: @@ -57,17 +66,21 @@ def shutdown(): class AnsiDecorator: plain_code = "\033[0m" - def __init__(self, text, code): + def __init__(self, text, code, quoted=False): self.text = text self.code = code + self.quoted = quoted def get_text(self, with_codes): + text = self.text if with_codes: - return self.code + self.text + AnsiDecorator.plain_code - return self.text + text = self.code + self.text + AnsiDecorator.plain_code + if self.quoted: + text = '"{}"'.format(text) + return text -def bold(text): - return AnsiDecorator(text, "\033[1m") +def bold(text, quoted=False): + return AnsiDecorator(text, "\033[1m", quoted=quoted) def red(text): return AnsiDecorator(text, "\033[1;31m") @@ -83,6 +96,8 @@ def cyan(text): def process_markup(args, keep): arr = [] + if log_timestamp_start is not None: + arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)] for arg in args: if isinstance(arg, str): arr.append(arg) @@ -125,19 +140,21 @@ def log(*args, **kwargs): force_print(*arr, **kwargs) def _log_error(severity, *args, **kwargs): - from . import environment + from .mesonlib import get_error_location_string + from .environment import build_filename if severity == 'warning': args = (yellow('WARNING:'),) + args elif severity == 'error': args = (red('ERROR:'),) + args + elif severity == 'deprecation': + args = (red('DEPRECATION:'),) + args else: assert False, 'Invalid severity ' + severity location = kwargs.pop('location', None) if location is not None: - location_str = '{}:{}:'.format(os.path.join(location.subdir, - environment.build_filename), - location.lineno) + location_file = os.path.join(location.subdir, build_filename) + location_str = get_error_location_string(location_file, location.lineno) args = (location_str,) + args log(*args, **kwargs) @@ -148,6 +165,9 @@ def error(*args, **kwargs): def warning(*args, **kwargs): return _log_error('warning', *args, **kwargs) +def deprecation(*args, **kwargs): + return _log_error('deprecation', *args, **kwargs) + def exception(e): log() if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'): diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index e871b87..6b6aa8b 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -1,20 +1,6 @@ import os from .. import build -from .. import mlog - -class permittedSnippetKwargs: - - def __init__(self, permitted): - self.permitted = permitted - - def __call__(self, f): - def wrapped(s, interpreter, state, args, kwargs): - for k in kwargs: - if k not in self.permitted: - mlog.warning('Passed invalid keyword argument "%s". This will become a hard error in the future.' % k) - return f(s, interpreter, state, args, kwargs) - return wrapped class ExtensionModule: diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 111e7f7..1459ffe 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -288,7 +288,7 @@ class GnomeModule(ExtensionModule): dep_files.append(f) return dep_files, depends, subdirs - def _get_link_args(self, state, lib, depends=None, include_rpath=False, + def _get_link_args(self, state, lib, depends, include_rpath=False, use_gir_args=False): link_command = [] # Construct link args @@ -306,18 +306,18 @@ class GnomeModule(ExtensionModule): link_command.append('-Wl,-rpath,' + d) if include_rpath: link_command.append('-Wl,-rpath,' + libdir) - if depends: - depends.append(lib) + depends.append(lib) if gir_has_extra_lib_arg(self.interpreter) and use_gir_args: link_command.append('--extra-library=' + lib.name) else: link_command.append('-l' + lib.name) return link_command - def _get_dependencies_flags(self, deps, state, depends=None, include_rpath=False, + def _get_dependencies_flags(self, deps, state, depends, include_rpath=False, use_gir_args=False): cflags = OrderedSet() - ldflags = OrderedSet() + internal_ldflags = OrderedSet() + external_ldflags = OrderedSet() gi_includes = OrderedSet() deps = mesonlib.listify(deps, unholder=True) @@ -327,17 +327,19 @@ class GnomeModule(ExtensionModule): for lib in dep.libraries: if hasattr(lib, 'held_object'): lib = lib.held_object - ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) + internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, use_gir_args) cflags.update(libdepflags[0]) - ldflags.update(libdepflags[1]) - gi_includes.update(libdepflags[2]) + internal_ldflags.update(libdepflags[1]) + external_ldflags.update(libdepflags[2]) + gi_includes.update(libdepflags[3]) extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath, use_gir_args) cflags.update(extdepflags[0]) - ldflags.update(extdepflags[1]) - gi_includes.update(extdepflags[2]) + internal_ldflags.update(extdepflags[1]) + external_ldflags.update(extdepflags[2]) + gi_includes.update(extdepflags[3]) for source in dep.sources: if hasattr(source, 'held_object'): source = source.held_object @@ -347,14 +349,14 @@ class GnomeModule(ExtensionModule): # This should be any dependency other than an internal one. elif isinstance(dep, Dependency): cflags.update(dep.get_compile_args()) - for lib in dep.get_link_args(): + for lib in dep.get_link_args(raw=True): if (os.path.isabs(lib) and # For PkgConfigDependency only: getattr(dep, 'is_libtool', False)): lib_dir = os.path.dirname(lib) - ldflags.update(["-L%s" % lib_dir]) + external_ldflags.update(["-L%s" % lib_dir]) if include_rpath: - ldflags.update(['-Wl,-rpath {}'.format(lib_dir)]) + external_ldflags.update(['-Wl,-rpath {}'.format(lib_dir)]) libname = os.path.basename(lib) if libname.startswith("lib"): libname = libname[3:] @@ -363,7 +365,7 @@ class GnomeModule(ExtensionModule): # Hack to avoid passing some compiler options in if lib.startswith("-W"): continue - ldflags.update([lib]) + external_ldflags.update([lib]) if isinstance(dep, PkgConfigDependency): girdir = dep.get_pkgconfig_variable("girdir", {'default': ''}) @@ -371,19 +373,23 @@ class GnomeModule(ExtensionModule): gi_includes.update([girdir]) elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): cflags.update(get_include_args(dep.get_include_dirs())) + depends.append(dep) else: mlog.log('dependency {!r} not handled to build gir files'.format(dep)) continue if gir_has_extra_lib_arg(self.interpreter) and use_gir_args: - fixed_ldflags = OrderedSet() - for ldflag in ldflags: - if ldflag.startswith("-l"): - fixed_ldflags.add(ldflag.replace('-l', '--extra-library=', 1)) - else: - fixed_ldflags.add(ldflag) - ldflags = fixed_ldflags - return cflags, ldflags, gi_includes + def fix_ldflags(ldflags): + fixed_ldflags = OrderedSet() + for ldflag in ldflags: + if ldflag.startswith("-l"): + fixed_ldflags.add(ldflag.replace('-l', '--extra-library=', 1)) + else: + fixed_ldflags.add(ldflag) + return fixed_ldflags + internal_ldflags = fix_ldflags(internal_ldflags) + external_ldflags = fix_ldflags(external_ldflags) + return cflags, internal_ldflags, external_ldflags, gi_includes @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix', @@ -446,9 +452,10 @@ class GnomeModule(ExtensionModule): if hasattr(s, 'held_object'): s = s.held_object if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)): - gir_filelist.write(os.path.join(state.environment.get_build_dir(), - state.backend.get_target_dir(s), - s.get_outputs()[0]) + '\n') + for custom_output in s.get_outputs(): + gir_filelist.write(os.path.join(state.environment.get_build_dir(), + state.backend.get_target_dir(s), + custom_output) + '\n') elif isinstance(s, mesonlib.File): gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n') elif isinstance(s, build.GeneratedList): @@ -486,7 +493,8 @@ class GnomeModule(ExtensionModule): 'Gir includes must be str, GirTarget, or list of them') cflags = [] - ldflags = [] + internal_ldflags = [] + external_ldflags = [] for lang, compiler in girtarget.compilers.items(): # XXX: Can you use g-i with any other language? if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'): @@ -503,7 +511,7 @@ class GnomeModule(ExtensionModule): sanitize = state.environment.coredata.base_options['b_sanitize'].value cflags += compilers.sanitizer_compile_args(sanitize) if 'address' in sanitize.split(','): - ldflags += ['-lasan'] + external_ldflags += ['-lasan'] # FIXME: Linking directly to libasan is not recommended but g-ir-scanner # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892 # ldflags += compilers.sanitizer_link_args(sanitize) @@ -564,10 +572,11 @@ class GnomeModule(ExtensionModule): # ldflags will be misinterpreted by gir scanner (showing # spurious dependencies) but building GStreamer fails if they # are not used here. - dep_cflags, dep_ldflags, gi_includes = self._get_dependencies_flags(deps, state, depends, - use_gir_args=True) + dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes = \ + self._get_dependencies_flags(deps, state, depends, use_gir_args=True) cflags += list(dep_cflags) - ldflags += list(dep_ldflags) + internal_ldflags += list(dep_internal_ldflags) + external_ldflags += list(dep_external_ldflags) scan_command += ['--cflags-begin'] scan_command += cflags scan_command += state.environment.coredata.get_external_args(lang) @@ -577,7 +586,7 @@ class GnomeModule(ExtensionModule): # ones. if isinstance(girtarget, build.SharedLibrary): scan_command += ["-L@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id()] - scan_command += list(ldflags) + scan_command += list(internal_ldflags) for i in gi_includes: scan_command += ['--add-include-path=%s' % i] @@ -605,6 +614,7 @@ class GnomeModule(ExtensionModule): for link_arg in state.environment.coredata.get_external_link_args(lang): if link_arg.startswith('-L'): scan_command.append(link_arg) + scan_command += list(external_ldflags) scankwargs = {'output': girfile, 'command': scan_command, @@ -673,7 +683,7 @@ class GnomeModule(ExtensionModule): langs = mesonlib.stringlistify(kwargs.pop('languages', [])) if langs: - mlog.log(mlog.red('DEPRECATION:'), '''The "languages" argument of gnome.yelp() is deprecated. + mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated. Use a LINGUAS file in the sources directory instead. This will become a hard error in the future.''') @@ -821,17 +831,18 @@ This will become a hard error in the future.''') args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state) args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs) - args += self._unpack_args('--installdir=', 'install_dir', kwargs, state) - args += self._get_build_args(kwargs, state) + args += self._unpack_args('--installdir=', 'install_dir', kwargs) + args += self._get_build_args(kwargs, state, depends) res = [build.RunTarget(targetname, command[0], command[1:] + args, depends, state.subdir, state.subproject)] if kwargs.get('install', True): res.append(build.RunScript(command, args)) return ModuleReturnValue(None, res) - def _get_build_args(self, kwargs, state): + def _get_build_args(self, kwargs, state, depends): args = [] deps = extract_as_list(kwargs, 'dependencies', unholder=True) - cflags, ldflags, gi_includes = self._get_dependencies_flags(deps, state, include_rpath=True) + cflags, internal_ldflags, external_ldflags, gi_includes = \ + self._get_dependencies_flags(deps, state, depends, include_rpath=True) inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories') for incd in inc_dirs: if not isinstance(incd.held_object, (str, build.IncludeDirs)): @@ -839,7 +850,10 @@ This will become a hard error in the future.''') 'Gir include dirs should be include_directories().') cflags.update(get_include_args(inc_dirs)) cflags.update(state.environment.coredata.get_external_args('c')) + ldflags = OrderedSet() + ldflags.update(internal_ldflags) ldflags.update(state.environment.coredata.get_external_link_args('c')) + ldflags.update(external_ldflags) if cflags: args += ['--cflags=%s' % ' '.join(cflags)] if ldflags: @@ -881,6 +895,20 @@ This will become a hard error in the future.''') return [] + def _get_autocleanup_args(self, kwargs, glib_version): + if not mesonlib.version_compare(glib_version, '>= 2.49.1'): + # Warn if requested, silently disable if not + if 'autocleanup' in kwargs: + mlog.warning('Glib version ({}) is too old to support the \'autocleanup\' ' + 'kwarg, need 2.49.1 or newer'.format(glib_version)) + return [] + autocleanup = kwargs.pop('autocleanup', 'all') + values = ('none', 'objects', 'all') + if autocleanup not in values: + raise MesonException('gdbus_codegen does not support {!r} as an autocleanup value, ' + 'must be one of: {!r}'.format(autocleanup, ', '.join(values))) + return ['--c-generate-autocleanup', autocleanup] + @FeatureNewKwargs('build target', '0.46.0', ['install_header', 'install_dir', 'sources']) @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) @FeatureNewKwargs('build target', '0.47.0', ['extra_args', 'autocleanup']) @@ -888,18 +916,15 @@ This will become a hard error in the future.''') 'annotations', 'docbook', 'install_header', 'install_dir', 'sources'}) def gdbus_codegen(self, state, args, kwargs): if len(args) not in (1, 2): - raise MesonException('Gdbus_codegen takes at most two arguments, name and xml file.') + raise MesonException('gdbus_codegen takes at most two arguments, name and xml file.') namebase = args[0] xml_files = args[1:] - target_name = namebase + '-gdbus' cmd = [self.interpreter.find_program_impl('gdbus-codegen')] extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', [])) cmd += extra_args - autocleanup = kwargs.pop('autocleanup', 'all') - if autocleanup not in ['none', 'objects', 'all']: - raise MesonException( - 'Gdbus_codegen does not support %s as an autocleanup value.' % (autocleanup, )) - cmd += ['--c-generate-autocleanup', autocleanup] + # Autocleanup supported? + glib_version = self._get_native_glib_version(state) + cmd += self._get_autocleanup_args(kwargs, glib_version) if 'interface_prefix' in kwargs: cmd += ['--interface-prefix', kwargs.pop('interface_prefix')] if 'namespace' in kwargs: @@ -922,22 +947,45 @@ This will become a hard error in the future.''') raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE') cmd += ['--annotate'] + annotation + targets = [] + install_header = kwargs.get('install_header', False) + install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('includedir')) + + output = namebase + '.c' # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2) # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2) - if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.56.2'): - targets = [] - install_header = kwargs.get('install_header', False) - install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('includedir')) - - output = namebase + '.c' + if mesonlib.version_compare(glib_version, '>= 2.56.2'): custom_kwargs = {'input': xml_files, 'output': output, 'command': cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'], 'build_by_default': build_by_default } - targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)) + else: + if 'docbook' in kwargs: + docbook = kwargs['docbook'] + if not isinstance(docbook, str): + raise MesonException('docbook value must be a string.') + + cmd += ['--generate-docbook', docbook] + + # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a + if mesonlib.version_compare(glib_version, '>= 2.51.3'): + cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@'] + else: + self._print_gdbus_warning() + cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@'] - output = namebase + '.h' + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd, + 'build_by_default': build_by_default + } + + cfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(cfile_custom_target) + + output = namebase + '.h' + if mesonlib.version_compare(glib_version, '>= 2.56.2'): custom_kwargs = {'input': xml_files, 'output': output, 'command': cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'], @@ -945,60 +993,50 @@ This will become a hard error in the future.''') 'install': install_header, 'install_dir': install_dir } - targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)) + else: + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd, + 'build_by_default': build_by_default, + 'install': install_header, + 'install_dir': install_dir, + 'depends': cfile_custom_target + } - if 'docbook' in kwargs: - docbook = kwargs['docbook'] - if not isinstance(docbook, str): - raise MesonException('docbook value must be a string.') + hfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(hfile_custom_target) + + if 'docbook' in kwargs: + docbook = kwargs['docbook'] + if not isinstance(docbook, str): + raise MesonException('docbook value must be a string.') - docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@'] + docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@'] - # The docbook output is always ${docbook}-${name_of_xml_file} - output = namebase + '-docbook' - outputs = [] - for f in xml_files: - outputs.append('{}-{}'.format(docbook, f)) + # The docbook output is always ${docbook}-${name_of_xml_file} + output = namebase + '-docbook' + outputs = [] + for f in xml_files: + outputs.append('{}-{}'.format(docbook, os.path.basename(str(f)))) + + if mesonlib.version_compare(glib_version, '>= 2.56.2'): custom_kwargs = {'input': xml_files, 'output': outputs, 'command': docbook_cmd, 'build_by_default': build_by_default } - targets.append(build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)) - - objects = targets - else: - if 'docbook' in kwargs: - docbook = kwargs['docbook'] - if not isinstance(docbook, str): - raise MesonException('docbook value must be a string.') + else: + custom_kwargs = {'input': xml_files, + 'output': outputs, + 'command': cmd, + 'build_by_default': build_by_default, + 'depends': cfile_custom_target + } - cmd += ['--generate-docbook', docbook] + docbook_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(docbook_custom_target) - # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a - if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.3'): - cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@'] - else: - self._print_gdbus_warning() - cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@'] - outputs = [namebase + '.c', namebase + '.h'] - install = kwargs.get('install_header', False) - custom_kwargs = {'input': xml_files, - 'output': outputs, - 'command': cmd, - 'build_by_default': build_by_default, - 'install': install, - } - if install and 'install_dir' in kwargs: - custom_kwargs['install_dir'] = [False, kwargs['install_dir']] - ct = build.CustomTarget(target_name, state.subdir, state.subproject, custom_kwargs) - # Ensure that the same number (and order) of arguments are returned - # regardless of the gdbus-codegen (glib) version being used - targets = [ct, ct] - if 'docbook' in kwargs: - targets.append(ct) - objects = [ct] - return ModuleReturnValue(targets, objects) + return ModuleReturnValue(targets, targets) @permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir', 'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod', diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 68c2dc5..8684864 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -170,16 +170,18 @@ class DependenciesHelper: return ', '.join(result) def remove_dups(self): - def _fn(xs): + def _fn(xs, libs=False): # Remove duplicates whilst preserving original order result = [] for x in xs: - if x not in result: + # Don't de-dup unknown strings to avoid messing up arguments like: + # ['-framework', 'CoreAudio', '-framework', 'CoreMedia'] + if x not in result or (libs and (isinstance(x, str) and not x.endswith(('-l', '-L')))): result.append(x) return result - self.pub_libs = _fn(self.pub_libs) + self.pub_libs = _fn(self.pub_libs, True) self.pub_reqs = _fn(self.pub_reqs) - self.priv_libs = _fn(self.priv_libs) + self.priv_libs = _fn(self.priv_libs, True) self.priv_reqs = _fn(self.priv_reqs) self.cflags = _fn(self.cflags) @@ -311,14 +313,14 @@ class PkgConfigModule(ExtensionModule): 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'}) def generate(self, state, args, kwargs): if 'variables' in kwargs: - FeatureNew('custom pkgconfig variables', '0.41.0').use() + FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject) default_version = state.project_version['version'] default_install_dir = None default_description = None default_name = None mainlib = None if len(args) == 1: - FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use() + FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject) mainlib = getattr(args[0], 'held_object', args[0]) if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)): raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object') diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index b30a1e6..9a90fe9 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -19,7 +19,6 @@ from pathlib import Path from .. import mesonlib from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue -from . import permittedSnippetKwargs from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, InterpreterObject, InvalidArguments, @@ -51,7 +50,6 @@ def run_command(python, command): class PythonDependency(ExternalDependency): - @FeatureNew('Python Module', '0.46.0') def __init__(self, python_holder, environment, kwargs): super().__init__('python', environment, None, kwargs) self.name = 'python' @@ -125,7 +123,7 @@ class PythonDependency(ExternalDependency): libname += self.variables['ABIFLAGS'] libdirs = [] - largs = self.compiler.find_library(libname, environment, libdirs) + largs = self.clib_compiler.find_library(libname, environment, libdirs) self.is_found = largs is not None @@ -284,7 +282,7 @@ class PythonInstallation(ExternalProgramHolder, InterpreterObject): self.platform = run_command(python, "import sysconfig; print (sysconfig.get_platform())") self.is_pypy = json.loads(run_command(python, IS_PYPY_COMMAND)) - @permittedSnippetKwargs(mod_kwargs) + @permittedKwargs(mod_kwargs) def extension_module(self, interpreter, state, args, kwargs): if 'subdir' in kwargs and 'install_dir' in kwargs: raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive') @@ -312,7 +310,7 @@ class PythonInstallation(ExternalProgramHolder, InterpreterObject): dep = PythonDependency(self, interpreter.environment, kwargs) return interpreter.holderify(dep) - @permittedSnippetKwargs(['pure', 'subdir']) + @permittedKwargs(['pure', 'subdir']) def install_sources(self, interpreter, state, args, kwargs): pure = kwargs.pop('pure', False) if not isinstance(pure, bool): @@ -433,6 +431,8 @@ class PythonInstallation(ExternalProgramHolder, InterpreterObject): class PythonModule(ExtensionModule): + + @FeatureNew('Python Module', '0.46.0') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.snippets.add('find_installation') @@ -450,7 +450,7 @@ class PythonModule(ExtensionModule): else: return None - @permittedSnippetKwargs(['required']) + @permittedKwargs(['required']) def find_installation(self, interpreter, state, args, kwargs): required = kwargs.get('required', True) if not isinstance(required, bool): diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 79da29a..3cfc689 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -17,8 +17,7 @@ from .. import mesonlib, dependencies from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue -from . import permittedSnippetKwargs -from ..interpreterbase import noKwargs +from ..interpreterbase import noKwargs, permittedKwargs from ..build import known_shmod_kwargs @@ -27,7 +26,7 @@ class Python3Module(ExtensionModule): super().__init__(*args, **kwargs) self.snippets.add('extension_module') - @permittedSnippetKwargs(known_shmod_kwargs) + @permittedKwargs(known_shmod_kwargs) def extension_module(self, interpreter, state, args, kwargs): if 'name_prefix' in kwargs: raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.') diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 7dd87f2..19f3e2b 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -15,12 +15,14 @@ import os from .. import mlog -from .. import mesonlib, dependencies, build +from .. import mesonlib, build from ..mesonlib import MesonException, extract_as_list from . import get_include_args from . import ModuleReturnValue from . import ExtensionModule +from ..interpreter import CustomTargetHolder from ..interpreterbase import permittedKwargs, FeatureNewKwargs +from ..dependencies import ExternalProgram class WindowsModule(ExtensionModule): @@ -30,13 +32,17 @@ class WindowsModule(ExtensionModule): return compilers[l] raise MesonException('Resource compilation requires a C or C++ compiler.') - @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files']) - @permittedKwargs({'args', 'include_directories', 'depend_files'}) + @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) + @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) def compile_resources(self, state, args, kwargs): comp = self.detect_compiler(state.compilers) extra_args = mesonlib.stringlistify(kwargs.get('args', [])) - wrc_deps = extract_as_list(kwargs, 'depend_files', pop = True) + wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) + wrc_depends = extract_as_list(kwargs, 'depends', pop = True) + for d in wrc_depends: + if isinstance(d, CustomTargetHolder): + extra_args += get_include_args([d.outdir_include()]) inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) for incd in inc_dirs: if not isinstance(incd.held_object, (str, build.IncludeDirs)): @@ -44,7 +50,7 @@ class WindowsModule(ExtensionModule): extra_args += get_include_args(inc_dirs) if comp.id == 'msvc': - rescomp = dependencies.ExternalProgram('rc', silent=True) + rescomp = ExternalProgram('rc', silent=True) res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@'] suffix = 'res' else: @@ -53,22 +59,23 @@ class WindowsModule(ExtensionModule): for arg in extra_args: if ' ' in arg: mlog.warning(m.format(arg)) - rescomp_name = None + rescomp = None # FIXME: Does not handle `native: true` executables, see # https://github.com/mesonbuild/meson/issues/1531 if state.environment.is_cross_build(): # If cross compiling see if windres has been specified in the # cross file before trying to find it another way. - rescomp_name = state.environment.cross_info.config['binaries'].get('windres') - if rescomp_name is None: + cross_info = state.environment.cross_info + rescomp = ExternalProgram.from_cross_info(cross_info, 'windres') + if not rescomp or not rescomp.found(): # Pick-up env var WINDRES if set. This is often used for # specifying an arch-specific windres. - rescomp_name = os.environ.get('WINDRES', 'windres') - rescomp = dependencies.ExternalProgram(rescomp_name, silent=True) + rescomp = ExternalProgram(os.environ.get('WINDRES', 'windres'), silent=True) res_args = extra_args + ['@INPUT@', '@OUTPUT@'] suffix = 'o' if not rescomp.found(): - raise MesonException('Could not find Windows resource compiler "%s".' % rescomp_name) + raise MesonException('Could not find Windows resource compiler {!r}' + ''.format(rescomp.get_path())) res_targets = [] @@ -85,7 +92,8 @@ class WindowsModule(ExtensionModule): 'output': '@BASENAME@.' + suffix, 'input': [src], 'command': [rescomp] + res_args, - 'depend_files': wrc_deps, + 'depend_files': wrc_depend_files, + 'depends': wrc_depends, } if isinstance(src, str): diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index 4207f45..94efbcf 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -18,7 +18,6 @@ import functools from . import mparser from . import coredata from . import mesonlib -from .interpreterbase import FeatureNew from . import compilers forbidden_option_names = coredata.get_builtin_options() @@ -94,7 +93,9 @@ def IntegerParser(name, description, kwargs): kwargs['value'], kwargs.get('yield', coredata.default_yielding)) -@FeatureNew('array type option()', '0.44.0') +# FIXME: Cannot use FeatureNew while parsing options because we parse it before +# reading options in project(). See func_project() in interpreter.py +#@FeatureNew('array type option()', '0.44.0') @permitted_kwargs({'value', 'yield', 'choices'}) def string_array_parser(name, description, kwargs): if 'choices' in kwargs: @@ -116,11 +117,19 @@ def string_array_parser(name, description, kwargs): choices=choices, yielding=kwargs.get('yield', coredata.default_yielding)) +@permitted_kwargs({'value', 'yield'}) +def FeatureParser(name, description, kwargs): + return coredata.UserFeatureOption(name, + description, + kwargs.get('value', 'enabled'), + yielding=kwargs.get('yield', coredata.default_yielding)) + option_types = {'string': StringParser, 'boolean': BooleanParser, 'combo': ComboParser, 'integer': IntegerParser, 'array': string_array_parser, + 'feature': FeatureParser, } class OptionInterpreter: @@ -180,8 +189,11 @@ class OptionInterpreter: raise OptionException('Only calls to option() are allowed in option files.') (posargs, kwargs) = self.reduce_arguments(node.args) - if 'yield' in kwargs: - FeatureNew('option yield', '0.45.0').use() + # FIXME: Cannot use FeatureNew while parsing options because we parse + # it before reading options in project(). See func_project() in + # interpreter.py + #if 'yield' in kwargs: + # FeatureNew('option yield', '0.45.0').use(self.subproject) if 'type' not in kwargs: raise OptionException('Option call missing mandatory "type" keyword argument') diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index 1d2cc60..d3d3028 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -364,7 +364,7 @@ def get_darwin_rpaths_to_remove(fname): result.append(rp) return result -def fix_darwin(fname, new_rpath): +def fix_darwin(fname, new_rpath, final_path, install_name_mappings): try: rpaths = get_darwin_rpaths_to_remove(fname) except subprocess.CalledProcessError: @@ -372,30 +372,55 @@ def fix_darwin(fname, new_rpath): # non-executable target. Just return. return try: + args = [] if rpaths: - args = [] for rp in rpaths: args += ['-delete_rpath', rp] subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + args = [] if new_rpath: - subprocess.check_call(['install_name_tool', '-add_rpath', new_rpath, fname], + args += ['-add_rpath', new_rpath] + # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib + if fname.endswith('dylib'): + args += ['-id', final_path] + if install_name_mappings: + for old, new in install_name_mappings.items(): + args += ['-change', old, new] + if args: + subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except Exception as e: raise sys.exit(0) -def fix_rpath(fname, new_rpath, verbose=True): +def fix_jar(fname): + subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF']) + with open('META-INF/MANIFEST.MF', 'r+') as f: + lines = f.readlines() + f.seek(0) + for line in lines: + if not line.startswith('Class-Path:'): + f.write(line) + f.truncate() + subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF']) + +def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True): + # Static libraries never have rpaths + if fname.endswith('.a'): + return try: + if fname.endswith('.jar'): + fix_jar(fname) + return fix_elf(fname, new_rpath, verbose) - return 0 + return except SystemExit as e: if isinstance(e.code, int) and e.code == 0: pass else: raise if shutil.which('install_name_tool'): - fix_darwin(fname, new_rpath) - return 0 + fix_darwin(fname, new_rpath, final_path, install_name_mappings) diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index fedcc47..4c5cbdf 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -117,11 +117,17 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, scan_cmd += scan_args gtkdoc_run_check(scan_cmd, abs_out) + # Use the generated types file when available, otherwise gobject_typesfile + # would often be a path to source dir instead of build dir. + if '--rebuild-types' in scan_args: + gobject_typesfile = os.path.join(abs_out, module + '.types') + if gobject_typesfile: scanobjs_cmd = ['gtkdoc-scangobj'] + scanobjs_args + ['--types=' + gobject_typesfile, '--module=' + module, '--cflags=' + cflags, '--ldflags=' + ldflags, + '--cc=' + cc, '--ld=' + ld, '--output-dir=' + abs_out] diff --git a/mesonbuild/scripts/meson_install.py b/mesonbuild/scripts/meson_install.py deleted file mode 100644 index 3b3be11..0000000 --- a/mesonbuild/scripts/meson_install.py +++ /dev/null @@ -1,438 +0,0 @@ -# Copyright 2013-2014 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys, pickle, os, shutil, subprocess, gzip, errno -import shlex -from glob import glob -from . import depfixer -from . import destdir_join -from ..mesonlib import is_windows, Popen_safe -from __main__ import __file__ as main_file - -install_log_file = None -selinux_updates = [] - -class DirMaker: - def __init__(self): - self.dirs = [] - - def makedirs(self, path, exist_ok=False): - dirname = os.path.normpath(path) - dirs = [] - while dirname != os.path.dirname(dirname): - if not os.path.exists(dirname): - dirs.append(dirname) - dirname = os.path.dirname(dirname) - os.makedirs(path, exist_ok=exist_ok) - - # store the directories in creation order, with the parent directory - # before the child directories. Future calls of makedir() will not - # create the parent directories, so the last element in the list is - # the last one to be created. That is the first one to be removed on - # __exit__ - dirs.reverse() - self.dirs += dirs - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.dirs.reverse() - for d in self.dirs: - append_to_log(d) - -def is_executable(path): - '''Checks whether any of the "x" bits are set in the source file mode.''' - return bool(os.stat(path).st_mode & 0o111) - -def sanitize_permissions(path, umask): - if umask is None: - return - new_perms = 0o777 if is_executable(path) else 0o666 - new_perms &= ~umask - try: - os.chmod(path, new_perms) - except PermissionError as e: - msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...' - print(msg.format(path, new_perms, e.strerror)) - -def set_mode(path, mode, default_umask): - if mode is None or (mode.perms_s or mode.owner or mode.group) is None: - # Just sanitize permissions with the default umask - sanitize_permissions(path, default_umask) - return - # No chown() on Windows, and must set one of owner/group - if not is_windows() and (mode.owner or mode.group) is not None: - try: - shutil.chown(path, mode.owner, mode.group) - except PermissionError as e: - msg = '{!r}: Unable to set owner {!r} and group {!r}: {}, ignoring...' - print(msg.format(path, mode.owner, mode.group, e.strerror)) - except LookupError: - msg = '{!r}: Non-existent owner {!r} or group {!r}: ignoring...' - print(msg.format(path, mode.owner, mode.group)) - except OSError as e: - if e.errno == errno.EINVAL: - msg = '{!r}: Non-existent numeric owner {!r} or group {!r}: ignoring...' - print(msg.format(path, mode.owner, mode.group)) - else: - raise - # Must set permissions *after* setting owner/group otherwise the - # setuid/setgid bits will get wiped by chmod - # NOTE: On Windows you can set read/write perms; the rest are ignored - if mode.perms_s is not None: - try: - os.chmod(path, mode.perms) - except PermissionError as e: - msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...' - print(msg.format(path, mode.perms_s, e.strerror)) - else: - sanitize_permissions(path, default_umask) - -def restore_selinux_contexts(): - ''' - Restores the SELinux context for files in @selinux_updates - - If $DESTDIR is set, do not warn if the call fails. - ''' - try: - subprocess.check_call(['selinuxenabled']) - except (FileNotFoundError, PermissionError, subprocess.CalledProcessError) as e: - # If we don't have selinux or selinuxenabled returned 1, failure - # is ignored quietly. - return - - if not shutil.which('restorecon'): - # If we don't have restorecon, failure is ignored quietly. - return - - with subprocess.Popen(['restorecon', '-F', '-f-', '-0'], - stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: - out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0') - if proc.returncode != 0 and not os.environ.get('DESTDIR'): - print('Failed to restore SELinux context of installed files...', - 'Standard output:', out.decode(), - 'Standard error:', err.decode(), sep='\n') - -def append_to_log(line): - install_log_file.write(line) - if not line.endswith('\n'): - install_log_file.write('\n') - install_log_file.flush() - -def do_copyfile(from_file, to_file): - if not os.path.isfile(from_file): - raise RuntimeError('Tried to install something that isn\'t a file:' - '{!r}'.format(from_file)) - # copyfile fails if the target file already exists, so remove it to - # allow overwriting a previous install. If the target is not a file, we - # want to give a readable error. - if os.path.exists(to_file): - if not os.path.isfile(to_file): - raise RuntimeError('Destination {!r} already exists and is not ' - 'a file'.format(to_file)) - os.unlink(to_file) - shutil.copyfile(from_file, to_file) - shutil.copystat(from_file, to_file) - selinux_updates.append(to_file) - append_to_log(to_file) - -def do_copydir(data, src_dir, dst_dir, exclude, install_mode): - ''' - Copies the contents of directory @src_dir into @dst_dir. - - For directory - /foo/ - bar/ - excluded - foobar - file - do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}, None) creates - /dst/ - dir/ - bar/ - foobar - file - - Args: - src_dir: str, absolute path to the source directory - dst_dir: str, absolute path to the destination directory - exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs), - each element of the set is a path relative to src_dir. - install_mode: FileMode object, or None to use defaults. - ''' - if not os.path.isabs(src_dir): - raise ValueError('src_dir must be absolute, got %s' % src_dir) - if not os.path.isabs(dst_dir): - raise ValueError('dst_dir must be absolute, got %s' % dst_dir) - if exclude is not None: - exclude_files, exclude_dirs = exclude - else: - exclude_files = exclude_dirs = set() - for root, dirs, files in os.walk(src_dir): - assert os.path.isabs(root) - for d in dirs[:]: - abs_src = os.path.join(root, d) - filepart = os.path.relpath(abs_src, start=src_dir) - abs_dst = os.path.join(dst_dir, filepart) - # Remove these so they aren't visited by os.walk at all. - if filepart in exclude_dirs: - dirs.remove(d) - continue - if os.path.isdir(abs_dst): - continue - if os.path.exists(abs_dst): - print('Tried to copy directory %s but a file of that name already exists.' % abs_dst) - sys.exit(1) - data.dirmaker.makedirs(abs_dst) - shutil.copystat(abs_src, abs_dst) - sanitize_permissions(abs_dst, data.install_umask) - for f in files: - abs_src = os.path.join(root, f) - filepart = os.path.relpath(abs_src, start=src_dir) - if filepart in exclude_files: - continue - abs_dst = os.path.join(dst_dir, filepart) - if os.path.isdir(abs_dst): - print('Tried to copy file %s but a directory of that name already exists.' % abs_dst) - if os.path.exists(abs_dst): - os.unlink(abs_dst) - parent_dir = os.path.dirname(abs_dst) - if not os.path.isdir(parent_dir): - os.mkdir(parent_dir) - shutil.copystat(os.path.dirname(abs_src), parent_dir) - shutil.copy2(abs_src, abs_dst, follow_symlinks=False) - set_mode(abs_dst, install_mode, data.install_umask) - append_to_log(abs_dst) - -def get_destdir_path(d, path): - if os.path.isabs(path): - output = destdir_join(d.destdir, path) - else: - output = os.path.join(d.fullprefix, path) - return output - -def do_install(log_dir, datafilename): - global install_log_file - - with open(datafilename, 'rb') as ifile: - d = pickle.load(ifile) - d.destdir = os.environ.get('DESTDIR', '') - d.fullprefix = destdir_join(d.destdir, d.prefix) - - if d.install_umask is not None: - os.umask(d.install_umask) - - with open(os.path.join(log_dir, 'install-log.txt'), 'w') as lf: - install_log_file = lf - append_to_log('# List of files installed by Meson') - append_to_log('# Does not contain files installed by custom scripts.') - - try: - d.dirmaker = DirMaker() - with d.dirmaker: - install_subdirs(d) # Must be first, because it needs to delete the old subtree. - install_targets(d) - install_headers(d) - install_man(d) - install_data(d) - restore_selinux_contexts() - run_install_script(d) - except PermissionError: - if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ: - print('Installation failed due to insufficient permissions.') - print('Attempting to use polkit to gain elevated privileges...') - os.execlp('pkexec', 'pkexec', sys.executable, main_file, *sys.argv[1:], - os.getcwd()) - else: - raise - - -def install_subdirs(d): - for (src_dir, dst_dir, mode, exclude) in d.install_subdirs: - full_dst_dir = get_destdir_path(d, dst_dir) - print('Installing subdir %s to %s' % (src_dir, full_dst_dir)) - d.dirmaker.makedirs(full_dst_dir, exist_ok=True) - do_copydir(d, src_dir, full_dst_dir, exclude, mode) - -def install_data(d): - for i in d.data: - fullfilename = i[0] - outfilename = get_destdir_path(d, i[1]) - mode = i[2] - outdir = os.path.dirname(outfilename) - d.dirmaker.makedirs(outdir, exist_ok=True) - print('Installing %s to %s' % (fullfilename, outdir)) - do_copyfile(fullfilename, outfilename) - set_mode(outfilename, mode, d.install_umask) - -def install_man(d): - for m in d.man: - full_source_filename = m[0] - outfilename = get_destdir_path(d, m[1]) - outdir = os.path.dirname(outfilename) - d.dirmaker.makedirs(outdir, exist_ok=True) - install_mode = m[2] - print('Installing %s to %s' % (full_source_filename, outdir)) - if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): - with open(outfilename, 'wb') as of: - with open(full_source_filename, 'rb') as sf: - # Set mtime and filename for reproducibility. - with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz: - gz.write(sf.read()) - shutil.copystat(full_source_filename, outfilename) - append_to_log(outfilename) - else: - do_copyfile(full_source_filename, outfilename) - set_mode(outfilename, install_mode, d.install_umask) - -def install_headers(d): - for t in d.headers: - fullfilename = t[0] - fname = os.path.basename(fullfilename) - outdir = get_destdir_path(d, t[1]) - outfilename = os.path.join(outdir, fname) - install_mode = t[2] - print('Installing %s to %s' % (fname, outdir)) - d.dirmaker.makedirs(outdir, exist_ok=True) - do_copyfile(fullfilename, outfilename) - set_mode(outfilename, install_mode, d.install_umask) - -def run_install_script(d): - env = {'MESON_SOURCE_ROOT': d.source_dir, - 'MESON_BUILD_ROOT': d.build_dir, - 'MESON_INSTALL_PREFIX': d.prefix, - 'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix, - 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]), - } - - child_env = os.environ.copy() - child_env.update(env) - - for i in d.install_scripts: - script = i['exe'] - args = i['args'] - name = ' '.join(script + args) - print('Running custom install script {!r}'.format(name)) - try: - rc = subprocess.call(script + args, env=child_env) - if rc != 0: - sys.exit(rc) - except OSError: - print('Failed to run install script {!r}'.format(name)) - sys.exit(1) - -def check_for_stampfile(fname): - '''Some languages e.g. Rust have output files - whose names are not known at configure time. - Check if this is the case and return the real - file instead.''' - if fname.endswith('.so') or fname.endswith('.dll'): - if os.stat(fname).st_size == 0: - (base, suffix) = os.path.splitext(fname) - files = glob(base + '-*' + suffix) - if len(files) > 1: - print("Stale dynamic library files in build dir. Can't install.") - sys.exit(1) - if len(files) == 1: - return files[0] - elif fname.endswith('.a') or fname.endswith('.lib'): - if os.stat(fname).st_size == 0: - (base, suffix) = os.path.splitext(fname) - files = glob(base + '-*' + '.rlib') - if len(files) > 1: - print("Stale static library files in build dir. Can't install.") - sys.exit(1) - if len(files) == 1: - return files[0] - return fname - -def install_targets(d): - for t in d.targets: - fname = check_for_stampfile(t[0]) - outdir = get_destdir_path(d, t[1]) - outname = os.path.join(outdir, os.path.basename(fname)) - aliases = t[2] - should_strip = t[3] - install_rpath = t[4] - install_mode = t[5] - print('Installing %s to %s' % (fname, outname)) - d.dirmaker.makedirs(outdir, exist_ok=True) - if not os.path.exists(fname): - raise RuntimeError('File {!r} could not be found'.format(fname)) - elif os.path.isfile(fname): - do_copyfile(fname, outname) - set_mode(outname, install_mode, d.install_umask) - if should_strip and d.strip_bin is not None: - if fname.endswith('.jar'): - print('Not stripping jar target:', os.path.basename(fname)) - continue - print('Stripping target {!r}'.format(fname)) - ps, stdo, stde = Popen_safe(d.strip_bin + [outname]) - if ps.returncode != 0: - print('Could not strip file.\n') - print('Stdout:\n%s\n' % stdo) - print('Stderr:\n%s\n' % stde) - sys.exit(1) - pdb_filename = os.path.splitext(fname)[0] + '.pdb' - if not should_strip and os.path.exists(pdb_filename): - pdb_outname = os.path.splitext(outname)[0] + '.pdb' - print('Installing pdb file %s to %s' % (pdb_filename, pdb_outname)) - do_copyfile(pdb_filename, pdb_outname) - set_mode(pdb_outname, install_mode, d.install_umask) - elif os.path.isdir(fname): - fname = os.path.join(d.build_dir, fname.rstrip('/')) - outname = os.path.join(outdir, os.path.basename(fname)) - do_copydir(d, fname, outname, None, install_mode) - else: - raise RuntimeError('Unknown file type for {!r}'.format(fname)) - printed_symlink_error = False - for alias, to in aliases.items(): - try: - symlinkfilename = os.path.join(outdir, alias) - try: - os.unlink(symlinkfilename) - except FileNotFoundError: - pass - os.symlink(to, symlinkfilename) - append_to_log(symlinkfilename) - except (NotImplementedError, OSError): - if not printed_symlink_error: - print("Symlink creation does not work on this platform. " - "Skipping all symlinking.") - printed_symlink_error = True - if os.path.isfile(outname): - try: - depfixer.fix_rpath(outname, install_rpath, False) - except SystemExit as e: - if isinstance(e.code, int) and e.code == 0: - pass - else: - raise - -def run(args): - if len(args) != 1 and len(args) != 2: - print('Installer script for Meson. Do not run on your own, mmm\'kay?') - print('meson_install.py [install info file]') - datafilename = args[0] - private_dir = os.path.dirname(datafilename) - log_dir = os.path.join(private_dir, '../meson-logs') - if len(args) == 2: - os.chdir(args[1]) - do_install(log_dir, datafilename) - install_log_file = None - return 0 - -if __name__ == '__main__': - sys.exit(run(sys.argv[1:])) diff --git a/mesonconf.py b/mesonconf.py deleted file mode 100755 index 894ec01..0000000 --- a/mesonconf.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2016 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys - -if __name__ == '__main__': - sys.exit('Error: This executable is no more. Use "meson configure" instead.') diff --git a/mesonintrospect.py b/mesonintrospect.py deleted file mode 100755 index 9ef1535..0000000 --- a/mesonintrospect.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2016 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys - -if __name__ == '__main__': - sys.exit('Error: This executable is no more. Use "meson introspect" instead.') diff --git a/mesonrewriter.py b/mesonrewriter.py deleted file mode 100755 index ef47e57..0000000 --- a/mesonrewriter.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2016 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This class contains the basic functionality needed to run any interpreter -# or an interpreter-based tool. - -# This tool is used to manipulate an existing Meson build definition. -# -# - add a file to a target -# - remove files from a target -# - move targets -# - reindent? - -import sys - -if __name__ == '__main__': - sys.exit('Error: This executable is no more. Use "meson rewrite" instead.') diff --git a/mesontest.py b/mesontest.py deleted file mode 100755 index e973d56..0000000 --- a/mesontest.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2016-2017 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A tool to run tests in many different ways. - -import sys - -if __name__ == '__main__': - sys.exit('Error: This executable is no more. Use "meson test" instead.') diff --git a/msi/createmsi.py b/msi/createmsi.py index 8a1bc5b..f21e73a 100755 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -85,6 +85,7 @@ class PackageGenerator: main_stage, ninja_stage = self.staging_dirs modules = [os.path.splitext(os.path.split(x)[1])[0] for x in glob(os.path.join('mesonbuild/modules/*'))] modules = ['mesonbuild.modules.' + x for x in modules if not x.startswith('_')] + modules += ['distutils.version'] modulestr = ','.join(modules) python = shutil.which('python') cxfreeze = os.path.join(os.path.dirname(python), "Scripts", "cxfreeze") @@ -130,6 +131,9 @@ class PackageGenerator: 'SummaryCodepage': '1252', }) + ET.SubElement(product, 'MajorUpgrade', + {'DowngradeErrorMessage': 'A newer version of Meson is already installed.'}) + if self.bytesize == 64: package.set('Platform', 'x64') ET.SubElement(product, 'Media', { diff --git a/run_unittests.py b/run_unittests.py index 7c68904..dd109aa 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -35,9 +35,9 @@ import mesonbuild.coredata import mesonbuild.modules.gnome from mesonbuild.interpreter import ObjectHolder from mesonbuild.mesonlib import ( - is_windows, is_osx, is_cygwin, is_dragonflybsd, + is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, windows_proof_rmtree, python_command, version_compare, - BuildDirLock + grab_leading_numbers, BuildDirLock ) from mesonbuild.environment import Environment, detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException @@ -74,6 +74,45 @@ def get_soname(fname): def get_rpath(fname): return get_dynamic_section_entry(fname, r'(?:rpath|runpath)') +def is_ci(): + if 'TRAVIS' in os.environ or 'APPVEYOR' in os.environ: + return True + return False + +def skipIfNoPkgconfig(f): + ''' + Skip this test if no pkg-config is found, unless we're on Travis or + Appveyor CI. This allows users to run our test suite without having + pkg-config installed on, f.ex., macOS, while ensuring that our CI does not + silently skip the test because of misconfiguration. + + Note: Yes, we provide pkg-config even while running Windows CI + ''' + def wrapped(*args, **kwargs): + if not is_ci() and shutil.which('pkg-config') is None: + raise unittest.SkipTest('pkg-config not found') + return f(*args, **kwargs) + return wrapped + +class PatchModule: + ''' + Fancy monkey-patching! Whee! Can't use mock.patch because it only + patches in the local namespace. + ''' + def __init__(self, func, name, impl): + self.func = func + assert(isinstance(name, str)) + self.func_name = name + self.old_impl = None + self.new_impl = impl + + def __enter__(self): + self.old_impl = self.func + exec('{} = self.new_impl'.format(self.func_name)) + + def __exit__(self, *args): + exec('{} = self.old_impl'.format(self.func_name)) + class InternalTests(unittest.TestCase): @@ -476,6 +515,92 @@ class InternalTests(unittest.TestCase): deps.add_pub_reqs([mock]) self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") + def _test_all_naming(self, cc, env, patterns, platform): + shr = patterns[platform]['shared'] + stc = patterns[platform]['static'] + p = cc.get_library_naming(env, 'shared') + self.assertEqual(p, shr) + p = cc.get_library_naming(env, 'static') + self.assertEqual(p, stc) + p = cc.get_library_naming(env, 'static-shared') + self.assertEqual(p, stc + shr) + p = cc.get_library_naming(env, 'shared-static') + self.assertEqual(p, shr + stc) + p = cc.get_library_naming(env, 'default') + self.assertEqual(p, shr + stc) + # Test find library by mocking up openbsd + if platform != 'openbsd': + return + with tempfile.TemporaryDirectory() as tmpdir: + with open(os.path.join(tmpdir, 'libfoo.so.6.0'), 'w') as f: + f.write('') + with open(os.path.join(tmpdir, 'libfoo.so.5.0'), 'w') as f: + f.write('') + with open(os.path.join(tmpdir, 'libfoo.so.54.0'), 'w') as f: + f.write('') + with open(os.path.join(tmpdir, 'libfoo.so.66a.0b'), 'w') as f: + f.write('') + with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f: + f.write('') + found = cc.find_library_real('foo', env, [tmpdir], '', 'default') + self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0') + + def test_find_library_patterns(self): + ''' + Unit test for the library search patterns used by find_library() + ''' + unix_static = ['lib{}.a', '{}.a'] + msvc_static = ['lib{}.a', 'lib{}.lib', '{}.a', '{}.lib'] + # This is the priority list of pattern matching for library searching + patterns = {'openbsd': {'shared': ['lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*'], + 'static': unix_static}, + 'linux': {'shared': ['lib{}.so', '{}.so'], + 'static': unix_static}, + 'darwin': {'shared': ['lib{}.dylib', '{}.dylib'], + 'static': unix_static}, + 'cygwin': {'shared': ['cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', + 'lib{}.dll.a', '{}.dll', '{}.dll.a'], + 'static': ['cyg{}.a'] + unix_static}, + 'windows-msvc': {'shared': ['lib{}.lib', '{}.lib'], + 'static': msvc_static}, + 'windows-mingw': {'shared': ['lib{}.dll.a', 'lib{}.lib', 'lib{}.dll', + '{}.dll.a', '{}.lib', '{}.dll'], + 'static': msvc_static}} + env = Environment('', '', get_fake_options('')) + cc = env.detect_c_compiler(False) + if is_osx(): + self._test_all_naming(cc, env, patterns, 'darwin') + elif is_cygwin(): + self._test_all_naming(cc, env, patterns, 'cygwin') + elif is_windows(): + if cc.get_id() == 'msvc': + self._test_all_naming(cc, env, patterns, 'windows-msvc') + else: + self._test_all_naming(cc, env, patterns, 'windows-mingw') + else: + self._test_all_naming(cc, env, patterns, 'linux') + # Mock OpenBSD since we don't have tests for it + true = lambda x, y: True + if not is_openbsd(): + with PatchModule(mesonbuild.compilers.c.for_openbsd, + 'mesonbuild.compilers.c.for_openbsd', true): + self._test_all_naming(cc, env, patterns, 'openbsd') + else: + self._test_all_naming(cc, env, patterns, 'openbsd') + with PatchModule(mesonbuild.compilers.c.for_darwin, + 'mesonbuild.compilers.c.for_darwin', true): + self._test_all_naming(cc, env, patterns, 'darwin') + with PatchModule(mesonbuild.compilers.c.for_cygwin, + 'mesonbuild.compilers.c.for_cygwin', true): + self._test_all_naming(cc, env, patterns, 'cygwin') + with PatchModule(mesonbuild.compilers.c.for_windows, + 'mesonbuild.compilers.c.for_windows', true): + self._test_all_naming(cc, env, patterns, 'windows-mingw') + cc.id = 'msvc' + with PatchModule(mesonbuild.compilers.c.for_windows, + 'mesonbuild.compilers.c.for_windows', true): + self._test_all_naming(cc, env, patterns, 'windows-msvc') + class BasePlatformTests(unittest.TestCase): def setUp(self): @@ -484,7 +609,7 @@ class BasePlatformTests(unittest.TestCase): src_root = os.path.join(os.getcwd(), src_root) self.src_root = src_root self.prefix = '/usr' - self.libdir = os.path.join(self.prefix, 'lib') + self.libdir = 'lib' # Get the backend # FIXME: Extract this from argv? self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja')) @@ -1102,12 +1227,18 @@ class AllPlatformTests(BasePlatformTests): testdir = os.path.join(self.common_test_dir, '137 build by default') self.init(testdir) self.build() - genfile = os.path.join(self.builddir, 'generated.dat') - exe = os.path.join(self.builddir, 'fooprog' + exe_suffix) - self.assertPathExists(genfile) - self.assertPathDoesNotExist(exe) + genfile1 = os.path.join(self.builddir, 'generated1.dat') + genfile2 = os.path.join(self.builddir, 'generated2.dat') + exe1 = os.path.join(self.builddir, 'fooprog' + exe_suffix) + exe2 = os.path.join(self.builddir, 'barprog' + exe_suffix) + self.assertPathExists(genfile1) + self.assertPathExists(genfile2) + self.assertPathDoesNotExist(exe1) + self.assertPathDoesNotExist(exe2) self.build(target=('fooprog' + exe_suffix)) - self.assertPathExists(exe) + self.assertPathExists(exe1) + self.build(target=('barprog' + exe_suffix)) + self.assertPathExists(exe2) def test_internal_include_order(self): testdir = os.path.join(self.common_test_dir, '138 include order') @@ -1656,6 +1787,7 @@ int main(int argc, char **argv) { if os.path.splitext(fname)[1] not in ['.c', '.h']: os.unlink(fname) + @skipIfNoPkgconfig def test_pkgconfig_static(self): ''' Test that the we prefer static libraries when `static: true` is @@ -1666,8 +1798,6 @@ int main(int argc, char **argv) { since system libraries -lm will never be found statically. https://github.com/mesonbuild/meson/issues/2785 ''' - if not shutil.which('pkg-config'): - raise unittest.SkipTest('pkg-config not found') (cc, stlinker, objext, shext) = self.detect_prebuild_env() testdir = os.path.join(self.unit_test_dir, '17 pkgconfig static') source = os.path.join(testdir, 'foo.c') @@ -1699,9 +1829,8 @@ int main(int argc, char **argv) { if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']: os.unlink(fname) + @skipIfNoPkgconfig def test_pkgconfig_gen_escaping(self): - if not shutil.which('pkg-config'): - raise unittest.SkipTest('pkg-config not found') testdir = os.path.join(self.common_test_dir, '51 pkgconfig-gen') prefix = '/usr/with spaces' libdir = 'lib' @@ -1768,6 +1897,26 @@ int main(int argc, char **argv) { changed = get_opt() self.assertDictEqual(changed, expected) + def test_array_option_empty_equivalents(self): + """Array options treat -Dopt=[] and -Dopt= as equivalent.""" + def get_opt(): + opts = self.introspect('--buildoptions') + for x in opts: + if x.get('name') == 'list': + return x + raise Exception(opts) + + expected = { + 'name': 'list', + 'description': 'list', + 'type': 'array', + 'value': [], + } + tdir = os.path.join(self.unit_test_dir, '18 array option') + self.init(tdir, extra_args='-Dlist=') + original = get_opt() + self.assertDictEqual(original, expected) + def opt_has(self, name, value): res = self.introspect('--buildoptions') found = False @@ -1820,7 +1969,7 @@ int main(int argc, char **argv) { r'meson.build:6: WARNING: a warning of some sort', r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning', r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.', - r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file conf.in are not present in the given configuration data.", + r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.", r'meson.build:1: WARNING: Passed invalid keyword argument "invalid".', ]: self.assertRegex(out, re.escape(expected)) @@ -2124,11 +2273,9 @@ recommended as it is not supported on some platforms''') self.assertEqual(obj.builtins['default_library'].value, 'shared') self.wipe() - # Should fail on unknown options - with self.assertRaises(subprocess.CalledProcessError) as cm: - self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo']) - self.assertNotEqual(0, cm.exception.returncode) - self.assertIn('Unknown options: "bad, foo, wrong_link_args"', cm.exception.output) + # Should warn on unknown options + out = self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo']) + self.assertIn('Unknown options: "bad, foo, wrong_link_args"', out) self.wipe() # Should fail on malformed option @@ -2190,6 +2337,7 @@ recommended as it is not supported on some platforms''') # they used to fail this test with Meson 0.46 an earlier versions. pass + @unittest.skipIf(not os.path.isdir('docs'), 'Doc dir not found, presumably because this is a tarball release.') def test_compiler_options_documented(self): ''' Test that C and C++ compiler options and base options are documented in @@ -2211,6 +2359,92 @@ recommended as it is not supported on some platforms''') self.assertIn(opt, md) self.assertNotIn('b_unknown', md) + @unittest.skipIf(not os.path.isdir('docs'), 'Doc dir not found, presumably because this is a tarball release.') + def test_cpu_families_documented(self): + with open("docs/markdown/Reference-tables.md") as f: + md = f.read() + self.assertIsNotNone(md) + + sections = list(re.finditer(r"^## (.+)$", md, re.MULTILINE)) + for s1, s2 in zip(sections[::2], sections[1::2]): + if s1.group(1) == "CPU families": + # Extract the content for this section + content = md[s1.end():s2.start()] + # Find the list entries + arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)] + # Drop the header + arches = set(arches[1:]) + self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families)) + + @unittest.skipIf(not os.path.isdir('docs'), 'Doc dir not found, presumably because this is a tarball release.') + def test_markdown_files_in_sitemap(self): + ''' + Test that each markdown files in docs/markdown is referenced in sitemap.txt + ''' + with open("docs/sitemap.txt") as f: + md = f.read() + self.assertIsNotNone(md) + toc = list(m.group(1) for m in re.finditer(r"^\s*(\w.*)$", md, re.MULTILINE)) + markdownfiles = [f.name for f in Path("docs/markdown").iterdir() if f.is_file() and f.suffix == '.md'] + exceptions = ['_Sidebar.md'] + for f in markdownfiles: + if f not in exceptions: + self.assertIn(f, toc) + + def test_feature_check_usage_subprojects(self): + testdir = os.path.join(self.unit_test_dir, '34 featurenew subprojects') + out = self.init(testdir) + # Parent project warns correctly + self.assertRegex(out, "WARNING: Project targetting '>=0.45'.*'0.47.0': dict") + # Subproject warns correctly + self.assertRegex(out, "|WARNING: Project targetting '>=0.40'.*'0.44.0': disabler") + # Subproject has a new-enough meson_version, no warning + self.assertNotRegex(out, "WARNING: Project targetting.*Python") + # Ensure a summary is printed in the subproject and the outer project + self.assertRegex(out, "|WARNING: Project specifies a minimum meson_version '>=0.40'") + self.assertRegex(out, "| * 0.44.0: {'disabler'}") + self.assertRegex(out, "WARNING: Project specifies a minimum meson_version '>=0.45'") + self.assertRegex(out, " * 0.47.0: {'dict'}") + + def test_configure_file_warnings(self): + testdir = os.path.join(self.common_test_dir, "16 configure file") + out = self.init(testdir) + self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") + self.assertRegex(out, "WARNING:.*'FOO_BAR'.*nosubst-nocopy2.txt.in.*not present.*") + self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") + self.assertRegex(out, "WARNING:.*empty configuration_data.*test.py.in") + # Warnings for configuration files that are overwritten. + self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") + self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") + self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") + # No warnings about empty configuration data objects passed to files with substitutions + self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") + self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") + with open(os.path.join(self.builddir, 'nosubst-nocopy1.txt'), 'rb') as f: + self.assertEqual(f.read().strip(), b'/* #undef FOO_BAR */') + with open(os.path.join(self.builddir, 'nosubst-nocopy2.txt'), 'rb') as f: + self.assertEqual(f.read().strip(), b'') + self.assertRegex(out, r"DEPRECATION:.*\['array'\] is invalid.*dict") + + def test_dirs(self): + with tempfile.TemporaryDirectory() as containing: + with tempfile.TemporaryDirectory(dir=containing) as srcdir: + mfile = os.path.join(srcdir, 'meson.build') + of = open(mfile, 'w') + of.write("project('foobar', 'c')\n") + of.close() + pc = subprocess.run(self.setup_command, + cwd=srcdir, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL) + self.assertIn(b'Must specify at least one directory name', pc.stdout) + with tempfile.TemporaryDirectory(dir=srcdir) as builddir: + subprocess.run(self.setup_command, + check=True, + cwd=builddir, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically @@ -2248,25 +2482,37 @@ class FailureTests(BasePlatformTests): # Must run in-process or we'll get a generic CalledProcessError self.init(self.srcdir, extra_args=extra_args, inprocess=True) - def assertMesonOutputs(self, contents, match, extra_args=None, langs=None): - ''' - Assert that running meson configure on the specified @contents outputs - something that matches regex @match. - ''' + def obtainMesonOutput(self, contents, match, extra_args, langs, meson_version): if langs is None: langs = [] with open(self.mbuild, 'w') as f: - f.write("project('output test', 'c', 'cpp')\n") + core_version = '.'.join([str(component) for component in grab_leading_numbers(mesonbuild.coredata.version)]) + meson_version = meson_version or core_version + f.write("project('output test', 'c', 'cpp', meson_version: '{}')\n".format(meson_version)) for lang in langs: f.write("add_languages('{}', required : false)\n".format(lang)) f.write(contents) # Run in-process for speed and consistency with assertMesonRaises - out = self.init(self.srcdir, extra_args=extra_args, inprocess=True) + return self.init(self.srcdir, extra_args=extra_args, inprocess=True) + + def assertMesonOutputs(self, contents, match, extra_args=None, langs=None, meson_version=None): + ''' + Assert that running meson configure on the specified @contents outputs + something that matches regex @match. + ''' + out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) self.assertRegex(out, match) + def assertMesonDoesNotOutput(self, contents, match, extra_args=None, langs=None, meson_version=None): + ''' + Assert that running meson configure on the specified @contents does not output + something that matches regex @match. + ''' + out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) + self.assertNotRegex(out, match) + + @skipIfNoPkgconfig def test_dependency(self): - if not shutil.which('pkg-config'): - raise unittest.SkipTest('pkg-config not found') if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0: raise unittest.SkipTest('zlib not found with pkg-config') a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"), @@ -2408,6 +2654,23 @@ class FailureTests(BasePlatformTests): self.assertMesonRaises("dict = {3: 'foo'}", 'Key must be a string.*') + def test_using_too_recent_feature(self): + # Here we use a dict, which was introduced in 0.47.0 + self.assertMesonOutputs("dict = {}", + ".*WARNING.*Project targetting.*but.*", + meson_version='>= 0.46.0') + + def test_using_recent_feature(self): + # Same as above, except the meson version is now appropriate + self.assertMesonDoesNotOutput("dict = {}", + ".*WARNING.*Project targetting.*but.*", + meson_version='>= 0.47.0') + + def test_using_too_recent_feature_dependency(self): + self.assertMesonOutputs("dependency('pcap', required: false)", + ".*WARNING.*Project targetting.*but.*", + meson_version='>= 0.41.0') + class WindowsTests(BasePlatformTests): ''' @@ -2660,13 +2923,12 @@ class LinuxlikeTests(BasePlatformTests): self.assertIn(" -Werror ", vala_command) self.assertIn(" -Werror ", c_command) + @skipIfNoPkgconfig def test_qt5dependency_pkgconfig_detection(self): ''' Test that qt4 and qt5 detection with pkgconfig works. ''' # Verify Qt4 or Qt5 can be found with pkg-config - if not shutil.which('pkg-config'): - raise unittest.SkipTest('pkg-config not found') qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore']) qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core']) if qt4 != 0 or qt5 != 0: @@ -2750,7 +3012,8 @@ class LinuxlikeTests(BasePlatformTests): self._test_soname_impl(self.builddir, False) def test_installed_soname(self): - self._test_soname_impl(self.installdir + self.libdir, True) + libdir = self.installdir + os.path.join(self.prefix, self.libdir) + self._test_soname_impl(libdir, True) def test_compiler_check_flags_order(self): ''' @@ -3153,7 +3416,8 @@ class LinuxlikeTests(BasePlatformTests): c = '/usr/bin/cc' ar = '/usr/bin/ar' strip = '/usr/bin/ar' -sometool.py = '%s' +sometool.py = ['{0}'] +someothertool.py = '{0}' [properties] @@ -3162,7 +3426,7 @@ system = 'linux' cpu_family = 'arm' cpu = 'armv7' # Not sure if correct. endian = 'little' -''' % os.path.join(testdir, 'some_cross_tool.py')) +'''.format(os.path.join(testdir, 'some_cross_tool.py'))) crossfile.flush() self.meson_cross_file = crossfile.name self.init(testdir) @@ -3206,7 +3470,7 @@ endian = 'little' self.build() mesonbuild.modules.gnome.native_glib_version = None - @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.') + @skipIfNoPkgconfig def test_pkgconfig_usage(self): testdir1 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependency') testdir2 = os.path.join(self.unit_test_dir, '24 pkgconfig usage/dependee') @@ -3242,7 +3506,7 @@ endian = 'little' self.assertTrue(os.path.isfile(test_exe)) subprocess.check_call(test_exe, env=myenv) - @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.') + @skipIfNoPkgconfig def test_pkgconfig_internal_libraries(self): ''' ''' @@ -3263,7 +3527,7 @@ endian = 'little' self.init(os.path.join(testdirbase, 'app')) self.build() - @unittest.skipIf(shutil.which('pkg-config') is None, 'Pkg-config not found.') + @skipIfNoPkgconfig def test_pkgconfig_formatting(self): testdir = os.path.join(self.unit_test_dir, '31 pkgconfig format') self.init(testdir) @@ -3271,7 +3535,7 @@ endian = 'little' myenv['PKG_CONFIG_PATH'] = self.privatedir stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv) deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething'] - if is_windows() or is_cygwin(): + if is_windows() or is_cygwin() or is_osx(): # On Windows, libintl is a separate library deps.append(b'-lintl') self.assertEqual(set(deps), set(stdo.split())) @@ -3323,6 +3587,61 @@ endian = 'little' self.build() self.run_tests() + @skipIfNoPkgconfig + def test_usage_external_library(self): + ''' + Test that uninstalled usage of an external library (from the system or + PkgConfigDependency) works. On macOS, this workflow works out of the + box. On Linux, BSDs, Windows, etc, you need to set extra arguments such + as LD_LIBRARY_PATH, etc, so this test is skipped. + + The system library is found with cc.find_library() and pkg-config deps. + ''' + if not is_osx(): + raise unittest.SkipTest('workflow currently only works on macOS') + oldprefix = self.prefix + # Install external library so we can find it + testdir = os.path.join(self.unit_test_dir, '33 external, internal library rpath', 'external library') + # install into installdir without using DESTDIR + installdir = self.installdir + self.prefix = installdir + self.init(testdir) + self.prefix = oldprefix + self.build() + self.install(use_destdir=False) + ## New builddir for the consumer + self.new_builddir() + os.environ['LIBRARY_PATH'] = os.path.join(installdir, self.libdir) + os.environ['PKG_CONFIG_PATH'] = os.path.join(installdir, self.libdir, 'pkgconfig') + testdir = os.path.join(self.unit_test_dir, '33 external, internal library rpath', 'built library') + # install into installdir without using DESTDIR + self.prefix = self.installdir + self.init(testdir) + self.prefix = oldprefix + self.build() + # test uninstalled + self.run_tests() + # test running after installation + self.install(use_destdir=False) + prog = os.path.join(self.installdir, 'bin', 'prog') + self._run([prog]) + out = self._run(['otool', '-L', prog]) + self.assertNotIn('@rpath', out) + ## New builddir for testing that DESTDIR is not added to install_name + self.new_builddir() + # install into installdir with DESTDIR + self.init(testdir) + self.build() + # test running after installation + self.install() + prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog') + lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib') + for f in prog, lib: + out = self._run(['otool', '-L', f]) + # Ensure that the otool output does not contain self.installdir + self.assertNotRegex(out, self.installdir + '.*dylib ') + + class LinuxArmCrossCompileTests(BasePlatformTests): ''' Tests that verify cross-compilation to Linux/ARM @@ -6,6 +6,8 @@ ignore = E251, # E261: at least two spaces before inline comment E261, + # E265: block comment should start with '# ' + E265, # E501: line too long E501, # E302: expected 2 blank lines, found 1 @@ -71,17 +71,9 @@ setup(name='meson', 'mesonbuild.modules', 'mesonbuild.scripts', 'mesonbuild.wrap'], - scripts=['meson.py', - 'mesonconf.py', - 'mesontest.py', - 'mesonintrospect.py', - 'wraptool.py'], + scripts=['meson.py'], cmdclass={'install_scripts': install_scripts}, - data_files=[('share/man/man1', ['man/meson.1', - 'man/mesonconf.1', - 'man/mesonintrospect.1', - 'man/mesontest.1', - 'man/wraptool.1']), + data_files=[('share/man/man1', ['man/meson.1']), ('share/polkit-1/actions', ['data/com.mesonbuild.install.policy'])], classifiers=['Development Status :: 5 - Production/Stable', 'Environment :: Console', diff --git a/test cases/common/137 build by default/checkexists.py b/test cases/common/137 build by default/checkexists.py new file mode 100644 index 0000000..6664f72 --- /dev/null +++ b/test cases/common/137 build by default/checkexists.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +import os.path, sys + +invert = False +for path in sys.argv[1:]: + if path == '--not': + invert = True + elif not os.path.exists(path) ^ invert: + sys.exit(1) diff --git a/test cases/common/137 build by default/meson.build b/test cases/common/137 build by default/meson.build index 6569548..b28b634 100644 --- a/test cases/common/137 build by default/meson.build +++ b/test cases/common/137 build by default/meson.build @@ -3,22 +3,43 @@ project('build on all', 'c') py3_mod = import('python3') py3 = py3_mod.find_python() -executable('fooprog', 'foo.c', build_by_default : false) +executable('fooprog', 'foo.c', + build_by_default : false, +) + +executable('barprog', 'foo.c', + build_by_default : false, + build_always : true, +) + comp = files('mygen.py') -mytarget = custom_target('gendat', - output : 'generated.dat', +checkexists = files('checkexists.py') + +mytarget = custom_target('gendat1', + output : 'generated1.dat', + input : 'source.txt', + command : [py3] + comp + ['@INPUT@', '@OUTPUT@'], + build_by_default : true, +) + +mytarget = custom_target('gendat2', + output : 'generated2.dat', input : 'source.txt', command : [py3] + comp + ['@INPUT@', '@OUTPUT@'], build_by_default : true, + build_always : false, ) -ct_output = join_paths(meson.build_root(), 'generated.dat') -exe_output = join_paths(meson.build_root(), 'fooprog') +ct1_output = join_paths(meson.build_root(), 'generated1.dat') +ct2_output = join_paths(meson.build_root(), 'generated2.dat') +exe1_output = join_paths(meson.build_root(), 'fooprog') +exe2_output = join_paths(meson.build_root(), 'barprog') + if host_machine.system() == 'windows' - exe_output += '.exe' + exe1_output += '.exe' + exe2_output += '.exe' endif -ct_exists_exe_nexists = 'import os.path, sys; sys.exit(not os.path.exists(sys.argv[1]) and os.path.exists(sys.argv[2]))' - test('check-build-by-default', py3, - args : ['-c', ct_exists_exe_nexists, ct_output, exe_output]) + args : [checkexists, + ct1_output, ct2_output, '--not', exe1_output, exe2_output]) diff --git a/test cases/common/146 C and CPP link/foo.cpp b/test cases/common/146 C and CPP link/foo.cpp index d8b4dbb..9db7fb2 100644 --- a/test cases/common/146 C and CPP link/foo.cpp +++ b/test cases/common/146 C and CPP link/foo.cpp @@ -16,6 +16,9 @@ const int cnums[] = {0, 61}; +/* Provided by foobar.c */ +extern "C" int get_number_index (void); + template<typename T, int N> std::vector<T> makeVector(const T (&data)[N]) { @@ -27,5 +30,5 @@ namespace { } extern "C" int six_one(void) { - return numbers[1]; + return numbers[get_number_index ()]; } diff --git a/test cases/common/146 C and CPP link/foobar.c b/test cases/common/146 C and CPP link/foobar.c index bd6cb00..27928bf 100644 --- a/test cases/common/146 C and CPP link/foobar.c +++ b/test cases/common/146 C and CPP link/foobar.c @@ -17,6 +17,10 @@ #include "foo.hpp" #include "foobar.h" +int get_number_index (void) { + return 1; +} + void mynumbers(int nums[]) { nums[0] = forty_two(); nums[1] = six_one(); diff --git a/test cases/common/146 C and CPP link/meson.build b/test cases/common/146 C and CPP link/meson.build index 2dd3364..55c1b87 100644 --- a/test cases/common/146 C and CPP link/meson.build +++ b/test cases/common/146 C and CPP link/meson.build @@ -44,6 +44,8 @@ configure_file( command : stlib_cmd) libstcppext = cxx.find_library('stcppext', dirs : meson.current_build_dir()) +lib_type_name = libstcppext.type_name() +assert(lib_type_name == 'library', 'type name is ' + lib_type_name) libfooext = shared_library( 'fooext', diff --git a/test cases/common/157 configure file in test/meson.build b/test cases/common/157 configure file in test/meson.build deleted file mode 100644 index 9028101..0000000 --- a/test cases/common/157 configure file in test/meson.build +++ /dev/null @@ -1,9 +0,0 @@ -project('conf file in test') - -test_file = configure_file( - input: 'test.py.in', - output: 'test.py', - configuration: configuration_data() -) - -test('configure-file', test_file) diff --git a/test cases/common/16 configure file/meson.build b/test cases/common/16 configure file/meson.build index 333b121..d7beeb1 100644 --- a/test cases/common/16 configure file/meson.build +++ b/test cases/common/16 configure file/meson.build @@ -156,11 +156,11 @@ configure_file( ) test('test7', executable('prog7', 'prog7.c')) -# Test empty configuration data object on invalid utf8 file +# Test copying of an empty configuration data object inf = 'invalid-utf8.bin.in' outf = configure_file(input : inf, output : 'invalid-utf8.bin', - configuration : configuration_data()) + copy: true) ret = run_command(check_file, inf, outf) if ret.returncode() != 0 error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) @@ -184,3 +184,52 @@ configure_file( encoding : 'koi8-r', configuration : conf8 ) + +# Test that passing an empty configuration_data() object to a file with +# #mesondefine substitutions does not print the warning. +configure_file( + input: 'nosubst-nocopy1.txt.in', + output: 'nosubst-nocopy1.txt', + configuration : configuration_data() +) + +# test that passing an empty configuration_data() object to a file with +# @foo@ substitutions does not print the warning. +configure_file( + input: 'nosubst-nocopy2.txt.in', + output: 'nosubst-nocopy2.txt', + configuration : configuration_data() +) + +# test that passing a configured file object to test() works, and that passing +# an empty configuration_data() object to a file that leads to no substitutions +# prints a warning (see unit tests) +test_file = configure_file( + input: 'test.py.in', + output: 'test.py', + configuration: configuration_data() +) + +# Test that overwriting an existing file creates a warning. +configure_file( + input: 'test.py.in', + output: 'double_output.txt', + configuration: conf +) +configure_file( + input: 'test.py.in', + output: 'double_output.txt', + configuration: conf +) + +# Test that the same file name in a different subdir will not create a warning +configure_file( + input: 'test.py.in', + output: 'no_write_conflict.txt', + configuration: conf +) + +test('configure-file', test_file) + +cdata = configuration_data() +cdata.set('invalid_value', ['array']) diff --git a/test cases/common/16 configure file/nosubst-nocopy1.txt.in b/test cases/common/16 configure file/nosubst-nocopy1.txt.in new file mode 100644 index 0000000..6e893a1 --- /dev/null +++ b/test cases/common/16 configure file/nosubst-nocopy1.txt.in @@ -0,0 +1 @@ +#mesondefine FOO_BAR diff --git a/test cases/common/16 configure file/nosubst-nocopy2.txt.in b/test cases/common/16 configure file/nosubst-nocopy2.txt.in new file mode 100644 index 0000000..a6a7cca --- /dev/null +++ b/test cases/common/16 configure file/nosubst-nocopy2.txt.in @@ -0,0 +1 @@ +@FOO_BAR@ diff --git a/test cases/common/16 configure file/subdir/meson.build b/test cases/common/16 configure file/subdir/meson.build index d802c1d..146b7b6 100644 --- a/test cases/common/16 configure file/subdir/meson.build +++ b/test cases/common/16 configure file/subdir/meson.build @@ -17,3 +17,22 @@ configure_file(input : '../dummy.dat', output : 'config2-3.h', command : [found_script, '@INPUT@', '@OUTPUT@']) run_command(check_file, join_paths(meson.current_build_dir(), 'config2-3.h')) + +# Test that overwriting an existing file creates a warning. +configure_file( + input: '../test.py.in', + output: 'double_output2.txt', + configuration: conf +) +configure_file( + input: '../test.py.in', + output: 'double_output2.txt', + configuration: conf +) + +# Test that the same file name in a different subdir will not create a warning +configure_file( + input: '../test.py.in', + output: 'no_write_conflict.txt', + configuration: conf +) diff --git a/test cases/common/157 configure file in test/test.py.in b/test cases/common/16 configure file/test.py.in index 15a61f5..15a61f5 100755..100644 --- a/test cases/common/157 configure file in test/test.py.in +++ b/test cases/common/16 configure file/test.py.in diff --git a/test cases/common/171 not-found dependency/meson.build b/test cases/common/171 not-found dependency/meson.build index 85505ee..02072b6 100644 --- a/test cases/common/171 not-found dependency/meson.build +++ b/test cases/common/171 not-found dependency/meson.build @@ -9,3 +9,6 @@ assert(dep.type_name() == 'not-found', 'dependency should be of type "not-found" library('testlib', 'testlib.c', dependencies: [dep]) subdir('sub', if_found: dep) + +subdep = dependency('', fallback: ['trivial', 'trivial_dep']) +missing = dependency('', fallback: ['missing', 'missing_dep'], required: false) diff --git a/test cases/common/171 not-found dependency/subprojects/trivial/meson.build b/test cases/common/171 not-found dependency/subprojects/trivial/meson.build new file mode 100644 index 0000000..8769c70 --- /dev/null +++ b/test cases/common/171 not-found dependency/subprojects/trivial/meson.build @@ -0,0 +1,3 @@ +project('trivial subproject', 'c') +trivial_lib = static_library('trivial', 'trivial.c', install: false) +trivial_dep = declare_dependency(link_with: trivial_lib) diff --git a/test cases/common/171 not-found dependency/subprojects/trivial/trivial.c b/test cases/common/171 not-found dependency/subprojects/trivial/trivial.c new file mode 100644 index 0000000..35b21e0 --- /dev/null +++ b/test cases/common/171 not-found dependency/subprojects/trivial/trivial.c @@ -0,0 +1,3 @@ +int subfunc() { + return 42; +} diff --git a/test cases/common/174 dependency factory/meson.build b/test cases/common/174 dependency factory/meson.build index 54f7d26..1b8ed17 100644 --- a/test cases/common/174 dependency factory/meson.build +++ b/test cases/common/174 dependency factory/meson.build @@ -1,4 +1,4 @@ -project('dependency factory') +project('dependency factory', meson_version : '>=0.40') dep = dependency('gl', method: 'pkg-config', required: false) if dep.found() and dep.type_name() == 'pkgconfig' diff --git a/test cases/common/203 feature option/meson.build b/test cases/common/203 feature option/meson.build new file mode 100644 index 0000000..ef3fa22 --- /dev/null +++ b/test cases/common/203 feature option/meson.build @@ -0,0 +1,47 @@ +project('feature user option', 'c') + +feature_opts = get_option('auto_features') +required_opt = get_option('required') +optional_opt = get_option('optional') +disabled_opt = get_option('disabled') + +assert(not feature_opts.enabled(), 'Should be auto option') +assert(not feature_opts.disabled(), 'Should be auto option') +assert(feature_opts.auto(), 'Should be auto option') + +assert(required_opt.enabled(), 'Should be enabled option') +assert(not required_opt.disabled(), 'Should be enabled option') +assert(not required_opt.auto(), 'Should be enabled option') + +assert(not optional_opt.enabled(), 'Should be auto option') +assert(not optional_opt.disabled(), 'Should be auto option') +assert(optional_opt.auto(), 'Should be auto option') + +assert(not disabled_opt.enabled(), 'Should be disabled option') +assert(disabled_opt.disabled(), 'Should be disabled option') +assert(not disabled_opt.auto(), 'Should be disabled option') + +dep = dependency('threads', required : required_opt) +assert(dep.found(), 'Should find required "threads" dep') + +dep = dependency('threads', required : optional_opt) +assert(dep.found(), 'Should find optional "threads" dep') + +dep = dependency('threads', required : disabled_opt) +assert(not dep.found(), 'Should not find disabled "threads" dep') + +dep = dependency('notfounddep', required : optional_opt) +assert(not dep.found(), 'Should not find optional "notfounddep" dep') + +dep = dependency('notfounddep', required : disabled_opt) +assert(not dep.found(), 'Should not find disabled "notfounddep" dep') + +cc = meson.get_compiler('c') +lib = cc.find_library('m', required : disabled_opt) +assert(not lib.found(), 'Should not find "m" library') + +cp = find_program('cp', required : disabled_opt) +assert(not cp.found(), 'Should not find "cp" program') + +found = add_languages('cpp', required : disabled_opt) +assert(not found, 'Should not find "cpp" language') diff --git a/test cases/common/203 feature option/meson_options.txt b/test cases/common/203 feature option/meson_options.txt new file mode 100644 index 0000000..063a35f --- /dev/null +++ b/test cases/common/203 feature option/meson_options.txt @@ -0,0 +1,3 @@ +option('required', type : 'feature', value : 'enabled', description : 'An required feature') +option('optional', type : 'feature', value : 'auto', description : 'An optional feature') +option('disabled', type : 'feature', value : 'disabled', description : 'A disabled feature') diff --git a/test cases/common/204 feature option disabled/meson.build b/test cases/common/204 feature option disabled/meson.build new file mode 100644 index 0000000..1a83187 --- /dev/null +++ b/test cases/common/204 feature option disabled/meson.build @@ -0,0 +1,23 @@ +project('feature user option', 'c', + default_options : ['auto_features=disabled']) + +feature_opts = get_option('auto_features') +required_opt = get_option('required') +optional_opt = get_option('optional') +disabled_opt = get_option('disabled') + +assert(not feature_opts.enabled(), 'Should be disabled option') +assert(feature_opts.disabled(), 'Should be disabled option') +assert(not feature_opts.auto(), 'Should be disabled option') + +assert(required_opt.enabled(), 'Should be enabled option') +assert(not required_opt.disabled(), 'Should be enabled option') +assert(not required_opt.auto(), 'Should be enabled option') + +assert(not optional_opt.enabled(), 'Auto feature should be disabled') +assert(optional_opt.disabled(), 'Auto feature should be disabled') +assert(not optional_opt.auto(), 'Auto feature should be disabled') + +assert(not disabled_opt.enabled(), 'Should be disabled option') +assert(disabled_opt.disabled(), 'Should be disabled option') +assert(not disabled_opt.auto(), 'Should be disabled option') diff --git a/test cases/common/204 feature option disabled/meson_options.txt b/test cases/common/204 feature option disabled/meson_options.txt new file mode 100644 index 0000000..063a35f --- /dev/null +++ b/test cases/common/204 feature option disabled/meson_options.txt @@ -0,0 +1,3 @@ +option('required', type : 'feature', value : 'enabled', description : 'An required feature') +option('optional', type : 'feature', value : 'auto', description : 'An optional feature') +option('disabled', type : 'feature', value : 'disabled', description : 'A disabled feature') diff --git a/test cases/common/205 static threads/lib1.c b/test cases/common/205 static threads/lib1.c new file mode 100644 index 0000000..1aa786c --- /dev/null +++ b/test cases/common/205 static threads/lib1.c @@ -0,0 +1,13 @@ +#if defined _WIN32 +#include<windows.h> +#else +#include<pthread.h> +#endif + +void *f(void) { +#if defined _WIN32 + return CreateThread; +#else + return pthread_create; +#endif +} diff --git a/test cases/common/205 static threads/lib2.c b/test cases/common/205 static threads/lib2.c new file mode 100644 index 0000000..e988814 --- /dev/null +++ b/test cases/common/205 static threads/lib2.c @@ -0,0 +1,5 @@ +extern void *f(void); + +void *g(void) { + return f(); +} diff --git a/test cases/common/205 static threads/meson.build b/test cases/common/205 static threads/meson.build new file mode 100644 index 0000000..4279200 --- /dev/null +++ b/test cases/common/205 static threads/meson.build @@ -0,0 +1,13 @@ +project('threads', 'c') + +thread_dep = dependency('threads') + + +lib1 = static_library('lib1', 'lib1.c', + dependencies : thread_dep) + +lib2 = static_library('lib2', 'lib2.c', + link_with : lib1) + +executable('prog', 'prog.c', + link_with : lib2) diff --git a/test cases/common/205 static threads/prog.c b/test cases/common/205 static threads/prog.c new file mode 100644 index 0000000..14a7c76 --- /dev/null +++ b/test cases/common/205 static threads/prog.c @@ -0,0 +1,6 @@ +extern void *g(void); + +int main(void) { + g(); + return 0; +} diff --git a/test cases/common/38 run program/meson.build b/test cases/common/38 run program/meson.build index 1563dec..ab800ef 100644 --- a/test cases/common/38 run program/meson.build +++ b/test cases/common/38 run program/meson.build @@ -55,3 +55,19 @@ if c.returncode() != 0 error('Using files() in argument failed.') endif +py3 = import('python3').find_python() + +ret = run_command(py3, '-c', 'print("some output")') +assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr()) +assert(ret.stdout() == 'some output\n', 'failed to run python3') + +ret = run_command(py3, '-c', 'print("some output")', capture : false) +assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr()) +assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout())) + +dd = find_program('dd', required : false) +if dd.found() + ret = run_command(dd, 'if=/dev/urandom', 'bs=10', 'count=1', capture: false) + assert(ret.returncode() == 0, 'failed to run dd: ' + ret.stderr()) + assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout())) +endif diff --git a/test cases/failing/80 subproj dependency not-found and required/meson.build b/test cases/failing/80 subproj dependency not-found and required/meson.build new file mode 100644 index 0000000..c5a2961 --- /dev/null +++ b/test cases/failing/80 subproj dependency not-found and required/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['missing', 'missing_dep'], required: true) diff --git a/test cases/failing/81 unfound run/meson.build b/test cases/failing/81 unfound run/meson.build new file mode 100644 index 0000000..3f37e9a --- /dev/null +++ b/test cases/failing/81 unfound run/meson.build @@ -0,0 +1,4 @@ +project('unfound runtarget') + +exe = find_program('nonexisting_prog', required : false) +run_target('invoke_fail', command : [exe]) diff --git a/test cases/frameworks/10 gtk-doc/doc/meson.build b/test cases/frameworks/10 gtk-doc/doc/meson.build index 9f38eaa..059d405 100644 --- a/test cases/frameworks/10 gtk-doc/doc/meson.build +++ b/test cases/frameworks/10 gtk-doc/doc/meson.build @@ -9,3 +9,10 @@ gnome.gtkdoc('foobar', main_sgml : 'foobar-docs.sgml', content_files : [docbook, version_xml], install : true) + +gnome.gtkdoc('foobar2', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true, + install_dir : 'foobar2') diff --git a/test cases/frameworks/10 gtk-doc/installed_files.txt b/test cases/frameworks/10 gtk-doc/installed_files.txt index 6f8ca01..2bfb3f5 100644 --- a/test cases/frameworks/10 gtk-doc/installed_files.txt +++ b/test cases/frameworks/10 gtk-doc/installed_files.txt @@ -13,3 +13,17 @@ usr/share/gtk-doc/html/foobar/right-insensitive.png usr/share/gtk-doc/html/foobar/style.css usr/share/gtk-doc/html/foobar/up.png usr/share/gtk-doc/html/foobar/up-insensitive.png +usr/share/gtk-doc/html/foobar2/BAR.html +usr/share/gtk-doc/html/foobar2/foobar2.devhelp2 +usr/share/gtk-doc/html/foobar2/foobar.html +usr/share/gtk-doc/html/foobar2/foobar2-foo.html +usr/share/gtk-doc/html/foobar2/foobar2-foo-version.html +usr/share/gtk-doc/html/foobar2/home.png +usr/share/gtk-doc/html/foobar2/index.html +usr/share/gtk-doc/html/foobar2/left.png +usr/share/gtk-doc/html/foobar2/left-insensitive.png +usr/share/gtk-doc/html/foobar2/right.png +usr/share/gtk-doc/html/foobar2/right-insensitive.png +usr/share/gtk-doc/html/foobar2/style.css +usr/share/gtk-doc/html/foobar2/up.png +usr/share/gtk-doc/html/foobar2/up-insensitive.png diff --git a/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.c b/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.c new file mode 100644 index 0000000..fae9c38 --- /dev/null +++ b/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.c @@ -0,0 +1,6 @@ +#include "fake-gthread.h" + +int fake_gthread_fake_function (void) +{ + return 7; +} diff --git a/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.h b/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.h new file mode 100644 index 0000000..52b5472 --- /dev/null +++ b/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.h @@ -0,0 +1,6 @@ +#ifndef FAKE_GTHREAD_H +#define FAKE_GTHREAD_H + +int fake_gthread_fake_function (void); + +#endif /* FAKE_GTHREAD_H */ diff --git a/test cases/frameworks/22 gir link order/fake-gthread/meson.build b/test cases/frameworks/22 gir link order/fake-gthread/meson.build new file mode 100644 index 0000000..693e8e0 --- /dev/null +++ b/test cases/frameworks/22 gir link order/fake-gthread/meson.build @@ -0,0 +1,12 @@ +fake_gthread_sources = ['fake-gthread.c', 'fake-gthread.h'] +fake_gthread_lib = shared_library( + 'gthread-2.0', + sources : fake_gthread_sources, + install : false, +) + +fake_gthread_includes = include_directories('.') +fake_gthread = declare_dependency( + include_directories : fake_gthread_includes, + link_with : fake_gthread_lib, +) diff --git a/test cases/frameworks/22 gir link order/get-prgname/get-prgname.c b/test cases/frameworks/22 gir link order/get-prgname/get-prgname.c new file mode 100644 index 0000000..356b45e --- /dev/null +++ b/test cases/frameworks/22 gir link order/get-prgname/get-prgname.c @@ -0,0 +1,8 @@ +#include "get-prgname.h" + +#include <glib.h> + +const char *get_prgname_get_name (void) +{ + return g_get_prgname (); +} diff --git a/test cases/frameworks/22 gir link order/get-prgname/get-prgname.h b/test cases/frameworks/22 gir link order/get-prgname/get-prgname.h new file mode 100644 index 0000000..cb5118e --- /dev/null +++ b/test cases/frameworks/22 gir link order/get-prgname/get-prgname.h @@ -0,0 +1,6 @@ +#ifndef GET_PRGNAME_H +#define GET_PRGNAME_H + +const char *get_prgname_get_name (void); + +#endif /* GET_PRGNAME_H */ diff --git a/test cases/frameworks/22 gir link order/get-prgname/meson.build b/test cases/frameworks/22 gir link order/get-prgname/meson.build new file mode 100644 index 0000000..6a7489d --- /dev/null +++ b/test cases/frameworks/22 gir link order/get-prgname/meson.build @@ -0,0 +1,13 @@ +get_prgname_sources = ['get-prgname.c', 'get-prgname.h'] +get_prgname_lib = shared_library( + 'get-prgname', + sources : get_prgname_sources, + dependencies : [glib], + install : false, +) + +get_prgname_includes = include_directories('.') +get_prgname = declare_dependency( + include_directories : get_prgname_includes, + link_with : get_prgname_lib, +) diff --git a/test cases/frameworks/22 gir link order/meson-sample.c b/test cases/frameworks/22 gir link order/meson-sample.c new file mode 100644 index 0000000..7c6442a --- /dev/null +++ b/test cases/frameworks/22 gir link order/meson-sample.c @@ -0,0 +1,48 @@ +#include "meson-sample.h" + +#include "get-prgname.h" +#include "fake-gthread.h" + +struct _MesonSample { + GObject parent_instance; +}; + +G_DEFINE_TYPE (MesonSample, meson_sample, G_TYPE_OBJECT) + +/** + * meson_sample_new: + * + * Allocates a new #MesonSample. + * + * Returns: (transfer full): a #MesonSample. + */ +MesonSample * +meson_sample_new (void) +{ + return g_object_new (MESON_TYPE_SAMPLE, NULL); +} + +static void +meson_sample_class_init (MesonSampleClass *klass) +{ +} + +static void +meson_sample_init (MesonSample *self) +{ +} + +/** + * meson_sample_print_message: + * @self: a #MesonSample. + * + * Prints a message. + */ +void +meson_sample_print_message (MesonSample *self) +{ + g_return_if_fail (MESON_IS_SAMPLE (self)); + + g_print ("Message: %s\n", get_prgname_get_name ()); + g_print ("Message: %d\n", fake_gthread_fake_function ()); +} diff --git a/test cases/frameworks/22 gir link order/meson-sample.h b/test cases/frameworks/22 gir link order/meson-sample.h new file mode 100644 index 0000000..2c28401 --- /dev/null +++ b/test cases/frameworks/22 gir link order/meson-sample.h @@ -0,0 +1,17 @@ +#ifndef MESON_SAMPLE_H +#define MESON_SAMPLE_H + +#include <glib-object.h> + +G_BEGIN_DECLS + +#define MESON_TYPE_SAMPLE (meson_sample_get_type()) + +G_DECLARE_FINAL_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject) + +MesonSample *meson_sample_new (void); +void meson_sample_print_message (MesonSample *self); + +G_END_DECLS + +#endif /* MESON_SAMPLE_H */ diff --git a/test cases/frameworks/22 gir link order/meson.build b/test cases/frameworks/22 gir link order/meson.build new file mode 100644 index 0000000..224eaf6 --- /dev/null +++ b/test cases/frameworks/22 gir link order/meson.build @@ -0,0 +1,41 @@ +project('gir link order', 'c') + +if not dependency('glib-2.0', required : false).found() + error('MESON_SKIP_TEST glib not found.') +endif + +gnome = import('gnome') +glib = dependency('glib-2.0') +gobject = dependency('gobject-2.0') + +# get-prgname is a shared library which uses a function from glib-2.0. It is +# used to introduce external -L flags which may cause -L order problems. +subdir('get-prgname') + +# fake-gthread is a shared library which has the same name as gthread-2.0 from +# GLib. This is used to simulate the case where an older or unrelated version +# of a library is already installed on the system. Our meson sample library +# defined below uses a function from fake-gthread. If meson messes up -L order, +# the linker will find libgthread-2.0.so installed on the system and fail to +# find the symbol our meson sample library uses. +subdir('fake-gthread') + +meson_sample_sources = ['meson-sample.c', 'meson-sample.h'] +meson_sample_lib = shared_library( + 'sample', + sources : meson_sample_sources, + dependencies : [gobject, get_prgname, fake_gthread], + install : false, +) + +gnome.generate_gir( + meson_sample_lib, + sources : meson_sample_sources, + nsversion : '1.0', + namespace : 'Meson', + symbol_prefix : 'meson', + identifier_prefix : 'Meson', + includes : ['GObject-2.0'], + install : false, + build_by_default: true, +) diff --git a/test cases/frameworks/7 gnome/gdbus/com.example.Sample.xml b/test cases/frameworks/7 gnome/gdbus/data/com.example.Sample.xml index 9ece885..9ece885 100644 --- a/test cases/frameworks/7 gnome/gdbus/com.example.Sample.xml +++ b/test cases/frameworks/7 gnome/gdbus/data/com.example.Sample.xml diff --git a/test cases/frameworks/7 gnome/gdbus/meson.build b/test cases/frameworks/7 gnome/gdbus/meson.build index 5bd640f..2de172f 100644 --- a/test cases/frameworks/7 gnome/gdbus/meson.build +++ b/test cases/frameworks/7 gnome/gdbus/meson.build @@ -1,4 +1,5 @@ -gdbus_src = gnome.gdbus_codegen('generated-gdbus-no-docbook', 'com.example.Sample.xml', +gdbus_src = gnome.gdbus_codegen('generated-gdbus-no-docbook', + 'data/com.example.Sample.xml', interface_prefix : 'com.example.', namespace : 'Sample', annotations : [ @@ -6,9 +7,11 @@ gdbus_src = gnome.gdbus_codegen('generated-gdbus-no-docbook', 'com.example.Sampl ], ) assert(gdbus_src.length() == 2, 'expected 2 targets') +assert(gdbus_src[0].full_path().endswith('.c'), 'expected 1 c source file') +assert(gdbus_src[1].full_path().endswith('.h'), 'expected 1 c header file') gdbus_src = gnome.gdbus_codegen('generated-gdbus', - sources : 'com.example.Sample.xml', + sources : files('data/com.example.Sample.xml'), interface_prefix : 'com.example.', namespace : 'Sample', annotations : [ @@ -19,6 +22,8 @@ gdbus_src = gnome.gdbus_codegen('generated-gdbus', install_dir : get_option('includedir') ) assert(gdbus_src.length() == 3, 'expected 3 targets') +assert(gdbus_src[0].full_path().endswith('.c'), 'expected 1 c source file') +assert(gdbus_src[1].full_path().endswith('.h'), 'expected 1 c header file') if not pretend_glib_old and glib.version().version_compare('>=2.51.3') includes = [] diff --git a/test cases/java/7 linking/com/mesonbuild/Linking.java b/test cases/java/7 linking/com/mesonbuild/Linking.java new file mode 100644 index 0000000..170e2aa --- /dev/null +++ b/test cases/java/7 linking/com/mesonbuild/Linking.java @@ -0,0 +1,9 @@ +package com.mesonbuild; + +import com.mesonbuild.SimpleLib; + +class Linking { + public static void main(String [] args) { + SimpleLib.func(); + } +} diff --git a/test cases/java/7 linking/meson.build b/test cases/java/7 linking/meson.build new file mode 100644 index 0000000..0ae0db3 --- /dev/null +++ b/test cases/java/7 linking/meson.build @@ -0,0 +1,8 @@ +project('linkingjava', 'java') + +subdir('sub') + +javaprog = jar('myprog', 'com/mesonbuild/Linking.java', + main_class : 'com.mesonbuild.Linking', + link_with : simplelib) +test('mytest', javaprog)
\ No newline at end of file diff --git a/test cases/java/7 linking/sub/com/mesonbuild/SimpleLib.java b/test cases/java/7 linking/sub/com/mesonbuild/SimpleLib.java new file mode 100644 index 0000000..835b2e4 --- /dev/null +++ b/test cases/java/7 linking/sub/com/mesonbuild/SimpleLib.java @@ -0,0 +1,7 @@ +package com.mesonbuild; + +public class SimpleLib { + public static void func() { + System.out.println("Java linking is working.\n"); + } +} diff --git a/test cases/java/7 linking/sub/meson.build b/test cases/java/7 linking/sub/meson.build new file mode 100644 index 0000000..13fd202 --- /dev/null +++ b/test cases/java/7 linking/sub/meson.build @@ -0,0 +1,2 @@ +simplelib = jar('simplelib', + 'com/mesonbuild/SimpleLib.java') diff --git a/test cases/nasm/1 configure file/meson.build b/test cases/nasm/1 configure file/meson.build index 213e114..e128325 100644 --- a/test cases/nasm/1 configure file/meson.build +++ b/test cases/nasm/1 configure file/meson.build @@ -39,6 +39,11 @@ config_file = configure_file( output_format: 'nasm', ) -exe = executable('hello', asm_gen.process('hello.asm')) +cc = meson.get_compiler('c') +link_args = cc.get_supported_link_arguments(['-no-pie']) + +exe = executable('hello', asm_gen.process('hello.asm'), + link_args: link_args, +) test('test-nasm-configure-file', exe) diff --git a/test cases/unit/12 cross prog/meson.build b/test cases/unit/12 cross prog/meson.build index e628701..a7adeb2 100644 --- a/test cases/unit/12 cross prog/meson.build +++ b/test cases/unit/12 cross prog/meson.build @@ -2,11 +2,15 @@ project('cross find program', 'c') native_exe = find_program('sometool.py', native : true) cross_exe = find_program('sometool.py') +cross_other_exe = find_program('someothertool.py') native_out = run_command(native_exe).stdout().strip() cross_out = run_command(cross_exe).stdout().strip() +cross_other_out = run_command(cross_other_exe).stdout().strip() assert(native_out == 'native', 'Native output incorrect:' + native_out) assert(cross_out == 'cross', 'Cross output incorrect:' + cross_out) +assert(cross_out == cross_other_out, + 'Cross output incorrect:' + cross_other_out) diff --git a/test cases/unit/33 external, internal library rpath/built library/bar.c b/test cases/unit/33 external, internal library rpath/built library/bar.c new file mode 100644 index 0000000..4f5662e --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/built library/bar.c @@ -0,0 +1,7 @@ +int foo_system_value (void); +int faa_system_value (void); + +int bar_built_value (int in) +{ + return faa_system_value() + foo_system_value() + in; +} diff --git a/test cases/unit/33 external, internal library rpath/built library/meson.build b/test cases/unit/33 external, internal library rpath/built library/meson.build new file mode 100644 index 0000000..2b422f4 --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/built library/meson.build @@ -0,0 +1,12 @@ +project('built library', 'c') + +cc = meson.get_compiler('c') +foo_system_dep = cc.find_library('foo_in_system') +faa_pkg_dep = dependency('faa_pkg') + +l = shared_library('bar_built', 'bar.c', + install: true, + dependencies : [foo_system_dep, faa_pkg_dep]) + +e = executable('prog', 'prog.c', link_with: l, install: true) +test('testprog', e) diff --git a/test cases/unit/33 external, internal library rpath/built library/meson_options.txt b/test cases/unit/33 external, internal library rpath/built library/meson_options.txt new file mode 100644 index 0000000..aa1d2ec --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/built library/meson_options.txt @@ -0,0 +1 @@ +option('foo_system_path', type: 'string', value: '') diff --git a/test cases/unit/33 external, internal library rpath/built library/prog.c b/test cases/unit/33 external, internal library rpath/built library/prog.c new file mode 100644 index 0000000..e3d4cf6 --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/built library/prog.c @@ -0,0 +1,7 @@ +int bar_built_value (int in); + +int main (int argc, char *argv[]) +{ + // this will evaluate to 0 + return bar_built_value(10) - (42 + 1969 + 10); +} diff --git a/test cases/unit/33 external, internal library rpath/external library/faa.c b/test cases/unit/33 external, internal library rpath/external library/faa.c new file mode 100644 index 0000000..4733575 --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/external library/faa.c @@ -0,0 +1,4 @@ +int faa_system_value (void) +{ + return 1969; +} diff --git a/test cases/unit/33 external, internal library rpath/external library/foo.c b/test cases/unit/33 external, internal library rpath/external library/foo.c new file mode 100644 index 0000000..a34e4a8 --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/external library/foo.c @@ -0,0 +1,4 @@ +int foo_system_value (void) +{ + return 42; +} diff --git a/test cases/unit/33 external, internal library rpath/external library/meson.build b/test cases/unit/33 external, internal library rpath/external library/meson.build new file mode 100644 index 0000000..6dcc97e --- /dev/null +++ b/test cases/unit/33 external, internal library rpath/external library/meson.build @@ -0,0 +1,9 @@ +project('system library', 'c') + +shared_library('foo_in_system', 'foo.c', install : true) +l = shared_library('faa_pkg', 'faa.c', install: true) + +pkg = import('pkgconfig') +pkg.generate(name: 'faa_pkg', + libraries: [l, '-framework', 'CoreFoundation', '-framework', 'CoreMedia'], + description: 'FAA, a pkg-config test library') diff --git a/test cases/unit/34 featurenew subprojects/meson.build b/test cases/unit/34 featurenew subprojects/meson.build new file mode 100644 index 0000000..27898cd --- /dev/null +++ b/test cases/unit/34 featurenew subprojects/meson.build @@ -0,0 +1,6 @@ +project('featurenew subproject', meson_version: '>=0.45') + +foo = {} + +subproject('foo') +subproject('bar') diff --git a/test cases/unit/34 featurenew subprojects/subprojects/bar/meson.build b/test cases/unit/34 featurenew subprojects/subprojects/bar/meson.build new file mode 100644 index 0000000..712a125 --- /dev/null +++ b/test cases/unit/34 featurenew subprojects/subprojects/bar/meson.build @@ -0,0 +1,3 @@ +project('foo subproject', meson_version: '>=0.46') + +import('python') diff --git a/test cases/unit/34 featurenew subprojects/subprojects/foo/meson.build b/test cases/unit/34 featurenew subprojects/subprojects/foo/meson.build new file mode 100644 index 0000000..0ef4472 --- /dev/null +++ b/test cases/unit/34 featurenew subprojects/subprojects/foo/meson.build @@ -0,0 +1,3 @@ +project('foo subproject', meson_version: '>=0.40') + +disabler() diff --git a/test cases/windows/15 resources with custom target depend_files/ico/gen-ico.py b/test cases/windows/15 resources with custom target depend_files/ico/gen-ico.py new file mode 100755 index 0000000..c49e0dd --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/ico/gen-ico.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1], 'rb') as infile, open(sys.argv[2], 'wb') as outfile: + outfile.write(infile.read()) diff --git a/test cases/windows/15 resources with custom target depend_files/ico/meson.build b/test cases/windows/15 resources with custom target depend_files/ico/meson.build new file mode 100644 index 0000000..3fae9e8 --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/ico/meson.build @@ -0,0 +1,8 @@ +ico_writer = find_program('gen-ico.py') + +ico = custom_target('makeico', + input : 'sample.ico.in', + output : 'sample.ico', + command : [ico_writer, '@INPUT@', '@OUTPUT@'], + install : false, + build_always : false) diff --git a/test cases/windows/15 resources with custom target depend_files/ico/sample.ico.in b/test cases/windows/15 resources with custom target depend_files/ico/sample.ico.in Binary files differnew file mode 100644 index 0000000..24bd3d9 --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/ico/sample.ico.in diff --git a/test cases/windows/15 resources with custom target depend_files/meson.build b/test cases/windows/15 resources with custom target depend_files/meson.build new file mode 100644 index 0000000..85ba06f --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/meson.build @@ -0,0 +1,69 @@ +project('winmain', 'c') + +# MinGW windres has a bug due to which it doesn't parse args with space properly: +# https://github.com/mesonbuild/meson/pull/1346 +# https://sourceware.org/bugzilla/show_bug.cgi?id=4933 +if meson.get_compiler('c').get_id() == 'gcc' and host_machine.system() == 'windows' + # Construct build_to_src and skip this test if it has spaces + # because then the -I flag to windres will also have spaces + # and we know the test will fail + src_parts = meson.source_root().split('/') + build_parts = meson.build_root().split('/') + + # Get the common path (which might just be '/' or 'C:/') + common = [] + done = false + count = 0 + if src_parts.length() > build_parts.length() + parts = build_parts + other = src_parts + else + parts = src_parts + other = build_parts + endif + foreach part : parts + if not done and part == other.get(count) + common += [part] + else + done = true + endif + count += 1 + endforeach + + # Create path components to go down from the build root to the common path + count = 0 + rel = build_parts + foreach build : build_parts + if count < build_parts.length() - common.length() + rel += ['..'] + endif + count += 1 + endforeach + + # Create path components to go up from the common path to the build root + count = 0 + foreach src : src_parts + if count >= common.length() + rel += [src] + endif + count += 1 + endforeach + + build_to_src = '/'.join(rel) + + if build_to_src.contains(' ') + message('build_to_src is: ' + build_to_src) + error('MESON_SKIP_TEST build_to_src has spaces') + endif + # Welcome to the end of this conditional. + # We hope you never have to implement something like this. +endif + +subdir('ico') +subdir('res') + +exe = executable('prog', 'prog.c', + res, + gui_app : true) + +test('winmain', exe) diff --git a/test cases/windows/15 resources with custom target depend_files/prog.c b/test cases/windows/15 resources with custom target depend_files/prog.c new file mode 100644 index 0000000..2bef6a2 --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/prog.c @@ -0,0 +1,14 @@ +#include<windows.h> + +#define MY_ICON 1 + +int APIENTRY +WinMain( + HINSTANCE hInstance, + HINSTANCE hPrevInstance, + LPSTR lpszCmdLine, + int nCmdShow) { + HICON hIcon; + hIcon = LoadIcon(GetModuleHandle(NULL), MAKEINTRESOURCE(MY_ICON)); + return hIcon ? 0 : 1; +} diff --git a/test cases/windows/15 resources with custom target depend_files/res/meson.build b/test cases/windows/15 resources with custom target depend_files/res/meson.build new file mode 100644 index 0000000..3d43b3f --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/res/meson.build @@ -0,0 +1,4 @@ +win = import('windows') + +res = win.compile_resources('myres.rc', + depends: ico) diff --git a/test cases/windows/15 resources with custom target depend_files/res/myres.rc b/test cases/windows/15 resources with custom target depend_files/res/myres.rc new file mode 100644 index 0000000..12838ae --- /dev/null +++ b/test cases/windows/15 resources with custom target depend_files/res/myres.rc @@ -0,0 +1,3 @@ +#include<windows.h> + +1 ICON "sample.ico" diff --git a/wraptool.py b/wraptool.py deleted file mode 100755 index a5ee9ef..0000000 --- a/wraptool.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2016 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from mesonbuild.wrap import wraptool -import sys - -if __name__ == '__main__': - print('Warning: This executable is deprecated. Use "meson wrap" instead.', - file=sys.stderr) - sys.exit(wraptool.run(sys.argv[1:])) |