diff options
189 files changed, 2701 insertions, 708 deletions
@@ -30,3 +30,4 @@ packagecache /docs/hotdoc-private* *.pyc +/*venv* diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 85fedab..1ff542a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -13,6 +13,7 @@ variables: jobs: - job: vs2017 + timeoutInMinutes: 120 pool: vmImage: VS2017-Win2016 @@ -41,6 +42,7 @@ jobs: - template: ci/azure-steps.yml - job: vs2019 + timeoutInMinutes: 120 pool: vmImage: windows-2019 @@ -109,8 +111,8 @@ jobs: displayName: Install Dependencies - script: | set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 - env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile pytest-xdist jsonschema - displayName: pip install gcovr pefile pytest-xdist jsonschema + env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile jsonschema + displayName: "pip install gcovr pefile jsonschema (pytest-xdist broken, skipped: CHECK ME AGAIN)" - script: | set BOOST_ROOT= set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 diff --git a/cross/armcc.txt b/cross/armcc.txt index c884ffa..ae65c9e 100644 --- a/cross/armcc.txt +++ b/cross/armcc.txt @@ -7,7 +7,7 @@ cpp = 'armcc' ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with armcc,. c_args = ['--cpu=Cortex-M0plus'] diff --git a/cross/armclang-linux.txt b/cross/armclang-linux.txt index 6df78d6..10f6fa4 100644 --- a/cross/armclang-linux.txt +++ b/cross/armclang-linux.txt @@ -12,7 +12,7 @@ # Armcc is only available in toolchain version 5. # Armclang is only available in toolchain version 6. # Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh -# Now the compilers will work. +# Now the compilers will work. [binaries] # we could set exe_wrapper = qemu-arm-static but to test the case @@ -24,8 +24,7 @@ ar = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armar' #strip = '/usr/arm-linux-gnueabihf/bin/strip' #pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' -[properties] - +[built-in options] c_args = ['--target=aarch64-arm-none-eabi'] [host_machine] diff --git a/cross/armclang.txt b/cross/armclang.txt index 955b7ef..6146e0d 100644 --- a/cross/armclang.txt +++ b/cross/armclang.txt @@ -7,7 +7,7 @@ cpp = 'armclang' ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--target', '-mcpu' options with the appropriate values should be mentioned # to cross compile c/c++ code with armclang. c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] diff --git a/cross/c2000.txt b/cross/c2000.txt index e624f25..61c0310 100644 --- a/cross/c2000.txt +++ b/cross/c2000.txt @@ -12,8 +12,7 @@ cpu_family = 'c2000' cpu = 'c28x' endian = 'little' -[properties] -needs_exe_wrapper = true +[built-in options] c_args = [ '-v28', '-ml', @@ -24,3 +23,6 @@ c_link_args = [ '\f28004x_flash.cmd'] cpp_args = [] cpp_link_args = [] + +[properties] +needs_exe_wrapper = true diff --git a/cross/ccrx.txt b/cross/ccrx.txt index 097ec06..f1b536c 100644 --- a/cross/ccrx.txt +++ b/cross/ccrx.txt @@ -7,7 +7,7 @@ cpp = 'ccrx' ar = 'rlink' strip = 'rlink' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with ccrx,. c_args = ['-cpu=rx600'] diff --git a/cross/iphone.txt b/cross/iphone.txt index e714da5..9659407 100644 --- a/cross/iphone.txt +++ b/cross/iphone.txt @@ -8,14 +8,14 @@ cpp = 'clang++' ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/ownstdlib.txt b/cross/ownstdlib.txt index 46e99f7..bdff6f4 100644 --- a/cross/ownstdlib.txt +++ b/cross/ownstdlib.txt @@ -10,4 +10,4 @@ endian = 'little' [properties] -c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, dependency name +c_stdlib = 'mylibc' # Subproject name diff --git a/cross/tvos.txt b/cross/tvos.txt index dd6d5c1..833f04b 100644 --- a/cross/tvos.txt +++ b/cross/tvos.txt @@ -8,14 +8,15 @@ cpp = 'clang++' ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' + has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt index 4600c22..69e0c86 100644 --- a/cross/ubuntu-armhf.txt +++ b/cross/ubuntu-armhf.txt @@ -9,12 +9,14 @@ strip = '/usr/arm-linux-gnueabihf/bin/strip' pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' ld = '/usr/bin/arm-linux/gnueabihf-ld' -[properties] -root = '/usr/arm-linux-gnueabihf' +[built-in options] # Used in unit test '140 get define' c_args = ['-DMESON_TEST_ISSUE_1665=1'] cpp_args = '-DMESON_TEST_ISSUE_1665=1' +[properties] +root = '/usr/arm-linux-gnueabihf' + has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/wasm.txt b/cross/wasm.txt index a43636f..f2d0cd7 100644 --- a/cross/wasm.txt +++ b/cross/wasm.txt @@ -3,8 +3,7 @@ c = '/home/jpakkane/emsdk/fastcomp/emscripten/emcc' cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++' ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar' -[properties] - +[built-in options] c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] c_link_args = ['-s','EXPORT_ALL=1'] cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] diff --git a/cross/xc16.txt b/cross/xc16.txt index 1e67362..c66889d 100644 --- a/cross/xc16.txt +++ b/cross/xc16.txt @@ -14,6 +14,8 @@ endian = 'little' [properties] needs_exe_wrapper = true + +[built-in options] c_args = [ '-c', '-mcpu=33EP64MC203', diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson index 49860d5..e7fe968 100644 --- a/data/shell-completions/zsh/_meson +++ b/data/shell-completions/zsh/_meson @@ -113,6 +113,7 @@ local -a meson_commands=( '--stdsplit=[split stdout and stderr in test logs]' \ '--errorlogs=[prints the logs from failing tests]' \ '--cross-file=[cross-compilation environment description]:cross file:_files' \ + '--native-file=[build machine compilation environment description]:native file:_files' \ '--wrap-mode=[special wrap mode]:wrap mode:'"$__meson_wrap_modes" \ ":$firstd directory:_directories" \ "::$secondd directory:_directories" \ diff --git a/data/syntax-highlighting/vim/syntax/meson.vim b/data/syntax-highlighting/vim/syntax/meson.vim index d0d15d9..1100113 100644 --- a/data/syntax-highlighting/vim/syntax/meson.vim +++ b/data/syntax-highlighting/vim/syntax/meson.vim @@ -32,8 +32,9 @@ set cpo&vim " http://mesonbuild.com/Syntax.html syn keyword mesonConditional elif else if endif -syn keyword mesonRepeat foreach endforeach +syn keyword mesonRepeat foreach endforeach syn keyword mesonOperator and not or in +syn keyword mesonStatement continue break syn match mesonComment "#.*$" contains=mesonTodo,@Spell syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index e7101d5..de801ab 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -17,7 +17,7 @@ by setting them inside `default_options` of `project()` in your `meson.build`. For legacy reasons `--warnlevel` is the cli argument for the `warning_level` option. -They can also be edited after setup using `meson configure`. +They can also be edited after setup using `meson configure -Doption=value`. Installation options are all relative to the prefix, except: @@ -59,27 +59,27 @@ Options that are labeled "per machine" in the table are set per machine. See the [specifying options per machine](#Specifying-options-per-machine) section for details. -| Option | Default value | Description | Is per machine | -| ------ | ------------- | ----------- | -------------- | -| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | -| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | -| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | -| debug | true | Debug | no | -| default_library {shared, static, both} | shared | Default library type | no | -| errorlogs | true | Whether to print the logs from failing tests. | no | -| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | -| layout {mirror,flat} | mirror | Build directory layout | no | -| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | -| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | -| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | -| stdsplit | true | Split stdout and stderr in test logs | no | -| strip | false | Strip targets on install | no | -| unity {on, off, subprojects} | off | Unity build | no | -| unity_size {>=2} | 4 | Unity file block size | no | -| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | -| werror | false | Treat warnings as errors | no | -| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no | -| force_fallback_for | [] | Force fallback for those dependencies | no | +| Option | Default value | Description | Is per machine | Is per subproject | +| ------ | ------------- | ----------- | -------------- | ----------------- | +| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no | +| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | no | +| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | no | +| debug | true | Debug | no | no | +| default_library {shared, static, both} | shared | Default library type | no | yes | +| errorlogs | true | Whether to print the logs from failing tests. | no | no | +| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no | +| layout {mirror,flat} | mirror | Build directory layout | no | no | +| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | no | +| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no | +| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no | +| stdsplit | true | Split stdout and stderr in test logs | no | no | +| strip | false | Strip targets on install | no | no | +| unity {on, off, subprojects} | off | Unity build | no | no | +| unity_size {>=2} | 4 | Unity file block size | no | no | +| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes | +| werror | false | Treat warnings as errors | no | yes | +| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no | no | +| force_fallback_for | [] | Force fallback for those dependencies | no | no | <a name="build-type-options"></a> For setting optimization levels and toggling debug, you can either set the @@ -215,3 +215,22 @@ the command line, as there was no `build.` prefix. Similarly named fields in the `[properties]` section of the cross file would effect cross compilers, but the code paths were fairly different allowing differences in behavior to crop out. + +## Specifying options per subproject + +Since *0.54.0* `default_library` and `werror` built-in options can be defined +per subproject. This is useful for example when building shared libraries in the +main project, but static link a subproject, or when the main project must build +with no warnings but some subprojects cannot. + +Most of the time this would be used either by the parent project by setting +subproject's default_options (e.g. `subproject('foo', default_options: 'default_library=static')`), +or by the user using the command line `-Dfoo:default_library=static`. + +The value is overriden in this order: +- Value from parent project +- Value from subproject's default_options if set +- Value from subproject() default_options if set +- Value from command line if set + +Since 0.56.0 `warning_level` can also be defined per subproject. diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index d86d417..c8cd728 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -268,7 +268,7 @@ invocation to use in your cross file is the following: ```ini [properties] -c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, dependency name +c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, variable name ``` This specifies that C standard library is provided in the Meson @@ -277,6 +277,18 @@ is used on every cross built C target in the entire source tree (including subprojects) and the standard library is disabled. The build definitions of these targets do not need any modification. +Note that it is supported for any language, not only `c`, using `<lang>_stdlib` +property. + +Since *0.56.0* the variable name parameter is no longer required as long as the +subproject calls `meson.override_dependency('c_stdlib', mylibc_dep)`. +The above example becomes: + +```ini +[properties] +c_stdlib = 'mylibc' +``` + ## Changing cross file settings Cross file settings are only read when the build directory is set up diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index a8f6d8a..b89a0aa 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -288,8 +288,12 @@ You can call `dependency` multiple times with different modules and use those to link against your targets. If your boost headers or libraries are in non-standard locations you -can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR -environment variables. +can set the `BOOST_ROOT`, or the `BOOST_INCLUDEDIR` and `BOOST_LIBRARYDIR` +environment variables. *(added in 0.56.0)* You can also set these +parameters as `boost_root`, `boost_include`, and `boost_librarydir` in your +native or cross machine file. Note that machine file variables are +preferred to environment variables, and that specifying any of these +disables system-wide search for boost. You can set the argument `threading` to `single` to use boost libraries that have been compiled for single-threaded use instead. diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 2cc4f4f..816225f 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -25,7 +25,8 @@ With this command meson will configure the project and also generate introspection information that is stored in `intro-*.json` files in the `meson-info` directory. The introspection dump will be automatically updated when meson is (re)configured, or the build options change. Thus, an IDE can -watch for changes in this directory to know when something changed. +watch for changes in this directory to know when something changed. Note that +`meson-info.json` guaranteed to be the last file written. The `meson-info` directory should contain the following files: @@ -334,3 +335,4 @@ removal of a key) are unlikely and will be announced in the release notes. - [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) (currently unmaintained !!) - [Meson-UI](https://github.com/michaelbadcrumble/meson-ui) (Meson build GUI) - [Meson Syntax Highlighter](https://plugins.jetbrains.com/plugin/13269-meson-syntax-highlighter) plugin for JetBrains IDEs. +- [asabil.meson](https://open-vsx.org/extension/asabil/meson) extension for VS Code/Codium diff --git a/docs/markdown/Keyval-module.md b/docs/markdown/Keyval-module.md index 643265e..afc48fa 100644 --- a/docs/markdown/Keyval-module.md +++ b/docs/markdown/Keyval-module.md @@ -1,5 +1,5 @@ --- -short-description: Unstable keyval module +short-description: Keyval module authors: - name: Mark Schulte, Paolo Bonzini years: [2017, 2019] @@ -23,7 +23,7 @@ chosen the configuration options), output a ".config" file. The module may be imported as follows: ``` meson -keyval = import('unstable-keyval') +keyval = import('keyval') ``` The following functions will then be available as methods on the object diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 9011f79..5ac66a8 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -5,6 +5,37 @@ documentation on the common values used by both, for the specific values of one or the other see the [cross compilation](Cross-compilation.md) and [native environments](Native-environments.md). +## Data Types + +There are four basic data types in a machine file: +- strings +- arrays +- booleans +- integers + +A string is specified single quoted: +```ini +[section] +option1 = 'false' +option2 = '2' +``` + +An array is enclosed in square brackets, and must consist of strings or booleans +```ini +[section] +option = ['value'] +``` + +A boolean must be either `true` or `false`, and unquoted. +```ini +option = false +``` + +An integer must be either an unquoted numeric constant; +```ini +option = 42 +``` + ## Sections The following sections are allowed: @@ -12,10 +43,12 @@ The following sections are allowed: - binaries - paths - properties +- project options +- built-in options ### constants -*Since 0.55.0* +*Since 0.56.0* String and list concatenation is supported using the `+` operator, joining paths is supported using the `/` operator. @@ -88,14 +121,16 @@ a = 'Hello' ### Binaries The binaries section contains a list of binaries. These can be used -internally by meson, or by the `find_program` function: +internally by meson, or by the `find_program` function. + +These values must be either strings or an array of strings Compilers and linkers are defined here using `<lang>` and `<lang>_ld`. `<lang>_ld` is special because it is compiler specific. For compilers like gcc and clang which are used to invoke the linker this is a value to pass to their "choose the linker" argument (-fuse-ld= in this case). For compilers like MSVC and Clang-Cl, this is the path to a linker for meson to invoke, -such as `link.exe` or `lld-link.exe`. Support for ls is *new in 0.53.0* +such as `link.exe` or `lld-link.exe`. Support for `ld` is *new in 0.53.0* *changed in 0.53.1* the `ld` variable was replaced by `<lang>_ld`, because it *regressed a large number of projects. in 0.53.0 the `ld` variable was used @@ -113,8 +148,8 @@ llvm-config = '/usr/lib/llvm8/bin/llvm-config' Cross example: ```ini -c = '/usr/bin/i586-mingw32msvc-gcc' -cpp = '/usr/bin/i586-mingw32msvc-g++' +c = ['ccache', '/usr/bin/i586-mingw32msvc-gcc'] +cpp = ['ccache', '/usr/bin/i586-mingw32msvc-g++'] c_ld = 'gold' cpp_ld = 'gold' ar = '/usr/i586-mingw32msvc/bin/ar' @@ -137,8 +172,10 @@ An incomplete list of internally used programs that can be overridden here is: ### Paths and Directories +*Deprecated in 0.56.0* use the built-in section instead. + As of 0.50.0 paths and directories such as libdir can be defined in the native -file in a paths section +and cross files in a paths section. These should be strings. ```ini [paths] @@ -157,21 +194,79 @@ command line will override any options in the native file. For example, passing In addition to special data that may be specified in cross files, this section may contain random key value pairs accessed using the -`meson.get_external_property()` +`meson.get_external_property()`, or `meson.get_cross_property()`. + +*Changed in 0.56.0* putting `<lang>_args` and `<lang>_link_args` in the +properties section has been deprecated, and should be put in the built-in +options section. + +### Project specific options + +*New in 0.56.0* + +Path options are not allowed, those must be set in the `[paths]` section. + +Being able to set project specific options in a cross or native file can be +done using the `[project options]` section of the specific file (if doing a +cross build the options from the native file will be ignored) + +For setting options in subprojects use the `[<subproject>:project options]` +section instead. + +```ini +[project options] +build-tests = true + +[zlib:project options] +build-tests = false +``` + +### Meson built-in options + +Meson built-in options can be set the same way: + +```ini +[built-in options] +c_std = 'c99' +``` + +You can set some meson built-in options on a per-subproject basis, such as +`default_library` and `werror`. The order of precedence is: +1) Command line +2) Machine file +3) Build system definitions + +```ini +[zlib:built-in options] +default_library = 'static' +werror = false +``` + +Options set on a per-subproject basis will inherit the +option from the parent if the parent has a setting but the subproject +doesn't, even when there is a default set meson language. + +```ini +[built-in options] +default_library = 'static' +``` -## Properties +will make subprojects use default_library as static. -*New for native files in 0.54.0* +Some options can be set on a per-machine basis (in other words, the value of +the build machine can be different than the host machine in a cross compile). +In these cases the values from both a cross file and a native file are used. -The properties section can contain any variable you like, and is accessed via -`meson.get_external_property`, or `meson.get_cross_property`. +An incomplete list of options is: +- pkg_config_path +- cmake_prefix_path ## Loading multiple machine files Native files allow layering (cross files can be layered since meson 0.52.0). -More than one native file can be loaded, with values from a previous file being +More than one file can be loaded, with values from a previous file being overridden by the next. The intention of this is not overriding, but to allow -composing native files. This composition is done by passing the command line +composing files. This composition is done by passing the command line argument multiple times: ```console diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 966d408..a860f85 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1977,6 +1977,9 @@ are immutable, all operations return their results as a new string. - `startswith(string)`: returns true if string starts with the string specified as the argument +- `substring(start,end)` *(since 0.56.0)*: returns a substring specified from start to end. + Both `start` and `end` arguments are optional, so, for example, `'foobar'.substring()` will return `'foobar'`. + - `strip()`: removes whitespace at the beginning and end of the string. *(since 0.43.0)* Optionally can take one positional string argument, and all characters in that string will be stripped. @@ -2264,7 +2267,7 @@ The following keyword arguments can be used: some symbols to be exposed on Linux, and it should be passed via `args` keyword argument, see below). Supported by the methods `sizeof`, `has_type`, `has_function`, `has_member`, `has_members`, - `check_header`, `has_header`, `has_header_symbol`. + `check_header`, `has_header`, `has_header_symbol`, `get_define` **Note:** These compiler checks do not use compiler arguments added with `add_*_arguments()`, via `-Dlang_args` on the command-line, or through diff --git a/docs/markdown/Release-notes-for-0.55.0.md b/docs/markdown/Release-notes-for-0.55.0.md index 534c452..cd3f795 100644 --- a/docs/markdown/Release-notes-for-0.55.0.md +++ b/docs/markdown/Release-notes-for-0.55.0.md @@ -305,3 +305,11 @@ $ meson compile "--ninja-args=['a,b', 'c d']" dumping the AST (--ast): **new in 0.55.0** - prints the AST of a meson.build as JSON +## `--backend=vs` now matches `-Db_vscrt=from_buildtype` behaviour in the Ninja backend + +When `--buildtype=debugoptimized` is used with the Ninja backend, the VS CRT +option used is `/MD`, which is the [behaviour documented for all +backends](https://mesonbuild.com/Builtin-options.html#b_vscrt-from_buildtype). +However, the Visual Studio backend was pass `/MT` in that case, which is inconsistent. + +If you need to use the MultiThreaded CRT, you should explicitly pass `-Db_vscrt=mt` diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md index 960e60c..04a81e8 100644 --- a/docs/markdown/Style-guide.md +++ b/docs/markdown/Style-guide.md @@ -13,9 +13,12 @@ Always spaces. ## Naming Variable -The most consistent naming convention is the snake case. Let say you would -like to refer to your executable so something like `my_exe` would work or -just `exe`. +Snake case (stylized as `snake_case`) refers to the style of writing in which +each space is replaced by an underscore (`_`) character, and the first letter of +each word written in lowercase. It is the most common naming convention used +in Meson build scripts as identifiers for variable. + +Let say you would like to refer to your executable so something like `my_exe`. ## Naming options diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 9c54d69..02a83e6 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -236,6 +236,28 @@ the following command-line options: but you only want to build against the library sources for a few of them. + **Warning**: This could lead to mixing system and subproject version of the + same library in the same process. Take this case as example: + - Libraries `glib-2.0` and `gstreamer-1.0` are installed on your system. + - `gstreamer-1.0` depends on `glib-2.0`, pkg-config file `gstreamer-1.0.pc` + has `Requires: glib-2.0`. + - In your application build definition you do: + ```meson + executable('app', ..., + dependencies: [ + dependency('glib-2.0', fallback: 'glib'), + dependency('gstreamer-1.0', fallback: 'gstreamer')], + ) + ``` + - You configure with `--force-fallback-for=glib`. + This result in linking to two different versions of library `glib-2.0` + because `dependency('glib-2.0', fallback: 'glib')` will return the + subproject dependency, but `dependency('gstreamer-1.0', fallback: 'gstreamer')` + will not fallback and return the system dependency, including `glib-2.0` + library. To avoid that situation, every dependency that itself depend on + `glib-2.0` must also be forced to fallback, in this case with + `--force-fallback-for=glib,gsteamer`. + ## Download subprojects *Since 0.49.0* diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index 7cb39e9..bbe3dbb 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -220,6 +220,26 @@ is_x86 = target.startswith('x86') # boolean value 'true' is_bsd = target.to_lower().endswith('bsd') # boolean value 'true' ``` +#### .substring() + +Since 0.56.0, you can extract a substring from a string. + +```meson +# Similar to the Python str[start:end] syntax +target = 'x86_FreeBSD' +platform = target.substring(0, 3) # prefix string value 'x86' +system = target.substring(4) # suffix string value 'FreeBSD' +``` + +The method accepts negative values where negative `start` is relative to the end of +string `len(string) - start` as well as negative `end`. + +```meson +string = 'foobar' +target.substring(-5, -3) # => 'oo' +target.substring(1, -1) # => 'ooba' +``` + #### .split(), .join() ```meson diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md index c5a4e6b..f108c0c 100644 --- a/docs/markdown/Tutorial.md +++ b/docs/markdown/Tutorial.md @@ -45,7 +45,10 @@ project('tutorial', 'c') executable('demo', 'main.c') ``` -That is all. We are now ready to build our application. First we need +That is all. Note that unlike Autotools you [do not need to add any source +headers to the list of sources](FAQ.md#do-i-need-to-add-my-headers-to-the-sources-list-like-in-autotools). + +We are now ready to build our application. First we need to initialize the build by going into the source directory and issuing the following commands. @@ -118,6 +121,15 @@ gtkdep = dependency('gtk+-3.0') executable('demo', 'main.c', dependencies : gtkdep) ``` +If your app needs to use multiple libraries, you need to use separate +[`dependency()`](Reference-manual.md#dependency) calls for each, like so: + +```meson +gtkdeps = [dependency('gtk+-3.0'), dependency('gtksourceview-3.0')] +``` + +We don't need it for the current example. + Now we are ready to build. The thing to notice is that we do *not* need to recreate our build directory, run any sort of magical commands or the like. Instead we just type the exact same command as if we were diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 49d30a4..307aef7 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -56,6 +56,7 @@ topic](https://github.com/topics/meson). - [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO - [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux + - [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine - [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system - [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C - [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite @@ -87,6 +88,7 @@ lookup based on OpenStreetMap data format files - [libui](https://github.com/andlabs/libui), a simple and portable (but not inflexible) GUI library in C that uses the native GUI technologies of each platform it supports - [Libva](https://github.com/intel/libva), an implementation for the VA (VIdeo Acceleration) API + - [Libvirt](https://libvirt.org), a toolkit to manage virtualization platforms - [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format - [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor - [Mesa](https://gitlab.freedesktop.org/mesa/mesa/), an open source graphics driver project @@ -96,6 +98,7 @@ format files - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment - [OcherBook](https://github.com/ccoffing/OcherBook), an open source book reader for Kobo devices - [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems + - [Owl Chess](https://github.com/michaelbrockus/chess), a chess game written in Rust - [OpenH264](https://github.com/cisco/openh264), open source H.264 codec - [OpenHMD](https://github.com/OpenHMD/OpenHMD), a free and open source API and drivers for immersive technology, such as head mounted displays with built in head tracking - [OpenTitan](https://github.com/lowRISC/opentitan), an open source silicon Root of Trust (RoT) project. @@ -108,12 +111,14 @@ format files - [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface - [PicoLibc](https://github.com/keith-packard/picolibc), a standard C library for small embedded systems with limited RAM - [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications + - [Paper Rock Scissors](https://github.com/michaelbrockus/paper_rock_scissors), a game with weapons themed at home paper rock scissors style. - [Pithos](https://github.com/pithos/pithos), a Pandora Radio client - [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor - [Playerctl](https://github.com/acrisci/playerctl), mpris command-line controller and library for spotify, vlc, audacious, bmp, cmus, and others - [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client - [qboot](https://github.com/bonzini/qboot), a minimal x86 firmware for booting Linux kernels - [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default) + - [QEMU](https://qemu.org), a processor emulator and virtualizer - [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock) - [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP - [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md index cbb58a9..0a29847 100644 --- a/docs/markdown/Vala.md +++ b/docs/markdown/Vala.md @@ -237,7 +237,7 @@ dependencies = [ dependency('glib-2.0'), dependency('gobject-2.0'), meson.get_compiler('c').find_library('foo'), - meson.get_compiler('vala').find_library('foo', dir: vapi_dir), + meson.get_compiler('vala').find_library('foo', dirs: vapi_dir), ] sources = files('app.vala') diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index c89f883..0d1a2a2 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -25,7 +25,7 @@ for the host platform in cross builds can only be specified with a cross file. There is a table of all environment variables supported [Here](Reference-tables.md#compiler-and-linker-selection-variables) -## Set dynamic linker +## Set linker *New in 0.53.0* diff --git a/docs/markdown/snippets/keyval.md b/docs/markdown/snippets/keyval.md new file mode 100644 index 0000000..895de9b --- /dev/null +++ b/docs/markdown/snippets/keyval.md @@ -0,0 +1,7 @@ +## `unstable-keyval` is now stable `keyval` + +The `unstable-keyval` has been renamed to `keyval` and now promises stability +guarantees. + +Meson will print a warning when you load an `unstable-` module that has been +stabilised (so `unstable-keyval` is still accepted for example). diff --git a/docs/markdown/snippets/per_subproject.md b/docs/markdown/snippets/per_subproject.md new file mode 100644 index 0000000..6de6068 --- /dev/null +++ b/docs/markdown/snippets/per_subproject.md @@ -0,0 +1,4 @@ +## Per subproject `warning_level` option + +`warning_level` can now be defined per subproject, in the same way as +`default_library` and `werror`. diff --git a/docs/markdown/snippets/project_options_in_machine_files.md b/docs/markdown/snippets/project_options_in_machine_files.md new file mode 100644 index 0000000..8dab951 --- /dev/null +++ b/docs/markdown/snippets/project_options_in_machine_files.md @@ -0,0 +1,52 @@ +## Project and built-in options can be set in native or cross files + +A new set of sections has been added to the cross and native files, `[project +options]` and `[<subproject_name>:project options]`, where `subproject_name` +is the name of a subproject. Any options that are allowed in the project can +be set from this section. They have the lowest precedent, and will be +overwritten by command line arguments. + + +```meson +option('foo', type : 'string', value : 'foo') +``` + +```ini +[project options] +foo = 'other val' +``` + +```console +meson build --native-file my.ini +``` + +Will result in the option foo having the value `other val`, + +```console +meson build --native-file my.ini -Dfoo='different val' +``` + +Will result in the option foo having the value `different val`, + + +Subproject options are assigned like this: + +```ini +[zlib:project options] +foo = 'some val' +``` + +Additionally meson level options can be set in the same way, using the +`[built-in options]` section. + +```ini +[built-in options] +c_std = 'c99' +``` + +These options can also be set on a per-subproject basis, although only +`default_library` and `werror` can currently be set: +```ini +[zlib:built-in options] +default_library = 'static' +``` diff --git a/docs/markdown/snippets/stdlib.md b/docs/markdown/snippets/stdlib.md new file mode 100644 index 0000000..5e80dd5 --- /dev/null +++ b/docs/markdown/snippets/stdlib.md @@ -0,0 +1,6 @@ +## Custom standard library + +- It is not limited to cross builds any more, `<lang>_stdlib` property can be + set in native files. +- The variable name parameter is no longer required as long as the subproject + calls `meson.override_dependency('c_stdlib', mylibc_dep)`. diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 142c219..6e6927f 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -120,7 +120,7 @@ class IntrospectionInterpreter(AstInterpreter): self.do_subproject(i) self.coredata.init_backend_options(self.backend) - options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} + options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')} self.coredata.set_options(options) self.func_add_languages(None, proj_langs, None) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index cfd3a39..c6a48d3 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -14,6 +14,7 @@ from collections import OrderedDict from functools import lru_cache +from pathlib import Path import enum import json import os @@ -183,9 +184,9 @@ class Backend: self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) - def get_target_filename(self, t): + def get_target_filename(self, t, *, warn_multi_output: bool = True): if isinstance(t, build.CustomTarget): - if len(t.get_outputs()) != 1: + if warn_multi_output and len(t.get_outputs()) != 1: mlog.warning('custom_target {!r} has more than one output! ' 'Using the first one.'.format(t.name)) filename = t.get_outputs()[0] @@ -261,7 +262,7 @@ class Backend: return self.build_to_src def get_target_private_dir(self, target): - return os.path.join(self.get_target_filename(target) + '.p') + return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p') def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) @@ -455,10 +456,35 @@ class Backend: args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang)) except Exception: pass + # Match rpath formats: + # -Wl,-rpath= + # -Wl,-rpath, + rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)') + # Match solaris style compat runpath formats: + # -Wl,-R + # -Wl,-R, + runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)') + # Match symbols formats: + # -Wl,--just-symbols= + # -Wl,--just-symbols, + symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)') for arg in args: - if arg.startswith('-Wl,-rpath='): - for dir in arg.replace('-Wl,-rpath=','').split(':'): + rpath_match = rpath_regex.match(arg) + if rpath_match: + for dir in rpath_match.group(1).split(':'): dirs.add(dir) + runpath_match = runpath_regex.match(arg) + if runpath_match: + for dir in runpath_match.group(1).split(':'): + # The symbols arg is an rpath if the path is a directory + if Path(dir).is_dir(): + dirs.add(dir) + symbols_match = symbols_regex.match(arg) + if symbols_match: + for dir in symbols_match.group(1).split(':'): + # Prevent usage of --just-symbols to specify rpath + if Path(dir).is_dir(): + raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir)) return dirs def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): @@ -631,7 +657,7 @@ class Backend: # First, the trivial ones that are impossible to override. # # Add -nostdinc/-nostdinc++ if needed; can't be overridden - commands += self.get_cross_stdlib_args(target, compiler) + commands += self.get_no_stdlib_args(target, compiler) # Add things like /NOLOGO or -pipe; usually can't be overridden commands += compiler.get_always_args() # Only add warning-flags by default if the buildtype enables it, and if diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 62bda1a..46886ce 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -348,7 +348,7 @@ class NinjaBuildElement: use_rspfile = self._should_use_rspfile() if use_rspfile: rulename = self.rulename + '_RSP' - mlog.log("Command line for building %s is long, using a response file" % self.outfilenames) + mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames) else: rulename = self.rulename line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins) @@ -459,7 +459,7 @@ int dummy; # different locales have different messages with a different # number of colons. Match up to the the drive name 'd:\'. # When used in cross compilation, the path separator is a - # backslash rather than a forward slash so handle both. + # forward slash rather than a backslash so handle both. matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|\/).*stdio.h$") def detect_prefix(out): @@ -972,6 +972,8 @@ int dummy; targets = self.build.get_targets().values() use_llvm_cov = False for target in targets: + if not hasattr(target, 'compilers'): + continue for compiler in target.compilers.values(): if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): use_llvm_cov = True @@ -984,7 +986,7 @@ int dummy; self.build.get_subproject_dir()), self.environment.get_build_dir(), self.environment.get_log_dir()] + - ['--use_llvm_cov'] if use_llvm_cov else []) + (['--use_llvm_cov'] if use_llvm_cov else [])) def generate_coverage_rules(self): e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY') @@ -2086,12 +2088,15 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler) return mod_files - def get_cross_stdlib_args(self, target, compiler): - if self.environment.machines.matches_build_machine(target.for_machine): - return [] - if not self.environment.properties.host.has_stdlib(compiler.language): - return [] - return compiler.get_no_stdinc_args() + def get_no_stdlib_args(self, target, compiler): + if compiler.language in self.build.stdlibs[target.for_machine]: + return compiler.get_no_stdinc_args() + return [] + + def get_no_stdlib_link_args(self, target, linker): + if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]: + return linker.get_no_stdlib_link_args() + return [] def get_compile_debugfile_args(self, compiler, target, objfile): # The way MSVC uses PDB files is documented exactly nowhere so @@ -2155,11 +2160,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) return linker.get_link_debugfile_args(outname) def generate_llvm_ir_compile(self, target, src): + base_proxy = self.get_base_options_for_target(target) compiler = get_compiler_for_source(target.compilers.values(), src) commands = compiler.compiler_args() # Compiler args for compiling this target - commands += compilers.get_base_compile_args(self.environment.coredata.base_options, - compiler) + commands += compilers.get_base_compile_args(base_proxy, compiler) if isinstance(src, File): if src.is_built: src_filename = os.path.join(src.subdir, src.fname) @@ -2518,14 +2523,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system) self.add_build(elem) - def get_cross_stdlib_link_args(self, target, linker): - if isinstance(target, build.StaticLibrary) or \ - self.environment.machines.matches_build_machine(target.for_machine): - return [] - if not self.environment.properties.host.has_stdlib(linker.language): - return [] - return linker.get_no_stdlib_link_args() - def get_import_filename(self, target): return os.path.join(self.get_target_dir(target), target.import_filename) @@ -2687,7 +2684,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) linker, isinstance(target, build.SharedModule)) # Add -nostdlib if needed; can't be overridden - commands += self.get_cross_stdlib_link_args(target, linker) + commands += self.get_no_stdlib_link_args(target, linker) # Add things like /NOLOGO; usually can't be overridden commands += linker.get_linker_always_args() # Add buildtype linker args: optimization level, etc. diff --git a/mesonbuild/build.py b/mesonbuild/build.py index d7f3b66..5e6db73 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -819,7 +819,8 @@ class BuildTarget(Target): def get_link_dep_subdirs(self): result = OrderedSet() for i in self.link_targets: - result.add(i.get_subdir()) + if not isinstance(i, StaticLibrary): + result.add(i.get_subdir()) result.update(i.get_link_dep_subdirs()) return result @@ -1010,23 +1011,16 @@ This will become a hard error in a future Meson release.''') def get_extra_args(self, language): return self.extra_args.get(language, []) - def get_dependencies(self, exclude=None, for_pkgconfig=False): + def get_dependencies(self, exclude=None): transitive_deps = [] if exclude is None: exclude = [] for t in itertools.chain(self.link_targets, self.link_whole_targets): if t in transitive_deps or t in exclude: continue - # When generating `Libs:` and `Libs.private:` lists in pkg-config - # files we don't want to include static libraries that we link_whole - # or are uninstalled (they're implicitly promoted to link_whole). - # But we still need to include their transitive dependencies, - # a static library we link_whole would itself link to a shared - # library or an installed static library. - if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets): - transitive_deps.append(t) + transitive_deps.append(t) if isinstance(t, StaticLibrary): - transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig) + transitive_deps += t.get_dependencies(transitive_deps + exclude) return transitive_deps def get_source_subdir(self): diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index 148a999..e29e67a 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -35,6 +35,31 @@ if T.TYPE_CHECKING: TYPE_result = T.Tuple[int, T.Optional[str], T.Optional[str]] +MESON_TO_CMAKE_MAPPING = { + 'arm': 'ARMCC', + 'armclang': 'ARMClang', + 'clang': 'Clang', + 'clang-cl': 'MSVC', + 'flang': 'Flang', + 'g95': 'G95', + 'gcc': 'GNU', + 'intel': 'Intel', + 'intel-cl': 'MSVC', + 'msvc': 'MSVC', + 'pathscale': 'PathScale', + 'pgi': 'PGI', + 'sun': 'SunPro', +} + +def meson_compiler_to_cmake_id(cobj): + # cland and apple clang both map to 'clang' in meson, so we need to look at + # the linker that's being used + if cobj.linker.get_id() == 'ld64': + return 'AppleClang' + # If no mapping, try GNU and hope that the build files don't care + return MESON_TO_CMAKE_MAPPING.get(cobj.get_id(), 'GNU') + + class CMakeExecutor: # The class's copy of the CMake path. Avoids having to search for it # multiple times in the same Meson invocation. @@ -69,7 +94,12 @@ class CMakeExecutor: self.environment.is_cross_build(), 'CMAKE_PREFIX_PATH') if env_pref_path is not None: - env_pref_path = re.split(r':|;', env_pref_path) + if mesonlib.is_windows(): + # Cannot split on ':' on Windows because its in the drive letter + env_pref_path = env_pref_path.split(os.pathsep) + else: + # https://github.com/mesonbuild/meson/issues/7294 + env_pref_path = re.split(r':|;', env_pref_path) env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings if not self.prefix_paths: self.prefix_paths = [] @@ -262,29 +292,33 @@ class CMakeExecutor: p = fallback return p - def choose_compiler(lang: str) -> T.Tuple[str, str]: + def choose_compiler(lang: str) -> T.Tuple[str, str, str, str]: + comp_obj = None exe_list = [] if lang in compilers: - exe_list = compilers[lang].get_exelist() + comp_obj = compilers[lang] else: try: comp_obj = self.environment.compiler_from_language(lang, MachineChoice.BUILD) - if comp_obj is not None: - exe_list = comp_obj.get_exelist() except Exception: pass + if comp_obj is not None: + exe_list = comp_obj.get_exelist() + comp_id = meson_compiler_to_cmake_id(comp_obj) + comp_version = comp_obj.version.upper() + if len(exe_list) == 1: - return make_abs(exe_list[0], lang), '' + return make_abs(exe_list[0], lang), '', comp_id, comp_version elif len(exe_list) == 2: - return make_abs(exe_list[1], lang), make_abs(exe_list[0], lang) + return make_abs(exe_list[1], lang), make_abs(exe_list[0], lang), comp_id, comp_version else: mlog.debug('Failed to find a {} compiler for CMake. This might cause CMake to fail.'.format(lang)) - return fallback, '' + return fallback, '', 'GNU', '' - c_comp, c_launcher = choose_compiler('c') - cxx_comp, cxx_launcher = choose_compiler('cpp') - fortran_comp, fortran_launcher = choose_compiler('fortran') + c_comp, c_launcher, c_id, c_version = choose_compiler('c') + cxx_comp, cxx_launcher, cxx_id, cxx_version = choose_compiler('cpp') + fortran_comp, fortran_launcher, _, _ = choose_compiler('fortran') # on Windows, choose_compiler returns path with \ as separator - replace by / before writing to CMAKE file c_comp = c_comp.replace('\\', '/') @@ -306,34 +340,42 @@ class CMakeExecutor: fortran_comp_file = comp_dir / 'CMakeFortranCompiler.cmake' if c_comp and not c_comp_file.is_file(): + is_gnu = '1' if c_id == 'GNU' else '' c_comp_file.write_text(textwrap.dedent('''\ # Fake CMake file to skip the boring and slow stuff set(CMAKE_C_COMPILER "{}") # Should be a valid compiler for try_compile, etc. set(CMAKE_C_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) - set(CMAKE_C_COMPILER_ID "GNU") # Pretend we have found GCC - set(CMAKE_COMPILER_IS_GNUCC 1) + set(CMAKE_COMPILER_IS_GNUCC {}) + set(CMAKE_C_COMPILER_ID "{}") + set(CMAKE_C_COMPILER_VERSION "{}") set(CMAKE_C_COMPILER_LOADED 1) + set(CMAKE_C_COMPILER_FORCED 1) set(CMAKE_C_COMPILER_WORKS TRUE) set(CMAKE_C_ABI_COMPILED TRUE) set(CMAKE_C_SOURCE_FILE_EXTENSIONS c;m) set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) set(CMAKE_SIZEOF_VOID_P "{}") - '''.format(c_comp, c_launcher, ctypes.sizeof(ctypes.c_voidp)))) + '''.format(c_comp, c_launcher, is_gnu, c_id, c_version, + ctypes.sizeof(ctypes.c_voidp)))) if cxx_comp and not cxx_comp_file.is_file(): + is_gnu = '1' if cxx_id == 'GNU' else '' cxx_comp_file.write_text(textwrap.dedent('''\ # Fake CMake file to skip the boring and slow stuff set(CMAKE_CXX_COMPILER "{}") # Should be a valid compiler for try_compile, etc. set(CMAKE_CXX_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) - set(CMAKE_CXX_COMPILER_ID "GNU") # Pretend we have found GCC - set(CMAKE_COMPILER_IS_GNUCXX 1) + set(CMAKE_COMPILER_IS_GNUCXX {}) + set(CMAKE_CXX_COMPILER_ID "{}") + set(CMAKE_CXX_COMPILER_VERSION "{}") set(CMAKE_CXX_COMPILER_LOADED 1) + set(CMAKE_CXX_COMPILER_FORCED 1) set(CMAKE_CXX_COMPILER_WORKS TRUE) set(CMAKE_CXX_ABI_COMPILED TRUE) set(CMAKE_CXX_IGNORE_EXTENSIONS inl;h;hpp;HPP;H;o;O;obj;OBJ;def;DEF;rc;RC) set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP) set(CMAKE_SIZEOF_VOID_P "{}") - '''.format(cxx_comp, cxx_launcher, ctypes.sizeof(ctypes.c_voidp)))) + '''.format(cxx_comp, cxx_launcher, is_gnu, cxx_id, cxx_version, + ctypes.sizeof(ctypes.c_voidp)))) if fortran_comp and not fortran_comp_file.is_file(): fortran_comp_file.write_text(textwrap.dedent('''\ diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 0516947..c2affd0 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -15,8 +15,6 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. -import pkg_resources - from .common import CMakeException, CMakeTarget, TargetOptions from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel from .fileapi import CMakeFileAPI @@ -25,6 +23,7 @@ from .traceparser import CMakeTraceParser, CMakeGeneratorTarget from .. import mlog, mesonlib from ..environment import Environment from ..mesonlib import MachineChoice, OrderedSet, version_compare +from ..mesondata import mesondata from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header from enum import Enum from functools import lru_cache @@ -280,7 +279,7 @@ class ConverterTarget: std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)') def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, install_prefix: str, trace: CMakeTraceParser) -> None: - # Detect setting the C and C++ standard + # Detect setting the C and C++ standard and do additional compiler args manipulation for i in ['c', 'cpp']: if i not in self.compile_opts: continue @@ -288,6 +287,7 @@ class ConverterTarget: temp = [] for j in self.compile_opts[i]: m = ConverterTarget.std_regex.match(j) + ctgt = output_target_map.generated(j) if m: std = m.group(2) supported = self._all_lang_stds(i) @@ -302,6 +302,12 @@ class ConverterTarget: self.override_options += ['{}_std={}'.format(i, std)] elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']: self.pie = True + elif isinstance(ctgt, ConverterCustomTarget): + # Sometimes projects pass generated source files as compiler + # flags. Add these as generated sources to ensure that the + # corresponding custom target is run.2 + self.generated += [j] + temp += [j] elif j in blacklist_compiler_flags: pass else: @@ -652,7 +658,7 @@ class ConverterCustomTarget: def __repr__(self) -> str: return '<{}: {} {}>'.format(self.__class__.__name__, self.name, self.outputs) - def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, all_outputs: T.List[str]) -> None: + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, all_outputs: T.List[str], trace: CMakeTraceParser) -> None: # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR} if not self.working_dir: self.working_dir = self.current_bin_dir.as_posix() @@ -695,7 +701,18 @@ class ConverterCustomTarget: if not j: continue target = output_target_map.executable(j) - cmd += [target] if target else [j] + if target: + cmd += [target] + continue + elif j in trace.targets: + trace_tgt = trace.targets[j] + if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties: + cmd += trace_tgt.properties['IMPORTED_LOCATION'] + continue + mlog.debug('CMake: Found invalid CMake target "{}" --> ignoring \n{}'.format(j, trace_tgt)) + + # Fallthrough on error + cmd += [j] commands += [cmd] self.command = commands @@ -814,7 +831,7 @@ class CMakeInterpreter: raise CMakeException('Unable to find CMake') self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True) - preload_file = pkg_resources.resource_filename('mesonbuild', 'cmake/data/preload.cmake') + preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env) # Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible, # since CMAKE_PROJECT_INCLUDE was actually designed for code injection. @@ -970,7 +987,7 @@ class CMakeInterpreter: object_libs = [] custom_target_outputs = [] # type: T.List[str] for i in self.custom_targets: - i.postprocess(self.output_target_map, self.src_dir, self.subdir, custom_target_outputs) + i.postprocess(self.output_target_map, self.src_dir, self.subdir, custom_target_outputs, self.trace) for i in self.targets: i.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace) if i.type == 'OBJECT_LIBRARY': diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index a241360..98b56f5 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -74,7 +74,7 @@ class CMakeGeneratorTarget(CMakeTarget): self.working_dir = None # type: T.Optional[str] class CMakeTraceParser: - def __init__(self, cmake_version: str, build_dir: str, permissive: bool = False): + def __init__(self, cmake_version: str, build_dir: str, permissive: bool = True): self.vars = {} # type: T.Dict[str, T.List[str]] self.targets = {} # type: T.Dict[str, CMakeTarget] @@ -269,6 +269,7 @@ class CMakeTraceParser: args = list(tline.args) # Make a working copy # Make sure the exe is imported + is_imported = True if 'IMPORTED' not in args: return self._gen_exception('add_executable', 'non imported executables are not supported', tline) @@ -277,7 +278,7 @@ class CMakeTraceParser: if len(args) < 1: return self._gen_exception('add_executable', 'requires at least 1 argument', tline) - self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}) + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported) def _cmake_add_library(self, tline: CMakeTraceLine): # DOC: https://cmake.org/cmake/help/latest/command/add_library.html diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 355abe6..936b04c 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -362,6 +362,26 @@ class VisualStudioCCompiler(MSVCCompiler, VisualStudioLikeCCompilerMixin, CCompi info, exe_wrap, **kwargs) MSVCCompiler.__init__(self, target) + def get_options(self): + opts = super().get_options() + c_stds = ['none', 'c89', 'c99', 'c11'] + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + c_stds, + 'none', + ), + }) + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['std'] + # As of MVSC 16.7, /std:c11 is the only valid C standard option. + if std.value in {'c11'}: + args.append('/std:' + std.value) + return args + class ClangClCCompiler(ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 8ecb972..0de59a4 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -854,9 +854,6 @@ class Compiler(metaclass=abc.ABCMeta): def bitcode_args(self) -> T.List[str]: return self.linker.bitcode_args() - def get_linker_debug_crt_args(self) -> T.List[str]: - return self.linker.get_debug_crt_args() - def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: return self.linker.get_buildtype_args(buildtype) diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 961ede8..698c71a 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -374,9 +374,20 @@ class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): **kwargs) ElbrusCompiler.__init__(self) - # It does not support c++/gnu++ 17 and 1z, but still does support 0x, 1y, and gnu++98. def get_options(self): opts = CPPCompiler.get_options(self) + + cpp_stds = [ + 'none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y', + 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y', + ] + + if version_compare(self.version, '>=1.24.00'): + cpp_stds += [ 'c++1z', 'c++17', 'gnu++1z', 'gnu++17' ] + + if version_compare(self.version, '>=1.25.00'): + cpp_stds += [ 'c++2a', 'gnu++2a' ] + opts.update({ 'eh': coredata.UserComboOption( 'C++ exception handling type.', @@ -385,10 +396,7 @@ class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): ), 'std': coredata.UserComboOption( 'C++ language standard to use', - [ - 'none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y', - 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y', - ], + cpp_stds, 'none', ), 'debugstl': coredata.UserBooleanOption( @@ -586,7 +594,7 @@ class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixi is_cross: bool, info: 'MachineInfo', exe_wrap, target, **kwargs): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs) MSVCCompiler.__init__(self, target) - self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like + self.base_options = ['b_pch', 'b_vscrt', 'b_ndebug'] # FIXME add lto, pgo and the like self.id = 'msvc' def get_options(self): diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index 4e89f5d..934ad12 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -263,9 +263,6 @@ class CudaCompiler(Compiler): def get_depfile_suffix(self): return 'd' - def get_linker_debug_crt_args(self) -> T.List[str]: - return self._cook_link_args(self.host_compiler.get_linker_debug_crt_args()) - def get_buildtype_linker_args(self, buildtype): return self._cook_link_args(self.host_compiler.get_buildtype_linker_args(buildtype)) diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index ecfbc64..7525c12 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -113,6 +113,11 @@ class ClangCompiler(GnuLikeCompiler): # (and other gcc-like compilers) cannot. This is becuse clang (being # llvm based) is retargetable, while GCC is not. # + + # qcld: Qualcomm Snapdragon linker, based on LLVM + if linker == 'qcld': + return ['-fuse-ld=qcld'] + if shutil.which(linker): if not shutil.which(linker): raise mesonlib.MesonException( diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 47e97d2..95b9592 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -366,9 +366,17 @@ class CLikeCompiler: def _get_basic_compiler_args(self, env, mode: str): cargs, largs = [], [] - # Select a CRT if needed since we're linking if mode == 'link': - cargs += self.get_linker_debug_crt_args() + # Sometimes we need to manually select the CRT to use with MSVC. + # One example is when trying to do a compiler check that involves + # linking with static libraries since MSVC won't select a CRT for + # us in that case and will error out asking us to pick one. + try: + crt_val = env.coredata.base_options['b_vscrt'].value + buildtype = env.coredata.base_options['buildtype'].value + cargs += self.get_crt_compile_args(crt_val, buildtype) + except (KeyError, AttributeError): + pass # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env sys_args = env.coredata.get_external_args(self.for_machine, self.language) diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 3526a91..83f7047 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -362,7 +362,7 @@ class GnuCompiler(GnuLikeCompiler): # For some compiler command line arguments, the GNU compilers will # emit a warning on stderr indicating that an option is valid for a # another language, but still complete with exit_success - with self._build_wrapper(code, env, args, None, mode, disable_cache=False, want_output=True) as p: + with self._build_wrapper(code, env, args, None, mode) as p: result = p.returncode == 0 if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stde: result = False diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py index bf1d339..a9967d6 100644 --- a/mesonbuild/compilers/mixins/islinker.py +++ b/mesonbuild/compilers/mixins/islinker.py @@ -110,9 +110,6 @@ class BasicLinkerIsCompilerMixin: install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: return ([], set()) - def get_linker_debug_crt_args(self) -> T.List[str]: - return [] - def get_asneeded_args(self) -> T.List[str]: return [] diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index 4dfd8b4..93101b5 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -114,7 +114,7 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta): # See: https://ninja-build.org/manual.html#_deps always_args = ['/nologo', '/showIncludes'] warn_args = { - '0': ['/W1'], + '0': [], '1': ['/W2'], '2': ['/W3'], '3': ['/W4'], diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 26f33f1..ce03fbc 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -19,10 +19,9 @@ from itertools import chain from pathlib import PurePath from collections import OrderedDict, defaultdict from .mesonlib import ( - MesonException, MachineChoice, PerMachine, OrderedSet, + MesonException, EnvironmentException, MachineChoice, PerMachine, default_libdir, default_libexecdir, default_prefix, split_args ) -from .envconfig import get_env_var_pair from .wrap import WrapMode import ast import argparse @@ -46,6 +45,16 @@ default_yielding = False # Can't bind this near the class method it seems, sadly. _T = T.TypeVar('_T') +class MesonVersionMismatchException(MesonException): + '''Build directory generated with Meson version incompatible with current version''' + def __init__(self, old_version, current_version): + super().__init__('Build directory has been generated with Meson version {}, ' + 'which is incompatible with current version {}.' + .format(old_version, current_version)) + self.old_version = old_version + self.current_version = current_version + + class UserOption(T.Generic[_T]): def __init__(self, description, choices, yielding): super().__init__() @@ -160,10 +169,16 @@ class UserComboOption(UserOption[str]): def validate_value(self, value): if value not in self.choices: + if isinstance(value, bool): + _type = 'boolean' + elif isinstance(value, (int, float)): + _type = 'number' + else: + _type = 'string' optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices]) - raise MesonException('Value "{}" for combo option "{}" is not one of the choices.' - ' Possible choices are: {}.'.format( - value, self.description, optionsstring)) + raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.' + ' Possible choices are (as string): {}.'.format( + value, _type, self.description, optionsstring)) return value class UserArrayOption(UserOption[T.List[str]]): @@ -360,15 +375,15 @@ class CoreData: self.install_guid = str(uuid.uuid4()).upper() self.target_guids = {} self.version = version - self.builtins = {} # : OptionDictType + self.builtins = {} # type: OptionDictType self.builtins_per_machine = PerMachine({}, {}) - self.backend_options = {} # : OptionDictType - self.user_options = {} # : OptionDictType + self.backend_options = {} # type: OptionDictType + self.user_options = {} # type: OptionDictType self.compiler_options = PerMachine( defaultdict(dict), defaultdict(dict), - ) # : PerMachine[T.defaultdict[str, OptionDictType]] - self.base_options = {} # : OptionDictType + ) # type: PerMachine[T.defaultdict[str, OptionDictType]] + self.base_options = {} # type: OptionDictType self.cross_files = self.__load_config_files(options, scratch_dir, 'cross') self.compilers = PerMachine(OrderedDict(), OrderedDict()) @@ -376,6 +391,7 @@ class CoreData: host_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD) self.deps = PerMachine(build_cache, host_cache) # type: PerMachine[DependencyCache] self.compiler_check_cache = OrderedDict() + # Only to print a warning if it changes between Meson invocations. self.config_files = self.__load_config_files(options, scratch_dir, 'native') self.builtin_options_libdir_cross_fixup() @@ -444,7 +460,7 @@ class CoreData: # getting the "system default" is always wrong on multiarch # platforms as it gets a value like lib/x86_64-linux-gnu. if self.cross_files: - builtin_options['libdir'].default = 'lib' + BUILTIN_OPTIONS['libdir'].default = 'lib' def sanitize_prefix(self, prefix): prefix = os.path.expanduser(prefix) @@ -500,10 +516,10 @@ class CoreData: def init_builtins(self, subproject: str): # Create builtin options with default values - for key, opt in builtin_options.items(): + for key, opt in BUILTIN_OPTIONS.items(): self.add_builtin_option(self.builtins, key, opt, subproject) for for_machine in iter(MachineChoice): - for key, opt in builtin_options_per_machine.items(): + for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items(): self.add_builtin_option(self.builtins_per_machine[for_machine], key, opt, subproject) def add_builtin_option(self, opts_map, key, opt, subproject): @@ -708,7 +724,7 @@ class CoreData: self.builtins['prefix'].set_value(prefix) for key in builtin_dir_noprefix_options: if key not in options: - self.builtins[key].set_value(builtin_options[key].prefixed_default(key, prefix)) + self.builtins[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix)) unknown_options = [] for k, v in options.items(): @@ -733,87 +749,54 @@ class CoreData: if not self.is_cross_build(): self.copy_build_options_from_regular_ones() - def set_default_options(self, default_options, subproject, env): - # Warn if the user is using two different ways of setting build-type - # options that override each other - if 'buildtype' in env.cmd_line_options and \ - ('optimization' in env.cmd_line_options or 'debug' in env.cmd_line_options): - mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' - 'Using both is redundant since they override each other. ' - 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') - cmd_line_options = OrderedDict() - # Set project default_options as if they were passed to the cmdline. - # Subprojects can only define default for user options and not yielding - # builtin option. - from . import optinterpreter - for k, v in default_options.items(): + def set_default_options(self, default_options: 'T.OrderedDict[str, str]', subproject: str, env: 'Environment') -> None: + def make_key(key: str) -> str: if subproject: - if (k not in builtin_options or builtin_options[k].yielding) \ - and optinterpreter.is_invalid_name(k, log=False): - continue - k = subproject + ':' + k - cmd_line_options[k] = v - - # Override project default_options using conf files (cross or native) - for k, v in env.paths.host: - if v is not None: - cmd_line_options[k] = v - - # Override all the above defaults using the command-line arguments - # actually passed to us - cmd_line_options.update(env.cmd_line_options) - env.cmd_line_options = cmd_line_options - - # Create a subset of cmd_line_options, keeping only options for this - # subproject. Also take builtin options if it's the main project. - # Language and backend specific options will be set later when adding - # languages and setting the backend (builtin options must be set first - # to know which backend we'll use). + return '{}:{}'.format(subproject, key) + return key + options = OrderedDict() - # Some options default to environment variables if they are - # unset, set those now. These will either be overwritten - # below, or they won't. These should only be set on the first run. - for for_machine in MachineChoice: - p_env_pair = get_env_var_pair(for_machine, self.is_cross_build(), 'PKG_CONFIG_PATH') - if p_env_pair is not None: - p_env_var, p_env = p_env_pair - - # PKG_CONFIG_PATH may contain duplicates, which must be - # removed, else a duplicates-in-array-option warning arises. - p_list = list(OrderedSet(p_env.split(':'))) - - key = 'pkg_config_path' - if for_machine == MachineChoice.BUILD: - key = 'build.' + key - - if env.first_invocation: - options[key] = p_list - elif options.get(key, []) != p_list: - mlog.warning( - p_env_var + - ' environment variable has changed ' - 'between configurations, meson ignores this. ' - 'Use -Dpkg_config_path to change pkg-config search ' - 'path instead.' - ) - - def remove_prefix(text, prefix): - if text.startswith(prefix): - return text[len(prefix):] - return text - - for k, v in env.cmd_line_options.items(): - if subproject: - if not k.startswith(subproject + ':'): - continue - elif k not in builtin_options.keys() \ - and remove_prefix(k, 'build.') not in builtin_options_per_machine.keys(): - if ':' in k: - continue - if optinterpreter.is_invalid_name(k, log=False): + # TODO: validate these + from .compilers import all_languages, base_options + lang_prefixes = tuple('{}_'.format(l) for l in all_languages) + # split arguments that can be set now, and those that cannot so they + # can be set later, when they've been initialized. + for k, v in default_options.items(): + if k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + for machine in MachineChoice: + if key not in env.compiler_options[machine][lang]: + env.compiler_options[machine][lang][key] = v + elif k in base_options: + if not subproject and k not in env.base_options: + env.base_options[k] = v + else: + options[make_key(k)] = v + + for k, v in chain(env.meson_options.host.get('', {}).items(), + env.meson_options.host.get(subproject, {}).items()): + options[make_key(k)] = v + + for k, v in chain(env.meson_options.build.get('', {}).items(), + env.meson_options.build.get(subproject, {}).items()): + if k in BUILTIN_OPTIONS_PER_MACHINE: + options[make_key('build.{}'.format(k))] = v + + options.update({make_key(k): v for k, v in env.user_options.get(subproject, {}).items()}) + + # Some options (namely the compiler options) are not preasant in + # coredata until the compiler is fully initialized. As such, we need to + # put those options into env.meson_options, only if they're not already + # in there, as the machine files and command line have precendence. + for k, v in default_options.items(): + if k in BUILTIN_OPTIONS and not BUILTIN_OPTIONS[k].yielding: + continue + for machine in MachineChoice: + if machine is MachineChoice.BUILD and not self.is_cross_build(): continue - options[k] = v + if k not in env.meson_options[machine][subproject]: + env.meson_options[machine][subproject][k] = v self.set_options(options, subproject=subproject) @@ -829,24 +812,19 @@ class CoreData: env.is_cross_build(), env.properties[for_machine]).items(): # prefixed compiler options affect just this machine - opt_prefix = for_machine.get_prefix() - user_k = opt_prefix + lang + '_' + k - if user_k in env.cmd_line_options: - o.set_value(env.cmd_line_options[user_k]) + if k in env.compiler_options[for_machine].get(lang, {}): + o.set_value(env.compiler_options[for_machine][lang][k]) self.compiler_options[for_machine][lang].setdefault(k, o) - def process_new_compiler(self, lang: str, comp: T.Type['Compiler'], env: 'Environment') -> None: + def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None: from . import compilers self.compilers[comp.for_machine][lang] = comp - enabled_opts = [] for k, o in comp.get_options().items(): # prefixed compiler options affect just this machine - opt_prefix = comp.for_machine.get_prefix() - user_k = opt_prefix + lang + '_' + k - if user_k in env.cmd_line_options: - o.set_value(env.cmd_line_options[user_k]) + if k in env.compiler_options[comp.for_machine].get(lang, {}): + o.set_value(env.compiler_options[comp.for_machine][lang][k]) self.compiler_options[comp.for_machine][lang].setdefault(k, o) enabled_opts = [] @@ -854,8 +832,8 @@ class CoreData: if optname in self.base_options: continue oobj = compilers.base_options[optname] - if optname in env.cmd_line_options: - oobj.set_value(env.cmd_line_options[optname]) + if optname in env.base_options: + oobj.set_value(env.base_options[optname]) enabled_opts.append(optname) self.base_options[optname] = oobj self.emit_base_options_warnings(enabled_opts) @@ -1014,9 +992,7 @@ def load(build_dir): if not isinstance(obj, CoreData): raise MesonException(load_fail_msg) if major_versions_differ(obj.version, version): - raise MesonException('Build directory has been generated with Meson version %s, ' - 'which is incompatible with current version %s.\n' % - (obj.version, version)) + raise MesonVersionMismatchException(obj.version, version) return obj def save(obj, build_dir): @@ -1037,9 +1013,9 @@ def save(obj, build_dir): def register_builtin_arguments(parser): - for n, b in builtin_options.items(): + for n, b in BUILTIN_OPTIONS.items(): b.add_to_argparse(n, parser, '', '') - for n, b in builtin_options_per_machine.items(): + for n, b in BUILTIN_OPTIONS_PER_MACHINE.items(): b.add_to_argparse(n, parser, '', ' (just for host machine)') b.add_to_argparse(n, parser, 'build.', ' (just for build machine)') parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option", @@ -1060,9 +1036,9 @@ def parse_cmd_line_options(args): # Merge builtin options set with --option into the dict. for name in chain( - builtin_options.keys(), - ('build.' + k for k in builtin_options_per_machine.keys()), - builtin_options_per_machine.keys(), + BUILTIN_OPTIONS.keys(), + ('build.' + k for k in BUILTIN_OPTIONS_PER_MACHINE.keys()), + BUILTIN_OPTIONS_PER_MACHINE.keys(), ): value = getattr(args, name, None) if value is not None: @@ -1149,23 +1125,25 @@ class BuiltinOption(T.Generic[_T, _U]): cmdline_name = self.argparse_name_to_arg(prefix + name) parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs) + # Update `docs/markdown/Builtin-options.md` after changing the options below -builtin_options = OrderedDict([ - # Directories - ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), - ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')), - ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')), - ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')), - ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')), - ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())), - ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), - ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')), +BUILTIN_DIR_OPTIONS = OrderedDict([ + ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), + ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')), + ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')), + ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')), + ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')), + ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())), + ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), + ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')), ('localstatedir', BuiltinOption(UserStringOption, 'Localstate data directory', 'var')), ('mandir', BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')), ('sbindir', BuiltinOption(UserStringOption, 'System executable directory', 'sbin')), ('sharedstatedir', BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')), ('sysconfdir', BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')), - # Core options +]) # type: OptionDictType + +BUILTIN_CORE_OPTIONS = OrderedDict([ ('auto_features', BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')), ('backend', BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)), ('buildtype', BuiltinOption(UserComboOption, 'Build type to use', 'debug', @@ -1181,13 +1159,15 @@ builtin_options = OrderedDict([ ('strip', BuiltinOption(UserBooleanOption, 'Strip targets on install', False)), ('unity', BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])), ('unity_size', BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))), - ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])), + ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)), ('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), ('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])), ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), -]) +]) # type: OptionDictType + +BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items())) -builtin_options_per_machine = OrderedDict([ +BUILTIN_OPTIONS_PER_MACHINE = OrderedDict([ ('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])), ('cmake_prefix_path', BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])), ]) @@ -1200,7 +1180,7 @@ builtin_dir_noprefix_options = { 'sharedstatedir': {'/usr': '/var/lib', '/usr/local': '/var/local/lib'}, } -forbidden_target_names = {'clean': None, +FORBIDDEN_TARGET_NAMES = {'clean': None, 'clean-ctlist': None, 'clean-gcno': None, 'clean-gcda': None, @@ -1221,3 +1201,4 @@ forbidden_target_names = {'clean': None, 'dist': None, 'distcheck': None, } + diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 23701da..f581c06 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -29,8 +29,6 @@ import typing as T from enum import Enum from pathlib import Path, PurePath -import pkg_resources - from .. import mlog from .. import mesonlib from ..compilers import clib_langs @@ -40,6 +38,7 @@ from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify, stringlistify, extract_as_list, split_args from ..mesonlib import Version, LibType +from ..mesondata import mesondata if T.TYPE_CHECKING: from ..compilers.compilers import CompilerType # noqa: F401 @@ -252,6 +251,16 @@ class InternalDependency(Dependency): self.ext_deps = ext_deps self.variables = variables + def __deepcopy__(self, memo: dict) -> 'InternalDependency': + result = self.__class__.__new__(self.__class__) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k in ['libraries', 'whole_libraries']: + setattr(result, k, copy.copy(v)) + else: + setattr(result, k, copy.deepcopy(v, memo)) + return result + def get_pkgconfig_variable(self, variable_name, kwargs): raise DependencyException('Method "get_pkgconfig_variable()" is ' 'invalid for an internal dependency') @@ -1441,7 +1450,10 @@ class CMakeDependency(ExternalDependency): cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] cfg = cfgs[0] - is_debug = self.env.coredata.get_builtin_option('debug'); + is_debug = self.env.coredata.get_builtin_option('buildtype') == 'debug' + if 'b_vscrt' in self.env.coredata.base_options: + if self.env.coredata.base_options['b_vscrt'].value in ('mdd', 'mtd'): + is_debug = True if is_debug: if 'DEBUG' in cfgs: cfg = 'DEBUG' @@ -1512,8 +1524,7 @@ class CMakeDependency(ExternalDependency): build_dir = self._get_build_dir() # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt - # Per the warning in pkg_resources, this is *not* a path and os.path and Pathlib are *not* safe to use here. - cmake_txt = pkg_resources.resource_string('mesonbuild', 'dependencies/data/' + cmake_file).decode() + cmake_txt = mesondata['dependencies/data/' + cmake_file].data # In general, some Fortran CMake find_package() also require C language enabled, # even if nothing from C is directly used. An easy Fortran example that fails @@ -2290,7 +2301,7 @@ def get_dep_identifier(name, kwargs) -> T.Tuple: # 'required' is irrelevant for caching; the caller handles it separately # 'fallback' subprojects cannot be cached -- they must be initialized # 'default_options' is only used in fallback case - if key in ('version', 'native', 'required', 'fallback', 'default_options'): + if key in ('version', 'native', 'required', 'fallback', 'default_options', 'force_fallback'): continue # All keyword arguments are strings, ints, or lists (or lists of lists) if isinstance(value, list): diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 907c0c2..3ad534e 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -20,6 +20,7 @@ from pathlib import Path from .. import mlog from .. import mesonlib +from ..envconfig import get_env_var from ..environment import Environment from .base import DependencyException, ExternalDependency, PkgConfigDependency @@ -163,8 +164,8 @@ class BoostLibraryFile(): if not tags: return - # Without any tags mt is assumed, however, an absents of mt in the name - # with tags present indicates that the lib was build without mt support + # Without any tags mt is assumed, however, an absence of mt in the name + # with tags present indicates that the lib was built without mt support self.mt = False for i in tags: if i == 'mt': @@ -367,36 +368,27 @@ class BoostDependency(ExternalDependency): self.arch = environment.machines[self.for_machine].cpu_family self.arch = boost_arch_map.get(self.arch, None) - # Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset - boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']] - if all(boost_manual_env): - inc_dir = Path(os.environ['BOOST_INCLUDEDIR']) - lib_dir = Path(os.environ['BOOST_LIBRARYDIR']) - mlog.debug('Trying to find boost with:') - mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir)) - mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir)) - - boost_inc_dir = None - for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']: - if j.is_file(): - boost_inc_dir = self._include_dir_from_version_header(j) - break - if not boost_inc_dir: - self.is_found = False - return + # First, look for paths specified in a machine file + props = self.env.properties[self.for_machine] + boost_property_env = [props.get('boost_includedir'), props.get('boost_librarydir'), props.get('boost_root')] + if any(boost_property_env): + self.detect_boost_machine_file(props) + return - self.is_found = self.run_check([boost_inc_dir], [lib_dir]) + # Next, look for paths in the environment + boost_manual_env_list = ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR', 'BOOST_ROOT', 'BOOSTROOT'] + boost_manual_env = [get_env_var(self.for_machine, self.env.is_cross_build, x) for x in boost_manual_env_list] + if any(boost_manual_env): + self.detect_boost_env() return - elif any(boost_manual_env): - mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.') - # A) Detect potential boost root directories (uses also BOOST_ROOT env var) - roots = self.detect_roots() - roots = list(mesonlib.OrderedSet(roots)) + # Finally, look for paths from .pc files and from searching the filesystem + self.detect_roots() - # B) Foreach candidate + def check_and_set_roots(self, roots) -> None: + roots = list(mesonlib.OrderedSet(roots)) for j in roots: - # 1. Look for the boost headers (boost/version.pp) + # 1. Look for the boost headers (boost/version.hpp) mlog.debug('Checking potential boost root {}'.format(j.as_posix())) inc_dirs = self.detect_inc_dirs(j) inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions @@ -411,6 +403,74 @@ class BoostDependency(ExternalDependency): self.boost_root = j break + def detect_boost_machine_file(self, props) -> None: + incdir = props.get('boost_includedir') + libdir = props.get('boost_librarydir') + + if incdir and libdir: + inc_dir = Path(props['boost_includedir']) + lib_dir = Path(props['boost_librarydir']) + + if not inc_dir.is_absolute() or not lib_dir.is_absolute(): + raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute') + + mlog.debug('Trying to find boost with:') + mlog.debug(' - boost_includedir = {}'.format(inc_dir)) + mlog.debug(' - boost_librarydir = {}'.format(lib_dir)) + + return self.detect_split_root(inc_dir, lib_dir) + + elif incdir or libdir: + raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)') + + rootdir = props.get('boost_root') + # It shouldn't be possible to get here without something in boost_root + assert(rootdir) + + raw_paths = mesonlib.stringlistify(rootdir) + paths = [Path(x) for x in raw_paths] + if paths and any([not x.is_absolute() for x in paths]): + raise DependencyException('boost_root path given in machine file must be absolute') + + self.check_and_set_roots(paths) + + def detect_boost_env(self): + boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR') + boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR') + + boost_manual_env = [boost_includedir, boost_librarydir] + if all(boost_manual_env): + inc_dir = Path(boost_includedir) + lib_dir = Path(boost_librarydir) + + if not inc_dir.is_absolute() or not lib_dir.is_absolute(): + raise DependencyException('Paths given in BOOST_INCLUDEDIR and BOOST_LIBRARYDIR must be absolute') + + mlog.debug('Trying to find boost with:') + mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir)) + mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir)) + + return self.detect_split_root(inc_dir, lib_dir) + + elif any(boost_manual_env): + raise DependencyException('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.') + + boost_root = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_ROOT') + boostroot = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOSTROOT') + + # It shouldn't be possible to get here without something in BOOST_ROOT or BOOSTROOT + assert(boost_root or boostroot) + + for path, name in [(boost_root, 'BOOST_ROOT'), (boostroot, 'BOOSTROOT')]: + if path: + raw_paths = path.split(os.pathsep) + paths = [Path(x) for x in raw_paths] + if paths and any([not x.is_absolute() for x in paths]): + raise DependencyException('Paths in {} must be absolute'.format(name)) + break + + self.check_and_set_roots(paths) + def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool: mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs])) mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs])) @@ -560,6 +620,12 @@ class BoostDependency(ExternalDependency): except (KeyError, IndexError, AttributeError): pass + # mlog.debug(' - static: {}'.format(self.static)) + # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static)) + # mlog.debug(' - mt: {}'.format(self.multithreading)) + # mlog.debug(' - version: {}'.format(lib_vers)) + # mlog.debug(' - arch: {}'.format(self.arch)) + # mlog.debug(' - vscrt: {}'.format(vscrt)) libs = [x for x in libs if x.static == self.static or not self.explicit_static] libs = [x for x in libs if x.mt == self.multithreading] libs = [x for x in libs if x.version_matches(lib_vers)] @@ -592,20 +658,22 @@ class BoostDependency(ExternalDependency): libs += [BoostLibraryFile(i)] return [x for x in libs if x.is_boost()] # Filter out no boost libraries - def detect_roots(self) -> T.List[Path]: - roots = [] # type: T.List[Path] + def detect_split_root(self, inc_dir, lib_dir) -> None: + boost_inc_dir = None + for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']: + if j.is_file(): + boost_inc_dir = self._include_dir_from_version_header(j) + break + if not boost_inc_dir: + self.is_found = False + return - # Add roots from the environment - for i in ['BOOST_ROOT', 'BOOSTROOT']: - if i in os.environ: - raw_paths = os.environ[i].split(os.pathsep) - paths = [Path(x) for x in raw_paths] - if paths and any([not x.is_absolute() for x in paths]): - raise DependencyException('Paths in {} must be absolute'.format(i)) - roots += paths - return roots # Do not add system paths if BOOST_ROOT is present + self.is_found = self.run_check([boost_inc_dir], [lib_dir]) + + def detect_roots(self) -> None: + roots = [] # type: T.List[Path] - # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarely + # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily # allows BoostDependency to find boost from Conan. See #5438 try: boost_pc = PkgConfigDependency('boost', self.env, {'required': False}) @@ -660,7 +728,7 @@ class BoostDependency(ExternalDependency): tmp = [x.resolve() for x in tmp] roots += tmp - return roots + self.check_and_set_roots(roots) def log_details(self) -> str: res = '' diff --git a/mesonbuild/dependencies/hdf5.py b/mesonbuild/dependencies/hdf5.py index fadd109..6a9bf2d 100644 --- a/mesonbuild/dependencies/hdf5.py +++ b/mesonbuild/dependencies/hdf5.py @@ -105,7 +105,8 @@ class HDF5Dependency(ExternalDependency): prog = ExternalProgram(wrappers[lang], silent=True) if not prog.found(): return - cmd = prog.get_command() + ['-show'] + shlib_arg = '-noshlib' if kwargs.get('static', False) else '-shlib' + cmd = prog.get_command() + [shlib_arg, '-show'] p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15) if p.returncode != 0: mlog.debug('Command', mlog.bold(cmd), 'failed to run:') diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 95dfe2b..5dffd3a 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -234,13 +234,13 @@ class QtBaseDependency(ExternalDependency): def gen_bins(): for b in bins: if self.bindir: - yield os.path.join(self.bindir, b), b, False + yield os.path.join(self.bindir, b), b # prefer the <tool>-qt<version> of the tool to the plain one, as we # don't know what the unsuffixed one points to without calling it. - yield '{}-{}'.format(b, self.name), b, False - yield b, b, self.required if b != 'lrelease' else False + yield '{}-{}'.format(b, self.name), b + yield b, b - for b, name, required in gen_bins(): + for b, name in gen_bins(): if found[name].found(): continue @@ -260,7 +260,7 @@ class QtBaseDependency(ExternalDependency): care = err return care.split(' ')[-1].replace(')', '') - p = interp_obj.find_program_impl([b], required=required, + p = interp_obj.find_program_impl([b], required=False, version_func=get_version, wanted=wanted).held_object if p.found(): diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index 219b62e..9402d38 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -407,43 +407,3 @@ class BinaryTable: if command is not None and (len(command) == 0 or len(command[0].strip()) == 0): command = None return command - -class Directories: - - """Data class that holds information about directories for native and cross - builds. - """ - - def __init__(self, bindir: T.Optional[str] = None, datadir: T.Optional[str] = None, - includedir: T.Optional[str] = None, infodir: T.Optional[str] = None, - libdir: T.Optional[str] = None, libexecdir: T.Optional[str] = None, - localedir: T.Optional[str] = None, localstatedir: T.Optional[str] = None, - mandir: T.Optional[str] = None, prefix: T.Optional[str] = None, - sbindir: T.Optional[str] = None, sharedstatedir: T.Optional[str] = None, - sysconfdir: T.Optional[str] = None): - self.bindir = bindir - self.datadir = datadir - self.includedir = includedir - self.infodir = infodir - self.libdir = libdir - self.libexecdir = libexecdir - self.localedir = localedir - self.localstatedir = localstatedir - self.mandir = mandir - self.prefix = prefix - self.sbindir = sbindir - self.sharedstatedir = sharedstatedir - self.sysconfdir = sysconfdir - - def __contains__(self, key: str) -> bool: - return hasattr(self, key) - - def __getitem__(self, key: str) -> T.Optional[str]: - # Mypy can't figure out what to do with getattr here, so we'll case for it - return T.cast(T.Optional[str], getattr(self, key)) - - def __setitem__(self, key: str, value: T.Optional[str]) -> None: - setattr(self, key, value) - - def __iter__(self) -> T.Iterator[T.Tuple[str, str]]: - return iter(self.__dict__.items()) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 6110d33..18ecff4 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -16,6 +16,7 @@ import os, platform, re, sys, shutil, subprocess import tempfile import shlex import typing as T +import collections from . import coredata from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, Xc16Linker, C2000Linker, IntelVisualStudioLinker @@ -27,12 +28,14 @@ from .mesonlib import ( from . import mlog from .envconfig import ( - BinaryTable, Directories, MachineInfo, - Properties, known_cpu_families, + BinaryTable, MachineInfo, + Properties, known_cpu_families, get_env_var_pair, ) from . import compilers from .compilers import ( Compiler, + all_languages, + base_options, is_assembly, is_header, is_library, @@ -52,6 +55,7 @@ from .linkers import ( GnuBFDDynamicLinker, GnuGoldDynamicLinker, LLVMDynamicLinker, + QualcommLLVMDynamicLinker, MSVCDynamicLinker, OptlinkDynamicLinker, NvidiaHPC_DynamicLinker, @@ -338,7 +342,7 @@ def detect_cpu_family(compilers: CompilersDict) -> str: """ if mesonlib.is_windows(): trial = detect_windows_arch(compilers) - elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd(): + elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx(): trial = platform.processor().lower() else: trial = platform.machine().lower() @@ -456,7 +460,8 @@ def machine_info_can_run(machine_info: MachineInfo): true_build_cpu_family = detect_cpu_family({}) return \ (machine_info.cpu_family == true_build_cpu_family) or \ - ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) + ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \ + ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm')) def search_version(text: str) -> str: # Usually of the type 4.1.4 but compiler output may contain @@ -523,6 +528,11 @@ class Environment: self.first_invocation = False except FileNotFoundError: self.create_new_coredata(options) + except coredata.MesonVersionMismatchException as e: + # This is routine, but tell the user the update happened + mlog.log('Regenerating configuration from scratch:', str(e)) + coredata.read_cmd_line_file(self.build_dir, options) + self.create_new_coredata(options) except MesonException as e: # If we stored previous command line options, we can recover from # a broken/outdated coredata. @@ -552,10 +562,11 @@ class Environment: # Misc other properties about each machine. properties = PerMachineDefaultable() - # Store paths for native and cross build files. There is no target - # machine information here because nothing is installed for the target - # architecture, just the build and host architectures - paths = PerMachineDefaultable() + # We only need one of these as project options are not per machine + user_options = collections.defaultdict(dict) # type: T.DefaultDict[str, T.Dict[str, object]] + + # meson builtin options, as passed through cross or native files + meson_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]] ## Setup build machine defaults @@ -567,14 +578,80 @@ class Environment: binaries.build = BinaryTable() properties.build = Properties() + # meson base options + _base_options = {} # type: T.Dict[str, object] + + # Per language compiler arguments + compiler_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]] + compiler_options.build = collections.defaultdict(dict) + ## Read in native file(s) to override build machine configuration + def load_options(tag: str, store: T.Dict[str, T.Any]) -> None: + for section in config.keys(): + if section.endswith(tag): + if ':' in section: + project = section.split(':')[0] + else: + project = '' + store[project].update(config.get(section, {})) + + def split_base_options(mopts: T.DefaultDict[str, T.Dict[str, object]]) -> None: + for k, v in list(mopts.get('', {}).items()): + if k in base_options: + _base_options[k] = v + del mopts[k] + + lang_prefixes = tuple('{}_'.format(l) for l in all_languages) + def split_compiler_options(mopts: T.DefaultDict[str, T.Dict[str, object]], machine: MachineChoice) -> None: + for k, v in list(mopts.get('', {}).items()): + if k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + if compiler_options[machine] is None: + compiler_options[machine] = collections.defaultdict(dict) + if lang not in compiler_options[machine]: + compiler_options[machine][lang] = collections.defaultdict(dict) + compiler_options[machine][lang][key] = v + del mopts[''][k] + + def move_compiler_options(properties: Properties, compopts: T.Dict[str, T.DefaultDict[str, object]]) -> None: + for k, v in properties.properties.copy().items(): + for lang in all_languages: + if k == '{}_args'.format(lang): + if 'args' not in compopts[lang]: + compopts[lang]['args'] = v + else: + mlog.warning('Ignoring {}_args in [properties] section for those in the [built-in options]'.format(lang)) + elif k == '{}_link_args'.format(lang): + if 'link_args' not in compopts[lang]: + compopts[lang]['link_args'] = v + else: + mlog.warning('Ignoring {}_link_args in [properties] section in favor of the [built-in options] section.') + else: + continue + mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k)) + del properties.properties[k] + break + if self.coredata.config_files is not None: config = coredata.parse_machine_files(self.coredata.config_files) binaries.build = BinaryTable(config.get('binaries', {})) - paths.build = Directories(**config.get('paths', {})) properties.build = Properties(config.get('properties', {})) + # Don't run this if there are any cross files, we don't want to use + # the native values if we're doing a cross build + if not self.coredata.cross_files: + load_options('project options', user_options) + meson_options.build = collections.defaultdict(dict) + if config.get('paths') is not None: + mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') + load_options('paths', meson_options.build) + load_options('built-in options', meson_options.build) + if not self.coredata.cross_files: + split_base_options(meson_options.build) + split_compiler_options(meson_options.build, MachineChoice.BUILD) + move_compiler_options(properties.build, compiler_options.build) + ## Read in cross file(s) to override host machine configuration if self.coredata.cross_files: @@ -585,14 +662,85 @@ class Environment: machines.host = MachineInfo.from_literal(config['host_machine']) if 'target_machine' in config: machines.target = MachineInfo.from_literal(config['target_machine']) - paths.host = Directories(**config.get('paths', {})) + load_options('project options', user_options) + meson_options.host = collections.defaultdict(dict) + compiler_options.host = collections.defaultdict(dict) + if config.get('paths') is not None: + mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') + load_options('paths', meson_options.host) + load_options('built-in options', meson_options.host) + split_base_options(meson_options.host) + split_compiler_options(meson_options.host, MachineChoice.HOST) + move_compiler_options(properties.host, compiler_options.host) ## "freeze" now initialized configuration, and "save" to the class. self.machines = machines.default_missing() self.binaries = binaries.default_missing() self.properties = properties.default_missing() - self.paths = paths.default_missing() + self.user_options = user_options + self.meson_options = meson_options.default_missing() + self.base_options = _base_options + self.compiler_options = compiler_options.default_missing() + + # Some options default to environment variables if they are + # unset, set those now. + + for for_machine in MachineChoice: + p_env_pair = get_env_var_pair(for_machine, self.coredata.is_cross_build(), 'PKG_CONFIG_PATH') + if p_env_pair is not None: + p_env_var, p_env = p_env_pair + + # PKG_CONFIG_PATH may contain duplicates, which must be + # removed, else a duplicates-in-array-option warning arises. + p_list = list(mesonlib.OrderedSet(p_env.split(':'))) + + key = 'pkg_config_path' + + if self.first_invocation: + # Environment variables override config + self.meson_options[for_machine][''][key] = p_list + elif self.meson_options[for_machine][''].get(key, []) != p_list: + mlog.warning( + p_env_var, + 'environment variable does not match configured', + 'between configurations, meson ignores this.', + 'Use -Dpkg_config_path to change pkg-config search', + 'path instead.' + ) + + # Read in command line and populate options + # TODO: validate all of this + all_builtins = set(coredata.BUILTIN_OPTIONS) | set(coredata.BUILTIN_OPTIONS_PER_MACHINE) | set(coredata.builtin_dir_noprefix_options) + for k, v in options.cmd_line_options.items(): + try: + subproject, k = k.split(':') + except ValueError: + subproject = '' + if k in base_options: + self.base_options[k] = v + elif k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + self.compiler_options.host[lang][key] = v + elif k in all_builtins or k.startswith('backend_'): + self.meson_options.host[subproject][k] = v + elif k.startswith('build.'): + k = k.lstrip('build.') + if k in coredata.BUILTIN_OPTIONS_PER_MACHINE: + if self.meson_options.build is None: + self.meson_options.build = collections.defaultdict(dict) + self.meson_options.build[subproject][k] = v + else: + assert not k.startswith('build.') + self.user_options[subproject][k] = v + + # Warn if the user is using two different ways of setting build-type + # options that override each other + if meson_options.build and 'buildtype' in meson_options.build[''] and \ + ('optimization' in meson_options.build[''] or 'debug' in meson_options.build['']): + mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' + 'Using both is redundant since they override each other. ' + 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') if exe_wrapper is not None: @@ -601,8 +749,6 @@ class Environment: else: self.exe_wrapper = None - self.cmd_line_options = options.cmd_line_options.copy() - # List of potential compilers. if mesonlib.is_windows(): # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere. @@ -893,10 +1039,13 @@ class Environment: check_args += override _, o, e = Popen_safe(compiler + check_args) - v = search_version(o) + v = search_version(o + e) if o.startswith('LLD'): linker = LLVMDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker + elif 'Snapdragon' in e and 'LLVM' in e: + linker = QualcommLLVMDynamicLinker( + compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker elif e.startswith('lld-link: '): # The LLD MinGW frontend didn't respond to --version before version 9.0.0, # and produced an error message about failing to link (when no object @@ -935,9 +1084,15 @@ class Environment: cls = GnuBFDDynamicLinker linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) elif 'Solaris' in e or 'Solaris' in o: + for line in (o+e).split('\n'): + if 'ld: Software Generation Utilities' in line: + v = line.split(':')[2].lstrip() + break + else: + v = 'unknown version' linker = SolarisDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, - version=search_version(e)) + version=v) else: raise EnvironmentException('Unable to determine dynamic linker') return linker @@ -1086,7 +1241,7 @@ class Environment: return cls( compiler, version, for_machine, is_cross, info, exe_wrap, target, linker=linker) - if 'clang' in out: + if 'clang' in out or 'Clang' in out: linker = None defines = self.get_clang_compiler_defines(compiler) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 11dac38..2924172 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1637,8 +1637,13 @@ class CompilerHolder(InterpreterObject): libtype = mesonlib.LibType.STATIC if kwargs['static'] else mesonlib.LibType.SHARED linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype) if required and not linkargs: - raise InterpreterException( - '{} library {!r} not found'.format(self.compiler.get_display_language(), libname)) + if libtype == mesonlib.LibType.PREFER_SHARED: + libtype = 'shared or static' + else: + libtype = libtype.name.lower() + raise InterpreterException('{} {} library {!r} not found' + .format(self.compiler.get_display_language(), + libtype, libname)) lib = dependencies.ExternalLibrary(libname, linkargs, self.environment, self.compiler.language) return ExternalLibraryHolder(lib, self.subproject) @@ -1844,10 +1849,18 @@ class Summary: if bool_yn and isinstance(i, bool): formatted_values.append(mlog.green('YES') if i else mlog.red('NO')) else: - formatted_values.append(i) + formatted_values.append(str(i)) self.sections[section][k] = (formatted_values, list_sep) self.max_key_len = max(self.max_key_len, len(k)) + def text_len(self, v): + if isinstance(v, str): + return len(v) + elif isinstance(v, mlog.AnsiDecorator): + return len(v.text) + else: + raise RuntimeError('Expecting only strings or AnsiDecorator') + def dump(self): mlog.log(self.project_name, mlog.normal_cyan(self.project_version)) for section, values in self.sections.items(): @@ -1859,12 +1872,28 @@ class Summary: indent = self.max_key_len - len(k) + 3 end = ' ' if v else '' mlog.log(' ' * indent, k + ':', end=end) - if list_sep is None: - indent = self.max_key_len + 6 - list_sep = '\n' + ' ' * indent - mlog.log(*v, sep=list_sep) + indent = self.max_key_len + 6 + self.dump_value(v, list_sep, indent) mlog.log('') # newline + def dump_value(self, arr, list_sep, indent): + lines_sep = '\n' + ' ' * indent + if list_sep is None: + mlog.log(*arr, sep=lines_sep) + return + max_len = shutil.get_terminal_size().columns + line = [] + line_len = indent + lines_sep = list_sep.rstrip() + lines_sep + for v in arr: + v_len = self.text_len(v) + len(list_sep) + if line and line_len + v_len > max_len: + mlog.log(*line, sep=list_sep, end=lines_sep) + line_len = indent + line = [] + line.append(v) + line_len += v_len + mlog.log(*line, sep=list_sep) class MesonMain(InterpreterObject): def __init__(self, build, interpreter): @@ -2543,21 +2572,34 @@ class Interpreter(InterpreterBase): return self.variables def check_stdlibs(self): - for for_machine in MachineChoice: + machine_choices = [MachineChoice.HOST] + if self.coredata.is_cross_build(): + machine_choices.append(MachineChoice.BUILD) + for for_machine in machine_choices: props = self.build.environment.properties[for_machine] for l in self.coredata.compilers[for_machine].keys(): try: di = mesonlib.stringlistify(props.get_stdlib(l)) - if len(di) != 2: - raise InterpreterException('Stdlib definition for %s should have exactly two elements.' - % l) - projname, depname = di - subproj = self.do_subproject(projname, 'meson', {}) - self.build.stdlibs.host[l] = subproj.get_variable_method([depname], {}) except KeyError: - pass - except InvalidArguments: - pass + continue + if len(di) == 1: + FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject) + kwargs = {'fallback': di, + 'native': for_machine is MachineChoice.BUILD, + 'force_fallback': True, + } + name = display_name = l + '_stdlib' + dep = self.dependency_impl(name, display_name, kwargs) + self.build.stdlibs[for_machine][l] = dep + + def import_module(self, modname): + if modname in self.modules: + return + try: + module = importlib.import_module('mesonbuild.modules.' + modname) + except ImportError: + raise InvalidArguments('Module "%s" does not exist' % (modname, )) + self.modules[modname] = module.initialize(self) @stringArgs @noKwargs @@ -2567,14 +2609,15 @@ class Interpreter(InterpreterBase): modname = args[0] if modname.startswith('unstable-'): plainname = modname.split('-', 1)[1] - mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) - modname = 'unstable_' + plainname - if modname not in self.modules: try: - module = importlib.import_module('mesonbuild.modules.' + modname) - except ImportError: - raise InvalidArguments('Module "%s" does not exist' % (modname, )) - self.modules[modname] = module.initialize(self) + # check if stable module exists + self.import_module(plainname) + mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname)) + modname = plainname + except InvalidArguments: + mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) + modname = 'unstable_' + plainname + self.import_module(modname) return ModuleHolder(modname, self.modules[modname], self) @stringArgs @@ -2749,6 +2792,12 @@ external dependencies (including libraries) must go to "dependencies".''') self.subprojects[dirname] = sub return sub + def get_subproject(self, dirname): + sub = self.subprojects.get(dirname) + if sub and sub.found(): + return sub + return None + def do_subproject(self, dirname: str, method: str, kwargs): disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: @@ -2757,6 +2806,7 @@ external dependencies (including libraries) must go to "dependencies".''') default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) default_options = coredata.create_options_dict(default_options) + if dirname == '': raise InterpreterException('Subproject dir name must not be empty.') if dirname[0] == '.': @@ -2911,6 +2961,7 @@ external dependencies (including libraries) must go to "dependencies".''') if self.is_subproject(): optname = self.subproject + ':' + optname + for opts in [ self.coredata.base_options, compilers.base_options, self.coredata.builtins, dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)), @@ -2996,8 +3047,9 @@ external dependencies (including libraries) must go to "dependencies".''') if self.environment.first_invocation: self.coredata.init_backend_options(backend) - options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} - self.coredata.set_options(options) + if '' in self.environment.meson_options.host: + options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')} + self.coredata.set_options(options) @stringArgs @permittedKwargs(permitted_kwargs['project']) @@ -3030,7 +3082,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) self.project_default_options = coredata.create_options_dict(self.project_default_options) if self.environment.first_invocation: - default_options = self.project_default_options + default_options = self.project_default_options.copy() default_options.update(self.default_project_options) self.coredata.init_builtins(self.subproject) else: @@ -3088,8 +3140,11 @@ external dependencies (including libraries) must go to "dependencies".''') return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs)) else: # absent 'native' means 'both' for backwards compatibility - mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.', - location=self.current_node) + tv = FeatureNew.get_target_version(self.subproject) + if FeatureNew.check_version(tv, '0.54.0'): + mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.', + location=self.current_node) + success = self.add_languages(args, False, MachineChoice.BUILD) success &= self.add_languages(args, required, MachineChoice.HOST) return success @@ -3206,6 +3261,7 @@ external dependencies (including libraries) must go to "dependencies".''') return should def add_languages_for(self, args, required, for_machine: MachineChoice): + args = [a.lower() for a in args] langs = set(self.coredata.compilers[for_machine].keys()) langs.update(args) if 'vala' in langs: @@ -3214,7 +3270,6 @@ external dependencies (including libraries) must go to "dependencies".''') success = True for lang in sorted(args, key=compilers.sort_clink): - lang = lang.lower() clist = self.coredata.compilers[for_machine] machine_name = for_machine.get_lower_case_name() if lang in clist: @@ -3469,7 +3524,7 @@ external dependencies (including libraries) must go to "dependencies".''') return DependencyHolder(NotFoundDependency(self.environment), self.subproject) def verify_fallback_consistency(self, dirname, varname, cached_dep): - subi = self.subprojects.get(dirname) + subi = self.get_subproject(dirname) if not cached_dep or not varname or not subi or not cached_dep.found(): return dep = subi.get_variable_method([varname], {}) @@ -3601,7 +3656,7 @@ external dependencies (including libraries) must go to "dependencies".''') # even if the dependency is not required. provider = self.environment.wrap_resolver.find_dep_provider(name) dirname = mesonlib.listify(provider)[0] - if provider and (required or dirname in self.subprojects): + if provider and (required or self.get_subproject(dirname)): kwargs['fallback'] = provider has_fallback = True @@ -3631,14 +3686,16 @@ external dependencies (including libraries) must go to "dependencies".''') # a higher level project, try to use it first. if has_fallback: dirname, varname = self.get_subproject_infos(kwargs) - if dirname in self.subprojects: + if self.get_subproject(dirname): return self.get_subproject_dep(name, display_name, dirname, varname, kwargs) wrap_mode = self.coredata.get_builtin_option('wrap_mode') force_fallback_for = self.coredata.get_builtin_option('force_fallback_for') + force_fallback = kwargs.get('force_fallback', False) forcefallback = has_fallback and (wrap_mode == WrapMode.forcefallback or \ name in force_fallback_for or \ - dirname in force_fallback_for) + dirname in force_fallback_for or \ + force_fallback) if name != '' and not forcefallback: self._handle_featurenew_dependencies(name) kwargs['required'] = required and not has_fallback @@ -4316,8 +4373,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self if len(inputs_abs) != 1: raise InterpreterException('Exactly one input file must be given in copy mode') os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True) - shutil.copyfile(inputs_abs[0], ofile_abs) - shutil.copystat(inputs_abs[0], ofile_abs) + shutil.copy2(inputs_abs[0], ofile_abs) else: # Not reachable raise AssertionError @@ -4645,7 +4701,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s if name.startswith('meson-'): raise InvalidArguments("Target names starting with 'meson-' are reserved " "for Meson's internal use. Please rename.") - if name in coredata.forbidden_target_names: + if name in coredata.FORBIDDEN_TARGET_NAMES: raise InvalidArguments("Target name '%s' is reserved for Meson's " "internal use. Please rename." % name) # To permit an executable and a shared library to have the @@ -4740,8 +4796,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs) target.project_version = self.project_version - if not self.environment.machines.matches_build_machine(for_machine): - self.add_cross_stdlib_info(target) + self.add_stdlib_info(target) l = targetholder(target, self) self.add_target(name, l.held_object) self.project_args_frozen = True @@ -4765,23 +4820,19 @@ This will become a hard error in the future.''', location=self.current_node) kwargs['d_import_dirs'] = cleaned_items def get_used_languages(self, target): - result = {} + result = set() for i in target.sources: - # TODO other platforms - for lang, c in self.coredata.compilers.host.items(): + for lang, c in self.coredata.compilers[target.for_machine].items(): if c.can_compile(i): - result[lang] = True + result.add(lang) break return result - def add_cross_stdlib_info(self, target): - if target.for_machine != MachineChoice.HOST: - return + def add_stdlib_info(self, target): for l in self.get_used_languages(target): - props = self.environment.properties.host - if props.has_stdlib(l) \ - and self.subproject != props.get_stdlib(l)[0]: - target.add_deps(self.build.stdlibs.host[l]) + dep = self.build.stdlibs[target.for_machine].get(l, None) + if dep: + target.add_deps(dep) def check_sources_exist(self, subdir, sources): for s in sources: diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 634f4f2..6c4f273 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -330,7 +330,7 @@ class FeatureDeprecated(FeatureCheckBase): @staticmethod def check_version(target_version: str, feature_version: str) -> bool: - # For deprecatoin checks we need to return the inverse of FeatureNew checks + # For deprecation checks we need to return the inverse of FeatureNew checks return not mesonlib.version_compare_condition_with_min(target_version, feature_version) @staticmethod @@ -810,9 +810,7 @@ The result of this is undefined and will become a hard error in a future Meson r assert(isinstance(node, mparser.PlusAssignmentNode)) varname = node.var_name addition = self.evaluate_statement(node.value) - if is_disabler(addition): - self.set_variable(varname, addition) - return + # Remember that all variables are immutable. We must always create a # full new variable and then assign it. old_variable = self.get_variable(varname) @@ -836,7 +834,7 @@ The result of this is undefined and will become a hard error in a future Meson r new_value = {**old_variable, **addition} # Add other data types here. else: - raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints ') + raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints') self.set_variable(varname, new_value) def evaluate_indexing(self, node: mparser.IndexNode) -> TYPE_var: @@ -1026,6 +1024,20 @@ The result of this is undefined and will become a hard error in a future Meson r if not isinstance(cmpr, str): raise InterpreterException('Version_compare() argument must be a string.') return mesonlib.version_compare(obj, cmpr) + elif method_name == 'substring': + if len(posargs) > 2: + raise InterpreterException('substring() takes maximum two arguments.') + start = 0 + end = len(obj) + if len (posargs) > 0: + if not isinstance(posargs[0], int): + raise InterpreterException('substring() argument must be an int') + start = posargs[0] + if len (posargs) > 1: + if not isinstance(posargs[1], int): + raise InterpreterException('substring() argument must be an int') + end = posargs[1] + return obj[start:end] raise InterpreterException('Unknown method "%s" for a string.' % method_name) def format_string(self, templ: str, args: T.List[TYPE_nvar]) -> str: diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 94de429..505aef6 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -451,9 +451,6 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta): def bitcode_args(self) -> T.List[str]: raise mesonlib.MesonException('This linker does not support bitcode bundles') - def get_debug_crt_args(self) -> T.List[str]: - return [] - def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: @@ -930,6 +927,12 @@ class ArmClangDynamicLinker(ArmDynamicLinker): def import_library_args(self, implibname: str) -> T.List[str]: return ['--symdefs=' + implibname] +class QualcommLLVMDynamicLinker(LLVMDynamicLinker): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # ARM Linker from Snapdragon LLVM ARM Compiler + self.id = 'ld.qcld' class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): @@ -1002,16 +1005,6 @@ class VisualStudioLikeLinkerMixin: def invoked_by_compiler(self) -> bool: return not self.direct - def get_debug_crt_args(self) -> T.List[str]: - """Arguments needed to select a debug crt for the linker. - - Sometimes we need to manually select the CRT (C runtime) to use with - MSVC. One example is when trying to link with static libraries since - MSVC won't auto-select a CRT for us in that case and will error out - asking us to select one. - """ - return self._apply_prefix('/MDd') - def get_output_args(self, outputname: str) -> T.List[str]: return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname]) @@ -1104,7 +1097,13 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def get_pie_args(self) -> T.List[str]: # Available in Solaris 11.2 and later - return ['-z', 'type=pie'] + pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp')) + for line in (stdo + stde).split('\n'): + if '-z type' in line: + if 'pie' in line: + return ['-z', 'type=pie'] + break + return [] def get_asneeded_args(self) -> T.List[str]: return self._apply_prefix(['-z', 'ignore']) diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2e03cab..f070355 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -184,19 +184,7 @@ class Conf: if not self.default_values_only: print(' Build dir ', self.build_dir) - dir_option_names = ['bindir', - 'datadir', - 'includedir', - 'infodir', - 'libdir', - 'libexecdir', - 'localedir', - 'localstatedir', - 'mandir', - 'prefix', - 'sbindir', - 'sharedstatedir', - 'sysconfdir'] + dir_option_names = list(coredata.BUILTIN_DIR_OPTIONS) test_option_names = ['errorlogs', 'stdsplit'] core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names] diff --git a/mesonbuild/mesondata.py b/mesonbuild/mesondata.py new file mode 100644 index 0000000..1f223c2 --- /dev/null +++ b/mesonbuild/mesondata.py @@ -0,0 +1,374 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +#### +#### WARNING: This is an automatically generated file! Do not edit! +#### Generated by tools/gen_data.py +#### + + +from pathlib import Path +import typing as T + +if T.TYPE_CHECKING: + from .environment import Environment + +###################### +# BEGIN Data section # +###################### + +file_0_data_CMakeListsLLVM_txt = '''\ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} ) + +set(PACKAGE_FOUND FALSE) + +while(TRUE) + find_package(LLVM REQUIRED CONFIG QUIET) + + # ARCHS has to be set via the CMD interface + if(LLVM_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(LLVM_FOUND) + set(PACKAGE_FOUND TRUE) + + foreach(mod IN LISTS LLVM_MESON_MODULES) + # Reset variables + set(out_mods) + set(real_mods) + + # Generate a lower and upper case version + string(TOLOWER "${mod}" mod_L) + string(TOUPPER "${mod}" mod_U) + + # Get the mapped components + llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U}) + list(SORT out_mods) + list(REMOVE_DUPLICATES out_mods) + + # Make sure that the modules exist + foreach(i IN LISTS out_mods) + if(TARGET ${i}) + list(APPEND real_mods ${i}) + endif() + endforeach() + + # Set the output variables + set(MESON_LLVM_TARGETS_${mod} ${real_mods}) + foreach(i IN LISTS real_mods) + set(MESON_TARGET_TO_LLVM_${i} ${mod}) + endforeach() + endforeach() + + # Check the following variables: + # LLVM_PACKAGE_VERSION + # LLVM_VERSION + # LLVM_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED LLVM_PACKAGE_VERSION) + set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}") + elseif(DEFINED LLVM_VERSION) + set(PACKAGE_VERSION "${LLVM_VERSION}") + elseif(DEFINED LLVM_VERSION_STRING) + set(PACKAGE_VERSION "${LLVM_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # LLVM_LIBRARIES + # LLVM_LIBS + set(libs) + if(DEFINED LLVM_LIBRARIES) + set(libs LLVM_LIBRARIES) + elseif(DEFINED LLVM_LIBS) + set(libs LLVM_LIBS) + endif() + + # Check the following variables: + # LLVM_INCLUDE_DIRS + # LLVM_INCLUDES + # LLVM_INCLUDE_DIR + set(includes) + if(DEFINED LLVM_INCLUDE_DIRS) + set(includes LLVM_INCLUDE_DIRS) + elseif(DEFINED LLVM_INCLUDES) + set(includes LLVM_INCLUDES) + elseif(DEFINED LLVM_INCLUDE_DIR) + set(includes LLVM_INCLUDE_DIR) + endif() + + # Check the following variables: + # LLVM_DEFINITIONS + set(definitions) + if(DEFINED LLVM_DEFINITIONS) + set(definitions LLVM_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() +''' + +file_1_data_CMakePathInfo_txt = '''\ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION}) + +set(TMP_PATHS_LIST) +list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH}) + +set(LIB_ARCH_LIST) +if(CMAKE_LIBRARY_ARCHITECTURE_REGEX) + file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* ) + foreach(dir ${implicit_dirs}) + if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}") + list(APPEND LIB_ARCH_LIST "${dir}") + endif() + endforeach() +endif() + +# "Export" these variables: +set(MESON_ARCH_LIST ${LIB_ARCH_LIST}) +set(MESON_PATHS_LIST ${TMP_PATHS_LIST}) +set(MESON_CMAKE_ROOT ${CMAKE_ROOT}) +set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT}) +set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH}) + +message(STATUS ${TMP_PATHS_LIST}) +''' + +file_2_data_CMakeLists_txt = '''\ +# fail noisily if attempt to use this file without setting: +# cmake_minimum_required(VERSION ${CMAKE_VERSION}) +# project(... LANGUAGES ...) + +cmake_policy(SET CMP0000 NEW) + +set(PACKAGE_FOUND FALSE) +set(_packageName "${NAME}") +string(TOUPPER "${_packageName}" PACKAGE_NAME) + +while(TRUE) + find_package("${NAME}" QUIET COMPONENTS ${COMPS}) + + # ARCHS has to be set via the CMD interface + if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND) + set(PACKAGE_FOUND TRUE) + + # Check the following variables: + # FOO_VERSION + # Foo_VERSION + # FOO_VERSION_STRING + # Foo_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED ${_packageName}_VERSION) + set(PACKAGE_VERSION "${${_packageName}_VERSION}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}") + elseif(DEFINED ${_packageName}_VERSION_STRING) + set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # FOO_LIBRARIES + # Foo_LIBRARIES + # FOO_LIBS + # Foo_LIBS + set(libs) + if(DEFINED ${_packageName}_LIBRARIES) + set(libs ${_packageName}_LIBRARIES) + elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES) + set(libs ${PACKAGE_NAME}_LIBRARIES) + elseif(DEFINED ${_packageName}_LIBS) + set(libs ${_packageName}_LIBS) + elseif(DEFINED ${PACKAGE_NAME}_LIBS) + set(libs ${PACKAGE_NAME}_LIBS) + endif() + + # Check the following variables: + # FOO_INCLUDE_DIRS + # Foo_INCLUDE_DIRS + # FOO_INCLUDES + # Foo_INCLUDES + # FOO_INCLUDE_DIR + # Foo_INCLUDE_DIR + set(includes) + if(DEFINED ${_packageName}_INCLUDE_DIRS) + set(includes ${_packageName}_INCLUDE_DIRS) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS) + set(includes ${PACKAGE_NAME}_INCLUDE_DIRS) + elseif(DEFINED ${_packageName}_INCLUDES) + set(includes ${_packageName}_INCLUDES) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDES) + set(includes ${PACKAGE_NAME}_INCLUDES) + elseif(DEFINED ${_packageName}_INCLUDE_DIR) + set(includes ${_packageName}_INCLUDE_DIR) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR) + set(includes ${PACKAGE_NAME}_INCLUDE_DIR) + endif() + + # Check the following variables: + # FOO_DEFINITIONS + # Foo_DEFINITIONS + set(definitions) + if(DEFINED ${_packageName}_DEFINITIONS) + set(definitions ${_packageName}_DEFINITIONS) + elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS) + set(definitions ${PACKAGE_NAME}_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() +''' + +file_3_data_preload_cmake = '''\ +if(MESON_PS_LOADED) + return() +endif() + +set(MESON_PS_LOADED ON) + +# Dummy macros that have a special meaning in the meson code +macro(meson_ps_execute_delayed_calls) +endmacro() + +macro(meson_ps_reload_vars) +endmacro() + +# Helper macro to inspect the current CMake state +macro(meson_ps_inspect_vars) + set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + meson_ps_execute_delayed_calls() +endmacro() + + +# Override some system functions with custom code and forward the args +# to the original function +macro(add_custom_command) + meson_ps_inspect_vars() + _add_custom_command(${ARGV}) +endmacro() + +macro(add_custom_target) + meson_ps_inspect_vars() + _add_custom_target(${ARGV}) +endmacro() + +macro(set_property) + meson_ps_inspect_vars() + _set_property(${ARGV}) +endmacro() + +function(set_source_files_properties) + set(FILES) + set(I 0) + set(PROPERTIES OFF) + + while(I LESS ARGC) + if(NOT PROPERTIES) + if("${ARGV${I}}" STREQUAL "PROPERTIES") + set(PROPERTIES ON) + else() + list(APPEND FILES "${ARGV${I}}") + endif() + + math(EXPR I "${I} + 1") + else() + set(ID_IDX ${I}) + math(EXPR PROP_IDX "${ID_IDX} + 1") + + set(ID "${ARGV${ID_IDX}}") + set(PROP "${ARGV${PROP_IDX}}") + + set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}") + math(EXPR I "${I} + 2") + endif() + endwhile() +endfunction() + +set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property) +meson_ps_reload_vars() +''' + + +#################### +# END Data section # +#################### + +class DataFile: + def __init__(self, path: Path, sha256sum: str, data: str) -> None: + self.path = path + self.sha256sum = sha256sum + self.data = data + + def write_once(self, path: Path) -> None: + if not path.exists(): + path.write_text(self.data) + + def write_to_private(self, env: 'Environment') -> Path: + out_file = Path(env.scratch_dir) / 'data' / self.path.name + out_file.parent.mkdir(exist_ok=True) + self.write_once(out_file) + return out_file + + +mesondata = { + 'dependencies/data/CMakeListsLLVM.txt': DataFile( + Path('dependencies/data/CMakeListsLLVM.txt'), + '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd', + file_0_data_CMakeListsLLVM_txt, + ), + 'dependencies/data/CMakePathInfo.txt': DataFile( + Path('dependencies/data/CMakePathInfo.txt'), + '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2', + file_1_data_CMakePathInfo_txt, + ), + 'dependencies/data/CMakeLists.txt': DataFile( + Path('dependencies/data/CMakeLists.txt'), + '71a2d58381f912bbfb1c8709884d34d721f682edf2fca001e1f582f0bffd0da7', + file_2_data_CMakeLists_txt, + ), + 'cmake/data/preload.cmake': DataFile( + Path('cmake/data/preload.cmake'), + '064d047b18a5c919ad016b838bed50c5d40aebe9e53da0e70eff9d52a2c1ca1f', + file_3_data_preload_cmake, + ), +} diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index a43d4c4..4b8cce8 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -389,6 +389,9 @@ class PerMachine(T.Generic[_T]): unfreeze.host = None return unfreeze + def __repr__(self) -> str: + return 'PerMachine({!r}, {!r})'.format(self.build, self.host) + class PerThreeMachine(PerMachine[_T]): """Like `PerMachine` but includes `target` too. @@ -421,6 +424,9 @@ class PerThreeMachine(PerMachine[_T]): def matches_build_machine(self, machine: MachineChoice) -> bool: return self.build == self[machine] + def __repr__(self) -> str: + return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + class PerMachineDefaultable(PerMachine[T.Optional[_T]]): """Extends `PerMachine` with the ability to default from `None`s. @@ -439,6 +445,9 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]): freeze.host = freeze.build return freeze + def __repr__(self) -> str: + return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host) + class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]): """Extends `PerThreeMachine` with the ability to default from `None`s. @@ -460,6 +469,9 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option freeze.target = freeze.host return freeze + def __repr__(self) -> str: + return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + def is_sunos() -> bool: return platform.system().lower() == 'sunos' @@ -515,6 +527,8 @@ def is_irix() -> bool: def is_hurd() -> bool: return platform.system().lower() == 'gnu' +def is_qnx() -> bool: + return platform.system().lower() == 'qnx' def exe_exists(arglist: T.List[str]) -> bool: try: diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 0be01fe..e6e973a 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -270,11 +270,9 @@ class Installer: # Remove this entire branch when changing the behaviour to duplicate # symlinks rather than copying what they point to. print(symlink_warning) - shutil.copyfile(from_file, to_file) - shutil.copystat(from_file, to_file) + shutil.copy2(from_file, to_file) else: - shutil.copyfile(from_file, to_file) - shutil.copystat(from_file, to_file) + shutil.copy2(from_file, to_file) selinux_updates.append(to_file) append_to_log(self.lf, to_file) return True diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index cccedaa..0049bbd 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -200,19 +200,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]: optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] - dir_option_names = ['bindir', - 'datadir', - 'includedir', - 'infodir', - 'libdir', - 'libexecdir', - 'localedir', - 'localstatedir', - 'mandir', - 'prefix', - 'sbindir', - 'sharedstatedir', - 'sysconfdir'] + dir_option_names = list(cdata.BUILTIN_DIR_OPTIONS) test_option_names = ['errorlogs', 'stdsplit'] core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names] diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index ca98b1c..2383753 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -92,7 +92,9 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder): tgt = args[0] res = self.held_object.cm_interpreter.target_info(tgt) if res is None: - raise InterpreterException('The CMake target {} does not exist'.format(tgt)) + raise InterpreterException('The CMake target {} does not exist\n'.format(tgt) + + ' Use the following command in your meson.build to list all available targets:\n\n' + + ' message(\'CMaket targets:\\n - \' + \'\\n - \'.join(<cmake_subproject>.target_list()))') # Make sure that all keys are present (if not this is a bug) assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']])) diff --git a/mesonbuild/modules/unstable_keyval.py b/mesonbuild/modules/keyval.py index 3da2992..3da2992 100644 --- a/mesonbuild/modules/unstable_keyval.py +++ b/mesonbuild/modules/keyval.py diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index b7a12ff..1cb7698 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -36,6 +36,7 @@ class DependenciesHelper: self.priv_reqs = [] self.cflags = [] self.version_reqs = {} + self.link_whole_targets = [] def add_pub_libs(self, libs): libs, reqs, cflags = self._process_libs(libs, True) @@ -130,10 +131,7 @@ class DependenciesHelper: if obj.found(): processed_libs += obj.get_link_args() processed_cflags += obj.get_compile_args() - if public: - self.add_pub_libs(obj.libraries) - else: - self.add_priv_libs(obj.libraries) + self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public) elif isinstance(obj, dependencies.Dependency): if obj.found(): processed_libs += obj.get_link_args() @@ -148,12 +146,13 @@ class DependenciesHelper: processed_libs.append(obj) elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): processed_libs.append(obj) - if isinstance(obj, build.StaticLibrary) and public: - self.add_pub_libs(obj.get_dependencies(for_pkgconfig=True)) - self.add_pub_libs(obj.get_external_deps()) - else: - self.add_priv_libs(obj.get_dependencies(for_pkgconfig=True)) - self.add_priv_libs(obj.get_external_deps()) + # If there is a static library in `Libs:` all its deps must be + # public too, otherwise the generated pc file will never be + # usable without --static. + self._add_lib_dependencies(obj.link_targets, + obj.link_whole_targets, + obj.external_deps, + isinstance(obj, build.StaticLibrary) and public) elif isinstance(obj, str): processed_libs.append(obj) else: @@ -161,6 +160,31 @@ class DependenciesHelper: return processed_libs, processed_reqs, processed_cflags + def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public): + add_libs = self.add_pub_libs if public else self.add_priv_libs + # Recursively add all linked libraries + for t in link_targets: + # Internal libraries (uninstalled static library) will be promoted + # to link_whole, treat them as such here. + if t.is_internal(): + self._add_link_whole(t, public) + else: + add_libs([t]) + for t in link_whole_targets: + self._add_link_whole(t, public) + # And finally its external dependencies + add_libs(external_deps) + + def _add_link_whole(self, t, public): + # Don't include static libraries that we link_whole. But we still need to + # include their dependencies: a static library we link_whole + # could itself link to a shared library or an installed static library. + # Keep track of link_whole_targets so we can remove them from our + # lists in case a library is link_with and link_whole at the same time. + # See remove_dups() below. + self.link_whole_targets.append(t) + self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public) + def add_version_reqs(self, name, version_reqs): if version_reqs: if name not in self.version_reqs: @@ -196,6 +220,32 @@ class DependenciesHelper: return ', '.join(result) def remove_dups(self): + # Set of ids that have already been handled and should not be added any more + exclude = set() + + # We can't just check if 'x' is excluded because we could have copies of + # the same SharedLibrary object for example. + def _ids(x): + if hasattr(x, 'generated_pc'): + yield x.generated_pc + if isinstance(x, build.Target): + yield x.get_id() + yield x + + # Exclude 'x' in all its forms and return if it was already excluded + def _add_exclude(x): + was_excluded = False + for i in _ids(x): + if i in exclude: + was_excluded = True + else: + exclude.add(i) + return was_excluded + + # link_whole targets are already part of other targets, exclude them all. + for t in self.link_whole_targets: + _add_exclude(t) + def _fn(xs, libs=False): # Remove duplicates whilst preserving original order result = [] @@ -206,19 +256,21 @@ class DependenciesHelper: cannot_dedup = libs and isinstance(x, str) and \ not x.startswith(('-l', '-L')) and \ x not in known_flags - if x not in result or cannot_dedup: - result.append(x) + if not cannot_dedup and _add_exclude(x): + continue + result.append(x) return result - self.pub_libs = _fn(self.pub_libs, True) + + # Handle lists in priority order: public items can be excluded from + # private and Requires can excluded from Libs. self.pub_reqs = _fn(self.pub_reqs) - self.priv_libs = _fn(self.priv_libs, True) + self.pub_libs = _fn(self.pub_libs, True) self.priv_reqs = _fn(self.priv_reqs) + self.priv_libs = _fn(self.priv_libs, True) + # Reset exclude list just in case some values can be both cflags and libs. + exclude = set() self.cflags = _fn(self.cflags) - # Remove from private libs/reqs if they are in public already - self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs] - self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs] - class PkgConfigModule(ExtensionModule): def _get_lname(self, l, msg, pcfile): @@ -267,7 +319,6 @@ class PkgConfigModule(ExtensionModule): def generate_pkgconfig_file(self, state, deps, subdirs, name, description, url, version, pcfile, conflicts, variables, uninstalled=False, dataonly=False): - deps.remove_dups() coredata = state.environment.get_coredata() if uninstalled: outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled') @@ -460,6 +511,8 @@ class PkgConfigModule(ExtensionModule): if compiler: deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None)) + deps.remove_dups() + def parse_variable_list(stringlist): reserved = ['prefix', 'libdir', 'includedir'] variables = [] diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index c2b1e01..c810df6 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -15,7 +15,7 @@ import os from .. import mlog from .. import build -from ..mesonlib import MesonException, extract_as_list, File, unholder +from ..mesonlib import MesonException, extract_as_list, File, unholder, version_compare from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram import xml.etree.ElementTree as ET from . import ModuleReturnValue, get_include_args, ExtensionModule @@ -30,6 +30,7 @@ _QT_DEPS_LUT = { class QtBaseModule(ExtensionModule): tools_detected = False + rcc_supports_depfiles = False def __init__(self, interpreter, qt_version=5): ExtensionModule.__init__(self, interpreter) @@ -46,6 +47,11 @@ class QtBaseModule(ExtensionModule): if qt.found(): # Get all tools and then make sure that they are the right version self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) + if version_compare(qt.version, '>=5.14.0'): + self.rcc_supports_depfiles = True + else: + mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:', + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) else: suffix = '-qt{}'.format(self.qt_version) self.moc = NonExistingExternalProgram(name='moc' + suffix) @@ -156,6 +162,9 @@ class QtBaseModule(ExtensionModule): 'output': name + '.cpp', 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} + if self.rcc_supports_depfiles: + rcc_kwargs['depfile'] = name + '.d' + rcc_kwargs['command'] += ['--depfile', '@DEPFILE@'] res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) if ui_files: diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 81a1055..e85a150 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import mlog from .qt import QtBaseModule @@ -23,6 +22,4 @@ class Qt4Module(QtBaseModule): def initialize(*args, **kwargs): - mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt4Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index 244a217..873c2db 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import mlog from .qt import QtBaseModule @@ -23,6 +22,4 @@ class Qt5Module(QtBaseModule): def initialize(*args, **kwargs): - mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt5Module(*args, **kwargs) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 4aafe62..d7fe54a 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -206,7 +206,7 @@ class TAPParser: explanation = explanation.strip() if explanation else None if directive is not None: directive = directive.upper() - if directive == 'SKIP': + if directive.startswith('SKIP'): if ok: yield self.Test(num, name, TestResult.SKIP, explanation) return @@ -489,7 +489,7 @@ class TestRun: failed = True elif isinstance(i, TAPParser.Test): results.append(i.result) - if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL}: + if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL, TestResult.SKIP}: failed = True elif isinstance(i, TAPParser.Error): results.append(TestResult.ERROR) @@ -794,6 +794,7 @@ class TestHarness: def __init__(self, options: argparse.Namespace): self.options = options self.collected_logs = [] # type: T.List[str] + self.collected_failures = [] # type: T.List[str] self.fail_count = 0 self.expectedfail_count = 0 self.unexpectedpass_count = 0 @@ -906,8 +907,9 @@ class TestHarness: if result.res is TestResult.FAIL: result_str += ' ' + returncode_to_status(result.returncode) if not self.options.quiet or result.res not in ok_statuses: - if result.res not in ok_statuses and mlog.colorize_console: + if result.res not in ok_statuses and mlog.colorize_console(): if result.res in bad_statuses: + self.collected_failures.append(result_str) decorator = mlog.red elif result.res is TestResult.SKIP: decorator = mlog.yellow @@ -928,7 +930,11 @@ class TestHarness: self.junit.log(name, result) def print_summary(self) -> None: - msg = textwrap.dedent(''' + # Prepend a list of failures + msg = '' if len(self.collected_failures) < 1 else "\nSummary of Failures:\n\n" + msg += '\n'.join(self.collected_failures) + msg += textwrap.dedent(''' + Ok: {:<4} Expected Fail: {:<4} Fail: {:<4} @@ -1128,8 +1134,8 @@ class TestHarness: break self.drain_futures(futures) - self.print_summary() self.print_collected_logs() + self.print_summary() if self.logfilename: print('Full log written to {}'.format(self.logfilename)) diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index d47a3d2..a6fd503 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -22,7 +22,7 @@ from . import mesonlib from . import mparser from .interpreterbase import FeatureNew -forbidden_option_names = set(coredata.builtin_options.keys()) +forbidden_option_names = set(coredata.BUILTIN_OPTIONS.keys()) forbidden_prefixes = [lang + '_' for lang in compilers.all_languages] + ['b_', 'backend_'] reserved_prefixes = ['cross_'] diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index a3a3eff..4176b9a 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -323,7 +323,8 @@ class Elf(DataSizes): new_rpath = b':'.join(new_rpaths) if len(old_rpath) < len(new_rpath): - sys.exit("New rpath must not be longer than the old one.") + msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath, new_rpath) + sys.exit(msg) # The linker does read-only string deduplication. If there is a # string that shares a suffix with the rpath, they might get # dedupped. This means changing the rpath string might break something diff --git a/mesonbuild/scripts/gettext.py b/mesonbuild/scripts/gettext.py index f5c0421..7042863 100644 --- a/mesonbuild/scripts/gettext.py +++ b/mesonbuild/scripts/gettext.py @@ -83,8 +83,7 @@ def do_install(src_sub, bld_sub, dest, pkgname, langs): pkgname + '.mo') tempfile = outfile + '.tmp' os.makedirs(os.path.dirname(outfile), exist_ok=True) - shutil.copyfile(srcfile, tempfile) - shutil.copystat(srcfile, tempfile) + shutil.copy2(srcfile, tempfile) os.replace(tempfile, outfile) print('Installing %s to %s' % (srcfile, outfile)) return 0 diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index 5240275..f4084be 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -124,9 +124,11 @@ def gnu_syms(libfilename: str, outfilename: str): def solaris_syms(libfilename: str, outfilename: str): # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump origpath = os.environ['PATH'] - os.environ['PATH'] = '/usr/gnu/bin:' + origpath - gnu_syms(libfilename, outfilename) - os.environ['PATH'] = origpath + try: + os.environ['PATH'] = '/usr/gnu/bin:' + origpath + gnu_syms(libfilename, outfilename) + finally: + os.environ['PATH'] = origpath def osx_syms(libfilename: str, outfilename: str): # Get the name of the library diff --git a/mesonbuild/scripts/yelphelper.py b/mesonbuild/scripts/yelphelper.py index 95c8c9c..6bf0673 100644 --- a/mesonbuild/scripts/yelphelper.py +++ b/mesonbuild/scripts/yelphelper.py @@ -68,8 +68,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source) outfile = os.path.join(indir, source) mlog.log('Installing %s to %s' % (infile, outfile)) - shutil.copyfile(infile, outfile) - shutil.copystat(infile, outfile) + shutil.copy2(infile, outfile) for m in media: infile = os.path.join(srcdir, lang, m) outfile = os.path.join(indir, m) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 54daaf3..aba220e 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -61,7 +61,10 @@ def quiet_git(cmd: T.List[str], workingdir: str) -> T.Tuple[bool, str]: def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool: if not GIT: return False - return git(cmd, workingdir, check=check).returncode == 0 + try: + return git(cmd, workingdir, check=check).returncode == 0 + except subprocess.CalledProcessError: + raise WrapException('Git command failed') def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult: """ raises WrapException if not whitelisted subdomain """ diff --git a/msi/createmsi.py b/msi/createmsi.py index 67020a5..eca4493 100644 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -151,8 +151,6 @@ class PackageGenerator: if os.path.exists(sdir): shutil.rmtree(sdir) main_stage, ninja_stage = self.staging_dirs - dep_data_dir = 'mesonbuild/dependencies/data' - cmake_data_dir = 'mesonbuild/cmake/data' modules = self.get_all_modules_from_dir('mesonbuild/modules') modules += self.get_all_modules_from_dir('mesonbuild/scripts') modules += self.get_more_modules() @@ -174,8 +172,6 @@ class PackageGenerator: pyinst_cmd += ['meson.py'] subprocess.check_call(pyinst_cmd) shutil.move(pyinstaller_tmpdir + '/meson', main_stage) - shutil.copytree(dep_data_dir, main_stage + '/mesonbuild/dependencies/data') - shutil.copytree(cmake_data_dir, main_stage + '/mesonbuild/cmake/data') if not os.path.exists(os.path.join(main_stage, 'meson.exe')): sys.exit('Meson exe missing from staging dir.') os.mkdir(ninja_stage) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index 6ed3d8f..7bc6185 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -129,6 +129,10 @@ class CommandTests(unittest.TestCase): os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '') os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) + # Fix importlib-metadata by appending all dirs in pylibdir + PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()] + PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS] + os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS) # Check that all the files were installed correctly self.assertTrue(bindir.is_dir()) self.assertTrue(pylibdir.is_dir()) diff --git a/run_project_tests.py b/run_project_tests.py index c368253..56b7e2a 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -41,6 +41,7 @@ from mesonbuild import compilers from mesonbuild import mesonlib from mesonbuild import mlog from mesonbuild import mtest +from mesonbuild.build import ConfigurationData from mesonbuild.mesonlib import MachineChoice, Popen_safe from mesonbuild.coredata import backendlist, version as meson_version @@ -361,11 +362,10 @@ def _run_ci_include(args: T.List[str]) -> str: if not args: return 'At least one parameter required' try: - file_path = Path(args[0]) - data = file_path.open(errors='ignore', encoding='utf-8').read() + data = Path(args[0]).read_text(errors='ignore', encoding='utf-8') return 'Included file {}:\n{}\n'.format(args[0], data) except Exception: - return 'Failed to open {} ({})'.format(args[0]) + return 'Failed to open {}'.format(args[0]) ci_commands = { 'ci_include': _run_ci_include @@ -476,6 +476,28 @@ def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str: os.mkdir(abs_pathname) return abs_pathname +def format_parameter_file(file_basename: str, test: TestDef, test_build_dir: str) -> Path: + confdata = ConfigurationData() + confdata.values = {'MESON_TEST_ROOT': (str(test.path.absolute()), 'base directory of current test')} + + template = test.path / (file_basename + '.in') + destination = Path(test_build_dir) / file_basename + mesonlib.do_conf_file(str(template), str(destination), confdata, 'meson') + + return destination + +def detect_parameter_files(test: TestDef, test_build_dir: str) -> (Path, Path): + nativefile = test.path / 'nativefile.ini' + crossfile = test.path / 'crossfile.ini' + + if os.path.exists(str(test.path / 'nativefile.ini.in')): + nativefile = format_parameter_file('nativefile.ini', test, test_build_dir) + + if os.path.exists(str(test.path / 'crossfile.ini.in')): + crossfile = format_parameter_file('crossfile.ini', test, test_build_dir) + + return nativefile, crossfile + def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool): if test.skip: return None @@ -498,8 +520,9 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, if 'libdir' not in test.do_not_set_opts: gen_args += ['--libdir', 'lib'] gen_args += [test.path.as_posix(), test_build_dir] + flags + extra_args - nativefile = test.path / 'nativefile.ini' - crossfile = test.path / 'crossfile.ini' + + nativefile, crossfile = detect_parameter_files(test, test_build_dir) + if nativefile.exists(): gen_args.extend(['--native-file', nativefile.as_posix()]) if crossfile.exists(): @@ -939,7 +962,7 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), TestCategory('python3', 'python3', backend is not Backend.ninja), - TestCategory('python', 'python', backend is not Backend.ninja), + TestCategory('python', 'python'), TestCategory('fpga', 'fpga', shutil.which('yosys') is None), TestCategory('frameworks', 'frameworks'), TestCategory('nasm', 'nasm'), @@ -1116,16 +1139,15 @@ def check_format(): '.build', '.md', } + skip_dirs = { + '.dub', # external deps are here + '.pytest_cache', + 'meson-logs', 'meson-private', + '.eggs', '_cache', # e.g. .mypy_cache + 'venv', # virtualenvs have DOS line endings + } for (root, _, filenames) in os.walk('.'): - if '.dub' in root: # external deps are here - continue - if '.pytest_cache' in root: - continue - if 'meson-logs' in root or 'meson-private' in root: - continue - if '__CMake_build' in root: - continue - if '.eggs' in root or '_cache' in root: # e.g. .mypy_cache + if any([x in root for x in skip_dirs]): continue for fname in filenames: file = Path(fname) @@ -1249,6 +1271,7 @@ if __name__ == '__main__': options.extra_args += ['--cross-file', options.cross_file] print('Meson build system', meson_version, 'Project Tests') + print('Using python', sys.version.split('\n')[0]) setup_commands(options.backend) detect_system_compiler(options) print_tool_versions() diff --git a/run_unittests.py b/run_unittests.py index 820b705..2012542 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1272,7 +1272,6 @@ class InternalTests(unittest.TestCase): self.assertFalse(errors) - @unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release') class DataTests(unittest.TestCase): @@ -1373,8 +1372,8 @@ class DataTests(unittest.TestCase): found_entries |= options self.assertEqual(found_entries, set([ - *mesonbuild.coredata.builtin_options.keys(), - *mesonbuild.coredata.builtin_options_per_machine.keys() + *mesonbuild.coredata.BUILTIN_OPTIONS.keys(), + *mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE.keys() ])) # Check that `buildtype` table inside `Core options` matches how @@ -1485,6 +1484,38 @@ class DataTests(unittest.TestCase): astint = AstInterpreter('.', '', '') self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys())) + def test_mesondata_is_up_to_date(self): + from mesonbuild.mesondata import mesondata + err_msg = textwrap.dedent(''' + + ########################################################### + ### mesonbuild.mesondata is not up-to-date ### + ### Please regenerate it by running tools/gen_data.py ### + ########################################################### + + ''') + + root_dir = Path(__file__).resolve().parent + mesonbuild_dir = root_dir / 'mesonbuild' + + data_dirs = mesonbuild_dir.glob('**/data') + data_files = [] # type: T.List[T.Tuple(str, str)] + + for i in data_dirs: + for p in i.iterdir(): + data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())] + + from pprint import pprint + current_files = set(mesondata.keys()) + scanned_files = set([x[0] for x in data_files]) + + self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n') + errors = [] + for i in data_files: + if mesondata[i[0]].sha256sum != i[1]: + errors += [i[0]] + + self.assertListEqual(errors, [], err_msg + 'Files were changed') class BasePlatformTests(unittest.TestCase): prefix = '/usr' @@ -3006,7 +3037,7 @@ int main(int argc, char **argv) { test. Needs to be a unit test because it accesses Meson internals. ''' testdir = os.path.join(self.common_test_dir, '154 reserved targets') - targets = mesonbuild.coredata.forbidden_target_names + targets = mesonbuild.coredata.FORBIDDEN_TARGET_NAMES # We don't actually define a target with this name targets.pop('build.ninja') # Remove this to avoid multiple entries with the same name @@ -3999,7 +4030,7 @@ recommended as it is not supported on some platforms''') self.__reconfigure() out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) - self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch') + self.assertRegex(out, 'Regenerating configuration from scratch') self.assertRegex(out, 'opt1 val1') self.assertRegex(out, 'opt2 val2') self.assertRegex(out, 'opt3 val3') @@ -4036,7 +4067,7 @@ recommended as it is not supported on some platforms''') self.__reconfigure(change_minor=True) out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) - self.assertNotRegex(out, 'WARNING:.*Regenerating configuration from scratch') + self.assertNotRegex(out, 'Regenerating configuration from scratch') self.assertRegex(out, 'opt1 val1') self.assertRegex(out, 'opt2 val2') self.assertRegex(out, 'opt3 val3') @@ -4635,6 +4666,10 @@ recommended as it is not supported on some platforms''') no: NO coma list: a, b, c + Plugins + long coma list: alpha, alphacolor, apetag, audiofx, audioparsers, auparse, + autodetect, avi + Subprojects sub: YES sub2: NO Problem encountered: This subproject failed @@ -4850,6 +4885,18 @@ recommended as it is not supported on some platforms''') m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1]) self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path)) + def _check_coverage_files(self, types=('text', 'xml', 'html')): + covdir = Path(self.builddir) / 'meson-logs' + files = [] + if 'text' in types: + files.append('coverage.txt') + if 'xml' in types: + files.append('coverage.xml') + if 'html' in types: + files.append('coveragereport/index.html') + for f in files: + self.assertTrue((covdir / f).is_file(), msg='{} is not a file'.format(f)) + def test_coverage(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') @@ -4868,6 +4915,27 @@ recommended as it is not supported on some platforms''') self.build() self.run_tests() self.run_target('coverage') + self._check_coverage_files() + + def test_coverage_complex(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '109 generatorcustom') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + self._check_coverage_files() def test_coverage_html(self): if mesonbuild.environment.detect_msys2_arch(): @@ -4887,6 +4955,7 @@ recommended as it is not supported on some platforms''') self.build() self.run_tests() self.run_target('coverage-html') + self._check_coverage_files(['html']) def test_coverage_text(self): if mesonbuild.environment.detect_msys2_arch(): @@ -4906,6 +4975,7 @@ recommended as it is not supported on some platforms''') self.build() self.run_tests() self.run_target('coverage-text') + self._check_coverage_files(['text']) def test_coverage_xml(self): if mesonbuild.environment.detect_msys2_arch(): @@ -4925,6 +4995,7 @@ recommended as it is not supported on some platforms''') self.build() self.run_tests() self.run_target('coverage-xml') + self._check_coverage_files(['xml']) def test_cross_file_constants(self): with temp_filename() as crossfile1, temp_filename() as crossfile2: @@ -4960,7 +5031,7 @@ recommended as it is not supported on some platforms''') def test_wrap_git(self): with tempfile.TemporaryDirectory() as tmpdir: srcdir = os.path.join(tmpdir, 'src') - shutil.copytree(os.path.join(self.unit_test_dir, '78 wrap-git'), srcdir) + shutil.copytree(os.path.join(self.unit_test_dir, '81 wrap-git'), srcdir) upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream') upstream_uri = Path(upstream).as_uri() _git_init(upstream) @@ -4975,6 +5046,37 @@ recommended as it is not supported on some platforms''') self.build() self.run_tests() + def test_multi_output_custom_target_no_warning(self): + testdir = os.path.join(self.common_test_dir, '235 custom_target source') + + out = self.init(testdir) + self.assertNotRegex(out, 'WARNING:.*Using the first one.') + self.build() + self.run_tests() + + @unittest.skipUnless(is_linux() and (re.search('^i.86$|^x86$|^x64$|^x86_64$|^amd64$', platform.processor()) is not None), + 'Requires ASM compiler for x86 or x86_64 platform currently only available on Linux CI runners') + def test_nostdlib(self): + testdir = os.path.join(self.unit_test_dir, '79 nostdlib') + machinefile = os.path.join(self.builddir, 'machine.txt') + with open(machinefile, 'w') as f: + f.write(textwrap.dedent(''' + [properties] + c_stdlib = 'mylibc' + ''')) + + # Test native C stdlib + self.meson_native_file = machinefile + self.init(testdir) + self.build() + + # Test cross C stdlib + self.new_builddir() + self.meson_native_file = None + self.meson_cross_file = machinefile + self.init(testdir) + self.build() + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically @@ -5782,6 +5884,19 @@ class LinuxlikeTests(BasePlatformTests): out = self._run(cmd + ['--libs'], override_envvars=env).strip().split() self.assertEqual(out, ['-llibmain2', '-llibinternal']) + # See common/47 pkgconfig-gen/meson.build for description of the case this test + with open(os.path.join(privatedir1, 'simple2.pc')) as f: + content = f.read() + self.assertIn('Libs: -L${libdir} -lsimple2 -lz -lsimple1', content) + + with open(os.path.join(privatedir1, 'simple3.pc')) as f: + content = f.read() + self.assertEqual(1, content.count('-lsimple3')) + + with open(os.path.join(privatedir1, 'simple5.pc')) as f: + content = f.read() + self.assertNotIn('-lstat2', content) + def test_pkgconfig_uninstalled(self): testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen') self.init(testdir) @@ -6406,7 +6521,7 @@ class LinuxlikeTests(BasePlatformTests): if is_osx(): raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)') - testdir = os.path.join(self.unit_test_dir, '77 global-rpath') + testdir = os.path.join(self.unit_test_dir, '80 global-rpath') oldinstalldir = self.installdir # Build and install an external library without DESTDIR. @@ -6419,19 +6534,34 @@ class LinuxlikeTests(BasePlatformTests): self.init(yonder_dir) self.build() self.install(use_destdir=False) - self.new_builddir() - # Build an app that uses that installed library. - # Supply the rpath to the installed library via LDFLAGS - # (as systems like buildroot and guix are wont to do) - # and verify install preserves that rpath. - env = {'LDFLAGS': '-Wl,-rpath=' + yonder_libdir, - 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} - self.init(testdir, override_envvars=env) - self.build() - self.install(use_destdir=False) - got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) - self.assertEqual(got_rpath, yonder_libdir) + # Since rpath has multiple valid formats we need to + # test that they are all properly used. + rpath_formats = [ + ('-Wl,-rpath=', False), + ('-Wl,-rpath,', False), + ('-Wl,--just-symbols=', True), + ('-Wl,--just-symbols,', True), + ('-Wl,-R', False), + ('-Wl,-R,', False) + ] + for rpath_format, exception in rpath_formats: + # Build an app that uses that installed library. + # Supply the rpath to the installed library via LDFLAGS + # (as systems like buildroot and guix are wont to do) + # and verify install preserves that rpath. + self.new_builddir() + env = {'LDFLAGS': rpath_format + yonder_libdir, + 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} + if exception: + with self.assertRaises(subprocess.CalledProcessError): + self.init(testdir, override_envvars=env) + break + self.init(testdir, override_envvars=env) + self.build() + self.install(use_destdir=False) + got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) + self.assertEqual(got_rpath, yonder_libdir, rpath_format) @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): @@ -6764,7 +6894,7 @@ class LinuxlikeTests(BasePlatformTests): oldinstalldir = self.installdir # Build and install both external libraries without DESTDIR - val1dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val1') + val1dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val1') val1prefix = os.path.join(oldinstalldir, 'val1') self.prefix = val1prefix self.installdir = val1prefix @@ -6775,7 +6905,7 @@ class LinuxlikeTests(BasePlatformTests): env1 = {} env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig') - val2dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val2') + val2dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val2') val2prefix = os.path.join(oldinstalldir, 'val2') self.prefix = val2prefix self.installdir = val2prefix @@ -6787,7 +6917,7 @@ class LinuxlikeTests(BasePlatformTests): # Build, install, and run the client program env2 = {} env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig') - testdir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'client') + testdir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'client') testprefix = os.path.join(oldinstalldir, 'client') self.prefix = testprefix self.installdir = testprefix @@ -7030,6 +7160,31 @@ c = ['{0}'] windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo')) os.unlink(wrap_filename) + def test_no_rpath_for_static(self): + testdir = os.path.join(self.common_test_dir, '5 linkstatic') + self.init(testdir) + self.build() + build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) + self.assertIsNone(build_rpath) + + def test_lookup_system_after_broken_fallback(self): + # Just to generate libfoo.pc so we can test system dependency lookup. + testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen') + self.init(testdir) + privatedir = self.privatedir + + # Write test project where the first dependency() returns not-found + # because 'broken' subproject does not exit, but that should not prevent + # the 2nd dependency() to lookup on system. + self.new_builddir() + with tempfile.TemporaryDirectory() as d: + with open(os.path.join(d, 'meson.build'), 'w') as f: + f.write(textwrap.dedent('''\ + project('test') + dependency('notfound', fallback: 'broken', required: false) + dependency('libfoo', fallback: 'broken', required: true) + ''')) + self.init(d, override_envvars={'PKG_CONFIG_LIBDIR': privatedir}) class BaseLinuxCrossTests(BasePlatformTests): # Don't pass --libdir when cross-compiling. We have tests that @@ -7091,7 +7246,7 @@ class LinuxCrossArmTests(BaseLinuxCrossTests): def test_cross_libdir_subproject(self): # Guard against a regression where calling "subproject" # would reset the value of libdir to its default value. - testdir = os.path.join(self.unit_test_dir, '76 subdir libdir') + testdir = os.path.join(self.unit_test_dir, '78 subdir libdir') self.init(testdir, extra_args=['--libdir=fuf']) for i in self.introspect('--buildoptions'): if i['name'] == 'libdir': @@ -7583,7 +7738,12 @@ class NativeFileTests(BasePlatformTests): for section, entries in values.items(): f.write('[{}]\n'.format(section)) for k, v in entries.items(): - f.write("{}='{}'\n".format(k, v)) + if isinstance(v, (bool, int, float)): + f.write("{}={}\n".format(k, v)) + elif isinstance(v, list): + f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v]))) + else: + f.write("{}='{}'\n".format(k, v)) return filename def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs): @@ -7907,6 +8067,219 @@ class NativeFileTests(BasePlatformTests): self.init(testcase, extra_args=['--native-file', config]) self.build() + def test_user_options(self): + testcase = os.path.join(self.common_test_dir, '43 options') + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0)]: + config = self.helper_create_native_file({'project options': {opt: value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_user_options_command_line_overrides(self): + testcase = os.path.join(self.common_test_dir, '43 options') + config = self.helper_create_native_file({'project options': {'other_one': True}}) + self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false']) + + def test_user_options_subproject(self): + testcase = os.path.join(self.unit_test_dir, '79 user options for subproject') + + s = os.path.join(testcase, 'subprojects') + if not os.path.exists(s): + os.mkdir(s) + s = os.path.join(s, 'sub') + if not os.path.exists(s): + sub = os.path.join(self.common_test_dir, '43 options') + shutil.copytree(sub, s) + + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0)]: + config = self.helper_create_native_file({'sub:project options': {opt: value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_option_bool(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'werror': True}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'werror' in each['name']: + self.assertEqual(each['value'], True) + break + else: + self.fail('Did not find werror in build options?') + + def test_option_integer(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'unity_size': 100}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'unity_size' in each['name']: + self.assertEqual(each['value'], 100) + break + else: + self.fail('Did not find unity_size in build options?') + + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('Did not find werror in build options?') + + def test_builtin_options_env_overrides_conf(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}}) + + self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'}) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/bar']) + break + else: + self.fail('Did not find pkg_config_path in build options?') + + def test_builtin_options_subprojects(self): + testcase = os.path.join(self.common_test_dir, '102 subproject subdir') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'c_args' in each['name']: + # This path will be hit twice, once for build and once for host, + self.assertEqual(each['value'], ['-Dfoo']) + found += 1 + elif each['name'] == 'default_library': + self.assertEqual(each['value'], 'both') + found += 1 + elif each['name'] == 'sub:default_library': + self.assertEqual(each['value'], 'static') + found += 1 + self.assertEqual(found, 4, 'Did not find all three sections') + + def test_builtin_options_subprojects_overrides_buildfiles(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '230 persubproject options') + config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}}) + + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertIn(cm.exception.stdout, 'Parent should override default_library') + + def test_builtin_options_subprojects_inherits_parent_override(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '230 persubproject options') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}}) + + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertIn(cm.exception.stdout, 'Parent should override default_library') + + def test_builtin_options_compiler_properties(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'c_args': ['-DFOO']}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DFOO']) + break + else: + self.fail('Did not find c_args in build options?') + + def test_builtin_options_compiler_properties_legacy(self): + # The legacy placement in properties is still valid if a 'built-in + # options' setting is present, but doesn't have the lang_args + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DBAR']) + break + else: + self.fail('Did not find c_args in build options?') + + def test_builtin_options_paths(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'bindir': 'foo'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'foo') + break + else: + self.fail('Did not find bindir in build options?') + + def test_builtin_options_paths_legacy(self): + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'bar') + break + else: + self.fail('Did not find bindir in build options?') + + def test_builtin_options_paths_legacy(self): + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'bar') + break + else: + self.fail('Did not find bindir in build options?') + class CrossFileTests(BasePlatformTests): @@ -7916,6 +8289,11 @@ class CrossFileTests(BasePlatformTests): This is mainly aimed to testing overrides from cross files. """ + def setUp(self): + super().setUp() + self.current_config = 0 + self.current_wrapper = 0 + def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, exe_wrapper: T.Optional[T.List[str]] = None) -> str: if is_windows(): @@ -8044,6 +8422,21 @@ class CrossFileTests(BasePlatformTests): self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) self.wipe() + def helper_create_cross_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + self.current_config += 1 + with open(filename, 'wt') as f: + for section, entries in values.items(): + f.write('[{}]\n'.format(section)) + for k, v in entries.items(): + f.write("{}='{}'\n".format(k, v)) + return filename + def test_cross_file_dirs(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, @@ -8100,6 +8493,89 @@ class CrossFileTests(BasePlatformTests): '-Ddef_sharedstatedir=sharedstatebar', '-Ddef_sysconfdir=sysconfbar']) + def test_user_options(self): + # This is just a touch test for cross file, since the implementation + # shares code after loading from the files + testcase = os.path.join(self.common_test_dir, '43 options') + config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--cross-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--cross-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('No c++ standard set?') + + def test_builtin_options_per_machine(self): + """Test options that are allowed to be set on a per-machine basis. + + Such options could be passed twice, once for the build machine, and + once for the host machine. I've picked pkg-config path, but any would + do that can be set for both. + """ + testcase = os.path.join(self.common_test_dir, '2 cpp') + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}}) + native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}}) + + # Ensure that PKG_CONFIG_PATH is not set in the environment + with mock.patch.dict('os.environ'): + for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']: + try: + del os.environ[k] + except KeyError: + pass + self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) + + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/cross/path']) + found += 1 + elif each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++17') + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/native/path']) + found += 1 + elif each['name'] == 'build.cpp_std': + self.assertEqual(each['value'], 'c++14') + found += 1 + + if found == 4: + break + self.assertEqual(found, 4, 'Did not find all sections.') + + def test_builtin_options_env_overrides_conf(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}}) + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}}) + + self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross], + override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'}) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/bar']) + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/dir']) + found += 1 + if found == 2: + break + self.assertEqual(found, 2, 'Did not find all sections.') + + class TAPParserTests(unittest.TestCase): def assert_test(self, events, **kwargs): if 'explanation' not in kwargs: @@ -37,10 +37,6 @@ packages = ['mesonbuild', 'mesonbuild.scripts', 'mesonbuild.templates', 'mesonbuild.wrap'] -package_data = { - 'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakeListsLLVM.txt', 'data/CMakePathInfo.txt'], - 'mesonbuild.cmake': ['data/run_ctgt.py', 'data/preload.cmake'], -} data_files = [] if sys.platform != 'win32': # Only useful on UNIX-like systems @@ -51,6 +47,5 @@ if __name__ == '__main__': setup(name='meson', version=version, packages=packages, - package_data=package_data, entry_points=entries, data_files=data_files,) diff --git a/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake b/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake index 4a189bf..0c663f4 100644 --- a/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake +++ b/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake @@ -1,24 +1,9 @@ cmake_policy(VERSION 3.7) -if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12) - find_package(Python COMPONENTS Interpreter) -else() - find_package(PythonInterp) -endif() - +find_package(Python COMPONENTS Interpreter) if(Python_FOUND OR PYTHONINTERP_FOUND) set(SomethingLikePython_FOUND ON) set(SomethingLikePython_EXECUTABLE ${Python_EXECUTABLE}) - - if(NOT DEFINED Python_VERSION) - set(Python_VERSION ${Python_VERSION_STRING}) - endif() - if(NOT TARGET Python::Interpreter) - add_executable(Python::Interpreter IMPORTED) - set_target_properties(Python::Interpreter PROPERTIES - IMPORTED_LOCATION ${Python_EXECUTABLE} - VERSION ${Python_VERSION}) - endif() else() set(SomethingLikePython_FOUND OFF) endif() diff --git a/test cases/cmake/11 cmake_module_path/meson.build b/test cases/cmake/11 cmake_module_path/meson.build index 2259268..e201936 100644 --- a/test cases/cmake/11 cmake_module_path/meson.build +++ b/test cases/cmake/11 cmake_module_path/meson.build @@ -1,7 +1,7 @@ # We use Python3 as it's the only thing guaranteed to be available on any platform Meson can run on (unlike Zlib in linuxlike/13 cmake dependency). -project('user CMake find_package module using cmake_module_path', - meson_version: '>= 0.50.0') +project('user CMake find_package module using cmake_module_path', ['c', 'cpp'], + meson_version: '>= 0.55.0') if not find_program('cmake', required: false).found() error('MESON_SKIP_TEST cmake binary not available.') @@ -15,3 +15,11 @@ endif dependency('SomethingLikePython', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'Python::Interpreter') dependency('SomethingLikePython', method : 'cmake', cmake_module_path : ['doesNotExist', 'cmake'], modules: 'Python::Interpreter') + +# Test a custom target with Python::Interpreter in COMMAND +cm = import('cmake') +op = cm.subproject_options() +op.add_cmake_defines({'CMAKE_MODULE_PATH': meson.source_root() / 'cmake'}) +sp = cm.subproject('cmMod', options: op) +main = sp.target('main') +test('main', main) diff --git a/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 0000000..88ba9bc --- /dev/null +++ b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) + +message(STATUS "CMAKE_MODULE_PATH: '${CMAKE_MODULE_PATH}'") + +find_package(SomethingLikePython REQUIRED) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/main.c" + COMMAND Python::Interpreter "${CMAKE_CURRENT_SOURCE_DIR}/gen.py" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" +) + +add_executable(main "${CMAKE_CURRENT_BINARY_DIR}/main.c") diff --git a/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py new file mode 100644 index 0000000..5c71646 --- /dev/null +++ b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py @@ -0,0 +1,9 @@ +with open('main.c', 'w') as fp: + print(''' +#include <stdio.h> + +int main(void) { + printf(\"Hello World\"); + return 0; +} +''', file=fp) diff --git a/test cases/cmake/11 cmake_module_path/test.json b/test cases/cmake/11 cmake_module_path/test.json new file mode 100644 index 0000000..79a2b60 --- /dev/null +++ b/test cases/cmake/11 cmake_module_path/test.json @@ -0,0 +1,5 @@ +{ + "tools": { + "cmake": ">=3.12" + } +} diff --git a/test cases/cmake/19 cmake file/foolib.cmake.in b/test cases/cmake/20 cmake file/foolib.cmake.in index 16e992b..16e992b 100644 --- a/test cases/cmake/19 cmake file/foolib.cmake.in +++ b/test cases/cmake/20 cmake file/foolib.cmake.in diff --git a/test cases/cmake/19 cmake file/meson.build b/test cases/cmake/20 cmake file/meson.build index 758bbee..758bbee 100644 --- a/test cases/cmake/19 cmake file/meson.build +++ b/test cases/cmake/20 cmake file/meson.build diff --git a/test cases/cmake/19 cmake file/test.json b/test cases/cmake/20 cmake file/test.json index a8c4ba3..a8c4ba3 100644 --- a/test cases/cmake/19 cmake file/test.json +++ b/test cases/cmake/20 cmake file/test.json diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt index 1498c36..199c2e9 100644 --- a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt +++ b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt @@ -134,6 +134,16 @@ add_custom_target(args_test_cmd ) add_custom_target(macro_name_cmd COMMAND macro_name) +if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + message(STATUS "Running the -include test case on macro_name") + add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am" "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am" + ) + target_compile_options(macro_name PUBLIC -DTEST_CMD_INCLUDE -include "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp") +endif() + # Only executable targets are replaced in the command # all other target names are kept as is add_custom_target(clang-format COMMAND clang-format -i cmMod.cpp) diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am b/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am new file mode 100644 index 0000000..07c8ff7 --- /dev/null +++ b/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_INC_WAS_INCLUDED 1 diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp b/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp index 790557b..964062f 100644 --- a/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp +++ b/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp @@ -5,6 +5,12 @@ using namespace std; +#ifdef TEST_CMD_INCLUDE +#if CPY_INC_WAS_INCLUDED != 1 +#error "cpyInc.hpp was not included" +#endif +#endif + int main() { this_thread::sleep_for(chrono::seconds(1)); ofstream out1("macro_name.txt"); diff --git a/test cases/common/109 generatorcustom/meson.build b/test cases/common/109 generatorcustom/meson.build index 17d27e5..b3f50bb 100644 --- a/test cases/common/109 generatorcustom/meson.build +++ b/test cases/common/109 generatorcustom/meson.build @@ -14,5 +14,7 @@ allinone = custom_target('alltogether', output : 'alltogether.h', command : [catter, '@INPUT@', '@OUTPUT@']) -executable('proggie', 'main.c', allinone) +proggie = executable('proggie', 'main.c', allinone) + +test('proggie', proggie) diff --git a/test cases/common/213 tap tests/cat.c b/test cases/common/213 tap tests/cat.c new file mode 100644 index 0000000..4b92010 --- /dev/null +++ b/test cases/common/213 tap tests/cat.c @@ -0,0 +1,26 @@ +#include <errno.h> +#include <stdio.h> + +int main(int argc, char **argv) { + char buf[1024]; + size_t len; + FILE *fh; + + if (argc != 2) { + fprintf(stderr, "Incorrect number of arguments, got %i\n", argc); + return 1; + } + fh = fopen(argv[1], "r"); + if (fh == NULL) { + fprintf(stderr, "Opening %s: errno=%i\n", argv[1], errno); + return 1; + } + do { + len = fread(buf, 1, sizeof(buf), fh); + if (len > 0) { + fwrite(buf, 1, len, stdout); + } + } while (len > 0); + fclose(fh); + return 0; +} diff --git a/test cases/common/213 tap tests/issue7515.txt b/test cases/common/213 tap tests/issue7515.txt new file mode 100644 index 0000000..ca85637 --- /dev/null +++ b/test cases/common/213 tap tests/issue7515.txt @@ -0,0 +1,27 @@ +1..26 +ok 1 Gtk overrides UI template sets up internal and public template children +ok 2 Gtk overrides UI template sets up public template children with the correct widgets +ok 3 Gtk overrides UI template sets up internal template children with the correct widgets +ok 4 Gtk overrides UI template connects template callbacks to the correct handler +ok 5 Gtk overrides UI template binds template callbacks to the correct object +ok 6 Gtk overrides UI template from resource sets up internal and public template children +ok 7 Gtk overrides UI template from resource sets up public template children with the correct widgets +ok 8 Gtk overrides UI template from resource sets up internal template children with the correct widgets +ok 9 Gtk overrides UI template from resource connects template callbacks to the correct handler +ok 10 Gtk overrides UI template from resource binds template callbacks to the correct object +ok 11 Gtk overrides UI template from file sets up internal and public template children +ok 12 Gtk overrides UI template from file sets up public template children with the correct widgets +ok 13 Gtk overrides UI template from file sets up internal template children with the correct widgets +ok 14 Gtk overrides UI template from file connects template callbacks to the correct handler +ok 15 Gtk overrides UI template from file binds template callbacks to the correct object +ok 16 Gtk overrides Class inheriting from template class sets up internal and public template children # SKIP pending +ok 17 Gtk overrides Class inheriting from template class sets up public template children with the correct widgets # SKIP pending +ok 18 Gtk overrides Class inheriting from template class sets up internal template children with the correct widgets # SKIP pending +ok 19 Gtk overrides Class inheriting from template class connects template callbacks to the correct handler # SKIP pending +ok 20 Gtk overrides Class inheriting from template class binds template callbacks to the correct object # SKIP pending +ok 21 Gtk overrides sets CSS names on classes +ok 22 Gtk overrides avoid crashing when GTK vfuncs are called in garbage collection +ok 23 Gtk overrides accepts string in place of GdkAtom +ok 24 Gtk overrides accepts null in place of GdkAtom as GDK_NONE +ok 25 Gtk overrides uses the correct GType for null child properties +ok 26 Gtk overrides can create a Gtk.TreeIter with accessible stamp field diff --git a/test cases/common/213 tap tests/meson.build b/test cases/common/213 tap tests/meson.build index 58529a7..5221319 100644 --- a/test cases/common/213 tap tests/meson.build +++ b/test cases/common/213 tap tests/meson.build @@ -1,10 +1,14 @@ project('test features', 'c') tester = executable('tester', 'tester.c') +cat = executable('cat', 'cat.c') test('pass', tester, args : ['ok'], protocol: 'tap') test('fail', tester, args : ['not ok'], should_fail: true, protocol: 'tap') test('xfail', tester, args : ['not ok # todo'], protocol: 'tap') test('xpass', tester, args : ['ok # todo'], should_fail: true, protocol: 'tap') test('skip', tester, args : ['ok # skip'], protocol: 'tap') +test('partially skipped', tester, args : ['ok 1\nok 2 # skip'], protocol: 'tap') +test('partially skipped (real-world example)', cat, args : [files('issue7515.txt')], protocol: 'tap') +test('skip comment', tester, args : ['ok # Skipped: with a comment'], protocol: 'tap') test('skip failure', tester, args : ['not ok # skip'], should_fail: true, protocol: 'tap') test('no tests', tester, args : ['1..0 # skip'], protocol: 'tap') diff --git a/test cases/common/222 source set realistic example/meson.build b/test cases/common/222 source set realistic example/meson.build index 106b81d..d986b99 100644 --- a/test cases/common/222 source set realistic example/meson.build +++ b/test cases/common/222 source set realistic example/meson.build @@ -9,7 +9,7 @@ if cppid == 'pgi' endif ss = import('sourceset') -keyval = import('unstable-keyval') +keyval = import('keyval') zlib = declare_dependency(compile_args: '-DZLIB=1') another = declare_dependency(compile_args: '-DANOTHER=1') diff --git a/test cases/common/227 fs module/a_symlink b/test cases/common/227 fs module/a_symlink deleted file mode 120000 index 25d053a..0000000 --- a/test cases/common/227 fs module/a_symlink +++ /dev/null @@ -1 +0,0 @@ -meson.build
\ No newline at end of file diff --git a/test cases/common/227 fs module/meson.build b/test cases/common/227 fs module/meson.build index a732768..cff0987 100644 --- a/test cases/common/227 fs module/meson.build +++ b/test cases/common/227 fs module/meson.build @@ -7,18 +7,14 @@ fs = import('fs') assert(fs.exists('meson.build'), 'Existing file reported as missing.') assert(not fs.exists('nonexisting'), 'Nonexisting file was found.') -# When one creates a source release with sdist, Python -# does not store symlinks in the archive as native symlinks. -# Thus the extracted archive does not contain them either. -# Sadly this means that we can only execute the symlink test when -# running from a git checkout because otherwise we'd need to -# do postprocessing on the generated archive before actual release. -# That is both nonstandard an error prone and having symlinks in -# the archive would probably break on Windows anyway. -is_git_checkout = fs.exists('../../../.git') - -if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout - assert(fs.is_symlink('a_symlink'), 'Symlink not detected.') +if not is_windows and build_machine.system() != 'cygwin' + # Symlinks on Windows have specific requirements including: + # * Meson running under Python >= 3.8 + # * Windows user permissions to create symlinks, and/or Windows in Developer mode + # so at this time the symlink test is skipped for Windows. + symlink = meson.current_build_dir() / 'a_symlink' + run_command('ln', '-s', meson.current_source_dir() / 'meson.build', symlink) + assert(fs.is_symlink(symlink), 'Symlink not detected.') assert(not fs.is_symlink('meson.build'), 'Regular file detected as symlink.') endif @@ -103,8 +99,8 @@ assert(fs.is_samepath(meson.source_root(), 'subdir/..'), 'is_samepath not detect assert(not fs.is_samepath(f1, 'subdir/subdirfile.txt'), 'is_samepath known bad comparison') assert(not fs.is_samepath('not-a-path', f2), 'is_samepath should not error if path(s) do not exist') -if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout - assert(fs.is_samepath('a_symlink', 'meson.build'), 'symlink is_samepath fail') +if not is_windows and build_machine.system() != 'cygwin' + assert(fs.is_samepath(symlink, 'meson.build'), 'symlink is_samepath fail') endif # parts of path diff --git a/test cases/common/23 object extraction/meson.build b/test cases/common/23 object extraction/meson.build index 6776b14..18be1db 100644 --- a/test cases/common/23 object extraction/meson.build +++ b/test cases/common/23 object extraction/meson.build @@ -9,12 +9,15 @@ else obj1 = lib1.extract_objects('src/lib.c') obj2 = lib2.extract_objects(['lib.c']) obj3 = lib2.extract_objects(files('lib.c')) + obj4 = lib2.extract_objects(['lib.c', 'lib.c']) e1 = executable('main1', 'main.c', objects : obj1) e2 = executable('main2', 'main.c', objects : obj2) e3 = executable('main3', 'main.c', objects : obj3) + e4 = executable('main4', 'main.c', objects : obj4) test('extraction test 1', e1) test('extraction test 2', e2) test('extraction test 3', e3) + test('extraction test 4', e4) endif diff --git a/test cases/common/230 persubproject options/meson.build b/test cases/common/230 persubproject options/meson.build index 20dff90..f76a70c 100644 --- a/test cases/common/230 persubproject options/meson.build +++ b/test cases/common/230 persubproject options/meson.build @@ -1,9 +1,11 @@ project('persubproject options', 'c', default_options : ['default_library=both', - 'werror=true']) + 'werror=true', + 'warning_level=3']) assert(get_option('default_library') == 'both', 'Parent default_library should be "both"') assert(get_option('werror')) +assert(get_option('warning_level') == '3') # Check it build both by calling a method only both_libraries target implement lib = library('lib1', 'foo.c') diff --git a/test cases/common/230 persubproject options/subprojects/sub1/foo.c b/test cases/common/230 persubproject options/subprojects/sub1/foo.c index 63e4de6..82ad2c2 100644 --- a/test cases/common/230 persubproject options/subprojects/sub1/foo.c +++ b/test cases/common/230 persubproject options/subprojects/sub1/foo.c @@ -1,5 +1,8 @@ int foo(void); int foo(void) { + /* This is built with -Werror, it would error if warning_level=3 was inherited + * from main project and not overridden by this subproject's default_options. */ + int x; return 0; } diff --git a/test cases/common/230 persubproject options/subprojects/sub1/meson.build b/test cases/common/230 persubproject options/subprojects/sub1/meson.build index 7afc934..4e4bc1b 100644 --- a/test cases/common/230 persubproject options/subprojects/sub1/meson.build +++ b/test cases/common/230 persubproject options/subprojects/sub1/meson.build @@ -1,6 +1,8 @@ -project('sub1', 'c') +project('sub1', 'c', + default_options : ['warning_level=0']) assert(get_option('default_library') == 'both', 'Should inherit parent project default_library') +assert(get_option('warning_level') == '0') # Check it build both by calling a method only both_libraries target implement lib = library('lib1', 'foo.c') diff --git a/test cases/common/234 very long commmand line/codegen.py b/test cases/common/234 very long commmand line/codegen.py index 4de78ce..b1de607 100755 --- a/test cases/common/234 very long commmand line/codegen.py +++ b/test cases/common/234 very long commmand line/codegen.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import sys +from pathlib import Path -with open(sys.argv[2], 'w') as f: - print('int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1]), file=f) +Path(sys.argv[2]).write_text( + 'int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1])) diff --git a/test cases/common/234 very long commmand line/main.c b/test cases/common/234 very long commmand line/main.c index dbb64a8..78f2de1 100644 --- a/test cases/common/234 very long commmand line/main.c +++ b/test cases/common/234 very long commmand line/main.c @@ -1,5 +1 @@ -int main(int argc, char **argv) { - (void) argc; - (void) argv; - return 0; -} +int main(void) { return 0; } diff --git a/test cases/common/234 very long commmand line/meson.build b/test cases/common/234 very long commmand line/meson.build index fe47b5e..70058e6 100644 --- a/test cases/common/234 very long commmand line/meson.build +++ b/test cases/common/234 very long commmand line/meson.build @@ -6,6 +6,10 @@ if build_machine.system() == 'windows' # cmd.exe: 8kb # CreateProcess: 32kb limit = 32767 + # NOTE: filename limit is 260 characters unless + # 1. Python >= 3.6 is being used + # 2. Windows 10 registry has been edited to enable long pathnaems + # ninja backend uses absolute filenames, so we ensure they don't exceed 260. elif build_machine.system() == 'cygwin' # cygwin-to-win32: see above # cygwin-to-cygwin: no limit? @@ -18,20 +22,21 @@ else limit = 131072 endif # Now exceed that limit, but not so far that the test takes too long. -name = 'ALongFilenameMuchLongerThanIsNormallySeenAndReallyHardToReadThroughToTheEndAMooseOnceBitMySisterSheNowWorksAtLLamaFreshFarmsThisHasToBeSoLongThatWeExceed128KBWithoutCompilingTooManyFiles' -namelen = 187 +namelen = 260 nfiles = 50 + limit / namelen message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28)))) -seq = run_command('seq.py', '1', '@0@'.format(nfiles)).stdout().strip().split('\n') +seq = run_command('name_gen.py', nfiles.to_string(), meson.build_root()).stdout().strip().split('\n') sources = [] codegen = find_program('codegen.py') -foreach i : seq - sources += custom_target('codegen' + i, - command: [codegen, i, '@OUTPUT@'], - output: name + i + '.c') +i=0 +foreach name : seq + sources += custom_target('codegen' + i.to_string(), + command: [codegen, i.to_string(), '@OUTPUT@'], + output: name + '.c') + i+=1 endforeach shared_library('sharedlib', sources) diff --git a/test cases/common/234 very long commmand line/name_gen.py b/test cases/common/234 very long commmand line/name_gen.py new file mode 100755 index 0000000..8435298 --- /dev/null +++ b/test cases/common/234 very long commmand line/name_gen.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +""" +generate sequence of filename that does not exceed MAX_LEN=260 +for Python < 3.6 and Windows without modified registry +""" + +import sys +import string + +name_len = 260 - len(sys.argv[2]) - 4 - 39 - 4 - 2 +if name_len < 1: + raise ValueError('The meson build directory pathname is so long ' + 'that we cannot generate filenames within 260 characters.') +# leave room for suffix and file separators, and meson generated text +# e.g. ".c.obj.d" and other decorators added by Meson at configuration +# for intermediate files + +base = string.ascii_letters * 5 # 260 characters +max_num_len = len(str(sys.argv[1])) +base = base[: name_len - max_num_len] + +for i in range(int(sys.argv[1])): + print("{base}{i:0{max_num_len}d}".format(base=base, max_num_len=max_num_len, i=i)) diff --git a/test cases/common/234 very long commmand line/seq.py b/test cases/common/234 very long commmand line/seq.py deleted file mode 100755 index 637bf57..0000000 --- a/test cases/common/234 very long commmand line/seq.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python3 - -import sys - -for i in range(int(sys.argv[1]), int(sys.argv[2])): - print(i) diff --git a/test cases/common/235 custom_target source/a b/test cases/common/235 custom_target source/a new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/235 custom_target source/a diff --git a/test cases/common/235 custom_target source/meson.build b/test cases/common/235 custom_target source/meson.build new file mode 100644 index 0000000..98b9d26 --- /dev/null +++ b/test cases/common/235 custom_target source/meson.build @@ -0,0 +1,5 @@ +project('a', ['c']) + +x = find_program('x.py') +outs = custom_target('foo', output: ['x.c', 'y'], input: 'a', command: [x]) +executable('testprog', outs[0]) diff --git a/test cases/common/235 custom_target source/x.py b/test cases/common/235 custom_target source/x.py new file mode 100644 index 0000000..12f40c8 --- /dev/null +++ b/test cases/common/235 custom_target source/x.py @@ -0,0 +1,5 @@ +#! /usr/bin/env python3 +with open('x.c', 'w') as f: + print('int main(void) { return 0; }', file=f) +with open('y', 'w'): + pass diff --git a/test cases/common/235 disabler array addition/meson.build b/test cases/common/235 disabler array addition/meson.build new file mode 100644 index 0000000..231f76a --- /dev/null +++ b/test cases/common/235 disabler array addition/meson.build @@ -0,0 +1,9 @@ +project('disabler_inside_array', 'c') + +exes = [] + +exes += library('a', 'test.c') + +exes += library('b', 'test.c', dependencies : disabler()) + +exes += library('c', 'test.c') diff --git a/test cases/common/235 disabler array addition/test.c b/test cases/common/235 disabler array addition/test.c new file mode 100644 index 0000000..e9a7aac --- /dev/null +++ b/test cases/common/235 disabler array addition/test.c @@ -0,0 +1 @@ +int stub(void) { return 0; } diff --git a/test cases/common/38 string operations/meson.build b/test cases/common/38 string operations/meson.build index 6596142..8a06a82 100644 --- a/test cases/common/38 string operations/meson.build +++ b/test cases/common/38 string operations/meson.build @@ -101,3 +101,18 @@ assert('\\\\n' == bs_bs_n, 'Four backslash broken before n') assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n') assert('\\\\' == bs_bs, 'Double-backslash broken') assert('\\' == bs, 'Backslash broken') + +mysubstring='foobarbaz' +assert(mysubstring.substring() == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(0) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(1) == 'oobarbaz', 'substring is broken') +assert(mysubstring.substring(-5) == 'arbaz', 'substring is broken') +assert(mysubstring.substring(1, 4) == 'oob', 'substring is broken') +assert(mysubstring.substring(1,-5) == 'oob', 'substring is broken') +assert(mysubstring.substring(1, 0) == '', 'substring is broken') +assert(mysubstring.substring(0, 100) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(-1, -5) == '', 'substring is broken') +assert(mysubstring.substring(10, -25) == '', 'substring is broken') +assert(mysubstring.substring(-4, 2) == '', 'substring is broken') +assert(mysubstring.substring(10, 9) == '', 'substring is broken') +assert(mysubstring.substring(8, 10) == 'z', 'substring is broken') diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build index eb2afe4..8c16cd5 100644 --- a/test cases/common/47 pkgconfig-gen/meson.build +++ b/test cases/common/47 pkgconfig-gen/meson.build @@ -1,5 +1,12 @@ project('pkgconfig-gen', 'c') +# Some CI runners does not have zlib, just skip them as we need some common +# external dependency. +cc = meson.get_compiler('c') +if not cc.find_library('z', required: false).found() + error('MESON_SKIP_TEST: zlib missing') +endif + # First check we have pkg-config >= 0.29 pkgconfig = find_program('pkg-config', required: false) if not pkgconfig.found() @@ -59,3 +66,32 @@ pkgg.generate( version : libver, dataonly: true ) + +# Regression test for 2 cases: +# - link_whole from InternalDependency used to be ignored, but we should still +# recurse to add libraries they link to. In this case it must add `-lsimple1` +# in generated pc file. +# - dependencies from InternalDependency used to be ignored. In this it must add +# `-lz` in generated pc file. +simple1 = shared_library('simple1', 'simple.c') +stat1 = static_library('stat1', 'simple.c', link_with: simple1) +dep = declare_dependency(link_whole: stat1, dependencies: cc.find_library('z')) +simple2 = library('simple2', 'simple.c') +pkgg.generate(simple2, libraries: dep) + +# Regression test: as_system() does a deepcopy() of the InternalDependency object +# which caused `-lsimple3` to be duplicated because generator used to compare +# Target instances instead of their id. +simple3 = shared_library('simple3', 'simple.c') +dep1 = declare_dependency(link_with: simple3) +dep2 = dep1.as_system() +pkgg.generate(libraries: [dep1, dep2], + name: 'simple3', + description: 'desc') + +# Regression test: stat2 is both link_with and link_whole, it should not appear +# in generated pc file. +stat2 = static_library('stat2', 'simple.c', install: true) +simple4 = library('simple4', 'simple.c', link_with: stat2) +simple5 = library('simple5', 'simple5.c', link_with: simple4, link_whole: stat2) +pkgg.generate(simple5) diff --git a/test cases/common/47 pkgconfig-gen/simple5.c b/test cases/common/47 pkgconfig-gen/simple5.c new file mode 100644 index 0000000..9f924bd --- /dev/null +++ b/test cases/common/47 pkgconfig-gen/simple5.c @@ -0,0 +1,6 @@ +int simple5(void); + +int simple5(void) +{ + return 0; +} diff --git a/test cases/common/47 pkgconfig-gen/test.json b/test cases/common/47 pkgconfig-gen/test.json index 1c6a452..702e7fe 100644 --- a/test cases/common/47 pkgconfig-gen/test.json +++ b/test cases/common/47 pkgconfig-gen/test.json @@ -1,9 +1,13 @@ { "installed": [ {"type": "file", "file": "usr/include/simple.h"}, + {"type": "file", "file": "usr/lib/libstat2.a"}, {"type": "file", "file": "usr/lib/pkgconfig/simple.pc"}, {"type": "file", "file": "usr/lib/pkgconfig/libfoo.pc"}, {"type": "file", "file": "usr/lib/pkgconfig/libhello.pc"}, - {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"} + {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple2.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple3.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple5.pc"} ] } diff --git a/test cases/failing test/5 tap tests/meson.build b/test cases/failing test/5 tap tests/meson.build index 844c1f9..c49043b 100644 --- a/test cases/failing test/5 tap tests/meson.build +++ b/test cases/failing test/5 tap tests/meson.build @@ -4,3 +4,4 @@ tester = executable('tester', 'tester.c') test('nonzero return code', tester, args : [], protocol: 'tap') test('missing test', tester, args : ['1..1'], protocol: 'tap') test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap') +test('partially skipped', tester, args : ['not ok 1\nok 2 # skip'], protocol: 'tap') diff --git a/test cases/failing/107 number in combo/meson.build b/test cases/failing/107 number in combo/meson.build new file mode 100644 index 0000000..1a647df --- /dev/null +++ b/test cases/failing/107 number in combo/meson.build @@ -0,0 +1 @@ +project('number in combo') diff --git a/test cases/failing/107 number in combo/nativefile.ini b/test cases/failing/107 number in combo/nativefile.ini new file mode 100644 index 0000000..55f10fc --- /dev/null +++ b/test cases/failing/107 number in combo/nativefile.ini @@ -0,0 +1,2 @@ +[built-in options] +optimization = 1 diff --git a/test cases/failing/107 number in combo/test.json b/test cases/failing/107 number in combo/test.json new file mode 100644 index 0000000..f5aeb4e --- /dev/null +++ b/test cases/failing/107 number in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/107 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." } + ] +} diff --git a/test cases/failing/108 bool in combo/meson.build b/test cases/failing/108 bool in combo/meson.build new file mode 100644 index 0000000..c5efd67 --- /dev/null +++ b/test cases/failing/108 bool in combo/meson.build @@ -0,0 +1 @@ +project('bool in combo') diff --git a/test cases/failing/108 bool in combo/meson_options.txt b/test cases/failing/108 bool in combo/meson_options.txt new file mode 100644 index 0000000..0c8f5de --- /dev/null +++ b/test cases/failing/108 bool in combo/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'opt', + type : 'combo', + choices : ['true', 'false'] +) diff --git a/test cases/failing/108 bool in combo/nativefile.ini b/test cases/failing/108 bool in combo/nativefile.ini new file mode 100644 index 0000000..b423957 --- /dev/null +++ b/test cases/failing/108 bool in combo/nativefile.ini @@ -0,0 +1,2 @@ +[project options] +opt = true diff --git a/test cases/failing/108 bool in combo/test.json b/test cases/failing/108 bool in combo/test.json new file mode 100644 index 0000000..729ad3d --- /dev/null +++ b/test cases/failing/108 bool in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/108 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." } + ] +} diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index 6c23360..5a2e1a1 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -54,7 +54,11 @@ python3module = shared_library('python3_module', ['python_module.cpp'], dependen test('Boost linktest', linkexe) test('Boost UTF test', unitexe) test('Boost nomod', nomodexe) -test('Boost extralib test', extralibexe) +if host_machine.system() != 'darwin' or s + # Segfaults on macOS with dynamic linking since Boost 1.73 + # https://github.com/mesonbuild/meson/issues/7535 + test('Boost extralib test', extralibexe) +endif # explicitly use the correct python interpreter so that we don't have to provide two different python scripts that have different shebang lines python2interpreter = find_program(python2.path(), required: false, disabler: true) diff --git a/test cases/frameworks/32 boost root/boost/include/boost/version.hpp b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp new file mode 100644 index 0000000..65e4fab --- /dev/null +++ b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp @@ -0,0 +1,3 @@ +#define BOOST_VERSION 100 + +#error This is not a real version of boost diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib diff --git a/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 b/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 diff --git a/test cases/frameworks/32 boost root/meson.build b/test cases/frameworks/32 boost root/meson.build new file mode 100644 index 0000000..50d2f0d --- /dev/null +++ b/test cases/frameworks/32 boost root/meson.build @@ -0,0 +1,6 @@ +project('boosttest', 'cpp') + +dep = dependency('boost', modules : 'regex', required: false) + +assert(dep.found(), 'expected to find a fake version of boost') +assert(dep.version() == '0.1.0', 'expected to find version 0.1.0') diff --git a/test cases/frameworks/32 boost root/nativefile.ini.in b/test cases/frameworks/32 boost root/nativefile.ini.in new file mode 100644 index 0000000..54510d7 --- /dev/null +++ b/test cases/frameworks/32 boost root/nativefile.ini.in @@ -0,0 +1,2 @@ +[properties] +boost_root = '@MESON_TEST_ROOT@/boost' diff --git a/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp new file mode 100644 index 0000000..3ba19ee --- /dev/null +++ b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp @@ -0,0 +1,3 @@ +#define BOOST_VERSION 200 + +#error This is not a real version of boost diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib diff --git a/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 b/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 diff --git a/test cases/frameworks/33 boost split root/meson.build b/test cases/frameworks/33 boost split root/meson.build new file mode 100644 index 0000000..a2353bb --- /dev/null +++ b/test cases/frameworks/33 boost split root/meson.build @@ -0,0 +1,6 @@ +project('boosttest', 'cpp') + +dep = dependency('boost', modules : 'regex', required: false) + +assert(dep.found(), 'expected to find a fake version of boost') +assert(dep.version() == '0.2.0', 'expected to find version 0.2.0') diff --git a/test cases/frameworks/33 boost split root/nativefile.ini.in b/test cases/frameworks/33 boost split root/nativefile.ini.in new file mode 100644 index 0000000..7bd5ac2 --- /dev/null +++ b/test cases/frameworks/33 boost split root/nativefile.ini.in @@ -0,0 +1,3 @@ +[properties] +boost_includedir = '@MESON_TEST_ROOT@/boost/extra-dir/include' +boost_librarydir = '@MESON_TEST_ROOT@/boost/lib' diff --git a/test cases/keyval/1 basic/meson.build b/test cases/keyval/1 basic/meson.build index fc7ddb3..4207b8e 100644 --- a/test cases/keyval/1 basic/meson.build +++ b/test cases/keyval/1 basic/meson.build @@ -1,6 +1,6 @@ project('keyval basic test') -k = import('unstable-keyval') +k = import('keyval') conf = k.load('.config') if not conf.has_key('CONFIG_VAL1') @@ -14,3 +14,5 @@ endif if conf.get('CONFIG_VAL_VAL').to_int() != 4 error('Expected CONFIG_VAL_VAL to be 4') endif + +k = import('unstable-keyval') diff --git a/test cases/keyval/1 basic/test.json b/test cases/keyval/1 basic/test.json new file mode 100644 index 0000000..dbdc5af --- /dev/null +++ b/test cases/keyval/1 basic/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "WARNING: Module unstable-keyval is now stable, please use the keyval module instead." + } + ] +} diff --git a/test cases/keyval/2 subdir/dir/meson.build b/test cases/keyval/2 subdir/dir/meson.build index dc1b478..291ad93 100644 --- a/test cases/keyval/2 subdir/dir/meson.build +++ b/test cases/keyval/2 subdir/dir/meson.build @@ -1,5 +1,5 @@ -k = import('unstable-keyval') +k = import('keyval') conf = k.load(meson.source_root() / '.config') diff --git a/test cases/keyval/3 load_config files/dir/meson.build b/test cases/keyval/3 load_config files/dir/meson.build index 43fba13..adc5289 100644 --- a/test cases/keyval/3 load_config files/dir/meson.build +++ b/test cases/keyval/3 load_config files/dir/meson.build @@ -1,5 +1,5 @@ -k = import('unstable-keyval') +k = import('keyval') conf = k.load(files('config')) diff --git a/test cases/keyval/4 load_config builddir/meson.build b/test cases/keyval/4 load_config builddir/meson.build index 1bb0285..6bd83db 100644 --- a/test cases/keyval/4 load_config builddir/meson.build +++ b/test cases/keyval/4 load_config builddir/meson.build @@ -1,6 +1,6 @@ project('keyval builddir test') -k = import('unstable-keyval') +k = import('keyval') out_conf = configure_file(input: 'config', output: 'out-config', copy: true) conf = k.load(out_conf) diff --git a/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake index d19f7e8..483926c 100644 --- a/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake +++ b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake @@ -4,6 +4,9 @@ include(CMakeFindDependencyMacro) include(CheckCXXSourceRuns) include(CheckCSourceRuns) +# Do something stupid (see https://github.com/mesonbuild/meson/issues/7501) +set("") + check_cxx_source_runs( " #include <iostream> diff --git a/test cases/python/1 basic/meson.build b/test cases/python/1 basic/meson.build index 9c3af10..bd9a65c 100644 --- a/test cases/python/1 basic/meson.build +++ b/test cases/python/1 basic/meson.build @@ -1,4 +1,4 @@ -project('python sample', 'c') +project('python sample') py_mod = import('python') py = py_mod.find_installation('python3') @@ -12,6 +12,7 @@ py_purelib = py.get_path('purelib') if not py_purelib.endswith('site-packages') error('Python3 purelib path seems invalid? ' + py_purelib) endif +message('Python purelib path:', py_purelib) # could be 'lib64' or 'Lib' on some systems py_platlib = py.get_path('platlib') diff --git a/test cases/python/1 basic/prog.py b/test cases/python/1 basic/prog.py index 9d95aea..720fdb1 100755 --- a/test cases/python/1 basic/prog.py +++ b/test cases/python/1 basic/prog.py @@ -1,9 +1,8 @@ #!/usr/bin/env python3 from gluon import gluonator -import sys print('Running mainprog from root dir.') if gluonator.gluoninate() != 42: - sys.exit(1) + raise ValueError("!= 42") diff --git a/test cases/python/1 basic/subdir/subprog.py b/test cases/python/1 basic/subdir/subprog.py index 08652f0..54178e5 100755 --- a/test cases/python/1 basic/subdir/subprog.py +++ b/test cases/python/1 basic/subdir/subprog.py @@ -4,9 +4,8 @@ # point to source root. from gluon import gluonator -import sys print('Running mainprog from subdir.') if gluonator.gluoninate() != 42: - sys.exit(1) + raise ValueError("!= 42") diff --git a/test cases/python/2 extmodule/blaster.py b/test cases/python/2 extmodule/blaster.py index 7e1eae6..1f01876 100755 --- a/test cases/python/2 extmodule/blaster.py +++ b/test cases/python/2 extmodule/blaster.py @@ -1,14 +1,11 @@ #!/usr/bin/env python3 import tachyon -import sys result = tachyon.phaserize('shoot') if not isinstance(result, int): - print('Returned result not an integer.') - sys.exit(1) + raise SystemExit('Returned result not an integer.') if result != 1: - print('Returned result {} is not 1.'.format(result)) - sys.exit(1) + raise SystemExit('Returned result {} is not 1.'.format(result)) diff --git a/test cases/python/2 extmodule/meson.build b/test cases/python/2 extmodule/meson.build index b4eb960..18d70c8 100644 --- a/test cases/python/2 extmodule/meson.build +++ b/test cases/python/2 extmodule/meson.build @@ -3,26 +3,33 @@ project('Python extension module', 'c', # Because Windows Python ships only with optimized libs, # we must build this project the same way. +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif + + py_mod = import('python') py = py_mod.find_installation() -py_dep = py.dependency() +py_dep = py.dependency(required: false) -if py_dep.found() - subdir('ext') +if not py_dep.found() + error('MESON_SKIP_TEST: Python libraries not found.') +endif - test('extmod', - py, - args : files('blaster.py'), - env : ['PYTHONPATH=' + pypathdir]) +subdir('ext') - # Check we can apply a version constraint - dependency('python3', version: '>=@0@'.format(py_dep.version())) +test('extmod', + py, + args : files('blaster.py'), + env : ['PYTHONPATH=' + pypathdir]) -else - error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') -endif py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false) if py3_pkg_dep.found() python_lib_dir = py3_pkg_dep.get_pkgconfig_variable('libdir') + + # Check we can apply a version constraint + dependency('python3', version: '>=@0@'.format(py_dep.version())) +else + message('Skipped python3 pkg-config test') endif diff --git a/test cases/python/3 cython/cytest.py b/test cases/python/3 cython/cytest.py index 43443dc..c08ffee 100755 --- a/test cases/python/3 cython/cytest.py +++ b/test cases/python/3 cython/cytest.py @@ -1,23 +1,19 @@ #!/usr/bin/env python3 from storer import Storer -import sys s = Storer() if s.get_value() != 0: - print('Initial value incorrect.') - sys.exit(1) + raise SystemExit('Initial value incorrect.') s.set_value(42) if s.get_value() != 42: - print('Setting value failed.') - sys.exit(1) + raise SystemExit('Setting value failed.') try: s.set_value('not a number') - print('Using wrong argument type did not fail.') - sys.exit(1) + raise SystemExit('Using wrong argument type did not fail.') except TypeError: pass diff --git a/test cases/python/3 cython/meson.build b/test cases/python/3 cython/meson.build index 194920b..5fc07a8 100644 --- a/test cases/python/3 cython/meson.build +++ b/test cases/python/3 cython/meson.build @@ -1,20 +1,26 @@ project('cython', 'c', default_options : ['warning_level=3']) -cython = find_program('cython3', required : false) -py3_dep = dependency('python3', required : false) +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif -if cython.found() and py3_dep.found() - py_mod = import('python') - py3 = py_mod.find_installation() - py3_dep = py3.dependency() - subdir('libdir') +cython = find_program('cython', required : false) +if not cython.found() + error('MESON_SKIP_TEST: Cython3 not found.') +endif - test('cython tester', - py3, - args : files('cytest.py'), - env : ['PYTHONPATH=' + pydir] - ) -else - error('MESON_SKIP_TEST: Cython3 or Python3 libraries not found, skipping test.') +py_mod = import('python') +py3 = py_mod.find_installation() +py3_dep = py3.dependency(required: false) +if not py3_dep.found() + error('MESON_SKIP_TEST: Python library not found.') endif + +subdir('libdir') + +test('cython tester', + py3, + args : files('cytest.py'), + env : ['PYTHONPATH=' + pydir] +) diff --git a/test cases/python/4 custom target depends extmodule/blaster.py b/test cases/python/4 custom target depends extmodule/blaster.py index 6106f6b..09039cb 100644 --- a/test cases/python/4 custom target depends extmodule/blaster.py +++ b/test cases/python/4 custom target depends extmodule/blaster.py @@ -24,9 +24,7 @@ if options.output: f.write('success') if not isinstance(result, int): - print('Returned result not an integer.') - sys.exit(1) + raise SystemExit('Returned result not an integer.') if result != 1: - print('Returned result {} is not 1.'.format(result)) - sys.exit(1) + raise SystemExit('Returned result {} is not 1.'.format(result)) diff --git a/test cases/python/4 custom target depends extmodule/meson.build b/test cases/python/4 custom target depends extmodule/meson.build index 3835377..d8a62ed 100644 --- a/test cases/python/4 custom target depends extmodule/meson.build +++ b/test cases/python/4 custom target depends extmodule/meson.build @@ -3,11 +3,19 @@ project('Python extension module', 'c', # Because Windows Python ships only with optimized libs, # we must build this project the same way. +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif + py_mod = import('python') py3 = py_mod.find_installation() py3_dep = py3.dependency(required : false) cc = meson.get_compiler('c') +if not py3_dep.found() + error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') +endif + # Copy to the builddir so that blaster.py can find the built tachyon module # FIXME: We should automatically detect this case and append the correct paths # to PYTHONLIBDIR @@ -20,21 +28,18 @@ import os, sys with open(sys.argv[1], 'rb') as f: assert(f.read() == b'success') ''' -if py3_dep.found() - message('Detected Python version: ' + py3_dep.version()) - if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1') - error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.') - endif - subdir('ext') - - out_txt = custom_target('tachyon flux', - input : blaster_py, - output : 'out.txt', - command : [py3, '@INPUT@', '-o', '@OUTPUT@'], - depends : pylib, - build_by_default: true) - - test('flux', py3, args : ['-c', check_exists, out_txt]) -else - error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') + +message('Detected Python version: ' + py3_dep.version()) +if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1') + error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.') endif +subdir('ext') + +out_txt = custom_target('tachyon flux', + input : blaster_py, + output : 'out.txt', + command : [py3, '@INPUT@', '-o', '@OUTPUT@'], + depends : pylib, + build_by_default: true) + +test('flux', py3, args : ['-c', check_exists, out_txt]) diff --git a/test cases/python/5 modules kwarg/meson.build b/test cases/python/5 modules kwarg/meson.build index 3c9d54f..9751ada 100644 --- a/test cases/python/5 modules kwarg/meson.build +++ b/test cases/python/5 modules kwarg/meson.build @@ -1,7 +1,7 @@ project('python kwarg') py = import('python') -prog_python = py.find_installation('python3', modules : ['setuptools']) +prog_python = py.find_installation('python3', modules : ['distutils']) assert(prog_python.found() == true, 'python not found when should be') prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false) assert(prog_python.found() == false, 'python not found but reported as found') diff --git a/test cases/unit/73 summary/meson.build b/test cases/unit/73 summary/meson.build index df4540d..1bc05ca 100644 --- a/test cases/unit/73 summary/meson.build +++ b/test cases/unit/73 summary/meson.build @@ -13,3 +13,4 @@ summary('A number', 1, section: 'Configuration') summary('yes', true, bool_yn : true, section: 'Configuration') summary('no', false, bool_yn : true, section: 'Configuration') summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration') +summary('long coma list', ['alpha', 'alphacolor', 'apetag', 'audiofx', 'audioparsers', 'auparse', 'autodetect', 'avi'], list_sep: ', ', section: 'Plugins') diff --git a/test cases/unit/76 pkgconfig prefixes/client/client.c b/test cases/unit/77 pkgconfig prefixes/client/client.c index be9bead..be9bead 100644 --- a/test cases/unit/76 pkgconfig prefixes/client/client.c +++ b/test cases/unit/77 pkgconfig prefixes/client/client.c diff --git a/test cases/unit/76 pkgconfig prefixes/client/meson.build b/test cases/unit/77 pkgconfig prefixes/client/meson.build index 491937b..491937b 100644 --- a/test cases/unit/76 pkgconfig prefixes/client/meson.build +++ b/test cases/unit/77 pkgconfig prefixes/client/meson.build diff --git a/test cases/unit/76 pkgconfig prefixes/val1/meson.build b/test cases/unit/77 pkgconfig prefixes/val1/meson.build index cc63e31..cc63e31 100644 --- a/test cases/unit/76 pkgconfig prefixes/val1/meson.build +++ b/test cases/unit/77 pkgconfig prefixes/val1/meson.build diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.c b/test cases/unit/77 pkgconfig prefixes/val1/val1.c index 591e521..591e521 100644 --- a/test cases/unit/76 pkgconfig prefixes/val1/val1.c +++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.c diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.h b/test cases/unit/77 pkgconfig prefixes/val1/val1.h index 6bd435e..6bd435e 100644 --- a/test cases/unit/76 pkgconfig prefixes/val1/val1.h +++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.h diff --git a/test cases/unit/76 pkgconfig prefixes/val2/meson.build b/test cases/unit/77 pkgconfig prefixes/val2/meson.build index ce69481..ce69481 100644 --- a/test cases/unit/76 pkgconfig prefixes/val2/meson.build +++ b/test cases/unit/77 pkgconfig prefixes/val2/meson.build diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.c b/test cases/unit/77 pkgconfig prefixes/val2/val2.c index d7d4857..d7d4857 100644 --- a/test cases/unit/76 pkgconfig prefixes/val2/val2.c +++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.c diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.h b/test cases/unit/77 pkgconfig prefixes/val2/val2.h index 995023d..995023d 100644 --- a/test cases/unit/76 pkgconfig prefixes/val2/val2.h +++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.h diff --git a/test cases/unit/76 subdir libdir/meson.build b/test cases/unit/78 subdir libdir/meson.build index 5099c91..5099c91 100644 --- a/test cases/unit/76 subdir libdir/meson.build +++ b/test cases/unit/78 subdir libdir/meson.build diff --git a/test cases/unit/76 subdir libdir/subprojects/flub/meson.build b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build index 7bfd2c5..7bfd2c5 100644 --- a/test cases/unit/76 subdir libdir/subprojects/flub/meson.build +++ b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build diff --git a/manual tests/9 nostdlib/meson.build b/test cases/unit/79 nostdlib/meson.build index 9c5f949..9c5f949 100644 --- a/manual tests/9 nostdlib/meson.build +++ b/test cases/unit/79 nostdlib/meson.build diff --git a/manual tests/9 nostdlib/prog.c b/test cases/unit/79 nostdlib/prog.c index b9216ee..b9216ee 100644 --- a/manual tests/9 nostdlib/prog.c +++ b/test cases/unit/79 nostdlib/prog.c diff --git a/manual tests/9 nostdlib/subprojects/mylibc/libc.c b/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c index 67261cb..67261cb 100644 --- a/manual tests/9 nostdlib/subprojects/mylibc/libc.c +++ b/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c diff --git a/manual tests/9 nostdlib/subprojects/mylibc/meson.build b/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build index aa0184e..ff4bdb2 100644 --- a/manual tests/9 nostdlib/subprojects/mylibc/meson.build +++ b/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build @@ -9,3 +9,5 @@ libc = static_library('c', 'libc.c', 'stubstart.s') mylibc_dep = declare_dependency(link_with : libc, include_directories : include_directories('.') ) + +meson.override_dependency('c_stdlib', mylibc_dep) diff --git a/manual tests/9 nostdlib/subprojects/mylibc/stdio.h b/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h index c3f8f56..c3f8f56 100644 --- a/manual tests/9 nostdlib/subprojects/mylibc/stdio.h +++ b/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h diff --git a/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s b/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s index 0a6d972..0a6d972 100644 --- a/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s +++ b/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore new file mode 100644 index 0000000..4976afc --- /dev/null +++ b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore @@ -0,0 +1 @@ +subprojects/* diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build new file mode 100644 index 0000000..0bc395b --- /dev/null +++ b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build @@ -0,0 +1,3 @@ +project('user option for subproject') + +p = subproject('sub') diff --git a/test cases/unit/77 global-rpath/meson.build b/test cases/unit/80 global-rpath/meson.build index c67d9e0..c67d9e0 100644 --- a/test cases/unit/77 global-rpath/meson.build +++ b/test cases/unit/80 global-rpath/meson.build diff --git a/test cases/unit/77 global-rpath/rpathified.cpp b/test cases/unit/80 global-rpath/rpathified.cpp index 3788906..3788906 100644 --- a/test cases/unit/77 global-rpath/rpathified.cpp +++ b/test cases/unit/80 global-rpath/rpathified.cpp diff --git a/test cases/unit/77 global-rpath/yonder/meson.build b/test cases/unit/80 global-rpath/yonder/meson.build index e32f383..e32f383 100644 --- a/test cases/unit/77 global-rpath/yonder/meson.build +++ b/test cases/unit/80 global-rpath/yonder/meson.build diff --git a/test cases/unit/77 global-rpath/yonder/yonder.cpp b/test cases/unit/80 global-rpath/yonder/yonder.cpp index b182d34..b182d34 100644 --- a/test cases/unit/77 global-rpath/yonder/yonder.cpp +++ b/test cases/unit/80 global-rpath/yonder/yonder.cpp diff --git a/test cases/unit/77 global-rpath/yonder/yonder.h b/test cases/unit/80 global-rpath/yonder/yonder.h index 9d9ad16..9d9ad16 100644 --- a/test cases/unit/77 global-rpath/yonder/yonder.h +++ b/test cases/unit/80 global-rpath/yonder/yonder.h diff --git a/test cases/unit/78 wrap-git/meson.build b/test cases/unit/81 wrap-git/meson.build index b0af30a..b0af30a 100644 --- a/test cases/unit/78 wrap-git/meson.build +++ b/test cases/unit/81 wrap-git/meson.build diff --git a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build index 2570f77..2570f77 100644 --- a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build +++ b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build diff --git a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c index 8488f4e..8488f4e 100644 --- a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c +++ b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c diff --git a/test cases/vala/1 basic/meson.build b/test cases/vala/1 basic/meson.build index fe422e1..d1bfabd 100644 --- a/test cases/vala/1 basic/meson.build +++ b/test cases/vala/1 basic/meson.build @@ -1,4 +1,5 @@ -project('valatest', 'vala', 'c') +# Language are case unsensitive, check here that capital C works too. +project('valatest', 'vala', 'C') valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')] diff --git a/test cases/warning/2 languages missing native/meson.build b/test cases/warning/2 languages missing native/meson.build index f4aa956..e204715 100644 --- a/test cases/warning/2 languages missing native/meson.build +++ b/test cases/warning/2 languages missing native/meson.build @@ -1,2 +1,3 @@ -project('languages missing native') +project('languages missing native', + meson_version : '>= 0.54') add_languages('c') diff --git a/test cases/warning/2 languages missing native/test.json b/test cases/warning/2 languages missing native/test.json index 36da0a7..f929654 100644 --- a/test cases/warning/2 languages missing native/test.json +++ b/test cases/warning/2 languages missing native/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "test cases/warning/2 languages missing native/meson.build:2: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build." + "line": "test cases/warning/2 languages missing native/meson.build:3: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build." } ] } diff --git a/test cases/windows/17 msvc ndebug/main.cpp b/test cases/windows/17 msvc ndebug/main.cpp new file mode 100644 index 0000000..d647d71 --- /dev/null +++ b/test cases/windows/17 msvc ndebug/main.cpp @@ -0,0 +1,9 @@ +int main() { +#ifdef NDEBUG + // NDEBUG is defined + return 0; +#else + // NDEBUG is not defined + return 1; +#endif +}
\ No newline at end of file diff --git a/test cases/windows/17 msvc ndebug/meson.build b/test cases/windows/17 msvc ndebug/meson.build new file mode 100644 index 0000000..78eaf89 --- /dev/null +++ b/test cases/windows/17 msvc ndebug/meson.build @@ -0,0 +1,7 @@ +project('msvc_ndebug', 'cpp', + default_options : [ 'b_ndebug=true' ] +) + +exe = executable('exe', 'main.cpp') + +test('ndebug', exe) diff --git a/tools/gen_data.py b/tools/gen_data.py new file mode 100755 index 0000000..2cc05a4 --- /dev/null +++ b/tools/gen_data.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 + +# Copyright 2020 Daniel Mensinger + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import hashlib +import textwrap +import re +from pathlib import Path +from datetime import datetime +import typing as T + +class DataFile: + file_counter = 0 + + def __init__(self, path: Path, root: Path): + self.path = path + self.id = self.path.relative_to(root) + self.data_str = f'file_{DataFile.file_counter}_data_' + re.sub('[^a-zA-Z0-9]', '_', self.path.name) + DataFile.file_counter += 1 + + b = self.path.read_bytes() + self.data = b.decode() + self.sha256sum = hashlib.sha256(b).hexdigest() + + def __repr__(self) -> str: + return f'<{type(self).__name__}: [{self.sha256sum}] {self.id}>' + +def main() -> int: + root_dir = Path(__file__).resolve().parents[1] + mesonbuild_dir = root_dir / 'mesonbuild' + out_file = mesonbuild_dir / 'mesondata.py' + + data_dirs = mesonbuild_dir.glob('**/data') + + data_files: T.List[DataFile] = [] + + for d in data_dirs: + for p in d.iterdir(): + data_files += [DataFile(p, mesonbuild_dir)] + + print(f'Found {len(data_files)} data files') + + # Generate the data script + data = '' + + data += textwrap.dedent(f'''\ + # Copyright {datetime.today().year} The Meson development team + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + + #### + #### WARNING: This is an automatically generated file! Do not edit! + #### Generated by {Path(__file__).resolve().relative_to(root_dir)} + #### + + + from pathlib import Path + import typing as T + + if T.TYPE_CHECKING: + from .environment import Environment + + ###################### + # BEGIN Data section # + ###################### + + ''') + + for i in data_files: + data += f"{i.data_str} = '''\\\n{i.data}'''\n\n" + + data += textwrap.dedent(f''' + #################### + # END Data section # + #################### + + class DataFile: + def __init__(self, path: Path, sha256sum: str, data: str) -> None: + self.path = path + self.sha256sum = sha256sum + self.data = data + + def write_once(self, path: Path) -> None: + if not path.exists(): + path.write_text(self.data) + + def write_to_private(self, env: 'Environment') -> Path: + out_file = Path(env.scratch_dir) / 'data' / self.path.name + out_file.parent.mkdir(exist_ok=True) + self.write_once(out_file) + return out_file + + + mesondata = {{ + ''') + + for i in data_files: + data += textwrap.indent(textwrap.dedent(f"""\ + '{i.id}': DataFile( + Path('{i.id}'), + '{i.sha256sum}', + {i.data_str}, + ), + """), ' ') + + data += textwrap.dedent('''\ + } + ''') + + print(f'Updating {out_file}') + out_file.write_text(data) + return 0 + +if __name__ == '__main__': + sys.exit(main()) |