diff options
94 files changed, 1626 insertions, 750 deletions
diff --git a/.gitattributes b/.gitattributes index de66d2b..a85dd10 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ .gitignore export-ignore .gitattributes export-ignore - +* text eol=lf +*.png binary @@ -9,6 +9,7 @@ build system. [](https://pypi.python.org/pypi/meson) [](https://travis-ci.org/mesonbuild/meson) [](https://ci.appveyor.com/project/mesonbuild/meson) +[)](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=2) [](https://codecov.io/gh/mesonbuild/meson/branch/master) [](https://lgtm.com/projects/g/mesonbuild/meson/context:python) [](https://lgtm.com/projects/g/mesonbuild/meson/alerts) diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..8c36abd --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,49 @@ +name: $(BuildID) + +trigger: + branches: + include: + - 'master' + +variables: + MESON_FIXED_NINJA: 1 + CI: 1 + +jobs: +#- job: vs2015 +# pool: +# vmImage: vs2015-win2012r2 +# +# strategy: +# maxParallel: 10 +# matrix: +# vc2015x86ninja: +# arch: x86 +# compiler: msvc2015 +# backend: ninja +# vc2015x86vs: +# arch: x86 +# compiler: msvc2015 +# backend: vs2015 +# +# steps: +# - template: ci/azure-steps.yml + +- job: vs2017 + pool: + vmImage: VS2017-Win2016 + + strategy: + maxParallel: 10 + matrix: + vc2017x64ninja: + arch: x64 + compiler: msvc2017 + backend: ninja + vc2017x64vs: + arch: x64 + compiler: msvc2017 + backend: vs2017 + + steps: + - template: ci/azure-steps.yml diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml new file mode 100644 index 0000000..5c4a058 --- /dev/null +++ b/ci/azure-steps.yml @@ -0,0 +1,96 @@ +steps: +- powershell: | + # test_find_program exercises some behaviour which relies on .py being in PATHEXT + $env:PATHEXT += ';.py' + + where.exe python + + python ./skip_ci.py --base-branch-env=SYSTEM_PULLREQUEST_TARGETBRANCH --is-pull-env=SYSTEM_PULLREQUEST_PULLREQUESTID --base-branch-origin + if ($LastExitCode -ne 0) { + throw ('error in skip_ci.py') + } + + # remove MinGW from path, so we don't find gfortran and try to use it + $env:Path = ($env:Path.Split(';') | Where-Object { $_ -notlike '*mingw*' }) -join ';' + + # download and install prerequisites + function DownloadFile([String] $Source, [String] $Destination) { + $retries = 10 + for ($i = 1; $i -le $retries; $i++) { + try { + (New-Object net.webclient).DownloadFile($Source, $Destination) + break # succeeded + } catch [net.WebException] { + if ($i -eq $retries) { + throw # fail on last retry + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $Source, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } + } + } + + DownloadFile -Source 'https://github.com/mesonbuild/cidata/raw/master/ninja.exe' -Destination $(System.WorkFolder)\ninja.exe + DownloadFile -Source 'http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe' -Destination $(System.WorkFolder)\pkg-config.exe + DownloadFile -Source 'https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi' -Destination msmpisdk.msi + DownloadFile -Source 'https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe' -Destination MSMpiSetup.exe + Start-Process msiexec.exe -ArgumentList '/i msmpisdk.msi /quiet' -Wait + Start-Process .\MSMpiSetup.exe -ArgumentList '-unattend -full' -Wait + + # import ms-mpi env vars (set by installer) + foreach ($p in "MSMPI_INC", "MSMPI_LIB32", "MSMPI_LIB64") { + $v = [Environment]::GetEnvironmentVariable($p, "Machine") + Set-Content "env:$p" "$v" + } + + # add downloads to PATH + $env:Path = "$env:SYSTEM_WORKFOLDER;$env:Path" + + # import visual studio variables + Set-PSRepository -Name PSGallery -InstallationPolicy Trusted + Install-Module Pscx -Scope CurrentUser -AllowClobber + Install-Module VSSetup -Scope CurrentUser + $vsver = $env:compiler.Replace('msvc', '') + Import-VisualStudioVars -VisualStudioVersion $vsver -Architecture $(arch) + + # add .NET framework tools to path for resgen for C# tests + # (always use 32-bit tool, as there doesn't seem to be a 64-bit tool) + if ((Get-Command "resgen.exe" -ErrorAction SilentlyContinue) -eq $null) { + $env:Path = "$env:WindowsSDK_ExecutablePath_x86;$env:Path" + } + + if ($env:backend -eq 'ninja') { + ninja --version + } else { + MSBuild /version + } + + python run_tests.py --backend $(backend) + + echo "##vso[task.setvariable variable=test_status]$LastExitCode" + + continueOnError: true + +- task: PublishTestResults@2 + inputs: + testResultsFiles: meson-test-run.xml + testRunTitle: $(System.JobName) + publishRunAttachments: true + +- task: CopyFiles@2 + inputs: + contents: 'meson-test-run.*' + targetFolder: $(Build.ArtifactStagingDirectory) + +- task: PublishBuildArtifacts@1 + inputs: + artifactName: $(System.JobName) + # publishing artifacts from PRs from a fork is currently blocked + condition: eq(variables['system.pullrequest.isfork'], false) + +- powershell: | + # after publishing test results, even if some failed + # exit with the test status + exit $(test_status) diff --git a/docs/markdown/Adding-arguments.md b/docs/markdown/Adding-arguments.md index 117622b..8dd8488 100644 --- a/docs/markdown/Adding-arguments.md +++ b/docs/markdown/Adding-arguments.md @@ -37,6 +37,19 @@ You should set only the most essential flags with this setting, you should *not* set debug or optimization flags. Instead they should be specified by selecting an appropriate build type. +Project arguments +-- + +Project arguments work similar to global arguments except that they +are valid only within the current subproject. The usage is simple: + +```meson +add_project_arguments('-DMYPROJ=projname', language : 'c') +``` + +This would add the compiler flags to all C sources in the current +project. + Per target arguments -- diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md index 832f803..c79cfd3 100644 --- a/docs/markdown/Adding-new-projects-to-wrapdb.md +++ b/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -1,84 +1,120 @@ -# Adding new projects to wrap +# Adding new projects to WrapDB -**If you don't have permissions to do something on this page, please - open issue against https://github.com/mesonbuild/wrapweb/issues to - let us know that you want to start new project.** -## Overview +## How it works -The wrap provider service is a simple web service that makes it easy -to download build definitions for projects. It works in much the same -way as Debian: we take the unaltered upstream source package and add a -new build system to it as a patch. These build systems are stored as -Git repositories on GitHub. They only contain build definition -files. You may also think of them as an overlay to upstream source. +Each wrap repository has a master branch with only one initial commit and *no* wrap files. +And that is the only commit ever made on that branch. -## Creator script +For every release of a project a new branch is created. The new branch is named after the +the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for +this particular release. + +There are two types of wraps on WrapDB - regular wraps and wraps with Meson build +definition patches. A wrap file in a repository on WrapDB must have a name `upstream.wrap`. + +Wraps with Meson build definition patches work in much the same way as Debian: we take the unaltered upstream source package and add a new build system to it as a patch. These build systems are stored as Git repositories on GitHub. They only contain build definition files. You may also think of them as an overlay to upstream source. + +Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated +with an incremented version number. All the old releases remain unaltered. +New commits are always done via GitHub merge requests and must be reviewed by +someone other than the submitter. -The WrapDB repository has a [helper -script](https://github.com/mesonbuild/wrapweb/blob/master/mesonwrap.py) -to generate new repositories, verify them and update them. The documentation below roughly explains -what it does to create a new wrap repository using plain shell commands. +Note that your Git repo with wrap must not contain the subdirectory of the source +release. That gets added automatically by the service. You also must not commit +any source code from the original tarball into the wrap repository. ## Choosing the repository name Wrapped subprojects are used much like external dependencies. Thus -they should have the same name as the upstream projects. If the -project provides a pkg-config file, then the repository name should be +they should have the same name as the upstream projects. + +If the project provides a pkg-config file, then the repository name should be the same as the pkg-config name. Usually this is the name of the project, such as `libpng`. Sometimes it is slightly different, however. As an example the libogg project's chosen pkg-config name is `ogg` instead of `libogg`, which is the reason why the repository is named plain `ogg`. -## Adding new project to the Wrap provider service +If there is no a pkg-config file, the name the project uses/promotes should be used, +lowercase only (Catch2 -> catch2). -Each project gets its own repo. It is initialized like this: +## How to contribute a new wrap - git init - git add readme.txt - git add LICENSE.build - git commit -a -m 'Create project foobar' - git remote add origin <repo url> - git push -u origin master +If the project already uses Meson build system, then only a wrap file - `upstream.wrap` +should be provided. In other case a Meson build definition patch - a set of `meson.build` +files - should be also provided. -Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches. +### Request a new repository or branch -Repo names must fully match this regexp: `[a-z0-9._]+`. +Create an issue on the [wrapweb bug tracker](https://github.com/mesonbuild/wrapweb/issues) +using *Title* and *Description* below as a template. -## Adding a new branch to an existing project +*Title:* `new wrap: <project_name>` -Create a new branch whose name matches the upstream release number. +*Description:* +``` +upstream url: <link_to_updastream> +version: <version_you_have_a_wrap_for> +``` - git checkout master - git checkout -b 1.0.0 - git push origin 1.0.0 - (or from GitHub web page, remember to branch from master) +Wait until the new repository or branch is created. A link to the new repository or branch +will be posted in a comment to this issue. -Branch names must fully match this regexp: `[a-z0-9._]+`. +### Add a new wrap -## Adding a new release to an existing branch +First you need to fork the repository to your own page. +Then you can create the first Wrap commit that usually looks something like this. -Here is where the magic happens. Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated with an incremented version number. All the old releases remain unaltered. New commits are always done via GitHub merge requests and must be reviewed by someone other than the submitter. +``` +tar xzf libfoo-1.0.0.tar.gz +git clone -b 1.0.0 git@github.com:yourusername/libfoo.git tmpdir +mv tmpdir/.git libfoo-1.0.0 +rm -rf tmpdir +cd libfoo-1.0.0 +git reset --hard +emacs upstream.wrap meson.build +<verify that your project builds and runs> +git add upstream.wrap meson.build +git commit -a -m 'Add wrap files for libfoo-1.0.0' +git push origin 1.0.0 +``` -Note that your Git repo must *not* contain the subdirectory of the source release. That gets added automatically by the service. You also must *not* commit any source code from the original tarball into the wrap repository. +Now you should create a pull request on GitHub. Remember to create it against the +correct branch rather than master (`1.0.0` branch in this example). GitHub should do +this automatically. -First you need to fork the repository to your own page. Then you can create the first Wrap commit that usually looks something like this. +## What is done by WrapDB maintainers - tar xzf libfoo_1.0.0.tar.gz - git clone -b 1.0.0 git@github.com:yourusername/libfoo.git tmpdir - mv tmpdir/.git libfoo-1.0.0 - rm -rf tmpdir - cd libfoo-1.0.0 - git reset --hard - emacs upstream.wrap meson.build - <verify that your project builds and runs> - git add upstream.wrap meson.build - git commit -a -m 'Create wrap files for libfoo-1.0.0.' - git push origin 1.0.0 +### Adding new project to the Wrap provider service -Now you can file a merge request. Remember to file it against branch -1.0.0 rather than master. GitHub should do this automatically. +Each project gets its own repo. It is initialized like this: + +``` +git init +git add readme.txt +git add LICENSE.build +git commit -a -m 'Create project foobar' +git remote add origin <repo url> +git push -u origin master +``` + +Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches. + +Repo names must fully match this regexp: `[a-z0-9._]+`. + +### Adding a new branch to an existing project + +Create a new branch whose name matches the upstream release number. + +``` +git checkout master +git checkout -b 1.0.0 +git push origin 1.0.0 +(or from GitHub web page, remember to branch from master) +``` + +Branch names must fully match this regexp: `[a-z0-9._]+`. ## Changes to original source @@ -93,6 +129,12 @@ to functionality. All such changes must be submitted to upstream. You may also host your own Git repo with the changes if you wish. The Wrap system has native support for Git subprojects. +## Creator script + +The WrapDB repository has a +[helper script](https://github.com/mesonbuild/wrapweb/blob/master/mesonwrap.py) +to generate new repositories, verify them and update them. + ## Reviewing wraps See [Wrap review guidelines](Wrap-review-guidelines.md). diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index 55d82a5..ce14304 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -82,6 +82,7 @@ platforms or with all compilers: | b_pgo | off | off, generate, use | Use profile guided optimization | | b_sanitize | none | see below | Code sanitizer to use | | b_staticpic | true | true, false | Build static libraries as position independent | +| b_pie | false | true, false | Build position-independent executables (since 0.49.0)| The value of `b_sanitize` can be one of: `none`, `address`, `thread`, `undefined`, `memory`, `address,undefined`. diff --git a/docs/markdown/Configuration.md b/docs/markdown/Configuration.md index 8b79bc6..cd1af14 100644 --- a/docs/markdown/Configuration.md +++ b/docs/markdown/Configuration.md @@ -121,6 +121,38 @@ you to specify which file encoding to use. It is however strongly advised to convert your non utf-8 file to utf-8 whenever possible. Supported file encodings are those of python3, see [standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings). +## Using dictionaries + +Since *0.49.0* `configuration_data()` takes an optional dictionary as first +argument. If provided, each key/value pair is added into the +`configuration_data` as if `set()` method was called for each of them. +`configure_file()`'s `configuration` kwarg also accepts a dictionary instead of +a configuration_data object. + +Example: +```meson +cdata = configuration_data({ + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, +}) + +configure_file(output : 'config1.h', + configuration : cdata, +) + +configure_file(output : 'config2.h', + configuration : { + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, + } +) + +``` + # A full example Generating and using a configuration file requires the following steps: diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md index 8348d4a..1aa444a 100644 --- a/docs/markdown/Installing.md +++ b/docs/markdown/Installing.md @@ -26,7 +26,7 @@ Other install commands are the following. ```meson install_headers('header.h', subdir : 'projname') # -> include/projname/header.h -install_man('foo.1') # -> share/man/man1/foo.1.gz +install_man('foo.1') # -> share/man/man1/foo.1 install_data('datafile.dat', install_dir : join_paths(get_option('datadir'), 'progname')) # -> share/progname/datafile.dat ``` diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md index 2bcad78..93005ea 100644 --- a/docs/markdown/Python-module.md +++ b/docs/markdown/Python-module.md @@ -48,6 +48,9 @@ Keyword arguments are the following: whether it was found or not. Since *0.48.0* the value of a [`feature`](Build-options.md#features) option can also be passed to the `required` keyword argument. +- `disabler`: if `true` and no python installation can be found, return a + [disabler object](#disabler-object) instead of a not-found object. + *Since 0.49.0* **Returns**: a [python installation][`python_installation` object] diff --git a/docs/markdown/Qt5-module.md b/docs/markdown/Qt5-module.md index 9542a81..3a51954 100644 --- a/docs/markdown/Qt5-module.md +++ b/docs/markdown/Qt5-module.md @@ -10,6 +10,7 @@ This method takes the following keyword arguments: - `include_directories`, the directories to add to header search path for `moc` (optional) - `moc_extra_arguments`, any additional arguments to `moc` (optional). Available since v0.44.0. - `uic_extra_arguments`, any additional arguments to `uic` (optional). Available since v0.49.0. + - `rcc_extra_arguments`, any additional arguments to `rcc` (optional). Available since v0.49.0. - `dependencies`, dependency objects needed by moc. Available since v0.48.0. It returns an opaque object that should be passed to a main build target. diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 0fb9f17..7a63d32 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -112,6 +112,14 @@ Note that all these options are also available while running the `meson test` script for running tests instead of `ninja test` or `msbuild RUN_TESTS.vcxproj`, etc depending on the backend. +### assert() + +``` meson + void assert(*condition*, *message*) +``` + +Abort with an error message if `condition` evaluates to `false`. + ### benchmark() ``` meson @@ -167,13 +175,17 @@ methods section](#build-target-object) below. ### configuration_data() ``` meson - configuration_data_object = configuration_data() + configuration_data_object = configuration_data(...) ``` Creates an empty configuration object. You should add your configuration with [its method calls](#configuration-data-object) and finally use it in a call to `configure_file`. +Since *0.49.0* takes an optional dictionary as first argument. If provided, each +key/value pair is added into the `configuration_data` as if `set()` method was +called for each of them. + ### configure_file() ``` meson @@ -187,7 +199,8 @@ When a [`configuration_data()`](#configuration_data) object is passed to the `configuration:` keyword argument, it takes a template file as the `input:` (optional) and produces the `output:` (required) by substituting values from the configuration data as detailed in [the -configuration file documentation](Configuration.md). +configuration file documentation](Configuration.md). Since *0.49.0* a dictionary +can be passed instead of a [`configuration_data()`](#configuration_data) object. When a list of strings is passed to the `command:` keyword argument, it takes any source or configured file as the `input:` and assumes @@ -395,6 +408,9 @@ otherwise. This function supports the following keyword arguments: [library-specific](Dependencies.md#dependencies-with-custom-lookup-functionality) keywords may also be accepted (e.g. `modules` specifies submodules to use for dependencies such as Qt5 or Boost. ) +- `disabler` if `true` and the dependency couldn't be found, return a + [disabler object](#disabler-object) instead of a not-found dependency. + *Since 0.49.0* If dependency_name is `''`, the dependency is always not found. So with `required: false`, this always returns a dependency object for which the @@ -539,6 +555,7 @@ be passed to [shared and static libraries](#library). - `d_unittest`, when set to true, the D modules are compiled in debug mode - `d_module_versions` list of module version identifiers set when compiling D sources - `d_debug` list of module debug identifiers set when compiling D sources +- `pie` *(added 0.49.0)* build a position-independent executable The list of `sources`, `objects`, and `dependencies` is always flattened, which means you can freely nest and add lists while @@ -586,6 +603,10 @@ Keyword arguments are the following: defined there, then from the system. If set to `true`, the cross file is ignored and the program is only searched from the system. +- `disabler` if `true` and the program couldn't be found, return a + [disabler object](#disabler-object) instead of a not-found object. + *Since 0.49.0* + Meson will also autodetect scripts with a shebang line and run them with the executable/interpreter specified in it both on Windows (because the command invocator will reject the command otherwise) and @@ -895,8 +916,7 @@ An example value could be `['rwxr-sr-x', 'root', 'root']`. Installs the specified man files from the source tree into system's man directory during the install step. This directory can be overridden by specifying it with the `install_dir` keyword -argument. All man pages are compressed during installation and -installed with a `.gz` suffix. +argument. The `install_mode` argument can be used to specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. @@ -1653,6 +1673,9 @@ the following methods: argument, which can be either a string or a list of strings. Since *0.47.0* the value of a [`feature`](Build-options.md#features) option can also be passed to the `required` keyword argument. + *Since 0.49.0* if the keyword argument `disabler` is `true` and the + dependency couldn't be found, return a [disabler object](#disabler-object) + instead of a not-found dependency. - `first_supported_argument(list_of_strings)`, given a list of strings, returns the first argument that passes the `has_argument` @@ -1781,7 +1804,7 @@ The following keyword arguments can be used: required to find the header or symbol. For example, you might need to pass the include path `-Isome/path/to/header` if a header is not in the default include path. In versions newer than 0.38.0 you - should use the `include_directories` keyword described above. You + should use the `include_directories` keyword described below. You may also want to pass a library name `-lfoo` for `has_function` to check for a function. Supported by all methods except `get_id`, `version`, and `find_library`. @@ -1794,6 +1817,9 @@ The following keyword arguments can be used: `run()`. If this keyword argument is not passed to those methods, no message will be printed about the check. +- `no_builtin_args` when set to true, the compiler arguments controlled + by built-in configuration options are not added. + - `prefix` can be used to add #includes and other things that are required for the symbol to be declared. System definitions should be passed via compiler args (eg: `_GNU_SOURCE` is often required for @@ -1802,7 +1828,7 @@ The following keyword arguments can be used: `sizeof`, `has_type`, `has_function`, `has_member`, `has_members`, `check_header`, `has_header`, `has_header_symbol`. -**Note:** These compiler checks do not use compiler arguments added with +**Note:** These compiler checks do not use compiler arguments added with `add_*_arguments()`, via `-Dlang_args` on the command-line, or through `CFLAGS`/`LDFLAGS`, etc in the environment. Hence, you can trust that the tests will be fully self-contained, and won't fail because of custom diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 39ec1cd..4657e17 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -84,6 +84,7 @@ These are provided by the `.system()` method call. | freebsd | FreeBSD and its derivatives | | dragonfly | DragonFly BSD | | netbsd | | +| gnu | GNU Hurd | Any string not listed above is not guaranteed to remain stable in future releases. diff --git a/docs/markdown/Release-notes-for-0.47.0.md b/docs/markdown/Release-notes-for-0.47.0.md index 9736256..a427a72 100644 --- a/docs/markdown/Release-notes-for-0.47.0.md +++ b/docs/markdown/Release-notes-for-0.47.0.md @@ -34,11 +34,10 @@ The environment path should be set properly for the ARM compiler executables. The `--target`, `-mcpu` options with the appropriate values should be mentioned in the cross file as shown in the snippet below. -``` +```ini [properties] c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] - ``` Note: @@ -106,7 +105,7 @@ When called without an input file, `configure_file` generates a C header file by default. A keyword argument was added to allow specifying the output format, for example for use with nasm or yasm: -``` +```meson conf = configuration_data() conf.set('FOO', 1) diff --git a/docs/markdown/Release-notes-for-0.48.0.md b/docs/markdown/Release-notes-for-0.48.0.md index 1eb5488..99673a4 100644 --- a/docs/markdown/Release-notes-for-0.48.0.md +++ b/docs/markdown/Release-notes-for-0.48.0.md @@ -36,16 +36,15 @@ configuration. For example: project('configure_file', 'cpp') configure_file( - input: 'a.in', - output: 'out', - command: ['./foo.sh'] - ) + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) configure_file( input: 'a.in', output: 'out', command: ['./foo.sh'] ) - ``` This will output: @@ -231,15 +230,15 @@ namely `add_global_arguments`, `add_global_link_arguments`, `add_project_arguments` and `add_project_link_arguments` that behaves like the following: -``` -## Added to native builds when compiling natively and to cross -## compilations when doing cross compiles. +```meson +# Added to native builds when compiling natively and to cross +# compilations when doing cross compiles. add_project_arguments(...) -## Added only to native compilations, not used in cross compilations. +# Added only to native compilations, not used in cross compilations. add_project_arguments(..., native : true) -## Added only to cross compilations, not used in native compilations. +# Added only to cross compilations, not used in native compilations. add_project_arguments(..., native : false) ``` diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 80ed3e7..2e3e2ea 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -14,45 +14,46 @@ Meson tries to solve this problem by making it extremely easy to provide both at the same time. The way this is done is that Meson allows you to take any other Meson project and make it a part of your build without (in the best case) any changes to its Meson setup. It -becomes a transparent part of the project. The basic idiom goes -something like this. +becomes a transparent part of the project. -```meson -dep = dependency('foo', fallback : [subproject_name, variable_name]) -``` +It should be noted that this only works for subprojects that are built +with Meson. It can not be used with any other build system. The reason +is the simple fact that there is no possible way to do this reliably +with mixed build systems. + +## A subproject example + +Usually dependencies consist of some header files plus a library to link against. +To declare this internal dependency use `declare_dependency` function. As an example, suppose we have a simple project that provides a shared -library. It would be set up like this. +library. It's `meson.build` would look like this. ```meson -project('simple', 'c') -i = include_directories('include') -l = shared_library('simple', 'simple.c', include_directories : i, install : true) -simple_dep = declare_dependency(include_directories : i, - link_with : l) -``` +project('libsimple', 'c') -Then we could use that from a master project. First we generate a -subdirectory called `subprojects` in the root of the master -directory. Then we create a subdirectory called `simple` and put the -subproject in that directory. Now the subproject can be used like -this. +inc = include_directories('include') +libsimple = shared_library('simple', + 'simple.c', + include_directories : inc, + install : true) -```meson -project('master', 'c') -dep = dependency('simple', fallback : ['simple', 'simple_dep']) -exe = executable('prog', 'prog.c', - dependencies : dep, install : true) +libsimple_dep = declare_dependency(include_directories : inc, + link_with : libsimple) ``` -With this setup the system dependency is used when it is available, -otherwise we fall back on the bundled version. If you wish to always -use the embedded version, then you would declare it like this: +### Naming convention for dependency variables -```meson -simple_sp = subproject('simple') -dep = simple_sp.get_variable('simple_dep') -``` +Ideally the dependency variable name should be of `<project_name>_dep` form. +This way one can just use it without even looking inside build definitions of that subproject. + +In cases where there are multiple dependencies need to be declared, the default one +should be named as `<project_name>_dep` (e.g. `gtest_dep`), and others can have +`<project_name>_<other>_<name>_dep` form (e.g. `gtest_main_dep` - gtest with main function). + +There may be exceptions to these rules where common sense should be applied. + +### Build options in subproject All Meson features of the subproject, such as project options keep working and can be set in the master project. There are a few @@ -62,17 +63,111 @@ must not set global arguments because there is no way to do that reliably over multiple subprojects. To check whether you are running as a subproject, use the `is_subproject` function. -It should be noted that this only works for subprojects that are built -with Meson. It can not be used with any other build system. The reason -is the simple fact that there is no possible way to do this reliably -with mixed build systems. +## Using a subproject + +All subprojects must be inside `subprojects` directory. +The `subprojects` directory must be at the top level of your project. +Subproject declaration must be in your top level `meson.build`. + +### A simple example + +Let's use `libsimple` as a subproject. + +At the top level of your project create `subprojects` directory. +Then copy `libsimple` into `subprojects` directory. + +Your project's `meson.build` should look like this. + +```meson +project('my_project', 'cpp') + +libsimple_proj = subproject('libsimple') +libsimple_dep = libsimple_proj.get_variable('libsimple_dep') + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Note that the subproject object is *not* used as the dependency, but +rather you need to get the declared dependency from it with +`get_variable` because a subproject may have multiple declared +dependencies. + +### Toggling between system libraries and embedded sources + +When building distro packages it is very important that you do not +embed any sources. Some distros have a rule forbidding embedded +dependencies so your project must be buildable without them or +otherwise the packager will hate you. + +Here's how you would use system libraries and fall back to embedding sources +if the dependency is not available. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', required : false) + +if not libsimple_dep.found() + libsimple_proj = subproject('libsimple') + libsimple_dep = libsimple_proj.get_variable('libsimple_dep') +endif + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Because this is such a common operation, Meson provides a shortcut for +this use case. + +```meson +dep = dependency('foo', fallback : [subproject_name, variable_name]) +``` + +The `fallback` keyword argument takes two items, the name of the +subproject and the name of the variable that holds the dependency. If +you need to do something more complicated, such as extract several +different variables, then you need to do it yourself with the manual +method described above. + +Using this shortcut the build definition would look like this. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', fallback : ['libsimple', 'libsimple_dep']) + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +With this setup when libsimple is provided by the system, we use it. When +that is not the case we use the embedded version (the one from subprojects). + +Note that `libsimple_dep` can point to an external or an internal dependency but +you don't have to worry about their differences. Meson will take care +of the details for you. + +### Subprojects depending on other subprojects Subprojects can use other subprojects, but all subprojects must reside in the top level `subprojects` directory. Recursive use of subprojects is not allowed, though, so you can't have subproject `a` that uses subproject `b` and have `b` also use `a`. -# Command-line options +## Obtaining subprojects + +Meson ships with a dependency system to automatically obtain +dependency subprojects. It is documented in the [Wrap dependency +system manual](Wrap-dependency-system-manual.md). + +## Command-line options The usage of subprojects can be controlled by users and distros with the following command-line options: @@ -101,13 +196,7 @@ the following command-line options: want to specifically build against the library sources provided by your subprojects. -# Obtaining subprojects - -Meson ships with a dependency system to automatically obtain -dependency subprojects. It is documented in the [Wrap dependency -system manual](Wrap-dependency-system-manual.md). - -# Why must all subprojects be inside a single directory? +## Why must all subprojects be inside a single directory? There are several reasons. diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index ff4c142..22b8be3 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -283,6 +283,17 @@ Note appending to an array will always create a new array object and assign it to `my_array` instead of modifying the original since all objects in Meson are immutable. +Since 0.49.0, you can check if an array contains an element like this: +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + #### Array methods The following methods are defined for all arrays: @@ -316,6 +327,20 @@ Dictionaries are available since 0.47.0. Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read about the methods exposed by dictionaries. +Since 0.49.0, you can check if a dictionary contains a key like this: +```meson +my_dict = {'foo': 42, 'foo': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` + Function calls -- @@ -432,6 +457,24 @@ foreach name, sources : components endforeach ``` +### Foreach `break` and `continue` + +Since 0.49.0 `break` and `continue` keywords can be used inside foreach loops. + +```meson +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] +``` + Comments -- diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md index c555abc..c4436d0 100644 --- a/docs/markdown/Tutorial.md +++ b/docs/markdown/Tutorial.md @@ -9,6 +9,19 @@ definition for a simple project. Then we expand it to use external dependencies to show how easily they can be integrated into your project. +This tutorial has been written mostly for Linux usage. It assumes that +you have GTK development libraries available on the system. On +Debian-derived systems such as Ubuntu they can be installed with the +following command: + +``` +sudo apt install libgtk-3-dev +``` + +It is possible to build the GUI application On other platforms such as +Windows and macOS but it requires for you to install the dependency +libraries using a dependendy provider of your choice. + The humble beginning ----- diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index d328b97..57257f9 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -9,13 +9,14 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson). - [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3 - [Arduino sample project](https://github.com/jpakkane/mesonarduino) - - [bolt](https://gitlab.freedesktop.org/bolt/bolt) Userpsace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux + - [bolt](https://gitlab.freedesktop.org/bolt/bolt) Userspace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux - [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies - [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool - [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library - [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker - [Dpdk](http://dpdk.org/browse/dpdk), Data plane development kit, a set of libraries and drivers for fast packet processing - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine + - [elementary OS](https://github.com/elementary/), Linux desktop oriented distribution - [Emeus](https://github.com/ebassi/emeus), Constraint based layout manager for GTK+ - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson) Sample project for using the ESP8266 Arduino port with Meson - [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME diff --git a/docs/markdown/Videos.md b/docs/markdown/Videos.md index d9ea34d..8146c6e 100644 --- a/docs/markdown/Videos.md +++ b/docs/markdown/Videos.md @@ -4,6 +4,10 @@ short-description: Videos about Meson # Videos + - [Compiling Multi-Million Line C++ Code Bases Effortlessly with the + Meson Build system](https://www.youtube.com/watch?v=SCZLnopmYBM), + CppCon 2018 + - [The Meson Build System, 4+ years of work to become an overnight success](https://www.youtube.com/watch?v=gHdTzdXkhRY), Linux.conf.au 2018 diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 2e977b2..619492a 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -27,10 +27,24 @@ itself in a way that makes it easy to use (usually this means as a static library). To use this kind of a project as a dependency you could just copy and -extract it inside your project's `subprojects` directory. However -there is a simpler way. You can specify a Wrap file that tells Meson -how to download it for you. An example wrap file would look like this -and should be put in `subprojects/foobar.wrap`: +extract it inside your project's `subprojects` directory. + +However there is a simpler way. You can specify a Wrap file that tells Meson +how to download it for you. If you then use this subproject in your build, +Meson will automatically download and extract it during build. This makes +subproject embedding extremely easy. + +All wrap files must have a name of `<project_name>.wrap` form and be in `subprojects` dir. + +Currently Meson has three kinds of wraps: +- wrap-file +- wrap-file with Meson build patch +- wrap-git + +## wrap-file + +An example wrap file for `libfoobar` would have a name `libfoobar.wrap` +and would look like this: ```ini [wrap-file] @@ -41,16 +55,22 @@ source_filename = foobar-1.0.tar.gz source_hash = 5ebeea0dfb75d090ea0e7ff84799b2a7a1550db3fe61eb5f6f61c2e971e57663 ``` -If you then use this subproject in your build, Meson will -automatically download and extract it during build. This makes -subproject embedding extremely easy. +`source_hash` is *sha256sum* of `source_filename`. + +Since *0.49.0* if `source_filename` is found in project's +`subprojects/packagecache` directory, it will be used instead of downloading the +source, even if `--wrap-mode` option is set to `nodownload`. The file's hash will +be checked. + +## wrap-file with Meson build patch Unfortunately most software projects in the world do not build with Meson. Because of this Meson allows you to specify a patch URL. This works in much the same way as Debian's distro patches. That is, they are downloaded and automatically applied to the subproject. These -files contain a Meson build definition for the given subproject. A -wrap file with an additional patch URL would look like this. +files contain a Meson build definition for the given subproject. + +A wrap file with an additional patch URL would look like this: ```ini [wrap-file] @@ -76,7 +96,14 @@ thousands of lines of code. Once you have a working build definition, just zip up the Meson build files (and others you have changed) and put them somewhere where you can download them. -## Branching subprojects directly from git +Since *0.49.0* if `patch_filename` is found in project's +`subprojects/packagecache` directory, it will be used instead of downloading the +patch, even if `--wrap-mode` option is set to `nodownload`. The file's hash will +be checked. + +## wrap-git + +This type of wrap allows branching subprojects directly from git. The above mentioned scheme assumes that your subproject is working off packaged files. Sometimes you want to check code out directly from @@ -118,79 +145,14 @@ clone-recursive = true ## Using wrapped projects -To use a subproject simply do this in your top level `meson.build`. - -```meson -foobar_proj = subproject('foobar') -``` - -Usually dependencies consist of some header files plus a library to -link against. To do this in a project so it can be used as a subproject you -would declare this internal dependency like this: - -```meson -foobar_dep = declare_dependency(link_with : mylib, - include_directories : myinc) -``` +Wraps provide a convenient way of obtaining a project into your subproject directory. +Then you use it as a regular subproject (see [subprojects](Subprojects.md)). -Then in your main project you would use them like this: - -```meson -executable('toplevel_exe', 'prog.c', - dependencies : foobar_proj.get_variable('foobar_dep')) -``` - -Note that the subproject object is *not* used as the dependency, but -rather you need to get the declared dependency from it with -`get_variable` because a subproject may have multiple declared -dependencies. - -## Toggling between distro packages and embedded source - -When building distro packages it is very important that you do not -embed any sources. Some distros have a rule forbidding embedded -dependencies so your project must be buildable without them or -otherwise the packager will hate you. - -Doing this with Meson and Wrap is simple. Here's how you would use -distro packages and fall back to embedding if the dependency is not -available. - -```meson -foobar_dep = dependency('foobar', required : false) - -if not foobar_dep.found() - foobar_proj = subproject('foobar') - # the subproject defines an internal dependency with - # the command declare_dependency(). - foobar_dep = foobar_proj.get_variable('foobar_dep') -endif - -executable('toplevel_exe', 'prog.c', - dependencies : foobar_dep) -``` - -Because this is such a common operation, Meson provides a shortcut for -this use case. - -```meson -foobar_dep = dependency('foobar', fallback : ['foobar', 'foobar_dep']) -``` - -The `fallback` keyword argument takes two items, the name of the -subproject and the name of the variable that holds the dependency. If -you need to do something more complicated, such as extract several -different variables, then you need to do it yourself with the manual -method described above. +## Getting wraps -With this setup when foobar is provided by the system, we use it. When -that is not the case we use the embedded version. Note that -`foobar_dep` can point to an external or an internal dependency but -you don't have to worry about their differences. Meson will take care -of the details for you. +Usually you don't want to write your wraps by hand. -## Getting wraps +There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides +many dependencies ready to use. You can read more about WrapDB [here](Using-the-WrapDB.md). -Usually you don't want to write your wraps by hand. There is an online -repository called [WrapDB](Using-the-WrapDB.md) that provides many -dependencies ready to use. +There is also a Meson subcommand to get and manage wraps (see [using wraptool](Using-wraptool.md)). diff --git a/docs/markdown/snippets/add_release_note_snippets_here b/docs/markdown/snippets/add_release_note_snippets_here index e69de29..bc4039c 100644 --- a/docs/markdown/snippets/add_release_note_snippets_here +++ b/docs/markdown/snippets/add_release_note_snippets_here @@ -0,0 +1,3 @@ +## Added `cpp_std` option for the Visual Studio C++ compiler +Allows the use of C++17 features and experimental not-yet-standardized +features. Valid options are `c++11`, `c++14`, `c++17`, and `c++latest`. diff --git a/docs/markdown/snippets/disabler.md b/docs/markdown/snippets/disabler.md new file mode 100644 index 0000000..76874f6 --- /dev/null +++ b/docs/markdown/snippets/disabler.md @@ -0,0 +1,6 @@ +## Return `Disabler()` instead of not-found object + +Functions such as `dependency()`, `find_library()`, `find_program()`, and +`python.find_installation()` have a new keyword argument: `disabler`. When set +to `true` those functions return `Disabler()` objects instead of not-found +objects. diff --git a/docs/markdown/snippets/manpage_compression.md b/docs/markdown/snippets/manpage_compression.md new file mode 100644 index 0000000..8c96807 --- /dev/null +++ b/docs/markdown/snippets/manpage_compression.md @@ -0,0 +1,7 @@ +## Manpages are no longer compressed implicitly + +Earlier, the `install_man` command has automatically compressed installed +manpages into `.gz` format. This collided with manpage compression hooks +already used by various distributions. Now, manpages are installed uncompressed +and distributors are expected to handle compressing them according to their own +compression preferences. diff --git a/docs/markdown/snippets/new_syntax.md b/docs/markdown/snippets/new_syntax.md new file mode 100644 index 0000000..98eccd0 --- /dev/null +++ b/docs/markdown/snippets/new_syntax.md @@ -0,0 +1,42 @@ +## Foreach `break` and `continue` + +`break` and `continue` keywords can be used inside foreach loops. + +```meson +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] +``` + +You can check if an array contains an element like this: +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + +You can check if a dictionary contains a key like this: +```meson +my_dict = {'foo': 42, 'foo': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` diff --git a/docs/markdown/snippets/pie.md b/docs/markdown/snippets/pie.md new file mode 100644 index 0000000..a9be174 --- /dev/null +++ b/docs/markdown/snippets/pie.md @@ -0,0 +1,6 @@ +## Position-independent executables + +When `b_pie` option, or `executable()`'s `pie` keyword argument is set to +`true`, position-independent executables are built. All their objects are built +with `-fPIE` and the executable is linked with `-pie`. Any static library they +link must be built with `pic` set to `true` (see `b_staticpic` option). diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 40ba213..6a3d936 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -23,11 +23,9 @@ import subprocess from ..mesonlib import MesonException, OrderedSet from ..mesonlib import classify_unity_sources from ..mesonlib import File -from ..compilers import CompilerArgs, get_macos_dylib_install_name +from ..compilers import CompilerArgs from collections import OrderedDict import shlex -from functools import lru_cache - class CleanTrees: @@ -132,8 +130,8 @@ class Backend: self.build = build self.environment = build.environment self.processed_targets = {} - self.build_to_src = os.path.relpath(self.environment.get_source_dir(), - self.environment.get_build_dir()) + self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), + self.environment.get_build_dir()) def get_target_filename(self, t): if isinstance(t, build.CustomTarget): @@ -552,6 +550,8 @@ class Backend: # Set -fPIC for static libraries by default unless explicitly disabled if isinstance(target, build.StaticLibrary) and target.pic: commands += compiler.get_pic_args() + if isinstance(target, build.Executable) and target.pie: + commands += compiler.get_pie_args() # Add compile args needed to find external dependencies. Link args are # added while generating the link command. # NOTE: We must preserve the order in which external deps are @@ -1083,7 +1083,7 @@ class Backend: if subdir is None: subdir = os.path.join(manroot, 'man' + num) srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) - dstabs = os.path.join(subdir, os.path.basename(f.fname) + '.gz') + dstabs = os.path.join(subdir, os.path.basename(f.fname)) i = [srcabs, dstabs, m.get_custom_install_mode()] d.man.append(i) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 6b2a00a..563b959 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1944,6 +1944,8 @@ rule FORTRAN_DEP_HACK%s if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): continue idir = self.get_target_dir(i) + if not idir: + idir = '.' if idir not in custom_target_include_dirs: custom_target_include_dirs.append(idir) incs = [] @@ -2243,6 +2245,8 @@ rule FORTRAN_DEP_HACK%s # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) if target.import_filename: commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename)) + if target.pie: + commands += linker.get_pie_link_args() elif isinstance(target, build.SharedLibrary): if isinstance(target, build.SharedModule): options = self.environment.coredata.base_options @@ -2382,7 +2386,8 @@ rule FORTRAN_DEP_HACK%s # Add buildtype linker args: optimization level, etc. commands += linker.get_buildtype_linker_args(self.get_option_for_target('buildtype', target)) # Add /DEBUG and the pdb filename when using MSVC - commands += self.get_link_debugfile_args(linker, target, outname) + if self.get_option_for_target('debug', target): + commands += self.get_link_debugfile_args(linker, target, outname) # Add link args specific to this BuildTarget type, such as soname args, # PIC, import library generation, etc. commands += self.get_target_type_link_args(target, linker) diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 11f8bb8..a550d91 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -44,6 +44,8 @@ class XCodeBackend(backends.Backend): 'inc': 'sourcecode.c.h', 'dylib': 'compiled.mach-o.dylib', 'o': 'compiled.mach-o.objfile', + 's': 'sourcecode.asm', + 'asm': 'sourcecode.asm', } self.maingroup_id = self.gen_id() self.all_id = self.gen_id() @@ -112,7 +114,11 @@ class XCodeBackend(backends.Backend): self.generate_suffix() def get_xcodetype(self, fname): - return self.xcodetypemap[fname.split('.')[-1]] + xcodetype = self.xcodetypemap.get(fname.split('.')[-1].lower()) + if not xcodetype: + xcodetype = 'sourcecode.unknown' + mlog.warning('Unknown file type "%s" fallbacking to "%s". Xcode project might be malformed.' % (fname, xcodetype)) + return xcodetype def generate_filemap(self): self.filemap = {} # Key is source file relative to src root. @@ -733,9 +739,13 @@ class XCodeBackend(backends.Backend): for lang in self.environment.coredata.compilers: if lang not in langnamemap: continue + # Add compile args added using add_project_arguments() + pargs = self.build.projects_args.get(target.subproject, {}).get(lang, []) + # Add compile args added using add_global_arguments() + # These override per-project arguments gargs = self.build.global_args.get(lang, []) targs = target.get_extra_args(lang) - args = gargs + targs + args = pargs + gargs + targs if len(args) > 0: langargs[langnamemap[lang]] = args symroot = os.path.join(self.environment.get_build_dir(), target.subdir) @@ -779,8 +789,7 @@ class XCodeBackend(backends.Backend): if isinstance(target, build.SharedLibrary): self.write_line('LIBRARY_STYLE = DYNAMIC;') for langname, args in langargs.items(): - argstr = ' '.join(args) - self.write_line('OTHER_%sFLAGS = "%s";' % (langname, argstr)) + self.write_build_setting_line('OTHER_%sFLAGS' % langname, args) self.write_line('OTHER_LDFLAGS = "%s";' % ldstr) self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = %s;' % product_name) @@ -869,7 +878,7 @@ class XCodeBackend(backends.Backend): # If path contains spaces surround it with double colon self.write_line('%s = "\\"%s\\"";' % (flag_name, value)) else: - self.write_line('"%s",' % value) + self.write_line('%s = "%s";' % (flag_name, value)) else: self.write_line('%s = (' % flag_name) self.indent_level += 1 diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 8ba5465..f2a6d9c 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -84,7 +84,7 @@ known_build_target_kwargs = ( rust_kwargs | cs_kwargs) -known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic'} +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} known_shmod_kwargs = known_build_target_kwargs known_stlib_kwargs = known_build_target_kwargs | {'pic'} @@ -414,6 +414,8 @@ class BuildTarget(Target): self.generated = [] self.extra_files = [] self.d_features = {} + self.pic = False + self.pie = False # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree @@ -869,13 +871,14 @@ This will become a hard error in a future Meson release.''') # since library loading is done differently) if for_darwin(self.is_cross, self.environment) or for_windows(self.is_cross, self.environment): self.pic = True - elif '-fPIC' in clist + cpplist: - mlog.warning("Use the 'pic' kwarg instead of passing -fPIC manually to static library {!r}".format(self.name)) - self.pic = True else: - self.pic = kwargs.get('pic', False) - if not isinstance(self.pic, bool): - raise InvalidArguments('Argument pic to static library {!r} must be boolean'.format(self.name)) + self.pic = self._extract_pic_pie(kwargs, 'pic') + if isinstance(self, Executable): + # Executables must be PIE on Android + if for_android(self.is_cross, self.environment): + self.pie = True + else: + self.pie = self._extract_pic_pie(kwargs, 'pie') self.implicit_include_directories = kwargs.get('implicit_include_directories', True) if not isinstance(self.implicit_include_directories, bool): raise InvalidArguments('Implicit_include_directories must be a boolean.') @@ -888,6 +891,18 @@ This will become a hard error in a future Meson release.''') raise InvalidArguments('GNU symbol visibility arg %s not one of: %s', self.symbol_visibility, ', '.join(permitted)) + def _extract_pic_pie(self, kwargs, arg): + # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags + all_flags = self.extra_args['c'] + self.extra_args['cpp'] + if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: + mlog.warning("Use the '{}' kwarg instead of passing '{}' manually to {!r}".format(arg, '-f' + arg, self.name)) + return True + + val = kwargs.get(arg, False) + if not isinstance(val, bool): + raise InvalidArguments('Argument {} to {!r} must be boolean'.format(arg, self.name)) + return val + def get_filename(self): return self.filename @@ -1307,6 +1322,8 @@ class Executable(BuildTarget): known_kwargs = known_exe_kwargs def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs): + if 'pie' not in kwargs and 'b_pie' in environment.coredata.base_options: + kwargs['pie'] = environment.coredata.base_options['b_pie'].value super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) # Unless overridden, executables have no suffix or prefix. Except on # Windows and with C#/Mono executables where the suffix is 'exe' diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 73721e4..c8e473a 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -121,10 +121,7 @@ class CCompiler(Compiler): # The default behavior is this, override in MSVC @functools.lru_cache(maxsize=None) def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): - if getattr(self, 'compiler_type', False) and self.compiler_type.is_osx_compiler: - # Clang, GCC and ICC on macOS all use the same rpath arguments - return self.build_osx_rpath_args(build_dir, rpath_paths, build_rpath) - elif self.compiler_type.is_windows_compiler: + if self.compiler_type.is_windows_compiler: return [] return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath) @@ -167,6 +164,9 @@ class CCompiler(Compiler): if self.compiler_type.is_osx_compiler: # Apple ld return ['-Wl,-undefined,dynamic_lookup'] + elif self.compiler_type.is_windows_compiler: + # For PE/COFF this is impossible + return [] else: # GNU ld and LLVM lld return ['-Wl,--allow-shlib-undefined'] @@ -896,7 +896,7 @@ class CCompiler(Compiler): prefixes = ['lib', ''] # Library suffixes and prefixes if for_darwin(env.is_cross_build(), env): - shlibext = ['dylib'] + shlibext = ['dylib', 'so'] elif for_windows(env.is_cross_build(), env): # FIXME: .lib files can be import or static so we should read the # file, figure out which one it is, and reject the wrong kind. @@ -1137,9 +1137,9 @@ class ClangCCompiler(ClangCompiler, CCompiler): class ArmclangCCompiler(ArmclangCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -1545,9 +1545,9 @@ class VisualStudioCCompiler(CCompiler): class ArmCCompiler(ArmCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CCompiler.get_options(self) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 26aeba7..fb03819 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -331,6 +331,9 @@ base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled he 'b_staticpic': coredata.UserBooleanOption('b_staticpic', 'Build static libraries as position independent', True), + 'b_pie': coredata.UserBooleanOption('b_pie', + 'Build executables as position independent', + False), 'b_bitcode': coredata.UserBooleanOption('b_bitcode', 'Generate and embed bitcode (only macOS and iOS)', False), @@ -503,6 +506,31 @@ def get_base_link_args(options, linker, is_shared_module): pass return args +def prepare_rpaths(raw_rpaths, build_dir, from_dir): + internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths] + ordered_rpaths = order_rpaths(internal_format_rpaths) + return ordered_rpaths + +def order_rpaths(rpath_list): + # We want rpaths that point inside our build dir to always override + # those pointing to other places in the file system. This is so built + # binaries prefer our libraries to the ones that may lie somewhere + # in the file system, such as /lib/x86_64-linux-gnu. + # + # The correct thing to do here would be C++'s std::stable_partition. + # Python standard library does not have it, so replicate it with + # sort, which is guaranteed to be stable. + return sorted(rpath_list, key=os.path.isabs) + +def evaluate_rpath(p, build_dir, from_dir): + if p == from_dir: + return '' # relpath errors out in this case + elif os.path.isabs(p): + return p # These can be outside of build dir. + else: + return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) + + class CrossNoRunException(MesonException): pass @@ -1088,46 +1116,30 @@ class Compiler: def get_instruction_set_args(self, instruction_set): return None - def build_osx_rpath_args(self, build_dir, rpath_paths, build_rpath): - # Ensure that there is enough space for large RPATHs and install_name - args = ['-Wl,-headerpad_max_install_names'] - if not rpath_paths and not build_rpath: - return args - # On OSX, rpaths must be absolute. - abs_rpaths = [os.path.join(build_dir, p) for p in rpath_paths] - if build_rpath != '': - abs_rpaths.append(build_rpath) - # Need to deduplicate abs_rpaths, as rpath_paths and - # build_rpath are not guaranteed to be disjoint sets - args += ['-Wl,-rpath,' + rp for rp in OrderedSet(abs_rpaths)] - return args - def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): if not rpath_paths and not install_rpath and not build_rpath: return [] - # The rpaths we write must be relative, because otherwise - # they have different length depending on the build + args = [] + if mesonlib.is_osx(): + # Ensure that there is enough space for install_name_tool in-place editing of large RPATHs + args.append('-Wl,-headerpad_max_install_names') + # @loader_path is the equivalent of $ORIGIN on macOS + # https://stackoverflow.com/q/26280738 + origin_placeholder = '@loader_path' + else: + origin_placeholder = '$ORIGIN' + # The rpaths we write must be relative if they point to the build dir, + # because otherwise they have different length depending on the build # directory. This breaks reproducible builds. - rel_rpaths = [] - for p in rpath_paths: - if p == from_dir: - relative = '' # relpath errors out in this case - else: - relative = os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) - rel_rpaths.append(relative) - paths = ':'.join([os.path.join('$ORIGIN', p) for p in rel_rpaths]) + processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) + # Need to deduplicate rpaths, as macOS's install_name_tool + # is *very* allergic to duplicate -delete_rpath arguments + # when calling depfixer on installation. + all_paths = OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths]) # Build_rpath is used as-is (it is usually absolute). if build_rpath != '': - if paths != '': - paths += ':' - paths += build_rpath - if len(paths) < len(install_rpath): - padding = 'X' * (len(install_rpath) - len(paths)) - if not paths: - paths = padding - else: - paths = paths + ':' + padding - args = [] + all_paths.add(build_rpath) + if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd(): # This argument instructs the compiler to record the value of # ORIGIN in the .dynamic section of the elf. On Linux this is done @@ -1135,7 +1147,23 @@ class Compiler: # $ORIGIN in the runtime path will be undefined and any binaries # linked against local libraries will fail to resolve them. args.append('-Wl,-z,origin') - args.append('-Wl,-rpath,' + paths) + + if mesonlib.is_osx(): + # macOS does not support colon-separated strings in LC_RPATH, + # hence we have to pass each path component individually + args += ['-Wl,-rpath,' + rp for rp in all_paths] + else: + # In order to avoid relinking for RPATH removal, the binary needs to contain just + # enough space in the ELF header to hold the final installation RPATH. + paths = ':'.join(all_paths) + if len(paths) < len(install_rpath): + padding = 'X' * (len(install_rpath) - len(paths)) + if not paths: + paths = padding + else: + paths = paths + ':' + padding + args.append('-Wl,-rpath,' + paths) + if get_compiler_is_linuxlike(self): # Rpaths to use while linking must be absolute. These are not # written to the binary. Needed only with GNU ld: @@ -1174,6 +1202,18 @@ class Compiler: raise EnvironmentException( 'Language {} does not support function attributes.'.format(self.get_display_language())) + def get_pic_args(self): + m = 'Language {} does not support position-independent code' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_link_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + @enum.unique class CompilerType(enum.Enum): @@ -1191,6 +1231,8 @@ class CompilerType(enum.Enum): ICC_OSX = 21 ICC_WIN = 22 + ARM_WIN = 30 + @property def is_standard_compiler(self): return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD') @@ -1201,7 +1243,7 @@ class CompilerType(enum.Enum): @property def is_windows_compiler(self): - return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN') + return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN', 'ARM_WIN') def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion): @@ -1314,7 +1356,7 @@ class GnuLikeCompiler(abc.ABC): def __init__(self, compiler_type): self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', - 'b_ndebug', 'b_staticpic'] + 'b_ndebug', 'b_staticpic', 'b_pie'] if not self.compiler_type.is_osx_compiler and not self.compiler_type.is_windows_compiler: self.base_options.append('b_lundef') if not self.compiler_type.is_windows_compiler: @@ -1337,6 +1379,12 @@ class GnuLikeCompiler(abc.ABC): return [] # On Window and OS X, pic is always on. return ['-fPIC'] + def get_pie_args(self): + return ['-fPIE'] + + def get_pie_link_args(self): + return ['-pie'] + def get_buildtype_args(self, buildtype): return gnulike_buildtype_args[buildtype] @@ -1542,7 +1590,7 @@ class ClangCompiler(GnuLikeCompiler): class ArmclangCompiler: - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armclang supports only cross-compilation.') # Check whether 'armlink.exe' is available in path @@ -1568,6 +1616,7 @@ class ArmclangCompiler: if not version_compare(self.version, '==' + linker_ver): raise EnvironmentException('armlink version does not match with compiler version') self.id = 'armclang' + self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_colorout'] # Assembly @@ -1654,10 +1703,11 @@ class IntelCompiler(GnuLikeCompiler): class ArmCompiler: # Functionality that is common to all ARM family compilers. - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armcc supports only cross-compilation.') self.id = 'arm' + self.compiler_type = compiler_type default_warn_args = [] self.warn_args = {'1': default_warn_args, '2': default_warn_args + [], diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index c68c529..66ae0e5 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -157,9 +157,9 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -309,11 +309,23 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like def get_options(self): + cpp_stds = ['none', 'c++11'] + # Visual Studio 2015 Update 3 and later + if version_compare(self.version, '>=19'): + cpp_stds.extend(['c++14', 'c++latest']) + # Visual Studio 2017 and later + if version_compare(self.version, '>=19.11'): + cpp_stds.append('c++17') + opts = CPPCompiler.get_options(self) opts.update({'cpp_eh': coredata.UserComboOption('cpp_eh', 'C++ exception handling type.', ['none', 'a', 's', 'sc'], 'sc'), + 'cpp_std': coredata.UserComboOption('cpp_std', + 'C++ language standard to use', + cpp_stds, + 'none'), 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Windows libs to link against.', msvc_winlibs)}) @@ -321,9 +333,28 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): def get_option_compile_args(self, options): args = [] - std = options['cpp_eh'] - if std.value != 'none': - args.append('/EH' + std.value) + + eh = options['cpp_eh'] + if eh.value != 'none': + args.append('/EH' + eh.value) + + std = options['cpp_std'] + if std.value == 'none': + pass + elif std.value == 'c++11': + # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can + # which means setting the C++ standard version to C++14, in compilers that support it + # (i.e., after VS2015U3) + # if one is using anything before that point, one cannot set the standard. + if version_compare(self.version, '>=19.00.24210'): + mlog.warning('MSVC does not support C++11; ' + 'attempting best effort; setting the standard to C++14') + args.append('/std:c++14') + else: + mlog.warning('This version of MSVC does not support cpp_std arguments') + else: + args.append('/std:' + std.value) + return args def get_option_link_args(self, options): @@ -336,9 +367,9 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): class ArmCPPCompiler(ArmCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrap=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CPPCompiler.get_options(self) diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 2865b1f..099d907 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -379,8 +379,12 @@ class DCompiler(Compiler): dcargs.append('-L=' + arg) continue - - dcargs.append(arg) + elif not arg.startswith('-') and arg.endswith(('.a', '.lib')): + # ensure static libraries are passed through to the linker + dcargs.append('-L=' + arg) + continue + else: + dcargs.append(arg) return dcargs diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 1ab7d72..819481b 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -223,7 +223,6 @@ class CoreData: self.base_options = {} self.external_preprocess_args = {} # CPPFLAGS only self.cross_file = self.__load_cross_file(options.cross_file) - self.wrap_mode = options.wrap_mode if options.wrap_mode is not None else WrapMode.default self.compilers = OrderedDict() self.cross_compilers = OrderedDict() self.deps = OrderedDict() @@ -338,7 +337,10 @@ class CoreData: def get_builtin_option(self, optname): if optname in self.builtins: - return self.builtins[optname].value + v = self.builtins[optname] + if optname == 'wrap_mode': + return WrapMode.from_string(v.value) + return v.value raise RuntimeError('Tried to get unknown builtin option %s.' % optname) def set_builtin_option(self, optname, value): @@ -616,7 +618,11 @@ builtin_options = { 'install_umask': [UserUmaskOption, 'Default umask to apply on permissions of installed files', '022'], 'auto_features': [UserFeatureOption, "Override value of all 'auto' features", 'auto'], 'optimization': [UserComboOption, 'Optimization level', ['0', 'g', '1', '2', '3', 's'], '0'], - 'debug': [UserBooleanOption, 'Debug', True] + 'debug': [UserBooleanOption, 'Debug', True], + 'wrap_mode': [UserComboOption, 'Wrap mode', ['default', + 'nofallback', + 'nodownload', + 'forcefallback'], 'default'], } # Special prefix-dependent defaults for installation directories that reside in diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index b06f62d..17f9240 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -443,7 +443,7 @@ class BoostDependency(ExternalDependency): if self.libdir: libdirs = [self.libdir] elif self.boost_root is None: - libdirs = mesonlib.get_library_dirs(self.env) + libdirs = mesonlib.get_library_dirs() else: libdirs = [os.path.join(self.boost_root, 'lib')] for libdir in libdirs: diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index 1e7c3e8..47beb4e 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -310,6 +310,7 @@ class LLVMDependency(ConfigToolDependency): if not matches: if self.required: raise + self.is_found = False return self.link_args = self.get_config_value(['--ldflags'], 'link_args') @@ -326,6 +327,8 @@ class LLVMDependency(ConfigToolDependency): except DependencyException: if self.required: raise + self.is_found = False + return link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared'] self.link_args = self.get_config_value( diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 2106c5d..8c6c60e 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -211,6 +211,19 @@ def detect_cpu_family(compilers): return 'arm' if trial.startswith('ppc64'): return 'ppc64' + if trial == 'powerpc': + # FreeBSD calls both ppc and ppc64 "powerpc". + # https://github.com/mesonbuild/meson/issues/4397 + try: + p, stdo, _ = Popen_safe(['uname', '-p']) + except (FileNotFoundError, PermissionError): + # Not much to go on here. + if sys.maxsize > 2**32: + return 'ppc64' + return 'ppc' + if 'powerpc64' in stdo: + return 'ppc64' + return 'ppc' if trial in ('amd64', 'x64'): trial = 'x86_64' if trial == 'x86_64': @@ -590,8 +603,9 @@ This is probably wrong, it should always point to the native compiler.''' % evar # Override previous values version = search_version(arm_ver_str) full_version = arm_ver_str + compiler_type = CompilerType.ARM_WIN cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) if 'clang' in out: if 'Apple' in out or mesonlib.for_darwin(want_cross, self): compiler_type = CompilerType.CLANG_OSX @@ -626,8 +640,9 @@ This is probably wrong, it should always point to the native compiler.''' % evar cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) if 'ARM' in out: + compiler_type = CompilerType.ARM_WIN cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) self._handle_exceptions(popen_exceptions, compilers) def detect_c_compiler(self, want_cross): @@ -928,6 +943,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar return ArLinker(linker) if p.returncode == 1 and err.startswith('usage'): # OSX return ArLinker(linker) + if p.returncode == 1 and err.startswith('Usage'): # AIX + return ArLinker(linker) self._handle_exceptions(popen_exceptions, linkers, 'linker') raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers)) @@ -1050,12 +1067,6 @@ class CrossBuildInfo: except Exception: raise EnvironmentException('Malformed value in cross file variable %s.' % entry) - if entry == 'cpu_family' and res not in known_cpu_families: - mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % value) - - if entry == 'endian' and res not in ('little', 'big'): - mlog.warning('Unknown endian %s in cross file' % res) - if self.ok_type(res): self.config[s][entry] = res elif isinstance(res, list): @@ -1155,11 +1166,20 @@ class MachineInfo: raise EnvironmentException( 'Machine info is currently {}\n'.format(literal) + 'but is missing {}.'.format(minimum_literal - set(literal))) + + cpu_family = literal['cpu_family'] + if cpu_family not in known_cpu_families: + mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family) + + endian = literal['endian'] + if endian not in ('little', 'big'): + mlog.warning('Unknown endian %s' % endian) + return MachineInfo( literal['system'], - literal['cpu_family'], + cpu_family, literal['cpu'], - literal['endian']) + endian) def is_windows(self): """ diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index c05b92a..66f7f02 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -27,8 +27,9 @@ from .dependencies import InternalDependency, Dependency, NotFoundDependency, De from .interpreterbase import InterpreterBase from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest -from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler +from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs +from .interpreterbase import ObjectHolder from .modules import ModuleReturnValue import os, shutil, uuid @@ -57,14 +58,6 @@ def stringifyUserArguments(args): raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.') -class ObjectHolder: - def __init__(self, obj, subproject=None): - self.held_object = obj - self.subproject = subproject - - def __repr__(self): - return '<Holder: {!r}>'.format(self.held_object) - class FeatureOptionHolder(InterpreterObject, ObjectHolder): def __init__(self, env, option): InterpreterObject.__init__(self) @@ -1388,6 +1381,8 @@ class CompilerHolder(InterpreterObject): mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h) return haz + @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs({ 'required', 'dirs', @@ -1547,8 +1542,8 @@ class ModuleHolder(InterpreterObject, ObjectHolder): # because the Build object contains dicts and lists. num_targets = len(self.interpreter.build.targets) state = ModuleState( - build_to_src=os.path.relpath(self.interpreter.environment.get_source_dir(), - self.interpreter.environment.get_build_dir()), + build_to_src=mesonlib.relpath(self.interpreter.environment.get_source_dir(), + self.interpreter.environment.get_build_dir()), subproject=self.interpreter.subproject, subdir=self.interpreter.subdir, current_lineno=self.interpreter.current_lineno, @@ -2189,14 +2184,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Program or command {!r} not found ' 'or not executable'.format(cmd)) cmd = prog - try: - cmd_path = os.path.relpath(cmd.get_path(), start=srcdir) - except ValueError: - # On Windows a relative path can't be evaluated for - # paths on two different drives (i.e. c:\foo and f:\bar). - # The only thing left to is is to use the original absolute - # path. - cmd_path = cmd.get_path() + cmd_path = mesonlib.relpath(cmd.get_path(), start=srcdir) if not cmd_path.startswith('..') and cmd_path not in self.build_def_files: self.build_def_files.append(cmd_path) expanded_args = [] @@ -2213,7 +2201,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not os.path.isabs(a): a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a) if os.path.isfile(a): - a = os.path.relpath(a, start=srcdir) + a = mesonlib.relpath(a, start=srcdir) if not a.startswith('..'): if a not in self.build_def_files: self.build_def_files.append(a) @@ -2272,22 +2260,24 @@ external dependencies (including libraries) must go to "dependencies".''') return subproject subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - r = wrap.Resolver(subproject_dir_abs, self.coredata.wrap_mode) + r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode')) try: resolved = r.resolve(dirname) - except RuntimeError as e: - # if the reason subproject execution failed was because - # the directory doesn't exist, try to give some helpful - # advice if it's a nested subproject that needs - # promotion... - self.print_nested_info(dirname) - - if required: - msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}' - raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e)) - - mlog.log('\nSubproject ', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)\n') - return self.disabled_subproject(dirname) + except wrap.WrapException as e: + subprojdir = os.path.join(self.subproject_dir, r.directory) + if not required: + mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n') + return self.disabled_subproject(dirname) + + if isinstance(e, wrap.WrapNotFoundException): + # if the reason subproject execution failed was because + # the directory doesn't exist, try to give some helpful + # advice if it's a nested subproject that needs + # promotion... + self.print_nested_info(dirname) + + msg = 'Failed to initialize {!r}:\n{}' + raise InterpreterException(msg.format(subprojdir, e)) subdir = os.path.join(self.subproject_dir, resolved) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) @@ -2379,9 +2369,18 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_configuration_data(self, node, args, kwargs): - if args: - raise InterpreterException('configuration_data takes no arguments') - return ConfigurationDataHolder(self.subproject) + if len(args) > 1: + raise InterpreterException('configuration_data takes only one optional positional arguments') + elif len(args) == 1: + initial_values = args[0] + if not isinstance(initial_values, dict): + raise InterpreterException('configuration_data first argument must be a dictionary') + else: + initial_values = {} + cdata = ConfigurationDataHolder(self.subproject) + for k, v in initial_values.items(): + cdata.set_method([k, v], {}) + return cdata def set_options(self, default_options): # Set default options as if they were passed to the command line. @@ -2779,6 +2778,8 @@ external dependencies (including libraries) must go to "dependencies".''') self.store_name_lookups(args) return progobj + @FeatureNewKwargs('find_program', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['find_program']) def func_find_program(self, node, args, kwargs): if not args: @@ -2844,35 +2845,33 @@ external dependencies (including libraries) must go to "dependencies".''') return True def get_subproject_dep(self, name, dirname, varname, required): + dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject) try: subproject = self.subprojects[dirname] - if not subproject.found(): - if not required: - return DependencyHolder(NotFoundDependency(self.environment), self.subproject) + if subproject.found(): + dep = self.subprojects[dirname].get_variable_method([varname], {}) + except InvalidArguments as e: + pass - raise DependencyException('Subproject %s was not found.' % (name)) + if not isinstance(dep, DependencyHolder): + raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' + 'not a dependency object.'.format(varname, dirname)) - dep = self.subprojects[dirname].get_variable_method([varname], {}) - except InvalidArguments as e: + if not dep.found(): if required: - raise DependencyException('Could not find dependency {} in subproject {}; {}' - ''.format(varname, dirname, str(e))) + raise DependencyException('Could not find dependency {} in subproject {}' + ''.format(varname, dirname)) # If the dependency is not required, don't raise an exception subproj_path = os.path.join(self.subproject_dir, dirname) mlog.log('Dependency', mlog.bold(name), 'from subproject', mlog.bold(subproj_path), 'found:', mlog.red('NO')) - return None - if not isinstance(dep, DependencyHolder): - raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' - 'not a dependency object.'.format(varname, dirname)) + return dep def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required): if dirname not in self.subprojects: return False dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return False if not dep.found(): return dep @@ -2907,8 +2906,10 @@ external dependencies (including libraries) must go to "dependencies".''') elif name == 'openmp': FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject) + @FeatureNewKwargs('dependency', '0.49.0', ['disabler']) @FeatureNewKwargs('dependency', '0.40.0', ['method']) @FeatureNewKwargs('dependency', '0.38.0', ['default_options']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['dependency']) def func_dependency(self, node, args, kwargs): self.validate_arguments(args, 1, [str]) @@ -2949,7 +2950,7 @@ external dependencies (including libraries) must go to "dependencies".''') dep = NotFoundDependency(self.environment) # Unless a fallback exists and is forced ... - if self.coredata.wrap_mode == WrapMode.forcefallback and 'fallback' in kwargs: + if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs: pass # ... search for it outside the project elif name != '': @@ -2986,26 +2987,21 @@ external dependencies (including libraries) must go to "dependencies".''') return Disabler() def print_nested_info(self, dependency_name): - message_templ = '''\nDependency %s not found but it is available in a sub-subproject. -To use it in the current project, promote it by going in the project source -root and issuing %s. - -''' + message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' + + 'To use it in the current project, promote it by going in the project source\n' + 'root and issuing'] sprojs = mesonlib.detect_subprojects('subprojects', self.source_root) if dependency_name not in sprojs: return found = sprojs[dependency_name] if len(found) > 1: - suffix = 'one of the following commands' + message.append('one of the following commands:') else: - suffix = 'the following command' - message = message_templ % (dependency_name, suffix) - cmds = [] - command_templ = 'meson wrap promote ' + message.append('the following command:') + command_templ = '\nmeson wrap promote {}' for l in found: - cmds.append(command_templ + l[len(self.source_root) + 1:]) - final_message = message + '\n'.join(cmds) - print(final_message) + message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:]))) + mlog.warning(*message) def get_subproject_infos(self, kwargs): fbinfo = kwargs['fallback'] @@ -3016,12 +3012,12 @@ root and issuing %s. def dependency_fallback(self, name, kwargs): display_name = name if name else '(anonymous)' - if self.coredata.wrap_mode in (WrapMode.nofallback, WrapMode.nodownload): + if self.coredata.get_builtin_option('wrap_mode') in (WrapMode.nofallback, WrapMode.nodownload): mlog.log('Not looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback' 'dependencies is disabled.') return None - elif self.coredata.wrap_mode == WrapMode.forcefallback: + elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.') else: @@ -3053,8 +3049,8 @@ root and issuing %s. return None required = kwargs.get('required', True) dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return None + if not dep.found(): + return dep subproj_path = os.path.join(self.subproject_dir, dirname) # Check if the version of the declared dependency matches what we want if 'version' in kwargs: @@ -3540,9 +3536,13 @@ root and issuing %s. raise InterpreterException('@INPUT@ used as command argument, but no input file specified.') # Validate output output = kwargs['output'] - ofile_rpath = os.path.join(self.subdir, output) if not isinstance(output, str): raise InterpreterException('Output file name must be a string') + if ifile_abs: + values = mesonlib.get_filenames_templates_dict([ifile_abs], None) + outputs = mesonlib.substitute_values([output], values) + output = outputs[0] + ofile_rpath = os.path.join(self.subdir, output) if ofile_rpath in self.configure_file_outputs: mesonbuildfile = os.path.join(self.subdir, 'meson.build') current_call = "{}:{}".format(mesonbuildfile, self.current_lineno) @@ -3550,10 +3550,6 @@ root and issuing %s. mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call) else: self.configure_file_outputs[ofile_rpath] = self.current_lineno - if ifile_abs: - values = mesonlib.get_filenames_templates_dict([ifile_abs], None) - outputs = mesonlib.substitute_values([output], values) - output = outputs[0] if os.path.dirname(output) != '': raise InterpreterException('Output file name must not contain a subdirectory.') (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output)) @@ -3561,7 +3557,12 @@ root and issuing %s. # Perform the appropriate action if 'configuration' in kwargs: conf = kwargs['configuration'] - if not isinstance(conf, ConfigurationDataHolder): + if isinstance(conf, dict): + cdata = ConfigurationDataHolder(self.subproject) + for k, v in conf.items(): + cdata.set_method([k, v], {}) + conf = cdata + elif not isinstance(conf, ConfigurationDataHolder): raise InterpreterException('Argument "configuration" is not of type configuration_data') mlog.log('Configuring', mlog.bold(output), 'using configuration') if inputfile is not None: diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 1c74eeb..aee1c87 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -21,6 +21,14 @@ from . import environment, dependencies import os, copy, re, types from functools import wraps +class ObjectHolder: + def __init__(self, obj, subproject=None): + self.held_object = obj + self.subproject = subproject + + def __repr__(self): + return '<Holder: {!r}>'.format(self.held_object) + # Decorators for method calls. def check_stringlist(a, msg='Arguments must be strings.'): @@ -137,6 +145,17 @@ def noArgsFlattening(f): setattr(f, 'no-args-flattening', True) return f +def disablerIfNotFound(f): + @wraps(f) + def wrapped(*wrapped_args, **wrapped_kwargs): + kwargs = _get_callee_args(wrapped_args)[3] + disabler = kwargs.pop('disabler', False) + ret = f(*wrapped_args, **wrapped_kwargs) + if disabler and not ret.held_object.found(): + return Disabler() + return ret + return wrapped + class permittedKwargs: def __init__(self, permitted): @@ -292,6 +311,12 @@ class InvalidArguments(InterpreterException): class SubdirDoneRequest(BaseException): pass +class ContinueRequest(BaseException): + pass + +class BreakRequest(BaseException): + pass + class InterpreterObject: def __init__(self): self.methods = {} @@ -445,6 +470,10 @@ class InterpreterBase: return self.evaluate_indexing(cur) elif isinstance(cur, mparser.TernaryNode): return self.evaluate_ternary(cur) + elif isinstance(cur, mparser.ContinueNode): + raise ContinueRequest() + elif isinstance(cur, mparser.BreakNode): + raise BreakRequest() elif self.is_elementary_type(cur): return cur else: @@ -487,6 +516,13 @@ class InterpreterBase: return False return True + def evaluate_in(self, val1, val2): + if not isinstance(val1, (str, int, float, ObjectHolder)): + raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object') + if not isinstance(val2, (list, dict)): + raise InvalidArguments('rvalue of "in" operator must be an array or a dict') + return val1 in val2 + def evaluate_comparison(self, node): val1 = self.evaluate_statement(node.left) if is_disabler(val1): @@ -494,6 +530,10 @@ class InterpreterBase: val2 = self.evaluate_statement(node.right) if is_disabler(val2): return val2 + if node.ctype == 'in': + return self.evaluate_in(val1, val2) + elif node.ctype == 'notin': + return not self.evaluate_in(val1, val2) valid = self.validate_comparison_types(val1, val2) # Ordering comparisons of different types isn't allowed since PR #1810 # (0.41.0). Since PR #2884 we also warn about equality comparisons of @@ -622,7 +662,12 @@ The result of this is undefined and will become a hard error in a future Meson r return items for item in items: self.set_variable(varname, item) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break elif isinstance(items, dict): if len(node.varnames) != 2: raise InvalidArguments('Foreach on dict unpacks key and value') @@ -631,7 +676,12 @@ The result of this is undefined and will become a hard error in a future Meson r for key, value in items.items(): self.set_variable(node.varnames[0].value, key) self.set_variable(node.varnames[1].value, value) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break else: raise InvalidArguments('Items of foreach loop must be an array or a dict') diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index ad6022f..04228a8 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -1208,3 +1208,12 @@ class BuildDirLock: elif have_msvcrt: msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) self.lockfile.close() + +def relpath(path, start): + # On Windows a relative path can't be evaluated for paths on two different + # drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the + # original absolute path. + try: + return os.path.relpath(path, start) + except ValueError: + return path diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index b65abe0..fc25057 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, pickle, os, shutil, subprocess, gzip, errno +import sys, pickle, os, shutil, subprocess, errno import shlex from glob import glob from .scripts import depfixer @@ -379,17 +379,7 @@ class Installer: outdir = os.path.dirname(outfilename) d.dirmaker.makedirs(outdir, exist_ok=True) install_mode = m[2] - if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): - with open(outfilename, 'wb') as of: - with open(full_source_filename, 'rb') as sf: - # Set mtime and filename for reproducibility. - with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz: - gz.write(sf.read()) - shutil.copystat(full_source_filename, outfilename) - print('Installing %s to %s' % (full_source_filename, outdir)) - append_to_log(self.lf, outfilename) - else: - self.do_copyfile(full_source_filename, outfilename) + self.do_copyfile(full_source_filename, outfilename) set_mode(outfilename, install_mode, d.install_umask) def install_headers(self, d): diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 1c2f034..46df53e 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -17,6 +17,7 @@ functionality such as gobject-introspection, gresources and gtk-doc''' import os import copy +import shlex import subprocess from .. import build @@ -153,10 +154,10 @@ class GnomeModule(ExtensionModule): # Make source dirs relative to build dir now source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] - # Always include current directory, but after paths set by user - source_dirs.append(os.path.join(state.build_to_src, state.subdir)) # Ensure build directories of generated deps are included source_dirs += subdirs + # Always include current directory, but after paths set by user + source_dirs.append(os.path.join(state.build_to_src, state.subdir)) for source_dir in OrderedSet(source_dirs): cmd += ['--sourcedir', source_dir] @@ -1034,12 +1035,12 @@ This will become a hard error in the future.''') ldflags.update(compiler_flags[1]) ldflags.update(compiler_flags[2]) if compiler: - args += ['--cc=%s' % ' '.join(compiler.get_exelist())] - args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())] + args += ['--cc=%s' % ' '.join([shlex.quote(x) for x in compiler.get_exelist()])] + args += ['--ld=%s' % ' '.join([shlex.quote(x) for x in compiler.get_linker_exelist()])] if cflags: - args += ['--cflags=%s' % ' '.join(cflags)] + args += ['--cflags=%s' % ' '.join([shlex.quote(x) for x in cflags])] if ldflags: - args += ['--ldflags=%s' % ' '.join(ldflags)] + args += ['--ldflags=%s' % ' '.join([shlex.quote(x) for x in ldflags])] return args diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index 8b5e181..cde548f 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -87,14 +87,14 @@ class I18nModule(ExtensionModule): else: if isinstance(inputfile, str): inputfile = mesonlib.File.from_source_file(state.environment.source_dir, - state.subdir, inputfile) + state.subdir, inputfile) output = kwargs['output'] ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir) values = mesonlib.get_filenames_templates_dict([ifile_abs], None) outputs = mesonlib.substitute_values([output], values) output = outputs[0] - ct = build.CustomTarget(output + '_' + state.subdir + '_merge', state.subdir, state.subproject, kwargs) + ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs) return ModuleReturnValue(ct, [ct]) @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset']) diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index 2229949..954220b 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -23,7 +23,7 @@ from mesonbuild.modules import ModuleReturnValue from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, InterpreterObject, InvalidArguments, - FeatureNew + FeatureNew, FeatureNewKwargs, disablerIfNotFound ) from ..interpreter import ExternalProgramHolder, extract_required_kwarg from ..interpreterbase import flatten @@ -467,6 +467,8 @@ class PythonModule(ExtensionModule): return mesonlib.version_compare(version, '>= 3.0') return True + @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(['required']) def find_installation(self, interpreter, state, args, kwargs): feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0') diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 3cfc689..5bda5ab 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -17,11 +17,12 @@ from .. import mesonlib, dependencies from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue -from ..interpreterbase import noKwargs, permittedKwargs +from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated from ..build import known_shmod_kwargs class Python3Module(ExtensionModule): + @FeatureDeprecated('python3 module', '0.48.0') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.snippets.add('extension_module') diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index 237220f..7a2c338 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -118,10 +118,11 @@ class QtBaseModule: @FeatureNewKwargs('qt.preprocess', '0.49.0', ['uic_extra_arguments']) @FeatureNewKwargs('qt.preprocess', '0.44.0', ['moc_extra_arguments']) - @permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) + @FeatureNewKwargs('qt.preprocess', '0.49.0', ['rcc_extra_arguments']) + @permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) def preprocess(self, state, args, kwargs): - rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, sources, include_directories, dependencies \ - = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) + rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, rcc_extra_arguments, sources, include_directories, dependencies \ + = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) sources += args[1:] method = kwargs.get('method', 'auto') self._detect_tools(state.environment, method) @@ -140,7 +141,7 @@ class QtBaseModule: name = args[0] rcc_kwargs = {'input': rcc_files, 'output': name + '.cpp', - 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) @@ -154,7 +155,7 @@ class QtBaseModule: name = 'qt' + str(self.qt_version) + '-' + basename.replace('.', '_') rcc_kwargs = {'input': rcc_file, 'output': name + '.cpp', - 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 9af6dac..be5c807 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -90,8 +90,9 @@ class Lexer: def __init__(self, code): self.code = code self.keywords = {'true', 'false', 'if', 'else', 'elif', - 'endif', 'and', 'or', 'not', 'foreach', 'endforeach'} - self.future_keywords = {'continue', 'break', 'in', 'return'} + 'endif', 'and', 'or', 'not', 'foreach', 'endforeach', + 'in', 'continue', 'break'} + self.future_keywords = {'return'} self.token_specification = [ # Need to be sorted longest to shortest. ('ignore', re.compile(r'[ \t]')), @@ -242,6 +243,12 @@ class StringNode(ElementaryNode): def __str__(self): return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno) +class ContinueNode(ElementaryNode): + pass + +class BreakNode(ElementaryNode): + pass + class ArrayNode: def __init__(self, args): self.subdir = args.subdir @@ -436,7 +443,9 @@ comparison_map = {'equal': '==', 'lt': '<', 'le': '<=', 'gt': '>', - 'ge': '>=' + 'ge': '>=', + 'in': 'in', + 'notin': 'not in', } # Recursive descent parser for Meson's definition language. @@ -543,6 +552,8 @@ class Parser: for nodename, operator_type in comparison_map.items(): if self.accept(nodename): return ComparisonNode(operator_type, left, self.e5()) + if self.accept('not') and self.accept('in'): + return ComparisonNode('notin', left, self.e5()) return left def e5(self): @@ -754,6 +765,10 @@ class Parser: block = self.foreachblock() self.block_expect('endforeach', block_start) return block + if self.accept('continue'): + return ContinueNode(self.current) + if self.accept('break'): + return BreakNode(self.current) return self.statement() def codeblock(self): diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index 1576556..4256272 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -24,7 +24,6 @@ from . import environment, interpreter, mesonlib from . import build from . import mlog, coredata from .mesonlib import MesonException -from .wrap import WrapMode def add_arguments(parser): coredata.register_builtin_arguments(parser) @@ -32,10 +31,6 @@ def add_arguments(parser): help='File describing cross compilation environment.') parser.add_argument('-v', '--version', action='version', version=coredata.version) - # See the mesonlib.WrapMode enum for documentation - parser.add_argument('--wrap-mode', default=None, - type=wrapmodetype, choices=WrapMode, - help='Special wrap mode to use') parser.add_argument('--profile-self', action='store_true', dest='profile', help=argparse.SUPPRESS) parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings', @@ -47,14 +42,6 @@ def add_arguments(parser): parser.add_argument('builddir', nargs='?', default=None) parser.add_argument('sourcedir', nargs='?', default=None) -def wrapmodetype(string): - try: - return getattr(WrapMode, string) - except AttributeError: - msg = ', '.join([t.name.lower() for t in WrapMode]) - msg = 'invalid argument {!r}, use one of {}'.format(string, msg) - raise argparse.ArgumentTypeError(msg) - class MesonApp: def __init__(self, options): (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir, diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 916c84f..0509eff 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -77,16 +77,19 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): subprocess.check_call([lcov_exe, '-a', initial_tracefile, '-a', run_tracefile, + '--rc', 'lcov_branch_coverage=1', '-o', raw_tracefile]) # Remove all directories outside the source_root from the covinfo subprocess.check_call([lcov_exe, '--extract', raw_tracefile, os.path.join(source_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) # Remove all directories inside subproject dir subprocess.check_call([lcov_exe, '--remove', covinfo, os.path.join(subproject_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) subprocess.check_call([genhtml_exe, '--prefix', build_root, diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index d3d3028..7294186 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -16,6 +16,8 @@ import sys, struct import shutil, subprocess +from ..mesonlib import OrderedSet + SHT_STRTAB = 3 DT_NEEDED = 1 DT_RPATH = 15 @@ -374,7 +376,26 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings): try: args = [] if rpaths: - for rp in rpaths: + # TODO: fix this properly, not totally clear how + # + # removing rpaths from binaries on macOS has tons of + # weird edge cases. For instance, if the user provided + # a '-Wl,-rpath' argument in LDFLAGS that happens to + # coincide with an rpath generated from a dependency, + # this would cause installation failures, as meson would + # generate install_name_tool calls with two identical + # '-delete_rpath' arguments, which install_name_tool + # fails on. Because meson itself ensures that it never + # adds duplicate rpaths, duplicate rpaths necessarily + # come from user variables. The idea of using OrderedSet + # is to remove *at most one* duplicate RPATH entry. This + # is not optimal, as it only respects the user's choice + # partially: if they provided a non-duplicate '-Wl,-rpath' + # argument, it gets removed, if they provided a duplicate + # one, it remains in the final binary. A potentially optimal + # solution would split all user '-Wl,-rpath' arguments from + # LDFLAGS, and later add them back with '-add_rpath'. + for rp in OrderedSet(rpaths): args += ['-delete_rpath', rp] subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, @@ -392,7 +413,7 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings): subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - except Exception as e: + except Exception: raise sys.exit(0) @@ -411,6 +432,9 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True) # Static libraries never have rpaths if fname.endswith('.a'): return + # DLLs never have rpaths + if fname.endswith('.dll'): + return try: if fname.endswith('.jar'): fix_jar(fname) diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py index 56ac585..f49492c 100644 --- a/mesonbuild/scripts/dist.py +++ b/mesonbuild/scripts/dist.py @@ -24,7 +24,6 @@ import tarfile, zipfile import tempfile from glob import glob from mesonbuild.environment import detect_ninja -from mesonbuild.dependencies import ExternalProgram from mesonbuild.mesonlib import windows_proof_rmtree from mesonbuild import mlog diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py index f381552..1c86bf1 100644 --- a/mesonbuild/scripts/scanbuild.py +++ b/mesonbuild/scripts/scanbuild.py @@ -13,16 +13,17 @@ # limitations under the License. import os +import shlex import subprocess import shutil import tempfile from ..environment import detect_ninja from ..mesonlib import Popen_safe -def scanbuild(exename, srcdir, blddir, privdir, logdir, args): +def scanbuild(exelist, srcdir, blddir, privdir, logdir, args): with tempfile.TemporaryDirectory(dir=privdir) as scandir: - meson_cmd = [exename] + args - build_cmd = [exename, '-o', logdir, detect_ninja(), '-C', scandir] + meson_cmd = exelist + args + build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir] rc = subprocess.call(meson_cmd + [srcdir, scandir]) if rc != 0: return rc @@ -58,8 +59,14 @@ def run(args): toolname = tool break - exename = os.environ.get('SCANBUILD', toolname) - if not shutil.which(exename): - print('Scan-build not installed.') + if 'SCANBUILD' in os.environ: + exelist = shlex.split(os.environ['SCANBUILD']) + else: + exelist = [toolname] + + try: + Popen_safe(exelist + ['--help']) + except OSError: + print('Could not execute scan-build "%s"' % ' '.join(exelist)) return 1 - return scanbuild(exename, srcdir, blddir, privdir, logdir, meson_cmd) + return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd) diff --git a/mesonbuild/wrap/__init__.py b/mesonbuild/wrap/__init__.py index b792dfa..6be2c44 100644 --- a/mesonbuild/wrap/__init__.py +++ b/mesonbuild/wrap/__init__.py @@ -33,6 +33,15 @@ from enum import Enum # Note that these options do not affect subprojects that # are git submodules since those are only usable in git # repositories, and you almost always want to download them. + +# This did _not_ work when inside the WrapMode class. +# I don't know why. If you can fix this, patches welcome. +string_to_value = {'default': 1, + 'nofallback': 2, + 'nodownload': 3, + 'forcefallback': 4, + } + class WrapMode(Enum): default = 1 nofallback = 2 @@ -41,3 +50,8 @@ class WrapMode(Enum): def __str__(self): return self.name + + @staticmethod + def from_string(mode_name): + g = string_to_value[mode_name] + return WrapMode(g) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 5cc2225..248c365 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -17,9 +17,9 @@ import contextlib import urllib.request, os, hashlib, shutil, tempfile, stat import subprocess import sys -from pathlib import Path +import configparser from . import WrapMode -from ..mesonlib import Popen_safe +from ..mesonlib import MesonException try: import ssl @@ -67,206 +67,179 @@ def open_wrapdburl(urlstring): urlstring = 'http' + urlstring[5:] return urllib.request.urlopen(urlstring, timeout=req_timeout) +class WrapException(MesonException): + pass + +class WrapNotFoundException(WrapException): + pass class PackageDefinition: def __init__(self, fname): - self.values = {} - with open(fname) as ifile: - first = ifile.readline().strip() - - if first == '[wrap-file]': - self.type = 'file' - elif first == '[wrap-git]': - self.type = 'git' - elif first == '[wrap-hg]': - self.type = 'hg' - elif first == '[wrap-svn]': - self.type = 'svn' - else: - raise RuntimeError('Invalid format of package file') - for line in ifile: - line = line.strip() - if line == '': - continue - (k, v) = line.split('=', 1) - k = k.strip() - v = v.strip() - self.values[k] = v + self.basename = os.path.basename(fname) + try: + self.config = configparser.ConfigParser(interpolation=None) + self.config.read(fname) + except: + raise WrapException('Failed to parse {}'.format(self.basename)) + if len(self.config.sections()) < 1: + raise WrapException('Missing sections in {}'.format(self.basename)) + self.wrap_section = self.config.sections()[0] + if not self.wrap_section.startswith('wrap-'): + m = '{!r} is not a valid first section in {}' + raise WrapException(m.format(self.wrap_section, self.basename)) + self.type = self.wrap_section[5:] + self.values = dict(self.config[self.wrap_section]) def get(self, key): - return self.values[key] + try: + return self.values[key] + except KeyError: + m = 'Missing key {!r} in {}' + raise WrapException(m.format(key, self.basename)) def has_patch(self): return 'patch_url' in self.values class Resolver: - def __init__(self, subdir_root, wrap_mode=WrapMode(1)): + def __init__(self, subdir_root, wrap_mode=WrapMode.default): self.wrap_mode = wrap_mode self.subdir_root = subdir_root self.cachedir = os.path.join(self.subdir_root, 'packagecache') def resolve(self, packagename): - # Check if the directory is already resolved - dirname = Path(os.path.join(self.subdir_root, packagename)) - subprojdir = os.path.join(*dirname.parts[-2:]) - if dirname.is_dir(): - if (dirname / 'meson.build').is_file(): - # The directory is there and has meson.build? Great, use it. - return packagename - # Is the dir not empty and also not a git submodule dir that is - # not checkout properly? Can't do anything, exception! - elif next(dirname.iterdir(), None) and not (dirname / '.git').is_file(): - m = '{!r} is not empty and has no meson.build files' - raise RuntimeError(m.format(subprojdir)) - elif dirname.exists(): - m = '{!r} already exists and is not a dir; cannot use as subproject' - raise RuntimeError(m.format(subprojdir)) - - dirname = str(dirname) + self.packagename = packagename + self.directory = packagename + # We always have to load the wrap file, if it exists, because it could + # override the default directory name. + self.wrap = self.load_wrap() + if self.wrap and 'directory' in self.wrap.values: + self.directory = self.wrap.get('directory') + self.dirname = os.path.join(self.subdir_root, self.directory) + meson_file = os.path.join(self.dirname, 'meson.build') + + # The directory is there and has meson.build? Great, use it. + if os.path.exists(meson_file): + return self.directory + # Check if the subproject is a git submodule - if self.resolve_git_submodule(dirname): - return packagename + self.resolve_git_submodule() + + if os.path.exists(self.dirname): + if not os.path.isdir(self.dirname): + raise WrapException('Path already exists but is not a directory') + else: + # A wrap file is required to download + if not self.wrap: + m = 'Subproject directory not found and {}.wrap file not found' + raise WrapNotFoundException(m.format(self.packagename)) + + if self.wrap.type == 'file': + self.get_file() + else: + self.check_can_download() + if self.wrap.type == 'git': + self.get_git() + elif self.wrap.type == "hg": + self.get_hg() + elif self.wrap.type == "svn": + self.get_svn() + else: + raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type)) + + # A meson.build file is required in the directory + if not os.path.exists(meson_file): + raise WrapException('Subproject exists but has no meson.build file') + return self.directory + + def load_wrap(self): + fname = os.path.join(self.subdir_root, self.packagename + '.wrap') + if os.path.isfile(fname): + return PackageDefinition(fname) + return None + + def check_can_download(self): # Don't download subproject data based on wrap file if requested. # Git submodules are ok (see above)! if self.wrap_mode is WrapMode.nodownload: m = 'Automatic wrap-based subproject downloading is disabled' - raise RuntimeError(m) - - # Check if there's a .wrap file for this subproject - fname = os.path.join(self.subdir_root, packagename + '.wrap') - if not os.path.isfile(fname): - # No wrap file with this name? Give up. - m = 'No {}.wrap found for {!r}' - raise RuntimeError(m.format(packagename, subprojdir)) - p = PackageDefinition(fname) - if p.type == 'file': - if not os.path.isdir(self.cachedir): - os.mkdir(self.cachedir) - self.download(p, packagename) - self.extract_package(p) - elif p.type == 'git': - self.get_git(p) - elif p.type == "hg": - self.get_hg(p) - elif p.type == "svn": - self.get_svn(p) - else: - raise AssertionError('Unreachable code.') - return p.get('directory') + raise WrapException(m) - def resolve_git_submodule(self, dirname): + def resolve_git_submodule(self): # Are we in a git repository? ret, out = quiet_git(['rev-parse'], self.subdir_root) if not ret: return False # Is `dirname` a submodule? - ret, out = quiet_git(['submodule', 'status', dirname], self.subdir_root) + ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root) if not ret: return False # Submodule has not been added, add it if out.startswith(b'+'): - mlog.warning('git submodule {} might be out of date'.format(dirname)) + mlog.warning('git submodule might be out of date') return True elif out.startswith(b'U'): - raise RuntimeError('submodule {} has merge conflicts'.format(dirname)) + raise WrapException('git submodule has merge conflicts') # Submodule exists, but is deinitialized or wasn't initialized elif out.startswith(b'-'): - if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) == 0: + if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0: return True - raise RuntimeError('Failed to git submodule init {!r}'.format(dirname)) + raise WrapException('git submodule failed to init') # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout. elif out.startswith(b' '): - subprocess.call(['git', 'checkout', '.'], cwd=dirname) + subprocess.call(['git', 'checkout', '.'], cwd=self.dirname) # Even if checkout failed, try building it anyway and let the user # handle any problems manually. return True + elif out == b'': + # It is not a submodule, just a folder that exists in the main repository. + return False m = 'Unknown git submodule output: {!r}' - raise RuntimeError(m.format(out)) + raise WrapException(m.format(out)) - def get_git(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - try: - subprocess.check_call(['git', 'rev-parse'], cwd=checkoutdir) - except subprocess.CalledProcessError: - raise RuntimeError('%s is not empty but is not a valid ' - 'git repository, we can not work with it' - ' as a subproject directory.' % ( - checkoutdir)) - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['git', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - else: - if p.values.get('clone-recursive', '').lower() == 'true': - subprocess.check_call(['git', 'clone', '--recursive', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - else: - subprocess.check_call(['git', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'head': - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - push_url = p.values.get('push-url') - if push_url: - subprocess.check_call(['git', 'remote', 'set-url', - '--push', 'origin', push_url], - cwd=checkoutdir) - - def get_hg(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - if revno.lower() == 'tip': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['hg', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['hg', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['hg', 'pull'], cwd=checkoutdir) - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - else: - subprocess.check_call(['hg', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'tip': - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - - def get_svn(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', checkoutdir]) - current_revno = out - if current_revno == revno: - return - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['svn', 'update'], cwd=checkoutdir) - else: - subprocess.check_call(['svn', 'update', '-r', revno], cwd=checkoutdir) + def get_file(self): + path = self.get_file_internal('source') + extract_dir = self.subdir_root + # Some upstreams ship packages that do not have a leading directory. + # Create one for them. + if 'lead_directory_missing' in self.wrap.values: + os.mkdir(self.dirname) + extract_dir = self.dirname + shutil.unpack_archive(path, extract_dir) + if self.wrap.has_patch(): + self.apply_patch() + + def get_git(self): + revno = self.wrap.get('revision') + if self.wrap.values.get('clone-recursive', '').lower() == 'true': + subprocess.check_call(['git', 'clone', '--recursive', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) else: - subprocess.check_call(['svn', 'checkout', '-r', revno, p.get('url'), - p.get('directory')], cwd=self.subdir_root) + subprocess.check_call(['git', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'head': + if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0: + subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname) + subprocess.check_call(['git', 'checkout', revno], + cwd=self.dirname) + push_url = self.wrap.values.get('push-url') + if push_url: + subprocess.check_call(['git', 'remote', 'set-url', + '--push', 'origin', push_url], + cwd=self.dirname) + + def get_hg(self): + revno = self.wrap.get('revision') + subprocess.check_call(['hg', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'tip': + subprocess.check_call(['hg', 'checkout', revno], + cwd=self.dirname) + + def get_svn(self): + revno = self.wrap.get('revision') + subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'), + self.directory], cwd=self.subdir_root) def get_data(self, url): blocksize = 10 * 1024 @@ -312,41 +285,48 @@ class Resolver: hashvalue = h.hexdigest() return hashvalue, tmpfile.name - def get_hash(self, data): + def check_hash(self, what, path): + expected = self.wrap.get(what + '_hash') h = hashlib.sha256() - h.update(data) - hashvalue = h.hexdigest() - return hashvalue - - def download(self, p, packagename): - ofname = os.path.join(self.cachedir, p.get('source_filename')) - if os.path.exists(ofname): - mlog.log('Using', mlog.bold(packagename), 'from cache.') - else: - srcurl = p.get('source_url') - mlog.log('Downloading', mlog.bold(packagename), 'from', mlog.bold(srcurl)) - dhash, tmpfile = self.get_data(srcurl) - expected = p.get('source_hash') - if dhash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash)) - os.rename(tmpfile, ofname) - if p.has_patch(): - patch_filename = p.get('patch_filename') - filename = os.path.join(self.cachedir, patch_filename) - if os.path.exists(filename): - mlog.log('Using', mlog.bold(patch_filename), 'from cache.') - else: - purl = p.get('patch_url') - mlog.log('Downloading patch from', mlog.bold(purl)) - phash, tmpfile = self.get_data(purl) - expected = p.get('patch_hash') - if phash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash)) - os.rename(tmpfile, filename) - else: - mlog.log('Package does not require patch.') + with open(path, 'rb') as f: + h.update(f.read()) + dhash = h.hexdigest() + if dhash != expected: + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + + def download(self, what, ofname): + self.check_can_download() + srcurl = self.wrap.get(what + '_url') + mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl)) + dhash, tmpfile = self.get_data(srcurl) + expected = self.wrap.get(what + '_hash') + if dhash != expected: + os.remove(tmpfile) + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + os.rename(tmpfile, ofname) + + def get_file_internal(self, what): + filename = self.wrap.get(what + '_filename') + cache_path = os.path.join(self.cachedir, filename) + + if os.path.exists(cache_path): + self.check_hash(what, cache_path) + mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.') + return cache_path + + if not os.path.isdir(self.cachedir): + os.mkdir(self.cachedir) + self.download(what, cache_path) + return cache_path + + def apply_patch(self): + path = self.get_file_internal('patch') + try: + shutil.unpack_archive(path, self.subdir_root) + except Exception: + with tempfile.TemporaryDirectory() as workdir: + shutil.unpack_archive(path, workdir) + self.copy_tree(workdir, self.subdir_root) def copy_tree(self, root_src_dir, root_dst_dir): """ @@ -366,36 +346,3 @@ class Resolver: os.chmod(dst_file, stat.S_IWUSR) os.remove(dst_file) shutil.copy2(src_file, dst_dir) - - def extract_package(self, package): - if sys.version_info < (3, 5): - try: - import lzma # noqa: F401 - del lzma - except ImportError: - pass - else: - try: - shutil.register_unpack_format('xztar', ['.tar.xz', '.txz'], shutil._unpack_tarfile, [], "xz'ed tar-file") - except shutil.RegistryError: - pass - target_dir = os.path.join(self.subdir_root, package.get('directory')) - if os.path.isdir(target_dir): - return - extract_dir = self.subdir_root - # Some upstreams ship packages that do not have a leading directory. - # Create one for them. - try: - package.get('lead_directory_missing') - os.mkdir(target_dir) - extract_dir = target_dir - except KeyError: - pass - shutil.unpack_archive(os.path.join(self.cachedir, package.get('source_filename')), extract_dir) - if package.has_patch(): - try: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), self.subdir_root) - except Exception: - with tempfile.TemporaryDirectory() as workdir: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), workdir) - self.copy_tree(workdir, self.subdir_root) diff --git a/run_cross_test.py b/run_cross_test.py index 7191402..b2ef6be 100755 --- a/run_cross_test.py +++ b/run_cross_test.py @@ -25,26 +25,34 @@ Eventually migrate to something fancier.''' import sys import os from pathlib import Path +import argparse from run_project_tests import gather_tests, run_tests, StopException, setup_commands from run_project_tests import failing_logs -def runtests(cross_file): +def runtests(cross_file, failfast): commontests = [('common', gather_tests(Path('test cases', 'common')), False)] try: - (passing_tests, failing_tests, skipped_tests) = run_tests(commontests, 'meson-cross-test-run', ['--cross', cross_file]) + (passing_tests, failing_tests, skipped_tests) = \ + run_tests(commontests, 'meson-cross-test-run', failfast, ['--cross', cross_file]) except StopException: pass print('\nTotal passed cross tests:', passing_tests) print('Total failed cross tests:', failing_tests) print('Total skipped cross tests:', skipped_tests) - if failing_tests > 0 and ('TRAVIS' in os.environ or 'APPVEYOR' in os.environ): + if failing_tests > 0 and ('CI' in os.environ): print('\nMesonlogs of failing tests\n') - for l in failing_logs: - print(l, '\n') - sys.exit(failing_tests) + for log in failing_logs: + print(log, '\n') + return failing_tests + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--failfast', action='store_true') + parser.add_argument('cross_file') + options = parser.parse_args() + setup_commands('ninja') + return runtests(options.cross_file, options.failfast) if __name__ == '__main__': - setup_commands('ninja') - cross_file = sys.argv[1] - runtests(cross_file) + sys.exit(main()) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index fd33856..390868a 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import os import tempfile import unittest @@ -23,11 +24,6 @@ from pathlib import Path from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows -# Find the meson.py adjacent to us -meson_py = Path(__file__).resolve().parent / 'meson.py' -if not meson_py.is_file(): - raise RuntimeError("meson.py not found: test must only run from git") - def get_pypath(): import sysconfig pypath = sysconfig.get_path('purelib', vars={'base': ''}) @@ -176,8 +172,7 @@ class CommandTests(unittest.TestCase): builddir = str(self.tmpdir / 'build4') (bindir / 'meson').rename(bindir / 'meson.real') wrapper = (bindir / 'meson') - with open(str(wrapper), 'w') as f: - f.write('#!/bin/sh\n\nmeson.real "$@"') + wrapper.open('w').write('#!/bin/sh\n\nmeson.real "$@"') wrapper.chmod(0o755) meson_setup = [str(wrapper), 'setup'] meson_command = meson_setup + self.meson_args @@ -195,5 +190,6 @@ class CommandTests(unittest.TestCase): zipapp.create_archive(source=source, target=target, interpreter=python_command[0], main=None) self._run([target.as_posix(), '--help']) + if __name__ == '__main__': - unittest.main(buffer=True) + sys.exit(unittest.main(buffer=True)) diff --git a/run_project_tests.py b/run_project_tests.py index 876d135..2445dd4 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -36,11 +36,12 @@ import argparse import xml.etree.ElementTree as ET import time import multiprocessing -from concurrent.futures import ProcessPoolExecutor +from concurrent.futures import ProcessPoolExecutor, CancelledError import re from run_tests import get_fake_options, run_configure, get_meson_script from run_tests import get_backend_commands, get_backend_args_for_dir, Backend from run_tests import ensure_backend_detects_changes +from run_tests import guess_backend class BuildStep(Enum): @@ -81,7 +82,7 @@ class AutoDeletedDir: failing_logs = [] print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ -under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) +under_ci = 'CI' in os.environ do_debug = under_ci or print_debug no_meson_log_msg = 'No meson-log.txt found.' @@ -101,26 +102,7 @@ signal.signal(signal.SIGTERM, stop_handler) def setup_commands(optbackend): global do_debug, backend, backend_flags global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands - backend = optbackend - msbuild_exe = shutil.which('msbuild') - # Auto-detect backend if unspecified - if backend is None: - if msbuild_exe is not None: - backend = 'vs' # Meson will auto-detect VS version to use - else: - backend = 'ninja' - # Set backend arguments for Meson - if backend.startswith('vs'): - backend_flags = ['--backend=' + backend] - backend = Backend.vs - elif backend == 'xcode': - backend_flags = ['--backend=xcode'] - backend = Backend.xcode - elif backend == 'ninja': - backend_flags = ['--backend=ninja'] - backend = Backend.ninja - else: - raise RuntimeError('Unknown backend: {!r}'.format(backend)) + backend, backend_flags = guess_backend(optbackend, shutil.which('msbuild')) compile_commands, clean_commands, test_commands, install_commands, \ uninstall_commands = get_backend_commands(backend, do_debug) @@ -523,14 +505,14 @@ def detect_tests_to_run(): gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] return gathered_tests -def run_tests(all_tests, log_name_base, extra_args): +def run_tests(all_tests, log_name_base, failfast, extra_args): global logfile txtname = log_name_base + '.txt' with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf: logfile = lf - return _run_tests(all_tests, log_name_base, extra_args) + return _run_tests(all_tests, log_name_base, failfast, extra_args) -def _run_tests(all_tests, log_name_base, extra_args): +def _run_tests(all_tests, log_name_base, failfast, extra_args): global stop, executor, futures, system_compiler xmlname = log_name_base + '.xml' junit_root = ET.Element('testsuites') @@ -578,7 +560,10 @@ def _run_tests(all_tests, log_name_base, extra_args): futures.append((testname, t, result)) for (testname, t, result) in futures: sys.stdout.flush() - result = result.result() + try: + result = result.result() + except CancelledError: + continue if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t.as_posix()))): print(yellow('Skipping:'), t.as_posix()) current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, @@ -599,6 +584,10 @@ def _run_tests(all_tests, log_name_base, extra_args): else: failing_logs.append(result.stdo) failing_logs.append(result.stde) + if failfast: + print("Cancelling the rest of the tests") + for (_, _, res) in futures: + res.cancel() else: print('Succeeded test%s: %s' % (without_install, t.as_posix())) passing_tests += 1 @@ -616,6 +605,10 @@ def _run_tests(all_tests, log_name_base, extra_args): stdoel.text = result.stdo stdeel = ET.SubElement(current_test, 'system-err') stdeel.text = result.stde + + if failfast and failing_tests > 0: + break + print("\nTotal configuration time: %.2fs" % conf_time) print("Total build time: %.2fs" % build_time) print("Total test time: %.2fs" % test_time) @@ -709,6 +702,8 @@ if __name__ == '__main__': help='arguments that are passed directly to Meson (remember to have -- before these).') parser.add_argument('--backend', default=None, dest='backend', choices=backendlist) + parser.add_argument('--failfast', action='store_true', + help='Stop running if test case fails') options = parser.parse_args() setup_commands(options.backend) @@ -720,7 +715,7 @@ if __name__ == '__main__': check_meson_commands_work() try: all_tests = detect_tests_to_run() - (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.extra_args) + (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args) except StopException: pass print('\nTotal passed tests:', green(str(passing_tests))) diff --git a/run_tests.py b/run_tests.py index af926ea..3445e30 100755 --- a/run_tests.py +++ b/run_tests.py @@ -21,17 +21,40 @@ import shutil import subprocess import tempfile import platform +import argparse from io import StringIO from enum import Enum from glob import glob from pathlib import Path - import mesonbuild from mesonbuild import mesonlib from mesonbuild import mesonmain from mesonbuild import mtest from mesonbuild import mlog from mesonbuild.environment import Environment, detect_ninja +from mesonbuild.coredata import backendlist + +def guess_backend(backend, msbuild_exe): + # Auto-detect backend if unspecified + backend_flags = [] + if backend is None: + if msbuild_exe is not None: + backend = 'vs' # Meson will auto-detect VS version to use + else: + backend = 'ninja' + # Set backend arguments for Meson + if backend.startswith('vs'): + backend_flags = ['--backend=' + backend] + backend = Backend.vs + elif backend == 'xcode': + backend_flags = ['--backend=xcode'] + backend = Backend.xcode + elif backend == 'ninja': + backend_flags = ['--backend=ninja'] + backend = Backend.ninja + else: + raise RuntimeError('Unknown backend: {!r}'.format(backend)) + return (backend, backend_flags) # Fake classes and objects for mocking @@ -106,9 +129,9 @@ def find_vcxproj_with_target(builddir, target): import re, fnmatch t, ext = os.path.splitext(target) if ext: - p = '<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext) + p = r'<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext) else: - p = '<TargetName>{}</TargetName>'.format(t) + p = r'<TargetName>{}</TargetName>'.format(t) for root, dirs, files in os.walk(builddir): for f in fnmatch.filter(files, '*.vcxproj'): f = os.path.join(builddir, f) @@ -218,34 +241,27 @@ def print_system_info(): print('System:', platform.system()) print('') -if __name__ == '__main__': +def main(): print_system_info() + parser = argparse.ArgumentParser() + parser.add_argument('--cov', action='store_true') + parser.add_argument('--backend', default=None, dest='backend', + choices=backendlist) + parser.add_argument('--cross', default=False, dest='cross', action='store_true') + parser.add_argument('--failfast', action='store_true') + (options, _) = parser.parse_known_args() # Enable coverage early... - enable_coverage = '--cov' in sys.argv + enable_coverage = options.cov if enable_coverage: os.makedirs('.coverage', exist_ok=True) sys.argv.remove('--cov') import coverage coverage.process_startup() returncode = 0 - # Iterate over list in reverse order to find the last --backend arg - backend = Backend.ninja - cross = False - # FIXME: PLEASE convert to argparse - for arg in reversed(sys.argv[1:]): - if arg.startswith('--backend'): - if arg.startswith('--backend=vs'): - backend = Backend.vs - elif arg == '--backend=xcode': - backend = Backend.xcode - if arg.startswith('--cross'): - cross = True - if arg == '--cross=mingw': - cross = 'mingw' - elif arg == '--cross=arm': - cross = 'arm' + cross = options.cross + backend, _ = guess_backend(options.backend, shutil.which('msbuild')) # Running on a developer machine? Be nice! - if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'TRAVIS' not in os.environ: + if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'CI' not in os.environ: os.nice(20) # Appveyor sets the `platform` environment variable which completely messes # up building with the vs2010 and vs2015 backends. @@ -267,26 +283,50 @@ if __name__ == '__main__': # Can't pass arguments to unit tests, so set the backend to use in the environment env = os.environ.copy() env['MESON_UNIT_TEST_BACKEND'] = backend.name - with tempfile.TemporaryDirectory() as td: + with tempfile.TemporaryDirectory() as temp_dir: # Enable coverage on all subsequent processes. if enable_coverage: - with open(os.path.join(td, 'usercustomize.py'), 'w') as f: - f.write('import coverage\n' - 'coverage.process_startup()\n') + Path(temp_dir, 'usercustomize.py').open('w').write( + 'import coverage\n' + 'coverage.process_startup()\n') env['COVERAGE_PROCESS_START'] = '.coveragerc' - env['PYTHONPATH'] = os.pathsep.join([td] + env.get('PYTHONPATH', [])) + if 'PYTHONPATH' in env: + env['PYTHONPATH'] = os.pathsep.join([temp_dir, env.get('PYTHONPATH')]) + else: + env['PYTHONPATH'] = temp_dir if not cross: - returncode += subprocess.call(mesonlib.python_command + ['run_meson_command_tests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_unittests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:], env=env) + cmd = mesonlib.python_command + ['run_meson_command_tests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_unittests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:] + returncode += subprocess.call(cmd, env=env) else: cross_test_args = mesonlib.python_command + ['run_cross_test.py'] - if cross is True or cross == 'arm': - print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/ubuntu-armhf.txt'], env=env) - if cross is True or cross == 'mingw': - print(mlog.bold('Running mingw-w64 64-bit cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/linux-mingw-w64-64bit.txt'], env=env) - sys.exit(returncode) + print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/ubuntu-armhf.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + print(mlog.bold('Running mingw-w64 64-bit cross tests.') + .get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/linux-mingw-w64-64bit.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + return returncode + +if __name__ == '__main__': + sys.exit(main()) diff --git a/run_unittests.py b/run_unittests.py index 8bea2d0..ee80a87 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -16,11 +16,13 @@ import stat import shlex import subprocess -import re, json +import re +import json import tempfile import textwrap import os import shutil +import sys import unittest import platform from itertools import chain @@ -81,7 +83,7 @@ def is_tarball(): return False def is_ci(): - if 'TRAVIS' in os.environ or 'APPVEYOR' in os.environ: + if 'CI' in os.environ: return True return False @@ -98,8 +100,8 @@ def _git_init(project_dir): def skipIfNoPkgconfig(f): ''' - Skip this test if no pkg-config is found, unless we're on Travis or - Appveyor CI. This allows users to run our test suite without having + Skip this test if no pkg-config is found, unless we're on CI. + This allows users to run our test suite without having pkg-config installed on, f.ex., macOS, while ensuring that our CI does not silently skip the test because of misconfiguration. @@ -582,7 +584,7 @@ class InternalTests(unittest.TestCase): 'static': unix_static}, 'linux': {'shared': ('lib{}.so', '{}.so'), 'static': unix_static}, - 'darwin': {'shared': ('lib{}.dylib', '{}.dylib'), + 'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'), 'static': unix_static}, 'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', 'lib{}.dll.a', '{}.dll', '{}.dll.a'), @@ -1671,7 +1673,7 @@ class AllPlatformTests(BasePlatformTests): self.assertIsInstance(linker, lib) self.assertEqual(cc.id, 'msvc') self.assertTrue(hasattr(cc, 'is_64')) - # If we're in the appveyor CI, we know what the compiler will be + # If we're on Windows CI, we know what the compiler will be if 'arch' in os.environ: if os.environ['arch'] == 'x64': self.assertTrue(cc.is_64) @@ -2682,6 +2684,8 @@ recommended as it is not supported on some platforms''') self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") + self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites") + self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites") # No warnings about empty configuration data objects passed to files with substitutions self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") @@ -3093,6 +3097,19 @@ class WindowsTests(BasePlatformTests): self.utime(os.path.join(testdir, 'res', 'resource.h')) self.assertRebuiltTarget('prog_1') + @unittest.skipIf(shutil.which('cl') is None, 'Test only applies to VS') + def test_msvc_cpp17(self): + testdir = os.path.join(self.unit_test_dir, '45 vscpp17') + try: + self.init(testdir) + except subprocess.CalledProcessError: + # According to Python docs, output is only stored when + # using check_output. We don't use it, so we can't check + # that the output is correct (i.e. that it failed due + # to the right reason). + return + self.build() + class DarwinTests(BasePlatformTests): ''' Tests that should run on macOS @@ -3175,6 +3192,18 @@ class DarwinTests(BasePlatformTests): self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0')) self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1')) + def test_duplicate_rpath(self): + testdir = os.path.join(self.unit_test_dir, '10 build_rpath') + # We purposely pass a duplicate rpath to Meson, in order + # to ascertain that Meson does not call install_name_tool + # with duplicate -delete_rpath arguments, which would + # lead to erroring out on installation + os.environ["LDFLAGS"] = "-Wl,-rpath,/foo/bar" + self.init(testdir) + self.build() + self.install() + del os.environ["LDFLAGS"] + class LinuxlikeTests(BasePlatformTests): ''' @@ -3605,7 +3634,7 @@ class LinuxlikeTests(BasePlatformTests): ('share', 'drwxr-x---'), ('share/man', 'drwxr-x---'), ('share/man/man1', 'drwxr-x---'), - ('share/man/man1/foo.1.gz', '-r--r--r-T'), + ('share/man/man1/foo.1', '-r--r--r-T'), ('share/sub1', 'drwxr-x---'), ('share/sub1/second.dat', '-rwxr-x--t'), ('subdir', 'drwxr-x---'), @@ -3678,7 +3707,7 @@ class LinuxlikeTests(BasePlatformTests): 'include/sample.h', 'share/datafile.cat', 'share/file.dat', - 'share/man/man1/prog.1.gz', + 'share/man/man1/prog.1', 'share/subdir/datafile.dog', ]: f = os.path.join(self.installdir, 'usr', *datafile.split('/')) @@ -4327,7 +4356,7 @@ def should_run_cross_arm_tests(): def should_run_cross_mingw_tests(): return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) -if __name__ == '__main__': +def main(): unset_envs() cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', 'PythonTests'] if not is_windows(): @@ -4341,4 +4370,7 @@ if __name__ == '__main__': if is_osx(): cases += ['DarwinTests'] - unittest.main(defaultTest=cases, buffer=True) + return unittest.main(defaultTest=cases, buffer=True) + +if __name__ == '__main__': + sys.exit(main()) @@ -16,13 +16,12 @@ import sys -from mesonbuild.coredata import version - if sys.version_info < (3, 5, 0): print('Tried to install with an unsupported version of Python. ' 'Meson requires Python 3.5.0 or greater') sys.exit(1) +from mesonbuild.coredata import version from setuptools import setup # On windows, will create Scripts/meson.exe and Scripts/meson-script.py @@ -54,9 +54,13 @@ def main(): help='Branch push is targeted to') parser.add_argument('--is-pull-env', required=True, help='Variable set if it is a PR') + parser.add_argument('--base-branch-origin', action='store_true', + help='Base branch reference is only in origin remote') args = parser.parse_args() check_pr(args.is_pull_env) base = get_base_branch(args.base_branch_env) + if args.base_branch_origin: + base = 'origin/' + base if all(is_documentation(f) for f in get_git_files(base)): print("Don't run CI for documentation-only changes, add '[skip ci]' to commit title.") print('See http://mesonbuild.com/Contributing.html#skipping-integration-tests') diff --git a/test cases/common/10 man install/installed_files.txt b/test cases/common/10 man install/installed_files.txt index c13baa4..5aad8ea 100644 --- a/test cases/common/10 man install/installed_files.txt +++ b/test cases/common/10 man install/installed_files.txt @@ -1,5 +1,5 @@ -usr/share/man/man1/foo.1.gz -usr/share/man/man2/bar.2.gz -usr/share/man/man1/vanishing.1.gz -usr/share/man/man2/vanishing.2.gz -usr/share/man/man1/baz.1.gz +usr/share/man/man1/foo.1 +usr/share/man/man2/bar.2 +usr/share/man/man1/vanishing.1 +usr/share/man/man2/vanishing.2 +usr/share/man/man1/baz.1 diff --git a/test cases/common/137 get define/meson.build b/test cases/common/137 get define/meson.build index b20c554..109f628 100644 --- a/test cases/common/137 get define/meson.build +++ b/test cases/common/137 get define/meson.build @@ -32,6 +32,9 @@ foreach lang : ['c', 'cpp'] elif host_system == 'netbsd' d = cc.get_define('__NetBSD__') assert(d == '1', '__NetBSD__ value is @0@ instead of 1'.format(d)) + elif host_system == 'gnu' + d = cc.get_define('__GNU__') + assert(d == '1', '__GNU__ value is @0@ instead of 1'.format(d)) else error('Please report a bug and help us improve support for this platform') endif diff --git a/test cases/common/14 configure file/differentafterbasename1.in b/test cases/common/14 configure file/differentafterbasename1.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/differentafterbasename1.in diff --git a/test cases/common/14 configure file/differentafterbasename2.in b/test cases/common/14 configure file/differentafterbasename2.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/differentafterbasename2.in diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build index a3601aa..53b06f3 100644 --- a/test cases/common/14 configure file/meson.build +++ b/test cases/common/14 configure file/meson.build @@ -218,7 +218,55 @@ configure_file( output: 'no_write_conflict.txt', configuration: conf) +# Test that @BASENAME@ is substituted before checking and does not create a warning. +configure_file( + input: 'differentafterbasename1.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'differentafterbasename2.in', + output: '@BASENAME@', + configuration: conf +) + +# Test that @BASENAME@ is substituted before checking and does create a warning on conflict. +configure_file( + input: 'sameafterbasename.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'sameafterbasename.in2', + output: '@BASENAME@', + configuration: conf +) + test('configure-file', test_file) cdata = configuration_data() cdata.set('invalid_value', ['array']) + +# Dictionaries + +cdata = configuration_data({ + 'A_STRING' : '"foo"', + 'A_INT' : 42, + 'A_DEFINED' : true, + 'A_UNDEFINED' : false, +}) + +configure_file(output : 'config9a.h', + configuration : cdata, +) + +configure_file(output : 'config9b.h', + configuration : { + 'B_STRING' : '"foo"', + 'B_INT' : 42, + 'B_DEFINED' : true, + 'B_UNDEFINED' : false, + } +) + +test('test9', executable('prog9', 'prog9.c')) diff --git a/test cases/common/14 configure file/prog9.c b/test cases/common/14 configure file/prog9.c new file mode 100644 index 0000000..28c7354 --- /dev/null +++ b/test cases/common/14 configure file/prog9.c @@ -0,0 +1,18 @@ +#include <string.h> +#include <config9a.h> +#include <config9b.h> + +#if defined(A_UNDEFINED) || defined(B_UNDEFINED) +#error "Should not be defined" +#endif + +#if !defined(A_DEFINED) || !defined(B_DEFINED) +#error "Should be defined" +#endif + +int main(int argc, char **argv) { + return strcmp(A_STRING, "foo") + || strcmp(B_STRING, "foo") + || A_INT != 42 + || B_INT != 42; +} diff --git a/test cases/common/14 configure file/sameafterbasename.in b/test cases/common/14 configure file/sameafterbasename.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/sameafterbasename.in diff --git a/test cases/common/14 configure file/sameafterbasename.in2 b/test cases/common/14 configure file/sameafterbasename.in2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/sameafterbasename.in2 diff --git a/test cases/common/158 wrap file should not failed/meson.build b/test cases/common/158 wrap file should not failed/meson.build index 9cf4e9a..f4ec2a8 100644 --- a/test cases/common/158 wrap file should not failed/meson.build +++ b/test cases/common/158 wrap file should not failed/meson.build @@ -1,6 +1,9 @@ -project('mainproj', 'c') +project('mainproj', 'c', + default_options : ['wrap_mode=nodownload'], +) subproject('zlib') +subproject('foo') executable('grabprog', files('src/subprojects/prog.c')) executable('grabprog2', files('src/subprojects/foo/prog2.c')) diff --git a/test cases/common/158 wrap file should not failed/subprojects/foo.wrap b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap new file mode 100644 index 0000000..90d6d40 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap @@ -0,0 +1,11 @@ +[wrap-file] +directory = foo-1.0 + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79 +lead_directory_missing = true + +patch_url = https://something.invalid/patch +patch_filename = foo-1.0-patch.tar.xz +patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da diff --git a/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz Binary files differnew file mode 100644 index 0000000..26d2927 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz diff --git a/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz Binary files differnew file mode 100644 index 0000000..2647ef9 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz diff --git a/test cases/common/164 disabler/meson.build b/test cases/common/164 disabler/meson.build index 1956cd3..a1763d2 100644 --- a/test cases/common/164 disabler/meson.build +++ b/test cases/common/164 disabler/meson.build @@ -31,4 +31,12 @@ endif assert(number == 2, 'If found handled incorrectly, value should be 2 but is @0@'.format(number)) +dep = dependency('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) +cc = meson.get_compiler('c') +dep = cc.find_library('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) + +dep = find_program('donotfindme', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) diff --git a/test cases/common/17 comparison/meson.build b/test cases/common/17 comparison/meson.build index fb641ed..bba0168 100644 --- a/test cases/common/17 comparison/meson.build +++ b/test cases/common/17 comparison/meson.build @@ -137,3 +137,18 @@ assert(2 != 'st', 'not equal') assert(not ([] == 'st'), 'not equal') assert(not ([] == 1), 'not equal') assert(not (2 == 'st'), 'not equal') + +# "in" and "not in" operators + +assert(1 in [1, 2], '''1 should be in [1, 2]''') +assert(3 not in [1, 2], '''3 shouldn't be in [1, 2]''') +assert(not (3 in [1, 2]), '''3 shouldn't be in [1, 2]''') + +assert('b' in ['a', 'b'], ''''b' should be in ['a', 'b']''') +assert('c' not in ['a', 'b'], ''''c' shouldn't be in ['a', 'b']''') + +assert(exe1 in [exe1, exe2], ''''exe1 should be in [exe1, exe2]''') +assert(exe3 not in [exe1, exe2], ''''exe3 shouldn't be in [exe1, exe2]''') + +assert('a' in {'a': 'b'}, '''1 should be in {'a': 'b'}''') +assert('b' not in {'a': 'b'}, '''1 should be in {'a': 'b'}''') diff --git a/test cases/common/196 install_mode/installed_files.txt b/test cases/common/196 install_mode/installed_files.txt index c1de3e1..4bd2211 100644 --- a/test cases/common/196 install_mode/installed_files.txt +++ b/test cases/common/196 install_mode/installed_files.txt @@ -4,7 +4,7 @@ usr/bin/trivialprog?exe usr/include/config.h usr/include/rootdir.h usr/libtest/libstat.a -usr/share/man/man1/foo.1.gz +usr/share/man/man1/foo.1 usr/share/sub1/second.dat usr/share/sub2/stub usr/subdir/data.dat diff --git a/test cases/common/44 options/meson.build b/test cases/common/44 options/meson.build index f177aa4..c6cf9c8 100644 --- a/test cases/common/44 options/meson.build +++ b/test cases/common/44 options/meson.build @@ -29,3 +29,5 @@ endif if get_option('integer_opt') != 3 error('Incorrect value in integer option.') endif + +assert(get_option('wrap_mode') == 'default', 'Wrap mode option is broken.') diff --git a/test cases/common/49 custom install dirs/installed_files.txt b/test cases/common/49 custom install dirs/installed_files.txt index 7d24ce8..4e17c2d 100644 --- a/test cases/common/49 custom install dirs/installed_files.txt +++ b/test cases/common/49 custom install dirs/installed_files.txt @@ -4,8 +4,8 @@ usr/dib/dab/dub2/prog2?exe ?msvc:usr/dib/dab/dub2/prog2.pdb usr/some/dir/sample.h usr/some/dir2/sample.h -usr/woman/prog.1.gz -usr/woman2/prog.1.gz +usr/woman/prog.1 +usr/woman2/prog.1 usr/meow/datafile.cat usr/meow2/datafile.cat usr/woof/subdir/datafile.dog diff --git a/test cases/common/64 foreach/meson.build b/test cases/common/64 foreach/meson.build index e633de8..7084e80 100644 --- a/test cases/common/64 foreach/meson.build +++ b/test cases/common/64 foreach/meson.build @@ -18,3 +18,16 @@ foreach i : tests # we definitely don't want that. tests = ['test4', 'prog4', 'prog4.c'] endforeach + +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach + +assert(result == ['a', 'b'], 'Continue or break in foreach failed') diff --git a/test cases/csharp/1 basic/installed_files.txt b/test cases/csharp/1 basic/installed_files.txt index f64c68c..5022d28 100644 --- a/test cases/csharp/1 basic/installed_files.txt +++ b/test cases/csharp/1 basic/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog.exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/csharp/2 library/installed_files.txt b/test cases/csharp/2 library/installed_files.txt index 4ebea55..73e77a2 100644 --- a/test cases/csharp/2 library/installed_files.txt +++ b/test cases/csharp/2 library/installed_files.txt @@ -1,2 +1,5 @@ usr/bin/prog.exe -usr/lib/helper.dll +?msvc:usr/bin/prog.pdb +?msvc:usr/bin/helper.dll +?msvc:usr/bin/helper.pdb +?gcc:usr/lib/helper.dll diff --git a/test cases/failing/90 subproj not-found dep/meson.build b/test cases/failing/90 subproj not-found dep/meson.build new file mode 100644 index 0000000..2b17df1 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true) diff --git a/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build new file mode 100644 index 0000000..5f451f4 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build @@ -0,0 +1,3 @@ +project('dep', 'c') + +notfound_dep = dependency('', required : false) diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build index 1085d40..ae3f08a 100644 --- a/test cases/frameworks/17 mpi/meson.build +++ b/test cases/frameworks/17 mpi/meson.build @@ -17,7 +17,7 @@ exec = executable('exec', test('MPI C', exec) if build_machine.system() != 'windows' - # C++ MPI not supported by MS-MPI used on AppVeyor. + # C++ MPI not supported by MS-MPI mpicpp = dependency('mpi', language : 'cpp') execpp = executable('execpp', 'main.cpp', @@ -44,4 +44,6 @@ if uburesult.returncode() != 0 and add_languages('fortran', required : false) endif # Check we can apply a version constraint -dependency('mpi', version: '>=@0@'.format(mpic.version())) +if mpic.version() != 'unknown' + dependency('mpi', version: '>=@0@'.format(mpic.version())) +endif diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build index 7ac945e..15fd822 100644 --- a/test cases/frameworks/4 qt/meson.build +++ b/test cases/frameworks/4 qt/meson.build @@ -58,6 +58,10 @@ foreach qt : ['qt4', 'qt5'] # Test that setting a unique name with a positional argument works qtmodule.preprocess(qt + 'teststuff', qresources : files(['stuff.qrc', 'stuff2.qrc']), method : get_option('method')) + # Test that passing extra arguments to rcc works + # qt4-rcc and qt5-rcc take different arguments, for example qt4: ['-compress', '3']; qt5: '--compress=3' + qtmodule.preprocess(qt + 'testrccarg', qresources : files(['stuff.qrc', 'stuff2.qrc']), rcc_extra_arguments : '--compress=3', method : get_option('method')) + qexe = executable(qt + 'app', sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing. prep, prep_rcc], diff --git a/test cases/frameworks/6 gettext/data/data3/meson.build b/test cases/frameworks/6 gettext/data/data3/meson.build new file mode 100644 index 0000000..044b498 --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/meson.build @@ -0,0 +1,9 @@ +# Target name will contain a path separator +i18n.merge_file( + input: 'test.desktop.in', + output: 'test4.desktop', + type: 'desktop', + po_dir: '../../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications') +) diff --git a/test cases/frameworks/6 gettext/data/data3/test.desktop.in b/test cases/frameworks/6 gettext/data/data3/test.desktop.in new file mode 100644 index 0000000..33b9a9f --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/test.desktop.in @@ -0,0 +1,6 @@ +[Desktop Entry] +Name=Test +GenericName=Application +Comment=Test Application +Type=Application + diff --git a/test cases/frameworks/6 gettext/data/meson.build b/test cases/frameworks/6 gettext/data/meson.build index a6b0a8b..0a7811b 100644 --- a/test cases/frameworks/6 gettext/data/meson.build +++ b/test cases/frameworks/6 gettext/data/meson.build @@ -26,3 +26,5 @@ i18n.merge_file( install: true, install_dir: join_paths(get_option('datadir'), 'applications') ) + +subdir('data3') diff --git a/test cases/frameworks/6 gettext/generated/desktopgenerator.py b/test cases/frameworks/6 gettext/generated/desktopgenerator.py index 150ed04..6ff799d 100644 --- a/test cases/frameworks/6 gettext/generated/desktopgenerator.py +++ b/test cases/frameworks/6 gettext/generated/desktopgenerator.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -import sys, shutil +import os, sys, shutil ifile = sys.argv[1] ofile = sys.argv[2] diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt index 9298909..868f4c1 100644 --- a/test cases/frameworks/6 gettext/installed_files.txt +++ b/test cases/frameworks/6 gettext/installed_files.txt @@ -4,3 +4,4 @@ usr/share/locale/fi/LC_MESSAGES/intltest.mo usr/share/applications/test.desktop usr/share/applications/test2.desktop usr/share/applications/test3.desktop +usr/share/applications/test4.desktop diff --git a/test cases/frameworks/7 gnome/resources/res3.txt b/test cases/frameworks/7 gnome/resources/res3.txt new file mode 100644 index 0000000..aeed4a5 --- /dev/null +++ b/test cases/frameworks/7 gnome/resources/res3.txt @@ -0,0 +1 @@ +This file is from the wrong directory. diff --git a/test cases/unit/45 vscpp17/main.cpp b/test cases/unit/45 vscpp17/main.cpp new file mode 100644 index 0000000..36e4156 --- /dev/null +++ b/test cases/unit/45 vscpp17/main.cpp @@ -0,0 +1,7 @@ +[[nodiscard]] int foo() { + return 0; +} + +int main() { + return foo(); +} diff --git a/test cases/unit/45 vscpp17/meson.build b/test cases/unit/45 vscpp17/meson.build new file mode 100644 index 0000000..afe740b --- /dev/null +++ b/test cases/unit/45 vscpp17/meson.build @@ -0,0 +1,4 @@ +project('msvc_cpp17', 'cpp', default_options: ['cpp_std=c++17']) + +exe = executable('msvc_cpp17', 'main.cpp') +test('msvc_cpp17', exe) |