aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--azure-pipelines.yml8
-rw-r--r--ci/azure-steps.yml7
-rw-r--r--ci/install-dmd.ps12
-rw-r--r--docs/markdown/Cuda-module.md10
-rw-r--r--docs/markdown/Dependencies.md21
-rw-r--r--docs/markdown/IDE-integration.md59
-rw-r--r--docs/markdown/Precompiled-headers.md5
-rw-r--r--docs/markdown/Reference-manual.md14
-rw-r--r--docs/markdown/Reference-tables.md15
-rw-r--r--docs/markdown/Rewriter.md236
-rw-r--r--docs/markdown/Style-guide.md32
-rw-r--r--docs/markdown/Unit-tests.md10
-rw-r--r--docs/markdown/snippets/introspect_deps_no_bd.md25
-rw-r--r--docs/markdown/snippets/introspect_multiple.md1
-rw-r--r--docs/markdown/snippets/introspect_projectinfo_subprojects_dir.md4
-rw-r--r--docs/markdown/snippets/introspect_targets_no_bd.md21
-rw-r--r--docs/markdown/snippets/netcdf.md3
-rw-r--r--docs/markdown/snippets/rewriter.md18
-rw-r--r--docs/sitemap.txt1
-rw-r--r--mesonbuild/ast/__init__.py3
-rw-r--r--mesonbuild/ast/interpreter.py5
-rw-r--r--mesonbuild/ast/introspection.py53
-rw-r--r--mesonbuild/ast/postprocess.py30
-rw-r--r--mesonbuild/backend/backends.py22
-rw-r--r--mesonbuild/backend/ninjabackend.py51
-rw-r--r--mesonbuild/backend/vs2010backend.py95
-rw-r--r--mesonbuild/backend/xcodebackend.py2
-rw-r--r--mesonbuild/build.py2
-rw-r--r--mesonbuild/compilers/compilers.py38
-rw-r--r--mesonbuild/compilers/cuda.py157
-rw-r--r--mesonbuild/coredata.py9
-rw-r--r--mesonbuild/dependencies/__init__.py4
-rw-r--r--mesonbuild/dependencies/base.py214
-rw-r--r--mesonbuild/dependencies/data/CMakeLists.txt18
-rw-r--r--mesonbuild/dependencies/data/CMakePathInfo.txt29
-rw-r--r--mesonbuild/dependencies/misc.py29
-rw-r--r--mesonbuild/envconfig.py418
-rw-r--r--mesonbuild/environment.py465
-rw-r--r--mesonbuild/interpreter.py18
-rw-r--r--mesonbuild/mesonlib.py4
-rw-r--r--mesonbuild/mesonmain.py19
-rw-r--r--mesonbuild/mintro.py155
-rw-r--r--mesonbuild/mlog.py21
-rw-r--r--mesonbuild/modules/unstable_cuda.py21
-rw-r--r--mesonbuild/mparser.py23
-rw-r--r--mesonbuild/mtest.py232
-rw-r--r--mesonbuild/rewriter.py504
-rwxr-xr-xrun_project_tests.py12
-rwxr-xr-xrun_unittests.py575
-rw-r--r--setup.py2
-rw-r--r--test cases/common/113 ternary/meson.build5
-rw-r--r--test cases/common/13 pch/c/meson.build2
-rw-r--r--test cases/common/13 pch/c/pch/prog_pch.c5
-rw-r--r--test cases/common/13 pch/cpp/meson.build2
-rw-r--r--test cases/common/13 pch/cpp/pch/prog_pch.cc5
-rw-r--r--test cases/common/13 pch/generated/meson.build2
-rw-r--r--test cases/common/13 pch/generated/pch/prog_pch.c5
-rw-r--r--test cases/common/13 pch/meson.build1
-rw-r--r--test cases/common/13 pch/mixed/meson.build15
-rw-r--r--test cases/common/13 pch/mixed/pch/func_pch.c1
-rw-r--r--test cases/common/13 pch/mixed/pch/main_pch.cc1
-rw-r--r--test cases/common/13 pch/userDefined/meson.build10
-rw-r--r--test cases/common/13 pch/userDefined/pch/pch.c5
-rw-r--r--test cases/common/13 pch/userDefined/pch/pch.h1
-rw-r--r--test cases/common/13 pch/userDefined/prog.c8
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/meson.build2
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c5
-rw-r--r--test cases/common/212 tap tests/meson.build10
-rw-r--r--test cases/common/212 tap tests/tester.c10
-rw-r--r--test cases/cuda/5 threads/main.cu20
-rw-r--r--test cases/cuda/5 threads/meson.build7
-rw-r--r--test cases/cuda/5 threads/shared/kernels.cu14
-rw-r--r--test cases/cuda/5 threads/shared/kernels.h86
-rw-r--r--test cases/cuda/5 threads/shared/meson.build5
-rw-r--r--test cases/failing test/4 hard error/main.c3
-rw-r--r--test cases/failing test/4 hard error/meson.build4
-rw-r--r--test cases/failing test/5 tap tests/meson.build6
-rw-r--r--test cases/failing test/5 tap tests/tester.c10
-rw-r--r--test cases/fortran/4 self dependency/meson.build4
-rw-r--r--test cases/fortran/4 self dependency/src/selfdep_mod.f906
-rw-r--r--test cases/fortran/4 self dependency/subprojects/sub1/main.f906
-rw-r--r--test cases/fortran/4 self dependency/subprojects/sub1/meson.build3
-rw-r--r--test cases/fortran/7 generated/prog.f908
-rw-r--r--test cases/frameworks/26 netcdf/main.c14
-rw-r--r--test cases/frameworks/26 netcdf/main.cpp15
-rw-r--r--test cases/frameworks/26 netcdf/main.f9019
-rw-r--r--test cases/frameworks/26 netcdf/meson.build35
-rw-r--r--test cases/rewrite/1 basic/addSrc.json9
-rw-r--r--test cases/rewrite/1 basic/addTgt.json2
-rw-r--r--test cases/rewrite/1 basic/info.json5
-rw-r--r--test cases/rewrite/1 basic/meson.build1
-rw-r--r--test cases/rewrite/1 basic/rmSrc.json5
-rw-r--r--test cases/rewrite/1 basic/rmTgt.json9
-rw-r--r--test cases/rewrite/2 subdirs/addTgt.json2
-rw-r--r--test cases/rewrite/2 subdirs/rmTgt.json2
-rw-r--r--test cases/rewrite/3 kwargs/add.json6
-rw-r--r--test cases/rewrite/3 kwargs/defopts_delete.json18
-rw-r--r--test cases/rewrite/3 kwargs/defopts_set.json24
-rw-r--r--test cases/rewrite/3 kwargs/info.json2
-rw-r--r--test cases/rewrite/3 kwargs/remove.json6
-rw-r--r--test cases/rewrite/3 kwargs/remove_regex.json20
-rw-r--r--test cases/rewrite/3 kwargs/set.json2
-rw-r--r--test cases/rewrite/4 same name targets/addSrc.json8
-rw-r--r--test cases/rewrite/4 same name targets/info.json12
-rw-r--r--test cases/rewrite/4 same name targets/meson.build6
-rw-r--r--test cases/rewrite/4 same name targets/sub1/meson.build3
-rw-r--r--test cases/rewrite/5 sorting/meson.build33
-rw-r--r--test cases/unit/52 introspection/meson.build6
-rw-r--r--test cases/unit/53 introspect buildoptions/main.c6
-rw-r--r--test cases/unit/53 introspect buildoptions/meson.build5
110 files changed, 3289 insertions, 985 deletions
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 90ebeff..5a7c6ac 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -50,6 +50,11 @@ jobs:
backend: ninja
steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '3.5'
+ addToPath: true
+ architecture: 'x64'
- template: ci/azure-steps.yml
- job: cygwin
@@ -76,12 +81,13 @@ jobs:
libglib2.0-devel,^
libgtk3-devel,^
ninja,^
- python3-pip,^
+ python35-pip,^
vala,^
zlib-devel
displayName: Install Dependencies
- script: |
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
+ cp /usr/bin/python3.5 /usr/bin/python3
env.exe -- python3 run_tests.py --backend=ninja
displayName: Run Tests
- task: CopyFiles@2
diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml
index 71642f0..6fe1831 100644
--- a/ci/azure-steps.yml
+++ b/ci/azure-steps.yml
@@ -142,9 +142,16 @@ steps:
MSBuild /version
}
+ echo "=== PATH BEGIN ==="
+ echo ($env:Path).Replace(';',"`n")
+ echo "=== PATH END ==="
+ echo ""
+ echo "Locating Python:"
where.exe python
python --version
+ echo ""
+ echo "=== Start running tests ==="
python run_tests.py --backend $(backend)
- task: PublishTestResults@2
diff --git a/ci/install-dmd.ps1 b/ci/install-dmd.ps1
index fc8226c..aeacdf2 100644
--- a/ci/install-dmd.ps1
+++ b/ci/install-dmd.ps1
@@ -68,4 +68,4 @@ $dmd_bin = Join-Path $dmd_install "dmd2\windows\bin"
$Env:Path = $Env:Path + ";" + $dmd_bin
#echo "Testing DMD..."
-& dmd.exe --version 2>&1>$null
+& dmd.exe --version
diff --git a/docs/markdown/Cuda-module.md b/docs/markdown/Cuda-module.md
index caa1756..f161eac 100644
--- a/docs/markdown/Cuda-module.md
+++ b/docs/markdown/Cuda-module.md
@@ -71,6 +71,14 @@ mixed with architecture names or compute capabilities. Their interpretation is:
| `'Common'` | Relatively common CCs supported by given NVCC compiler. Generally excludes Tegra and Tesla devices. |
| `'Auto'` | The CCs provided by the `detected:` keyword, filtered for support by given NVCC compiler. |
+As a special case, when `nvcc_arch_flags()` is invoked with
+
+- an NVCC `compiler` object `nvcc`,
+- `'Auto'` mode and
+- no `detected:` keyword,
+
+Meson uses `nvcc`'s architecture auto-detection results.
+
The supported architecture names and their corresponding compute capabilities
are:
@@ -85,7 +93,7 @@ are:
| `'Pascal'` | 6.0, 6.1 |
| `'Pascal+Tegra'` | 6.2 |
| `'Volta'` | 7.0 |
-| `'Volta+Tegra'` | 7.2 |
+| `'Xavier'` | 7.2 |
| `'Turing'` | 7.5 |
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index e243e94..47fce8b 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -257,7 +257,9 @@ libraries that have been compiled for single-threaded use instead.
## Fortran Coarrays
-As of 0.50.0 Coarrays are a Fortran language intrinsic feature, enabled by
+*(added 0.50.0)*
+
+ Coarrays are a Fortran language intrinsic feature, enabled by
`dependency('coarray')`.
GCC will use OpenCoarrays if present to implement coarrays, while Intel and NAG
@@ -286,6 +288,9 @@ test('gtest test', e)
```
## HDF5
+
+*(added 0.50.0)*
+
HDF5 is supported for C, C++ and Fortran. Because dependencies are
language-specific, you must specify the requested language using the
`language` keyword argument, i.e.,
@@ -349,6 +354,20 @@ are not in your path, they can be specified by setting the standard
environment variables `MPICC`, `MPICXX`, `MPIFC`, `MPIF90`, or
`MPIF77`, during configuration.
+## NetCDF
+
+*(added 0.50.0)*
+
+NetCDF is supported for C, C++ and Fortran. Because NetCDF dependencies are
+language-specific, you must specify the requested language using the
+`language` keyword argument, i.e.,
+ * `dependency('netcdf', language: 'c')` for the C NetCDF headers and libraries
+ * `dependency('netcdf', language: 'cpp')` for the C++ NetCDF headers and libraries
+ * `dependency('netcdf', language: 'fortran')` for the Fortran NetCDF headers and libraries
+
+Meson uses pkg-config to find NetCDF.
+
+
## OpenMP
*(added 0.46.0)*
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index 32e5e32..7bbec5d 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -54,6 +54,7 @@ for one target is defined as follows:
"id": "The internal ID meson uses",
"type": "<TYPE>",
"defined_in": "/Path/to/the/targets/meson.build",
+ "subproject": null,
"filename": ["list", "of", "generated", "files"],
"build_by_default": true / false,
"target_sources": [],
@@ -66,6 +67,9 @@ be present. It stores the installation location for each file in `filename`.
If one file in `filename` is not installed, its corresponding install location
is set to `null`.
+The `subproject' key specifies the name of the subproject this target was
+defined in, or `null` if the target was defined in the top level project.
+
A target usually generates only one file. However, it is possible for custom
targets to have multiple outputs.
@@ -105,6 +109,29 @@ The following table shows all valid types for a target.
`run` | A Meson run target
`jar` | A Java JAR target
+### Using `--targets` without a build directory
+
+It is also possible to get most targets without a build directory. This can be
+done by running `meson introspect --targets /path/to/meson.build`.
+
+The generated output is similar to running the introspection with a build
+directory or reading the `intro-targets.json`. However, there are some key
+differences:
+
+- The paths in `filename` now are _relative_ to the future build directory
+- The `install_filename` key is completely missing
+- There is only one entry in `target_sources`:
+ - With the language set to `unknown`
+ - Empty lists for `compiler` and `parameters` and `generated_sources`
+ - The `sources` list _should_ contain all sources of the target
+
+There is no guarantee that the sources list in `target_sources` is correct.
+There might be differences, due to internal limitations. It is also not
+guaranteed that all targets will be listed in the output. It might even be
+possible that targets are listed, which won't exist when meson is run normally.
+This can happen if a target is defined inside an if statement.
+Use this feature with care.
+
## Build Options
The list of all build options (build type, warning level, etc.) is stored in
@@ -154,6 +181,38 @@ However, this behavior is not guaranteed if subprojects are present. Due to
internal limitations all subprojects are processed even if they are never used
in a real meson run. Because of this options for the subprojects can differ.
+## The dependencies section
+
+The list of all _found_ dependencies can be acquired from
+`intro-dependencies.json`. Here, the name, compiler and linker arguments for
+a dependency are listed.
+
+### Scanning for dependecie with `--scan-dependencies`
+
+It is also possible to get most dependencies used without a build directory.
+This can be done by running `meson introspect --scan-dependencies /path/to/meson.build`.
+
+The output format is as follows:
+
+```json
+[
+ {
+ "name": "The name of the dependency",
+ "required": true,
+ "conditional": false,
+ "has_fallback": false
+ }
+]
+```
+
+The `required` keyword specifies whether the dependency is marked as required
+in the `meson.build` (all dependencies are required by default). The
+`conditional` key indicates whether the `dependency()` function was called
+inside a conditional block. In a real meson run these dependencies might not be
+used, thus they _may_ not be required, even if the `required` key is set. The
+`has_fallback` key just indicates whether a fallback was directly set in the
+`dependency()` function.
+
## Tests
Compilation and unit tests are done as usual by running the `ninja` and
diff --git a/docs/markdown/Precompiled-headers.md b/docs/markdown/Precompiled-headers.md
index 3f7d4b4..8dfb438 100644
--- a/docs/markdown/Precompiled-headers.md
+++ b/docs/markdown/Precompiled-headers.md
@@ -75,8 +75,11 @@ executable('multilang', sources : srclist,
Using precompiled headers with MSVC
--
+Since Meson version 0.50.0, precompiled headers with MSVC work just like
+with GCC. Meson will automatically create the matching pch implementation
+file for you.
-MSVC is a bit trickier, because in addition to the header file, it
+Before version 0.50.0, in addition to the header file, Meson
also requires a corresponding source file. If your header is called
`foo_pch.h`, the corresponding source file is usually called
`foo_pch.cpp` and it resides in the same `pch` subdirectory as the
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index adaffe7..f2b0416 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -1406,10 +1406,7 @@ executable to run. The executable can be an [executable build target
object](#build-target-object) returned by
[`executable()`](#executable) or an [external program
object](#external-program-object) returned by
-[`find_program()`](#find_program). The executable's exit code is used
-by the test harness to record the outcome of the test, for example
-exit code zero indicates success. For more on the Meson test harness
-protocol read [Unit Tests](Unit-tests.md).
+[`find_program()`](#find_program).
Keyword arguments are the following:
@@ -1446,6 +1443,12 @@ Keyword arguments are the following:
before test is executed even if they have `build_by_default : false`.
Since 0.46.0
+- `protocol` specifies how the test results are parsed and can be one
+ of `exitcode` (the executable's exit code is used by the test harness
+ to record the outcome of the test) or `tap` ([Test Anything
+ Protocol](https://www.testanything.org/)). For more on the Meson test
+ harness protocol read [Unit Tests](Unit-tests.md). Since 0.50.0
+
Defined tests can be run in a backend-agnostic way by calling
`meson test` inside the build dir, or by using backend-specific
commands, such as `ninja test` or `msbuild RUN_TESTS.vcxproj`.
@@ -2256,7 +2259,8 @@ sample piece of code with [`compiler.run()`](#compiler-object) or
[`run_command()`](#run_command). It has the following methods:
- `compiled()` if true, the compilation succeeded, if false it did not
- and the other methods return unspecified data
+ and the other methods return unspecified data. This is only available
+ for `compiler.run()` results.
- `returncode()` the return code of executing the compiled binary
- `stderr()` the standard error produced when the command was run
- `stdout()` the standard out produced when the command was run
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index a4bef3a..d3a6815 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -196,3 +196,18 @@ which are supported by MSVC, GCC, Clang, and other compilers.
|----------------------|
| dllexport |
| dllimport |
+
+
+## Dependency lookup methods
+
+These are the values that can be passed to `dependency` function's
+`method` keyword argument.
+
+| Name | Comment |
+| ----- | ------- |
+| auto | Automatic method selection |
+| pkg-config | Use Pkg-Config |
+| cmake | Look up as a CMake module |
+| config-tool | Use a custom dep tool such as `cups-config` |
+| system | System provided (e.g. OpenGL) |
+| extraframework | A macOS/iOS framework |
diff --git a/docs/markdown/Rewriter.md b/docs/markdown/Rewriter.md
new file mode 100644
index 0000000..b6301d6
--- /dev/null
+++ b/docs/markdown/Rewriter.md
@@ -0,0 +1,236 @@
+---
+short-description: Automatic modification of the build system files
+...
+
+# Meson file rewriter
+
+Since version 0.50.0, meson has the functionality to perform some basic
+modification on the `meson.build` files from the command line. The currently
+supported operations are:
+
+- For build targets:
+ - Add/Remove source files
+ - Add/Remove targets
+ - Modify a select set of kwargs
+ - Print some JSON information
+- For dependencies:
+ - Modify a select set of kwargs
+- For the project function:
+ - Modify a select set of kwargs
+ - Modify the default options list
+
+The rewriter has both, a normal command line interface and a "script mode". The
+normal CLI is mostly designed for everyday use. The "script mode", on the
+other hand, is meant to be used by external programs (IDEs, graphical
+frontends, etc.)
+
+The rewriter itself is considered stable, however the user interface and the
+"script mode" API might change in the future. These changes may also break
+backwards comaptibility to older releases.
+
+We are also open to suggestions for API improvements.
+
+## Using the rewriter
+
+All rewriter functions are accessed via `meson rewrite`. The meson rewriter
+assumes that it is run inside the project root directory. If this isn't the
+case, use `--sourcedir` to specify the actual project source directory.
+
+### Adding and removing sources
+
+The most common operations will probably be the adding and removing of source
+files to a build target. This can be easily done with:
+
+```bash
+meson rewrite target <target name/id> {add/rm} [list of sources]
+```
+
+For instance, given the following example
+
+```meson
+src = ['main.cpp', 'fileA.cpp']
+
+exe1 = executable('testExe', src)
+```
+
+the source `fileB.cpp` can be added with:
+
+```bash
+meson rewrite target testExe add fileB.cpp
+```
+
+After executing this command, the new `meson.build` will look like this:
+
+```meson
+src = ['main.cpp', 'fileA.cpp', 'fileB.cpp']
+
+exe1 = executable('testExe', src)
+```
+
+In this case, `exe1` could also have been used for the target name. This is
+possible because the rewriter also searches for assignments and unique meson
+IDs, which can be acquired with introspection. If there are multiple targets
+with the same name, meson will do nothing and print an error message.
+
+For more information see the help output of the rewriter target command.
+
+### Setting the project version
+
+It is also possible to set kwargs of specific functions with the rewriter. The
+general command for setting or removing kwargs is:
+
+```bash
+meson rewriter kwargs {set/delete} <function type> <function ID> <key1> <value1> <key2> <value2> ...
+```
+
+For instance, setting the project version can be achieved with this command:
+
+```bash
+meson rewriter kwargs set project / version 1.0.0
+```
+
+Currently, only the following function types are supported:
+
+- dependency
+- target (any build target, the function ID is the target name/ID)
+- project (the function ID must be `/` since project() can only be called once)
+
+For more information see the help output of the rewriter kwargs command.
+
+### Setting the project default options
+
+For setting and deleting default options, use the following command:
+
+```bash
+meson rewrite default-options {set/delete} <opt1> <value1> <opt2> <value2> ...
+```
+
+## Limitations
+
+Rewriting a meson file is not guranteed to keep the indentation of the modified
+functions. Additionally, comments inside a modified statement will be removed.
+Furthermore, all source files will be sorted alphabetically.
+
+For instance adding `e.c` to srcs in the following code
+
+```meson
+# Important comment
+
+srcs = [
+'a.c', 'c.c', 'f.c',
+# something important about b
+ 'b.c', 'd.c', 'g.c'
+]
+
+# COMMENT
+```
+
+would result in the following code:
+
+```meson
+# Important comment
+
+srcs = [
+ 'a.c',
+ 'b.c',
+ 'c.c',
+ 'd.c',
+ 'e.c',
+ 'f.c',
+ 'g.c'
+]
+
+# COMMENT
+```
+
+## Using the "script mode"
+
+The "script mode" should be the preferred API for third party programs, since
+it offers more flexibility and higher API stability. The "scripts" are stored
+in JSON format and executed with `meson rewrite command <JSON file or string>`.
+
+The JSON format is defined as follows:
+
+```json
+[
+ {
+ "type": "function to execute",
+ ...
+ }, {
+ "type": "other function",
+ ...
+ },
+ ...
+]
+```
+
+Each object in the main array must have a `type` entry which specifies which
+function should be executed.
+
+Currently, the following functions are supported:
+
+- target
+- kwargs
+- default_options
+
+### Target modification format
+
+The format for the type `target` is defined as follows:
+
+```json
+{
+ "type": "target",
+ "target": "target ID/name/assignment variable",
+ "operation": "one of ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']",
+ "sources": ["list", "of", "source", "files", "to", "add, remove"],
+ "subdir": "subdir where the new target should be added (only has an effect for operation 'tgt_add')",
+ "target_type": "function name of the new target -- same as in the CLI (only has an effect for operation 'tgt_add')"
+}
+```
+
+The keys `sources`, `subdir` and `target_type` are optional.
+
+### kwargs modification format
+
+The format for the type `target` is defined as follows:
+
+```json
+{
+ "type": "kwargs",
+ "function": "one of ['dependency', 'target', 'project']",
+ "id": "function ID",
+ "operation": "one of ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']",
+ "kwargs": {
+ "key1": "value1",
+ "key2": "value2",
+ ...
+ }
+}
+```
+
+### Default options modification format
+
+The format for the type `default_options` is defined as follows:
+
+```json
+{
+ "type": "default_options",
+ "operation": "one of ['set', 'delete']",
+ "options": {
+ "opt1": "value1",
+ "opt2": "value2",
+ ...
+ }
+}
+```
+
+For operation `delete`, the values of the `options` can be anything (including `null`)
+
+## Extracting information
+
+The rewriter also offers operation `info` for the types `target` and `kwargs`.
+When this operation is used, meson will print a JSON dump to stderr, containing
+all available information to the rewriter about the build target / function
+kwargs in question.
+
+The output format is currently experimental and may change in the future.
diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md
index 9008592..30d30cf 100644
--- a/docs/markdown/Style-guide.md
+++ b/docs/markdown/Style-guide.md
@@ -34,3 +34,35 @@ Try to keep cross compilation arguments away from your build files as
much as possible. Keep them in the cross file instead. This adds
portability, since all changes needed to compile to a different
platform are isolated in one place.
+
+# Sorting source paths
+
+The source file arrays should all be sorted. This makes it easier to spot
+errors and often reduces merge conflicts. Furthermore, the paths should be
+sorted with a natural sorting algorithm, so that numbers are sorted in an
+intuitive way (`1, 2, 3, 10, 20` instead of `1, 10, 2, 20, 3`).
+
+Numbers should also be sorted before characters (`a111` before `ab0`).
+Furthermore, strings should be sorted case insensitive.
+
+Additionally, if a path contains a directory it should be sorted before
+normal files. This rule also applies recursively for subdirectories.
+
+The following example shows correct source list definition:
+
+```meson
+sources = files([
+ 'aaa/a1.c',
+ 'aaa/a2.c',
+ 'bbb/subdir1/b1.c',
+ 'bbb/subdir2/b2.c',
+ 'bbb/subdir10/b3.c',
+ 'bbb/subdir20/b4.c',
+ 'bbb/b5.c',
+ 'bbb/b6.c',
+ 'f1.c',
+ 'f2.c',
+ 'f10.c,
+ 'f20.c'
+])
+```
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index a8e7273..9e61739 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -51,9 +51,15 @@ By default Meson uses as many concurrent processes as there are cores on the tes
$ MESON_TESTTHREADS=5 ninja test
```
-## Skipped tests
+## Skipped tests and hard errors
-Sometimes a test can only determine at runtime that it can not be run. The GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
+Sometimes a test can only determine at runtime that it can not be run.
+
+For the default `exitcode` testing protocol, the GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
+
+For TAP-based tests, skipped tests should print a single line starting with `1..0 # SKIP`.
+
+In addition, sometimes a test fails set up so that it should fail even if it is marked as an expected failure. The GNU standard approach in this case is to exit the program with error code 99. Again, Meson will detect this and report these tests as `ERROR`, ignoring the setting of `should_fail`. This behavior was added in version 0.50.0.
## Testing tool
diff --git a/docs/markdown/snippets/introspect_deps_no_bd.md b/docs/markdown/snippets/introspect_deps_no_bd.md
new file mode 100644
index 0000000..cfae58b
--- /dev/null
+++ b/docs/markdown/snippets/introspect_deps_no_bd.md
@@ -0,0 +1,25 @@
+## `introspect --scan-dependencies` can now be used to scan for dependencies used in a project
+
+It is now possible to run `meson introspect --scan-dependencies /path/to/meson.build`
+without a configured build directory to scan for dependencies.
+
+The output format is as follows:
+
+```json
+[
+ {
+ "name": "The name of the dependency",
+ "required": true,
+ "conditional": false,
+ "has_fallback": false
+ }
+]
+```
+
+The `required` keyword specifies whether the dependency is marked as required
+in the `meson.build` (all dependencies are required by default). The
+`conditional` key indicates whether the `dependency()` function was called
+inside a conditional block. In a real meson run these dependencies might not be
+used, thus they _may_ not be required, even if the `required` key is set. The
+`has_fallback` key just indicates whether a fallback was directly set in the
+`dependency()` function.
diff --git a/docs/markdown/snippets/introspect_multiple.md b/docs/markdown/snippets/introspect_multiple.md
index 15f0e29..7953415 100644
--- a/docs/markdown/snippets/introspect_multiple.md
+++ b/docs/markdown/snippets/introspect_multiple.md
@@ -20,4 +20,5 @@ Additionlly the format of `meson introspect target` was changed:
- New: the `sources` key. It stores the source files of a target and their compiler parameters.
- New: the `defined_in` key. It stores the meson file where a target is defined
+ - New: the `subproject` key. It stores the name of the subproject where a target is defined.
- Added new target types (`jar`, `shared module`).
diff --git a/docs/markdown/snippets/introspect_projectinfo_subprojects_dir.md b/docs/markdown/snippets/introspect_projectinfo_subprojects_dir.md
new file mode 100644
index 0000000..6d893d0
--- /dev/null
+++ b/docs/markdown/snippets/introspect_projectinfo_subprojects_dir.md
@@ -0,0 +1,4 @@
+## Add subproject_dir to --projectinfo introspection output
+
+This allows applications interfacing with Meson (such as IDEs) to know about
+an overridden subproject directory.
diff --git a/docs/markdown/snippets/introspect_targets_no_bd.md b/docs/markdown/snippets/introspect_targets_no_bd.md
new file mode 100644
index 0000000..0172a4e
--- /dev/null
+++ b/docs/markdown/snippets/introspect_targets_no_bd.md
@@ -0,0 +1,21 @@
+## `introspect --targets` can now be used without configured build directory
+
+It is now possible to run `meson introspect --targets /path/to/meson.build`
+without a configured build directory.
+
+The generated output is similar to running the introspection with a build
+directory. However, there are some key differences:
+
+- The paths in `filename` now are _relative_ to the future build directory
+- The `install_filename` key is completely missing
+- There is only one entry in `target_sources`:
+ - With the language set to `unknown`
+ - Empty lists for `compiler` and `parameters` and `generated_sources`
+ - The `sources` list _should_ contain all sources of the target
+
+There is no guarantee that the sources list in `target_sources` is correct.
+There might be differences, due to internal limitations. It is also not
+guaranteed that all targets will be listed in the output. It might even be
+possible that targets are listed, which won't exist when meson is run normally.
+This can happen if a target is defined inside an if statement.
+Use this feature with care. \ No newline at end of file
diff --git a/docs/markdown/snippets/netcdf.md b/docs/markdown/snippets/netcdf.md
new file mode 100644
index 0000000..711f174
--- /dev/null
+++ b/docs/markdown/snippets/netcdf.md
@@ -0,0 +1,3 @@
+## NetCDF
+
+NetCDF support for C, C++ and Fortran is added via pkg-config.
diff --git a/docs/markdown/snippets/rewriter.md b/docs/markdown/snippets/rewriter.md
new file mode 100644
index 0000000..7a4621d
--- /dev/null
+++ b/docs/markdown/snippets/rewriter.md
@@ -0,0 +1,18 @@
+## Meson file rewriter
+
+This release adds the functionality to perform some basic modification
+on the `meson.build` files from the command line. The currently
+supported operations are:
+
+- For build targets:
+ - Add/Remove source files
+ - Add/Remove targets
+ - Modify a select set of kwargs
+ - Print some JSON information
+- For dependencies:
+ - Modify a select set of kwargs
+- For the project function:
+ - Modify a select set of kwargs
+ - Modify the default options list
+
+For more information see the rewriter documentation.
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index 6987641..bea2a31 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -60,6 +60,7 @@ index.md
Reference-manual.md
Reference-tables.md
Style-guide.md
+ Rewriter.md
FAQ.md
Reproducible-builds.md
howtox.md
diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py
index a9370dc..48de523 100644
--- a/mesonbuild/ast/__init__.py
+++ b/mesonbuild/ast/__init__.py
@@ -16,6 +16,7 @@
# or an interpreter-based tool.
__all__ = [
+ 'AstConditionLevel',
'AstInterpreter',
'AstIDGenerator',
'AstIndentationGenerator',
@@ -28,5 +29,5 @@ __all__ = [
from .interpreter import AstInterpreter
from .introspection import IntrospectionInterpreter, build_target_functions
from .visitor import AstVisitor
-from .postprocess import AstIDGenerator, AstIndentationGenerator
+from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
from .printer import AstPrinter
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index b2cd3f5..01277f0 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -51,6 +51,7 @@ class AstInterpreter(interpreterbase.InterpreterBase):
self.visitors = visitors
self.visited_subdirs = {}
self.assignments = {}
+ self.assign_vals = {}
self.reverse_assignment = {}
self.funcs.update({'project': self.func_do_nothing,
'test': self.func_do_nothing,
@@ -161,7 +162,7 @@ class AstInterpreter(interpreterbase.InterpreterBase):
self.assignments[node.var_name] += [node.value] # Save a reference to the value node
if hasattr(node.value, 'ast_id'):
self.reverse_assignment[node.value.ast_id] = node
- self.evaluate_statement(node.value) # Evaluate the value just in case
+ self.assign_vals[node.var_name] += [self.evaluate_statement(node.value)]
def evaluate_indexing(self, node):
return 0
@@ -200,7 +201,7 @@ class AstInterpreter(interpreterbase.InterpreterBase):
self.assignments[node.var_name] = [node.value] # Save a reference to the value node
if hasattr(node.value, 'ast_id'):
self.reverse_assignment[node.value.ast_id] = node
- self.evaluate_statement(node.value) # Evaluate the value just in case
+ self.assign_vals[node.var_name] = [self.evaluate_statement(node.value)] # Evaluate the value just in case
def flatten_args(self, args, include_unknown_args: bool = False):
# Resolve mparser.ArrayNode if needed
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 0917015..5745d29 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -16,10 +16,11 @@
# or an interpreter-based tool
from . import AstInterpreter
-from .. import compilers, environment, mesonlib, mparser, optinterpreter
+from .. import compilers, environment, mesonlib, optinterpreter
from .. import coredata as cdata
from ..interpreterbase import InvalidArguments
from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
+from ..mparser import ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
import os
build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']
@@ -77,7 +78,7 @@ class IntrospectionInterpreter(AstInterpreter):
proj_name = args[0]
proj_vers = kwargs.get('version', 'undefined')
proj_langs = self.flatten_args(args[1:])
- if isinstance(proj_vers, mparser.ElementaryNode):
+ if isinstance(proj_vers, ElementaryNode):
proj_vers = proj_vers.value
if not isinstance(proj_vers, str):
proj_vers = 'undefined'
@@ -96,8 +97,8 @@ class IntrospectionInterpreter(AstInterpreter):
if not self.is_subproject() and 'subproject_dir' in kwargs:
spdirname = kwargs['subproject_dir']
- if isinstance(spdirname, str):
- self.subproject_dir = spdirname
+ if isinstance(spdirname, ElementaryNode):
+ self.subproject_dir = spdirname.value
if not self.is_subproject():
self.project_data['subprojects'] = []
subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
@@ -136,16 +137,25 @@ class IntrospectionInterpreter(AstInterpreter):
if not args:
return
name = args[0]
+ has_fallback = 'fallback' in kwargs
+ required = kwargs.get('required', True)
+ condition_level = node.condition_level if hasattr(node, 'condition_level') else 0
+ if isinstance(required, ElementaryNode):
+ required = required.value
self.dependencies += [{
'name': name,
+ 'required': required,
+ 'has_fallback': has_fallback,
+ 'conditional': condition_level > 0,
'node': node
}]
def build_target(self, node, args, kwargs, targetclass):
- if not args:
+ args = self.flatten_args(args)
+ if not args or not isinstance(args[0], str):
return
kwargs = self.flatten_kwargs(kwargs, True)
- name = self.flatten_args(args)[0]
+ name = args[0]
srcqueue = [node]
if 'sources' in kwargs:
srcqueue += kwargs['sources']
@@ -154,47 +164,52 @@ class IntrospectionInterpreter(AstInterpreter):
while srcqueue:
curr = srcqueue.pop(0)
arg_node = None
- if isinstance(curr, mparser.FunctionNode):
+ if isinstance(curr, FunctionNode):
arg_node = curr.args
- elif isinstance(curr, mparser.ArrayNode):
+ elif isinstance(curr, ArrayNode):
arg_node = curr.args
- elif isinstance(curr, mparser.IdNode):
+ elif isinstance(curr, IdNode):
# Try to resolve the ID and append the node to the queue
id = curr.value
if id in self.assignments and self.assignments[id]:
tmp_node = self.assignments[id][0]
- if isinstance(tmp_node, (mparser.ArrayNode, mparser.IdNode, mparser.FunctionNode)):
+ if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)):
srcqueue += [tmp_node]
+ elif isinstance(curr, ArithmeticNode):
+ srcqueue += [curr.left, curr.right]
if arg_node is None:
continue
- elemetary_nodes = list(filter(lambda x: isinstance(x, (str, mparser.StringNode)), arg_node.arguments))
- srcqueue += list(filter(lambda x: isinstance(x, (mparser.FunctionNode, mparser.ArrayNode, mparser.IdNode)), arg_node.arguments))
+ elemetary_nodes = list(filter(lambda x: isinstance(x, (str, StringNode)), arg_node.arguments))
+ srcqueue += list(filter(lambda x: isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode)), arg_node.arguments))
# Pop the first element if the function is a build target function
- if isinstance(curr, mparser.FunctionNode) and curr.func_name in build_target_functions:
+ if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions:
elemetary_nodes.pop(0)
if elemetary_nodes:
source_nodes += [curr]
# Make sure nothing can crash when creating the build class
- kwargs = {}
+ kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']}
is_cross = False
objects = []
empty_sources = [] # Passing the unresolved sources list causes errors
- target = targetclass(name, self.subdir, self.subproject, is_cross, empty_sources, objects, self.environment, kwargs)
+ target = targetclass(name, self.subdir, self.subproject, is_cross, empty_sources, objects, self.environment, kwargs_reduced)
- self.targets += [{
+ new_target = {
'name': target.get_basename(),
'id': target.get_id(),
'type': target.get_typename(),
'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
'subdir': self.subdir,
'build_by_default': target.build_by_default,
+ 'installed': target.should_install(),
+ 'outputs': target.get_outputs(),
'sources': source_nodes,
'kwargs': kwargs,
'node': node,
- }]
+ }
- return
+ self.targets += [new_target]
+ return new_target
def build_library(self, node, args, kwargs):
default_library = self.coredata.get_builtin_option('default_library')
@@ -230,7 +245,7 @@ class IntrospectionInterpreter(AstInterpreter):
if 'target_type' not in kwargs:
return
target_type = kwargs.pop('target_type')
- if isinstance(target_type, mparser.ElementaryNode):
+ if isinstance(target_type, ElementaryNode):
target_type = target_type.value
if target_type == 'executable':
return self.build_target(node, args, kwargs, Executable)
diff --git a/mesonbuild/ast/postprocess.py b/mesonbuild/ast/postprocess.py
index e913b4f..8e8732f 100644
--- a/mesonbuild/ast/postprocess.py
+++ b/mesonbuild/ast/postprocess.py
@@ -84,3 +84,33 @@ class AstIDGenerator(AstVisitor):
self.counter[name] = 0
node.ast_id = name + '#' + str(self.counter[name])
self.counter[name] += 1
+
+class AstConditionLevel(AstVisitor):
+ def __init__(self):
+ self.condition_level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ node.condition_level = self.condition_level
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.condition_level += 1
+ node.elseblock.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 14b189b..4d35d22 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -84,7 +84,7 @@ class ExecutableSerialisation:
class TestSerialisation:
def __init__(self, name, project, suite, fname, is_cross_built, exe_wrapper, is_parallel,
- cmd_args, env, should_fail, timeout, workdir, extra_paths):
+ cmd_args, env, should_fail, timeout, workdir, extra_paths, protocol):
self.name = name
self.project_name = project
self.suite = suite
@@ -100,6 +100,7 @@ class TestSerialisation:
self.timeout = timeout
self.workdir = workdir
self.extra_paths = extra_paths
+ self.protocol = protocol
class OptionProxy:
def __init__(self, name, value):
@@ -515,6 +516,23 @@ class Backend:
args += compiler.get_pch_use_args(pchpath, p[0])
return includeargs + args
+ def create_msvc_pch_implementation(self, target, lang, pch_header):
+ # We have to include the language in the file name, otherwise
+ # pch.c and pch.cpp will both end up as pch.obj in VS backends.
+ impl_name = 'meson_pch-%s.%s' % (lang, lang)
+ pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
+ # Make sure to prepend the build dir, since the working directory is
+ # not defined. Otherwise, we might create the file in the wrong path.
+ pch_file = os.path.join(self.build_dir, pch_rel_to_build)
+ os.makedirs(os.path.dirname(pch_file), exist_ok=True)
+
+ content = '#include "%s"' % os.path.basename(pch_header)
+ pch_file_tmp = pch_file + '.tmp'
+ with open(pch_file_tmp, 'w') as f:
+ f.write(content)
+ mesonlib.replace_if_different(pch_file, pch_file_tmp)
+ return pch_rel_to_build
+
@staticmethod
def escape_extra_args(compiler, args):
# No extra escaping/quoting needed when not running on Windows
@@ -739,7 +757,7 @@ class Backend:
raise MesonException('Bad object in test command.')
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, t.is_parallel, cmd_args, t.env,
- t.should_fail, t.timeout, t.workdir, extra_paths)
+ t.should_fail, t.timeout, t.workdir, extra_paths, t.protocol)
arr.append(ts)
return arr
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 16962a4..1e0bb8b 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -19,7 +19,7 @@ import pickle
import subprocess
from collections import OrderedDict
import itertools
-from pathlib import PurePath
+from pathlib import PurePath, Path
from functools import lru_cache
from . import backends
@@ -1851,9 +1851,11 @@ rule FORTRAN_DEP_HACK%s
mod_files = []
usere = re.compile(r"\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)", re.IGNORECASE)
submodre = re.compile(r"\s*\bsubmodule\b\s+\((\w+:?\w+)\)\s+(\w+)\s*$", re.IGNORECASE)
- dirname = self.get_target_private_dir(target)
+ dirname = Path(self.get_target_private_dir(target))
tdeps = self.fortran_deps[target.get_basename()]
- with open(src, encoding='ascii', errors='ignore') as f:
+ src = Path(src)
+ srcdir = Path(self.source_dir)
+ with src.open(encoding='ascii', errors='ignore') as f:
for line in f:
usematch = usere.match(line)
if usematch is not None:
@@ -1872,14 +1874,17 @@ rule FORTRAN_DEP_HACK%s
# a common occurrence, which would lead to lots of
# distracting noise.
continue
- mod_source_file = tdeps[usename]
- # Check if a source uses a module it exports itself.
- # Potential bug if multiple targets have a file with
- # the same name.
- if mod_source_file.fname == os.path.basename(src):
+ srcfile = srcdir / tdeps[usename].fname
+ if not srcfile.is_file():
+ if srcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif srcfile.samefile(src): # self-reference
continue
+
mod_name = compiler.module_name_to_filename(usename)
- mod_files.append(os.path.join(dirname, mod_name))
+ mod_files.append(str(dirname / mod_name))
else:
submodmatch = submodre.match(line)
if submodmatch is not None:
@@ -1890,10 +1895,16 @@ rule FORTRAN_DEP_HACK%s
for parent in parents:
if parent not in tdeps:
raise MesonException("submodule {} relies on parent module {} that was not found.".format(submodmatch.group(2).lower(), parent))
- if tdeps[parent].fname == os.path.basename(src): # same file
+ submodsrcfile = srcdir / tdeps[parent].fname
+ if not submodsrcfile.is_file():
+ if submodsrcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif submodsrcfile.samefile(src): # self-reference
continue
mod_name = compiler.module_name_to_filename(parent)
- mod_files.append(os.path.join(dirname, mod_name))
+ mod_files.append(str(dirname / mod_name))
return mod_files
@@ -2188,6 +2199,7 @@ rule FORTRAN_DEP_HACK%s
for modname, srcfile in self.fortran_deps[target.get_basename()].items():
modfile = os.path.join(self.get_target_private_dir(target),
compiler.module_name_to_filename(modname))
+
if srcfile == src:
depelem = NinjaBuildElement(self.all_outputs, modfile, 'FORTRAN_DEP_HACK' + crstr, rel_obj)
depelem.write(outfile)
@@ -2240,22 +2252,28 @@ rule FORTRAN_DEP_HACK%s
return [os.path.join(self.get_target_dir(lt), lt.get_filename()) for lt in target.link_targets]
def generate_msvc_pch_command(self, target, compiler, pch):
- if len(pch) != 2:
- raise MesonException('MSVC requires one header and one source to produce precompiled headers.')
header = pch[0]
- source = pch[1]
pchname = compiler.get_pch_name(header)
dst = os.path.join(self.get_target_private_dir(target), pchname)
commands = []
commands += self.generate_basic_compiler_args(target, compiler)
+
+ if len(pch) == 1:
+ # Auto generate PCH.
+ source = self.create_msvc_pch_implementation(target, compiler.get_language(), pch[0])
+ pch_header_dir = os.path.dirname(os.path.join(self.build_to_src, target.get_source_subdir(), header))
+ commands += compiler.get_include_args(pch_header_dir, False)
+ else:
+ source = os.path.join(self.build_to_src, target.get_source_subdir(), pch[1])
+
just_name = os.path.basename(header)
(objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
commands += pch_args
commands += self._generate_single_compile(target, compiler)
commands += self.get_compile_debugfile_args(compiler, target, objname)
dep = dst + '.' + compiler.get_depfile_suffix()
- return commands, dep, dst, [objname]
+ return commands, dep, dst, [objname], source
def generate_gcc_pch_command(self, target, compiler, pch):
commands = self._generate_single_compile(target, compiler)
@@ -2284,8 +2302,7 @@ rule FORTRAN_DEP_HACK%s
raise InvalidArguments(msg)
compiler = target.compilers[lang]
if isinstance(compiler, VisualStudioCCompiler):
- src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1])
- (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch)
+ (commands, dep, dst, objs, src) = self.generate_msvc_pch_command(target, compiler, pch)
extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
elif compiler.id == 'intel':
# Intel generates on target generation
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index f25a2a6..6d62553 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import copy
import os
import pickle
import xml.dom.minidom
@@ -531,8 +532,6 @@ class Vs2010Backend(backends.Backend):
def gen_custom_target_vcxproj(self, target, ofname, guid):
root = self.create_basic_crap(target, guid)
- action = ET.SubElement(root, 'ItemDefinitionGroup')
- customstep = ET.SubElement(action, 'CustomBuildStep')
# We need to always use absolute paths because our invocation is always
# from the target dir, not the build root.
target.absolute_paths = True
@@ -549,9 +548,12 @@ class Vs2010Backend(backends.Backend):
extra_paths=extra_paths,
capture=ofilenames[0] if target.capture else None)
wrapper_cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
- ET.SubElement(customstep, 'Command').text = ' '.join(self.quote_arguments(wrapper_cmd))
- ET.SubElement(customstep, 'Outputs').text = ';'.join(ofilenames)
- ET.SubElement(customstep, 'Inputs').text = ';'.join([exe_data] + srcs + depend_files)
+ if target.build_always_stale:
+ # Use a nonexistent file to always consider the target out-of-date.
+ ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
+ self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)),
+ deps=[exe_data] + srcs + depend_files, outputs=ofilenames)
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self.generate_custom_generator_commands(target, root)
self.add_regen_dependency(root)
@@ -792,19 +794,6 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false'
# Let VS auto-set the RTC level
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default'
- o_flags = split_o_flags_args(buildtype_args)
- if '/Oi' in o_flags:
- ET.SubElement(type_config, 'IntrinsicFunctions').text = 'true'
- if '/Ob1' in o_flags:
- ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
- elif '/Ob2' in o_flags:
- ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'AnySuitable'
- # In modern MSVC parlance "/O1" means size optimization.
- # "/Os" has been deprecated.
- if '/O1' in o_flags:
- ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Size'
- else:
- ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Speed'
# Incremental linking increases code size
if '/INCREMENTAL:NO' in buildtype_link_args:
ET.SubElement(type_config, 'LinkIncremental').text = 'false'
@@ -844,15 +833,6 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
- # Optimization flags
- if '/Ox' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'Full'
- elif '/O2' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'MaxSpeed'
- elif '/O1' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'MinSpace'
- elif '/Od' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'Disabled'
# End configuration
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
@@ -1024,6 +1004,27 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level))
if self.get_option_for_target('werror', target):
ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
+ # Optimization flags
+ o_flags = split_o_flags_args(buildtype_args)
+ if '/Ox' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Full'
+ elif '/O2' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed'
+ elif '/O1' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MinSpace'
+ elif '/Od' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Disabled'
+ if '/Oi' in o_flags:
+ ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true'
+ if '/Ob1' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
+ elif '/Ob2' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable'
+ # Size-preserving flags
+ if '/Os' in o_flags:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size'
+ else:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed'
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
if self.environment.coredata.base_options.get('b_pch', False):
@@ -1034,13 +1035,18 @@ class Vs2010Backend(backends.Backend):
continue
pch_node.text = 'Use'
if compiler.id == 'msvc':
- if len(pch) != 2:
- raise MesonException('MSVC requires one header and one source to produce precompiled headers.')
- pch_sources[lang] = [pch[0], pch[1], lang]
+ if len(pch) == 1:
+ # Auto generate PCH.
+ src = os.path.join(down, self.create_msvc_pch_implementation(target, lang, pch[0]))
+ pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0]))
+ else:
+ src = os.path.join(proj_to_src_dir, pch[1])
+ pch_header_dir = None
+ pch_sources[lang] = [pch[0], src, lang, pch_header_dir]
else:
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
- pch_sources[lang] = [pch[0], None, lang]
+ pch_sources[lang] = [pch[0], None, lang, None]
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
@@ -1204,14 +1210,19 @@ class Vs2010Backend(backends.Backend):
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
for lang in pch_sources:
- header, impl, suffix = pch_sources[lang]
+ impl = pch_sources[lang][1]
if impl:
- relpath = os.path.join(proj_to_src_dir, impl)
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl)
self.create_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
- self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ pch_header_dir = pch_sources[lang][3]
+ if pch_header_dir:
+ inc_dirs = copy.deepcopy(file_inc_dirs)
+ inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang]
+ else:
+ inc_dirs = file_inc_dirs
+ self.add_include_dirs(lang, inc_cl, inc_dirs)
if self.has_objects(objects, additional_objects, gen_objs):
inc_objs = ET.SubElement(root, 'ItemGroup')
@@ -1426,14 +1437,20 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(custombuild, 'Command').text = cmd_templ % command
if not outputs:
# Use a nonexistent file to always consider the target out-of-date.
- output_file = os.path.join(self.environment.get_scratch_dir(), 'outofdate.file')
- while os.path.exists(output_file):
- output_file += '0'
- outputs = [output_file]
+ outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs)
if deps:
ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
+ @staticmethod
+ def nonexistent_file(prefix):
+ i = 0
+ file = prefix
+ while os.path.exists(file):
+ file = '%s%d' % (prefix, i)
+ return file
+
def generate_debug_information(self, link):
# valid values for vs2015 is 'false', 'true', 'DebugFastLink'
ET.SubElement(link, 'GenerateDebugInformation').text = 'true'
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index a550d91..990b824 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -733,6 +733,8 @@ class XCodeBackend(backends.Backend):
else:
product_name = target.get_basename()
ldargs += target.link_args
+ for dep in target.get_external_deps():
+ ldargs += dep.get_link_args()
ldstr = ' '.join(ldargs)
valid = self.buildconfmap[target_name][buildtype]
langargs = {}
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 2187d3e..9a1d158 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -1096,6 +1096,8 @@ You probably should put it in link_with instead.''')
if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])):
raise InvalidArguments('PCH files must be stored in the same folder.')
+
+ mlog.warning('PCH source files are deprecated, only a single header file should be used.')
elif len(pchlist) > 2:
raise InvalidArguments('PCH definition may have a maximum of 2 files.')
for f in pchlist:
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 7955f3d..24dffa6 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -24,6 +24,9 @@ from ..mesonlib import (
EnvironmentException, MesonException, OrderedSet,
version_compare, Popen_safe
)
+from ..envconfig import (
+ Properties,
+)
"""This file contains the data files of all compilers Meson knows
about. To support a new compiler, add its information below.
@@ -1009,27 +1012,44 @@ class Compiler:
def get_options(self):
opts = {} # build afresh every time
-
- # Take default values from env variables.
- if not self.is_cross:
- compile_args, link_args = self.get_args_from_envvars()
- else:
- compile_args = []
- link_args = []
description = 'Extra arguments passed to the {}'.format(self.get_display_language())
opts.update({
self.language + '_args': coredata.UserArrayOption(
self.language + '_args',
description + ' compiler',
- compile_args, shlex_split=True, user_input=True, allow_dups=True),
+ [], shlex_split=True, user_input=True, allow_dups=True),
self.language + '_link_args': coredata.UserArrayOption(
self.language + '_link_args',
description + ' linker',
- link_args, shlex_split=True, user_input=True, allow_dups=True),
+ [], shlex_split=True, user_input=True, allow_dups=True),
})
return opts
+ def get_and_default_options(self, properties: Properties):
+ """
+ Take default values from env variables and/or config files.
+ """
+ opts = self.get_options()
+
+ if properties.fallback:
+ # Get from env vars.
+ compile_args, link_args = self.get_args_from_envvars()
+ else:
+ compile_args = []
+ link_args = []
+
+ for k, o in opts.items():
+ if k in properties:
+ # Get from configuration files.
+ o.set_value(properties[k])
+ elif k == self.language + '_args':
+ o.set_value(compile_args)
+ elif k == self.language + '_link_args':
+ o.set_value(link_args)
+
+ return opts
+
def get_option_compile_args(self, options):
return []
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index 66dcf33..21fa498 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import re, subprocess, os.path
+import re, os.path
from .. import mlog
from ..mesonlib import EnvironmentException, Popen_safe
@@ -46,36 +46,101 @@ class CudaCompiler(Compiler):
def get_no_stdinc_args(self):
return []
- def sanity_check(self, work_dir, environment):
- source_name = os.path.join(work_dir, 'sanitycheckcuda.cu')
- binary_name = os.path.join(work_dir, 'sanitycheckcuda')
- extra_flags = self.get_cross_extra_flags(environment, link=False)
- if self.is_cross:
- extra_flags += self.get_compile_only_args()
+ def thread_link_flags(self, environment):
+ return ['-Xcompiler=-pthread']
- code = '''
-__global__ void kernel (void) {
-
-}
+ def sanity_check(self, work_dir, environment):
+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
+ mlog.debug('Is cross compiler: %s.' % str(self.is_cross))
- int main(int argc,char** argv){
+ sname = 'sanitycheckcuda.cu'
+ code = r'''
+ #include <cuda_runtime.h>
+ #include <stdio.h>
+
+ __global__ void kernel (void) {}
+
+ int main(void){
+ struct cudaDeviceProp prop;
+ int count, i;
+ cudaError_t ret = cudaGetDeviceCount(&count);
+ if(ret != cudaSuccess){
+ fprintf(stderr, "%d\n", (int)ret);
+ }else{
+ for(i=0;i<count;i++){
+ if(cudaGetDeviceProperties(&prop, i) == cudaSuccess){
+ fprintf(stdout, "%d.%d\n", prop.major, prop.minor);
+ }
+ }
+ }
+ fflush(stderr);
+ fflush(stdout);
return 0;
}
'''
-
+ binname = sname.rsplit('.', 1)[0]
+ binname += '_cross' if self.is_cross else ''
+ source_name = os.path.join(work_dir, sname)
+ binary_name = os.path.join(work_dir, binname + '.exe')
with open(source_name, 'w') as ofile:
ofile.write(code)
- pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
- pc.wait()
+
+ # The Sanity Test for CUDA language will serve as both a sanity test
+ # and a native-build GPU architecture detection test, useful later.
+ #
+ # For this second purpose, NVCC has very handy flags, --run and
+ # --run-args, that allow one to run an application with the
+ # environment set up properly. Of course, this only works for native
+ # builds; For cross builds we must still use the exe_wrapper (if any).
+ self.detected_cc = ''
+ flags = ['-w', '-cudart', 'static', source_name]
+ if self.is_cross and self.exe_wrapper is None:
+ # Linking cross built apps is painful. You can't really
+ # tell if you should use -nostdlib or not and for example
+ # on OSX the compiler binary is the same but you need
+ # a ton of compiler flags to differentiate between
+ # arm and x86_64. So just compile.
+ flags += self.get_compile_only_args()
+ flags += self.get_output_args(binary_name)
+
+ # Compile sanity check
+ cmdlist = self.exelist + flags
+ mlog.debug('Sanity check compiler command line: ', ' '.join(cmdlist))
+ pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check compile stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check compile stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
if pc.returncode != 0:
- raise EnvironmentException('Cuda compiler %s can not compile programs.' % self.name_string())
+ raise EnvironmentException('Compiler {0} can not compile programs.'.format(self.name_string()))
+
+ # Run sanity check (if possible)
if self.is_cross:
- # Can't check if the binaries run so we have to assume they do
- return
- pe = subprocess.Popen(binary_name)
+ if self.exe_wrapper is None:
+ return
+ else:
+ cmdlist = self.exe_wrapper + [binary_name]
+ else:
+ cmdlist = self.exelist + ['--run', '"' + binary_name + '"']
+ mlog.debug('Sanity check run command line: ', ' '.join(cmdlist))
+ pe, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check run stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check run stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
pe.wait()
if pe.returncode != 0:
- raise EnvironmentException('Executables created by Cuda compiler %s are not runnable.' % self.name_string())
+ raise EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string()))
+
+ # Interpret the result of the sanity test.
+ # As mentionned above, it is not only a sanity test but also a GPU
+ # architecture detection test.
+ if stde == '':
+ self.detected_cc = stdo
+ else:
+ mlog.debug('cudaGetDeviceCount() returned ' + stde)
def get_compiler_check_args(self):
return super().get_compiler_check_args() + []
@@ -92,56 +157,6 @@ __global__ void kernel (void) {
int main () {{ return 0; }}'''
return self.compiles(t.format(**fargs), env, extra_args, dependencies)
- def sanity_check_impl(self, work_dir, environment, sname, code):
- mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
- mlog.debug('Is cross compiler: %s.' % str(self.is_cross))
-
- extra_flags = []
- source_name = os.path.join(work_dir, sname)
- binname = sname.rsplit('.', 1)[0]
- if self.is_cross:
- binname += '_cross'
- if self.exe_wrapper is None:
- # Linking cross built apps is painful. You can't really
- # tell if you should use -nostdlib or not and for example
- # on OSX the compiler binary is the same but you need
- # a ton of compiler flags to differentiate between
- # arm and x86_64. So just compile.
- extra_flags += self.get_cross_extra_flags(environment, link=False)
- extra_flags += self.get_compile_only_args()
- else:
- extra_flags += self.get_cross_extra_flags(environment, link=True)
- # Is a valid executable output for all toolchains and platforms
- binname += '.exe'
- # Write binary check source
- binary_name = os.path.join(work_dir, binname)
- with open(source_name, 'w') as ofile:
- ofile.write(code)
- # Compile sanity check
- cmdlist = self.exelist + extra_flags + [source_name] + self.get_output_args(binary_name)
- pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
- mlog.debug('Sanity check compiler command line:', ' '.join(cmdlist))
- mlog.debug('Sanity check compile stdout:')
- mlog.debug(stdo)
- mlog.debug('-----\nSanity check compile stderr:')
- mlog.debug(stde)
- mlog.debug('-----')
- if pc.returncode != 0:
- raise EnvironmentException('Compiler {0} can not compile programs.'.format(self.name_string()))
- # Run sanity check
- if self.is_cross:
- if self.exe_wrapper is None:
- # Can't check if the binaries run so we have to assume they do
- return
- cmdlist = self.exe_wrapper + [binary_name]
- else:
- cmdlist = [binary_name]
- mlog.debug('Running test binary command: ' + ' '.join(cmdlist))
- pe = subprocess.Popen(cmdlist)
- pe.wait()
- if pe.returncode != 0:
- raise EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string()))
-
@staticmethod
def _cook_link_args(args):
"""
@@ -176,7 +191,7 @@ __global__ void kernel (void) {
return cuda_debug_args[is_debug]
def get_werror_args(self):
- return ['-Werror']
+ return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder']
def get_linker_exelist(self):
return self.exelist[:]
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 0e11f5c..28e78e5 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -569,13 +569,13 @@ class CoreData:
self.cross_compilers[lang] = cross_comp
# Native compiler always exist so always add its options.
- new_options_for_build = comp.get_options()
+ new_options_for_build = comp.get_and_default_options(env.properties.build)
preproc_flags_for_build = comp.get_preproc_flags()
if cross_comp is not None:
- new_options_for_host = cross_comp.get_options()
+ new_options_for_host = cross_comp.get_and_default_options(env.properties.host)
preproc_flags_for_host = cross_comp.get_preproc_flags()
else:
- new_options_for_host = comp.get_options()
+ new_options_for_host = comp.get_and_default_options(env.properties.host)
preproc_flags_for_host = comp.get_preproc_flags()
opts_machines_list = [
@@ -588,9 +588,6 @@ class CoreData:
for k, o in new_options.items():
if not k.startswith(optprefix):
raise MesonException('Internal error, %s has incorrect prefix.' % k)
- if k in env.properties[for_machine]:
- # Get from configuration files.
- o.set_value(env.properties[for_machine][k])
if (env.machines.matches_build_machine(for_machine) and
k in env.cmd_line_options):
# TODO think about cross and command-line interface.
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index 71a0bb3..53ff1c9 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -18,7 +18,7 @@ from .base import ( # noqa: F401
ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
-from .misc import (CoarrayDependency, HDF5Dependency, MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
+from .misc import (CoarrayDependency, HDF5Dependency, MPIDependency, NetCDFDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
from .platform import AppleFrameworks
from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
@@ -35,6 +35,7 @@ packages.update({
'coarray': CoarrayDependency,
'mpi': MPIDependency,
'hdf5': HDF5Dependency,
+ 'netcdf': NetCDFDependency,
'openmp': OpenMPDependency,
'python3': Python3Dependency,
'threads': ThreadDependency,
@@ -58,5 +59,6 @@ packages.update({
_packages_accept_language.update({
'hdf5',
'mpi',
+ 'netcdf',
'openmp',
})
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index f4e47cf..6d41ec5 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -26,14 +26,14 @@ import textwrap
import platform
import itertools
import ctypes
-from typing import List
+from typing import List, Tuple
from enum import Enum
from pathlib import Path, PurePath
from .. import mlog
from .. import mesonlib
from ..compilers import clib_langs
-from ..environment import BinaryTable, Environment
+from ..environment import BinaryTable, Environment, MachineInfo
from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine
from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify
from ..mesonlib import Version
@@ -916,12 +916,14 @@ class CMakeDependency(ExternalDependency):
# multiple times in the same Meson invocation.
class_cmakebin = PerMachine(None, None, None)
class_cmakevers = PerMachine(None, None, None)
+ class_cmakeinfo = PerMachine(None, None, None)
# We cache all pkg-config subprocess invocations to avoid redundant calls
cmake_cache = {}
# Version string for the minimum CMake version
class_cmake_version = '>=3.4'
# CMake generators to try (empty for no generator)
class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010']
+ class_working_generator = None
def _gen_exception(self, msg):
return DependencyException('Dependency {} not found: {}'.format(self.name, msg))
@@ -934,6 +936,7 @@ class CMakeDependency(ExternalDependency):
# stored in the pickled coredata and recovered.
self.cmakebin = None
self.cmakevers = None
+ self.cmakeinfo = None
# Dict of CMake variables: '<var_name>': ['list', 'of', 'values']
self.vars = {}
@@ -1009,6 +1012,12 @@ class CMakeDependency(ExternalDependency):
mlog.debug(msg)
return
+ if CMakeDependency.class_cmakeinfo[for_machine] is None:
+ CMakeDependency.class_cmakeinfo[for_machine] = self._get_cmake_info()
+ self.cmakeinfo = CMakeDependency.class_cmakeinfo[for_machine]
+ if self.cmakeinfo is None:
+ raise self._gen_exception('Unable to obtain CMake system information')
+
modules = kwargs.get('modules', [])
cm_path = kwargs.get('cmake_module_path', [])
cm_args = kwargs.get('cmake_args', [])
@@ -1021,6 +1030,8 @@ class CMakeDependency(ExternalDependency):
cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
if cm_path:
cm_args += ['-DCMAKE_MODULE_PATH={}'.format(';'.join(cm_path))]
+ if not self._preliminary_find_check(name, cm_path, environment.machines[for_machine]):
+ return
self._detect_dep(name, modules, cm_args)
def __repr__(self):
@@ -1028,6 +1039,166 @@ class CMakeDependency(ExternalDependency):
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
+ def _get_cmake_info(self):
+ mlog.debug("Extracting basic cmake information")
+ res = {}
+
+ # Try different CMake generators since specifying no generator may fail
+ # in cygwin for some reason
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ for i in gen_list:
+ mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+ # Prepare options
+ cmake_opts = ['--trace-expand', '.']
+ if len(i) > 0:
+ cmake_opts = ['-G', i] + cmake_opts
+
+ # Run CMake
+ ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt')
+
+ # Current generator was successful
+ if ret1 == 0:
+ CMakeDependency.class_working_generator = i
+ break
+
+ mlog.debug('CMake failed to gather system information for generator {} with error code {}'.format(i, ret1))
+ mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1))
+
+ # Check if any generator succeeded
+ if ret1 != 0:
+ return None
+
+ try:
+ # First parse the trace
+ lexer1 = self._lex_trace(err1)
+
+ # Primary pass -- parse all invocations of set
+ for l in lexer1:
+ if l.func == 'set':
+ self._cmake_set(l)
+ except:
+ return None
+
+ # Extract the variables and sanity check them
+ module_paths = sorted(set(self.get_cmake_var('MESON_PATHS_LIST')))
+ module_paths = list(filter(lambda x: os.path.isdir(x), module_paths))
+ archs = self.get_cmake_var('MESON_ARCH_LIST')
+
+ common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share']
+ for i in archs:
+ common_paths += [os.path.join('lib', i)]
+
+ res = {
+ 'module_paths': module_paths,
+ 'cmake_root': self.get_cmake_var('MESON_CMAKE_ROOT')[0],
+ 'archs': archs,
+ 'common_paths': common_paths
+ }
+
+ mlog.debug(' -- Module search paths: {}'.format(res['module_paths']))
+ mlog.debug(' -- CMake root: {}'.format(res['cmake_root']))
+ mlog.debug(' -- CMake architectures: {}'.format(res['archs']))
+ mlog.debug(' -- CMake lib search paths: {}'.format(res['common_paths']))
+
+ # Reset variables
+ self.vars = {}
+ return res
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_listdir(path: str) -> Tuple[Tuple[str, str]]:
+ try:
+ return tuple((x, str(x).lower()) for x in os.listdir(path))
+ except OSError:
+ return ()
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_isdir(path: str) -> bool:
+ try:
+ return os.path.isdir(path)
+ except OSError:
+ return False
+
+ def _preliminary_find_check(self, name: str, module_path: List[str], machine: MachineInfo) -> bool:
+ lname = str(name).lower()
+
+ # Checks <path>, <path>/cmake, <path>/CMake
+ def find_module(path: str) -> bool:
+ for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]:
+ if not self._cached_isdir(i):
+ continue
+
+ for j in ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake']:
+ if os.path.isfile(os.path.join(i, j.format(name))):
+ return True
+ return False
+
+ # Search in <path>/(lib/<arch>|lib*|share) for cmake files
+ def search_lib_dirs(path: str) -> bool:
+ for i in [os.path.join(path, x) for x in self.cmakeinfo['common_paths']]:
+ if not self._cached_isdir(i):
+ continue
+
+ # Check <path>/(lib/<arch>|lib*|share)/cmake/<name>*/
+ cm_dir = os.path.join(i, 'cmake')
+ if self._cached_isdir(cm_dir):
+ content = self._cached_listdir(cm_dir)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if find_module(os.path.join(cm_dir, k[0])):
+ return True
+
+ # <path>/(lib/<arch>|lib*|share)/<name>*/
+ # <path>/(lib/<arch>|lib*|share)/<name>*/(cmake|CMake)/
+ content = self._cached_listdir(i)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if find_module(os.path.join(i, k[0])):
+ return True
+
+ return False
+
+ # Check the user provided and system module paths
+ for i in module_path + [os.path.join(self.cmakeinfo['cmake_root'], 'Modules')]:
+ if find_module(i):
+ return True
+
+ # Check the system paths
+ for i in self.cmakeinfo['module_paths']:
+ if find_module(i):
+ return True
+
+ if search_lib_dirs(i):
+ return True
+
+ content = self._cached_listdir(i)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if search_lib_dirs(os.path.join(i, k[0])):
+ return True
+
+ # Mac framework support
+ if machine.is_darwin():
+ for j in ['{}.framework', '{}.app']:
+ j = j.format(lname)
+ if j in content:
+ if find_module(os.path.join(i, j[0], 'Resources')) or find_module(os.path.join(i, j[0], 'Version')):
+ return True
+
+ # Check the environment path
+ env_path = os.environ.get('{}_DIR'.format(name))
+ if env_path and find_module(env_path):
+ return True
+
+ return False
+
def _detect_dep(self, name: str, modules: List[str], args: List[str]):
# Detect a dependency with CMake using the '--find-package' mode
# and the trace output (stderr)
@@ -1040,19 +1211,26 @@ class CMakeDependency(ExternalDependency):
# Try different CMake generators since specifying no generator may fail
# in cygwin for some reason
- for i in CMakeDependency.class_cmake_generators:
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ for i in gen_list:
mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
# Prepare options
- cmake_opts = ['--trace-expand', '-DNAME={}'.format(name)] + args + ['.']
+ cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))] + args + ['.']
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
# Run CMake
- ret1, out1, err1 = self._call_cmake(cmake_opts)
+ ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakeLists.txt')
# Current generator was successful
if ret1 == 0:
+ CMakeDependency.class_working_generator = i
break
mlog.debug('CMake failed for generator {} and package {} with error code {}'.format(i, name, ret1))
@@ -1221,7 +1399,7 @@ class CMakeDependency(ExternalDependency):
def get_cmake_var(self, var):
# Return the value of the CMake variable var or an empty list if var does not exist
- for var in self.vars:
+ if var in self.vars:
return self.vars[var]
return []
@@ -1449,24 +1627,25 @@ set(CMAKE_CXX_ABI_COMPILED TRUE)
set(CMAKE_SIZEOF_VOID_P "{}")
'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp)))
- def _setup_cmake_dir(self):
+ def _setup_cmake_dir(self, cmake_file: str) -> str:
# Setup the CMake build environment and return the "build" directory
build_dir = '{}/cmake_{}'.format(self.cmake_root_dir, self.name)
os.makedirs(build_dir, exist_ok=True)
# Copy the CMakeLists.txt
cmake_lists = '{}/CMakeLists.txt'.format(build_dir)
- if not os.path.exists(cmake_lists):
- dir_path = os.path.dirname(os.path.realpath(__file__))
- src_cmake = '{}/data/CMakeLists.txt'.format(dir_path)
- shutil.copyfile(src_cmake, cmake_lists)
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ src_cmake = '{}/data/{}'.format(dir_path, cmake_file)
+ if os.path.exists(cmake_lists):
+ os.remove(cmake_lists)
+ shutil.copyfile(src_cmake, cmake_lists)
self._setup_compiler(build_dir)
self._reset_cmake_cache(build_dir)
return build_dir
- def _call_cmake_real(self, args, env):
- build_dir = self._setup_cmake_dir()
+ def _call_cmake_real(self, args, cmake_file: str, env):
+ build_dir = self._setup_cmake_dir(cmake_file)
cmd = self.cmakebin.get_command() + args
p, out, err = Popen_safe(cmd, env=env, cwd=build_dir)
rc = p.returncode
@@ -1475,7 +1654,7 @@ set(CMAKE_SIZEOF_VOID_P "{}")
return rc, out, err
- def _call_cmake(self, args, env=None):
+ def _call_cmake(self, args, cmake_file: str, env=None):
if env is None:
fenv = env
env = os.environ
@@ -1485,9 +1664,9 @@ set(CMAKE_SIZEOF_VOID_P "{}")
# First check if cached, if not call the real cmake function
cache = CMakeDependency.cmake_cache
- if (self.cmakebin, targs, fenv) not in cache:
- cache[(self.cmakebin, targs, fenv)] = self._call_cmake_real(args, env)
- return cache[(self.cmakebin, targs, fenv)]
+ if (self.cmakebin, targs, cmake_file, fenv) not in cache:
+ cache[(self.cmakebin, targs, cmake_file, fenv)] = self._call_cmake_real(args, cmake_file, env)
+ return cache[(self.cmakebin, targs, cmake_file, fenv)]
@staticmethod
def get_methods():
@@ -2107,6 +2286,7 @@ display_name_map = {
'hdf5': 'HDF5',
'llvm': 'LLVM',
'mpi': 'MPI',
+ 'netcdf': 'NetCDF',
'openmp': 'OpenMP',
'wxwidgets': 'WxWidgets',
}
diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt
index 6f51681..64f5b23 100644
--- a/mesonbuild/dependencies/data/CMakeLists.txt
+++ b/mesonbuild/dependencies/data/CMakeLists.txt
@@ -1,16 +1,5 @@
cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
-# Inspired by CMakeDetermineCompilerABI.cmake to set CMAKE_LIBRARY_ARCHITECTURE
-set(LIB_ARCH_LIST)
-if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
- file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
- foreach(dir ${implicit_dirs})
- if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
- list(APPEND LIB_ARCH_LIST "${dir}")
- endif()
- endforeach()
-endif()
-
set(PACKAGE_FOUND FALSE)
set(_packageName "${NAME}")
string(TOUPPER "${_packageName}" PACKAGE_NAME)
@@ -18,12 +7,13 @@ string(TOUPPER "${_packageName}" PACKAGE_NAME)
while(TRUE)
find_package("${NAME}" QUIET)
- if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${LIB_ARCH_LIST}" STREQUAL "")
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
break()
endif()
- list(GET LIB_ARCH_LIST 0 CMAKE_LIBRARY_ARCHITECTURE)
- list(REMOVE_AT LIB_ARCH_LIST 0)
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
endwhile()
if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
diff --git a/mesonbuild/dependencies/data/CMakePathInfo.txt b/mesonbuild/dependencies/data/CMakePathInfo.txt
new file mode 100644
index 0000000..713c2da
--- /dev/null
+++ b/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+
+message(STATUS ${TMP_PATHS_LIST})
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index db83422..df3a053 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -117,6 +117,35 @@ class HDF5Dependency(ExternalDependency):
except Exception:
pass
+class NetCDFDependency(ExternalDependency):
+
+ def __init__(self, environment, kwargs):
+ language = kwargs.get('language', 'c')
+ super().__init__('netcdf', environment, language, kwargs)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ pkgconfig_files = ['netcdf']
+
+ if language not in ('c', 'cpp', 'fortran'):
+ raise DependencyException('Language {} is not supported with NetCDF.'.format(language))
+
+ if language == 'fortran':
+ pkgconfig_files.append('netcdf-fortran')
+
+ self.compile_args = []
+ self.link_args = []
+ self.pcdep = []
+ for pkg in pkgconfig_files:
+ pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
+ if pkgdep.found():
+ self.compile_args.extend(pkgdep.get_compile_args())
+ self.link_args.extend(pkgdep.get_link_args())
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ self.pcdep.append(pkgdep)
+
class MPIDependency(ExternalDependency):
def __init__(self, environment, kwargs):
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
new file mode 100644
index 0000000..f2510c1
--- /dev/null
+++ b/mesonbuild/envconfig.py
@@ -0,0 +1,418 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import configparser, os, shlex, subprocess
+import typing
+
+from . import mesonlib
+from .mesonlib import EnvironmentException, MachineChoice, PerMachine
+from . import mlog
+
+
+# These classes contains all the data pulled from configuration files (native
+# and cross file currently), and also assists with the reading environment
+# variables.
+#
+# At this time there isn't an ironclad difference between this an other sources
+# of state like `coredata`. But one rough guide is much what is in `coredata` is
+# the *output* of the configuration process: the final decisions after tests.
+# This, on the other hand has *inputs*. The config files are parsed, but
+# otherwise minimally transformed. When more complex fallbacks (environment
+# detection) exist, they are defined elsewhere as functions that construct
+# instances of these classes.
+
+
+known_cpu_families = (
+ 'aarch64',
+ 'arc',
+ 'arm',
+ 'e2k',
+ 'ia64',
+ 'mips',
+ 'mips64',
+ 'parisc',
+ 'ppc',
+ 'ppc64',
+ 'riscv32',
+ 'riscv64',
+ 'rl78',
+ 'rx',
+ 's390x',
+ 'sparc',
+ 'sparc64',
+ 'x86',
+ 'x86_64'
+)
+
+class MesonConfigFile:
+ @classmethod
+ def parse_datafile(cls, filename):
+ config = configparser.ConfigParser()
+ try:
+ with open(filename, 'r') as f:
+ config.read_file(f, filename)
+ except FileNotFoundError:
+ raise EnvironmentException('File not found: %s.' % filename)
+ return cls.from_config_parser(config)
+
+ @classmethod
+ def from_config_parser(cls, parser: configparser.ConfigParser):
+ out = {}
+ # This is a bit hackish at the moment.
+ for s in parser.sections():
+ section = {}
+ for entry in parser[s]:
+ value = parser[s][entry]
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
+ try:
+ res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
+ except Exception:
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+
+ for i in (res if isinstance(res, list) else [res]):
+ if not isinstance(i, (str, int, bool)):
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+
+ section[entry] = res
+
+ out[s] = section
+ return out
+
+class HasEnvVarFallback:
+ """
+ A tiny class to indicate that this class contains data that can be
+ initialized from either a config file or environment file. The `fallback`
+ field says whether env vars should be used. Downstream logic (e.g. subclass
+ methods) can check it to decide what to do, since env vars are currently
+ lazily decoded.
+
+ Frankly, this is a pretty silly class at the moment. The hope is the way
+ that we deal with environment variables will become more structured, and
+ this can be starting point.
+ """
+ def __init__(self, fallback = True):
+ self.fallback = fallback
+
+class Properties(HasEnvVarFallback):
+ def __init__(
+ self,
+ properties: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
+ fallback = True):
+ super().__init__(fallback)
+ self.properties = properties or {}
+
+ def has_stdlib(self, language):
+ return language + '_stdlib' in self.properties
+
+ def get_stdlib(self, language):
+ return self.properties[language + '_stdlib']
+
+ def get_root(self):
+ return self.properties.get('root', None)
+
+ def get_sys_root(self):
+ return self.properties.get('sys_root', None)
+
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.properties == other.properties
+ return NotImplemented
+
+ # TODO consider removing so Properties is less freeform
+ def __getitem__(self, key):
+ return self.properties[key]
+
+ # TODO consider removing so Properties is less freeform
+ def __contains__(self, item):
+ return item in self.properties
+
+ # TODO consider removing, for same reasons as above
+ def get(self, key, default=None):
+ return self.properties.get(key, default)
+
+class MachineInfo:
+ def __init__(self, system, cpu_family, cpu, endian):
+ self.system = system
+ self.cpu_family = cpu_family
+ self.cpu = cpu
+ self.endian = endian
+
+ def __eq__(self, other):
+ if self.__class__ is not other.__class__:
+ return NotImplemented
+ return \
+ self.system == other.system and \
+ self.cpu_family == other.cpu_family and \
+ self.cpu == other.cpu and \
+ self.endian == other.endian
+
+ def __ne__(self, other):
+ if self.__class__ is not other.__class__:
+ return NotImplemented
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return '<MachineInfo: {} {} ({})>'.format(self.system, self.cpu_family, self.cpu)
+
+ @staticmethod
+ def from_literal(literal):
+ minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
+ if set(literal) < minimum_literal:
+ raise EnvironmentException(
+ 'Machine info is currently {}\n'.format(literal) +
+ 'but is missing {}.'.format(minimum_literal - set(literal)))
+
+ cpu_family = literal['cpu_family']
+ if cpu_family not in known_cpu_families:
+ mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family)
+
+ endian = literal['endian']
+ if endian not in ('little', 'big'):
+ mlog.warning('Unknown endian %s' % endian)
+
+ return MachineInfo(
+ literal['system'],
+ cpu_family,
+ literal['cpu'],
+ endian)
+
+ def is_windows(self):
+ """
+ Machine is windows?
+ """
+ return self.system == 'windows'
+
+ def is_cygwin(self):
+ """
+ Machine is cygwin?
+ """
+ return self.system == 'cygwin'
+
+ def is_linux(self):
+ """
+ Machine is linux?
+ """
+ return self.system == 'linux'
+
+ def is_darwin(self):
+ """
+ Machine is Darwin (iOS/OS X)?
+ """
+ return self.system in ('darwin', 'ios')
+
+ def is_android(self):
+ """
+ Machine is Android?
+ """
+ return self.system == 'android'
+
+ def is_haiku(self):
+ """
+ Machine is Haiku?
+ """
+ return self.system == 'haiku'
+
+ def is_openbsd(self):
+ """
+ Machine is OpenBSD?
+ """
+ return self.system == 'openbsd'
+
+ # Various prefixes and suffixes for import libraries, shared libraries,
+ # static libraries, and executables.
+ # Versioning is added to these names in the backends as-needed.
+
+ def get_exe_suffix(self):
+ if self.is_windows() or self.is_cygwin():
+ return 'exe'
+ else:
+ return ''
+
+ def get_object_suffix(self):
+ if self.is_windows():
+ return 'obj'
+ else:
+ return 'o'
+
+ def libdir_layout_is_win(self):
+ return self.is_windows() \
+ or self.is_cygwin()
+
+class PerMachineDefaultable(PerMachine):
+ """Extends `PerMachine` with the ability to default from `None`s.
+ """
+ def __init__(self):
+ super().__init__(None, None, None)
+
+ def default_missing(self):
+ """Default host to buid and target to host.
+
+ This allows just specifying nothing in the native case, just host in the
+ cross non-compiler case, and just target in the native-built
+ cross-compiler case.
+ """
+ if self.host is None:
+ self.host = self.build
+ if self.target is None:
+ self.target = self.host
+
+ def miss_defaulting(self):
+ """Unset definition duplicated from their previous to None
+
+ This is the inverse of ''default_missing''. By removing defaulted
+ machines, we can elaborate the original and then redefault them and thus
+ avoid repeating the elaboration explicitly.
+ """
+ if self.target == self.host:
+ self.target = None
+ if self.host == self.build:
+ self.host = None
+
+class MachineInfos(PerMachineDefaultable):
+ def matches_build_machine(self, machine: MachineChoice):
+ return self.build == self[machine]
+
+class BinaryTable(HasEnvVarFallback):
+ def __init__(
+ self,
+ binaries: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
+
+ fallback = True):
+ super().__init__(fallback)
+ self.binaries = binaries or {}
+ for name, command in self.binaries.items():
+ if not isinstance(command, (list, str)):
+ # TODO generalize message
+ raise mesonlib.MesonException(
+ 'Invalid type {!r} for binary {!r} in cross file'
+ ''.format(command, name))
+
+ # Map from language identifiers to environment variables.
+ evarMap = {
+ # Compilers
+ 'c': 'CC',
+ 'cpp': 'CXX',
+ 'cs': 'CSC',
+ 'd': 'DC',
+ 'fortran': 'FC',
+ 'objc': 'OBJC',
+ 'objcpp': 'OBJCXX',
+ 'rust': 'RUSTC',
+ 'vala': 'VALAC',
+
+ # Binutils
+ 'strip': 'STRIP',
+ 'ar': 'AR',
+ 'windres': 'WINDRES',
+
+ 'cmake': 'CMAKE',
+ 'qmake': 'QMAKE',
+ 'pkgconfig': 'PKG_CONFIG',
+ }
+
+ @classmethod
+ def detect_ccache(cls):
+ try:
+ has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError:
+ has_ccache = 1
+ if has_ccache == 0:
+ cmdlist = ['ccache']
+ else:
+ cmdlist = []
+ return cmdlist
+
+ @classmethod
+ def _warn_about_lang_pointing_to_cross(cls, compiler_exe, evar):
+ evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
+ if evar_str == compiler_exe:
+ mlog.warning('''Env var %s seems to point to the cross compiler.
+This is probably wrong, it should always point to the native compiler.''' % evar)
+
+ @classmethod
+ def parse_entry(cls, entry):
+ compiler = mesonlib.stringlistify(entry)
+ # Ensure ccache exists and remove it if it doesn't
+ if compiler[0] == 'ccache':
+ compiler = compiler[1:]
+ ccache = cls.detect_ccache()
+ else:
+ ccache = []
+ # Return value has to be a list of compiler 'choices'
+ return compiler, ccache
+
+ def lookup_entry(self, name):
+ """Lookup binary
+
+ Returns command with args as list if found, Returns `None` if nothing is
+ found.
+
+ First tries looking in explicit map, then tries environment variable.
+ """
+ # Try explict map, don't fall back on env var
+ command = self.binaries.get(name)
+ if command is not None:
+ command = mesonlib.stringlistify(command)
+ # Relies on there being no "" env var
+ evar = self.evarMap.get(name, "")
+ self._warn_about_lang_pointing_to_cross(command[0], evar)
+ elif self.fallback:
+ # Relies on there being no "" env var
+ evar = self.evarMap.get(name, "")
+ command = os.environ.get(evar)
+ if command is not None:
+ command = shlex.split(command)
+ return command
+
+class Directories:
+
+ """Data class that holds information about directories for native and cross
+ builds.
+ """
+
+ def __init__(self, bindir: typing.Optional[str] = None, datadir: typing.Optional[str] = None,
+ includedir: typing.Optional[str] = None, infodir: typing.Optional[str] = None,
+ libdir: typing.Optional[str] = None, libexecdir: typing.Optional[str] = None,
+ localedir: typing.Optional[str] = None, localstatedir: typing.Optional[str] = None,
+ mandir: typing.Optional[str] = None, prefix: typing.Optional[str] = None,
+ sbindir: typing.Optional[str] = None, sharedstatedir: typing.Optional[str] = None,
+ sysconfdir: typing.Optional[str] = None):
+ self.bindir = bindir
+ self.datadir = datadir
+ self.includedir = includedir
+ self.infodir = infodir
+ self.libdir = libdir
+ self.libexecdir = libexecdir
+ self.localedir = localedir
+ self.localstatedir = localstatedir
+ self.mandir = mandir
+ self.prefix = prefix
+ self.sbindir = sbindir
+ self.sharedstatedir = sharedstatedir
+ self.sysconfdir = sysconfdir
+
+ def __contains__(self, key: str) -> str:
+ return hasattr(self, key)
+
+ def __getitem__(self, key: str) -> str:
+ return getattr(self, key)
+
+ def __setitem__(self, key: str, value: typing.Optional[str]) -> None:
+ setattr(self, key, value)
+
+ def __iter__(self) -> typing.Iterator[typing.Tuple[str, str]]:
+ return iter(self.__dict__.items())
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index b2cc657..3bfdfd7 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -19,10 +19,14 @@ from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker
from . import mesonlib
from .mesonlib import (
- MesonException, EnvironmentException, MachineChoice, PerMachine, Popen_safe
+ MesonException, EnvironmentException, MachineChoice, PerMachine, Popen_safe,
)
from . import mlog
+from .envconfig import (
+ BinaryTable, Directories, MachineInfo, MachineInfos, MesonConfigFile,
+ PerMachineDefaultable, Properties, known_cpu_families,
+)
from . import compilers
from .compilers import (
CompilerType,
@@ -79,39 +83,6 @@ from .compilers import (
build_filename = 'meson.build'
-known_cpu_families = (
- 'aarch64',
- 'arc',
- 'arm',
- 'e2k',
- 'ia64',
- 'mips',
- 'mips64',
- 'parisc',
- 'ppc',
- 'ppc64',
- 'riscv32',
- 'riscv64',
- 'rl78',
- 'rx',
- 's390x',
- 'sparc',
- 'sparc64',
- 'x86',
- 'x86_64'
-)
-
-# Environment variables that each lang uses.
-cflags_mapping = {'c': 'CFLAGS',
- 'cpp': 'CXXFLAGS',
- 'cu': 'CUFLAGS',
- 'objc': 'OBJCFLAGS',
- 'objcpp': 'OBJCXXFLAGS',
- 'fortran': 'FFLAGS',
- 'd': 'DFLAGS',
- 'vala': 'VALAFLAGS'}
-
-
def detect_gcovr(version='3.1', log=False):
gcovr_exe = 'gcovr'
try:
@@ -310,6 +281,37 @@ def detect_msys2_arch():
return os.environ['MSYSTEM_CARCH']
return None
+def detect_machine_info(compilers = None) -> MachineInfo:
+ """Detect the machine we're running on
+
+ If compilers are not provided, we cannot know as much. None out those
+ fields to avoid accidentally depending on partial knowledge. The
+ underlying ''detect_*'' method can be called to explicitly use the
+ partial information.
+ """
+ return MachineInfo(
+ detect_system(),
+ detect_cpu_family(compilers) if compilers is not None else None,
+ detect_cpu(compilers) if compilers is not None else None,
+ sys.byteorder)
+
+# TODO make this compare two `MachineInfo`s purely. How important is the
+# `detect_cpu_family({})` distinction? It is the one impediment to that.
+def machine_info_can_run(machine_info: MachineInfo):
+ """Whether we can run binaries for this machine on the current machine.
+
+ Can almost always run 32-bit binaries on 64-bit natively if the host
+ and build systems are the same. We don't pass any compilers to
+ detect_cpu_family() here because we always want to know the OS
+ architecture, not what the compiler environment tells us.
+ """
+ if machine_info.system != detect_system():
+ return False
+ true_build_cpu_family = detect_cpu_family({})
+ return \
+ (machine_info.cpu_family == true_build_cpu_family) or \
+ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
+
def search_version(text):
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
@@ -367,18 +369,19 @@ class Environment:
self.machines = MachineInfos()
# Will be fully initialized later using compilers later.
- self.machines.detect_build()
+ self.detect_build_machine()
# Similar to coredata.compilers and build.compilers, but lower level in
# that there is no meta data, only names/paths.
self.binaries = PerMachineDefaultable()
+ # Misc other properties about each machine.
+ self.properties = PerMachineDefaultable()
+
# Just uses hard-coded defaults and environment variables. Might be
# overwritten by a native file.
- self.binaries.build = BinaryTable({})
-
- # Misc other properties about each machine.
- self.properties = PerMachine(Properties(), Properties(), Properties())
+ self.binaries.build = BinaryTable()
+ self.properties.build = Properties()
# Store paths for native and cross build files. There is no target
# machine information here because nothing is installed for the target
@@ -393,7 +396,7 @@ class Environment:
if self.coredata.cross_file is not None:
config = MesonConfigFile.parse_datafile(self.coredata.cross_file)
- self.properties.host.properties = config.get('properties', {})
+ self.properties.host = Properties(config.get('properties', {}), False)
self.binaries.host = BinaryTable(config.get('binaries', {}), False)
if 'host_machine' in config:
self.machines.host = MachineInfo.from_literal(config['host_machine'])
@@ -403,6 +406,7 @@ class Environment:
self.machines.default_missing()
self.binaries.default_missing()
+ self.properties.default_missing()
self.paths.default_missing()
exe_wrapper = self.binaries.host.lookup_entry('exe_wrapper')
@@ -1173,6 +1177,9 @@ class Environment:
self._handle_exceptions(popen_exceptions, linkers, 'linker')
raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers))
+ def detect_build_machine(self, compilers = None):
+ self.machines.build = detect_machine_info(compilers)
+
def get_source_dir(self):
return self.source_dir
@@ -1247,386 +1254,10 @@ class Environment:
value = self.properties[for_machine].get('needs_exe_wrapper', None)
if value is not None:
return value
- return not self.machines[for_machine].can_run()
+ return not machine_info_can_run(self.machines[for_machine])
def get_exe_wrapper(self):
if not self.need_exe_wrapper():
from .dependencies import EmptyExternalProgram
return EmptyExternalProgram()
return self.exe_wrapper
-
-class MesonConfigFile:
- @classmethod
- def parse_datafile(cls, filename):
- config = configparser.ConfigParser()
- try:
- with open(filename, 'r') as f:
- config.read_file(f, filename)
- except FileNotFoundError:
- raise EnvironmentException('File not found: %s.' % filename)
- return cls.from_config_parser(config)
-
- @classmethod
- def from_config_parser(cls, parser: configparser.ConfigParser):
- out = {}
- # This is a bit hackish at the moment.
- for s in parser.sections():
- section = {}
- for entry in parser[s]:
- value = parser[s][entry]
- # Windows paths...
- value = value.replace('\\', '\\\\')
- if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
- raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
- try:
- res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
- except Exception:
- raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
- for i in (res if isinstance(res, list) else [res]):
- if not isinstance(i, (str, int, bool)):
- raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
- section[entry] = res
-
- out[s] = section
- return out
-
-class Properties:
- def __init__(
- self,
- properties: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None):
- self.properties = properties or {}
-
- def has_stdlib(self, language):
- return language + '_stdlib' in self.properties
-
- def get_stdlib(self, language):
- return self.properties[language + '_stdlib']
-
- def get_root(self):
- return self.properties.get('root', None)
-
- def get_sys_root(self):
- return self.properties.get('sys_root', None)
-
- def __eq__(self, other):
- if isinstance(other, type(self)):
- return self.properties == other.properties
- return NotImplemented
-
- # TODO consider removing so Properties is less freeform
- def __getitem__(self, key):
- return self.properties[key]
-
- # TODO consider removing so Properties is less freeform
- def __contains__(self, item):
- return item in self.properties
-
- # TODO consider removing, for same reasons as above
- def get(self, key, default=None):
- return self.properties.get(key, default)
-
-class MachineInfo:
- def __init__(self, system, cpu_family, cpu, endian):
- self.system = system
- self.cpu_family = cpu_family
- self.cpu = cpu
- self.endian = endian
-
- def __eq__(self, other):
- if self.__class__ is not other.__class__:
- return NotImplemented
- return \
- self.system == other.system and \
- self.cpu_family == other.cpu_family and \
- self.cpu == other.cpu and \
- self.endian == other.endian
-
- def __ne__(self, other):
- if self.__class__ is not other.__class__:
- return NotImplemented
- return not self.__eq__(other)
-
- def __repr__(self):
- return '<MachineInfo: {} {} ({})>'.format(self.system, self.cpu_family, self.cpu)
-
- @staticmethod
- def detect(compilers = None):
- """Detect the machine we're running on
-
- If compilers are not provided, we cannot know as much. None out those
- fields to avoid accidentally depending on partial knowledge. The
- underlying ''detect_*'' method can be called to explicitly use the
- partial information.
- """
- return MachineInfo(
- detect_system(),
- detect_cpu_family(compilers) if compilers is not None else None,
- detect_cpu(compilers) if compilers is not None else None,
- sys.byteorder)
-
- @staticmethod
- def from_literal(literal):
- minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
- if set(literal) < minimum_literal:
- raise EnvironmentException(
- 'Machine info is currently {}\n'.format(literal) +
- 'but is missing {}.'.format(minimum_literal - set(literal)))
-
- cpu_family = literal['cpu_family']
- if cpu_family not in known_cpu_families:
- mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family)
-
- endian = literal['endian']
- if endian not in ('little', 'big'):
- mlog.warning('Unknown endian %s' % endian)
-
- return MachineInfo(
- literal['system'],
- cpu_family,
- literal['cpu'],
- endian)
-
- def is_windows(self):
- """
- Machine is windows?
- """
- return self.system == 'windows'
-
- def is_cygwin(self):
- """
- Machine is cygwin?
- """
- return self.system == 'cygwin'
-
- def is_linux(self):
- """
- Machine is linux?
- """
- return self.system == 'linux'
-
- def is_darwin(self):
- """
- Machine is Darwin (iOS/OS X)?
- """
- return self.system in ('darwin', 'ios')
-
- def is_android(self):
- """
- Machine is Android?
- """
- return self.system == 'android'
-
- def is_haiku(self):
- """
- Machine is Haiku?
- """
- return self.system == 'haiku'
-
- def is_openbsd(self):
- """
- Machine is OpenBSD?
- """
- return self.system == 'openbsd'
-
- # Various prefixes and suffixes for import libraries, shared libraries,
- # static libraries, and executables.
- # Versioning is added to these names in the backends as-needed.
-
- def get_exe_suffix(self):
- if self.is_windows() or self.is_cygwin():
- return 'exe'
- else:
- return ''
-
- def get_object_suffix(self):
- if self.is_windows():
- return 'obj'
- else:
- return 'o'
-
- def libdir_layout_is_win(self):
- return self.is_windows() \
- or self.is_cygwin()
-
- # TODO make this compare two `MachineInfo`s purely. How important is the
- # `detect_cpu_family({})` distinction? It is the one impediment to that.
- def can_run(self):
- """Whether we can run binaries for this machine on the current machine.
-
- Can almost always run 32-bit binaries on 64-bit natively if the host
- and build systems are the same. We don't pass any compilers to
- detect_cpu_family() here because we always want to know the OS
- architecture, not what the compiler environment tells us.
- """
- if self.system != detect_system():
- return False
- true_build_cpu_family = detect_cpu_family({})
- return \
- (self.cpu_family == true_build_cpu_family) or \
- ((true_build_cpu_family == 'x86_64') and (self.cpu_family == 'x86'))
-
-class PerMachineDefaultable(PerMachine):
- """Extends `PerMachine` with the ability to default from `None`s.
- """
- def __init__(self):
- super().__init__(None, None, None)
-
- def default_missing(self):
- """Default host to buid and target to host.
-
- This allows just specifying nothing in the native case, just host in the
- cross non-compiler case, and just target in the native-built
- cross-compiler case.
- """
- if self.host is None:
- self.host = self.build
- if self.target is None:
- self.target = self.host
-
- def miss_defaulting(self):
- """Unset definition duplicated from their previous to None
-
- This is the inverse of ''default_missing''. By removing defaulted
- machines, we can elaborate the original and then redefault them and thus
- avoid repeating the elaboration explicitly.
- """
- if self.target == self.host:
- self.target = None
- if self.host == self.build:
- self.host = None
-
-class MachineInfos(PerMachineDefaultable):
- def detect_build(self, compilers = None):
- self.build = MachineInfo.detect(compilers)
-
- def matches_build_machine(self, machine: MachineChoice):
- return self.build == self[machine]
-
-class BinaryTable:
- def __init__(self, binaries = {}, fallback = True):
- self.binaries = binaries
- self.fallback = fallback
- for name, command in self.binaries.items():
- if not isinstance(command, (list, str)):
- # TODO generalize message
- raise mesonlib.MesonException(
- 'Invalid type {!r} for binary {!r} in cross file'
- ''.format(command, name))
-
- # Map from language identifiers to environment variables.
- evarMap = {
- # Compilers
- 'c': 'CC',
- 'cpp': 'CXX',
- 'cs': 'CSC',
- 'd': 'DC',
- 'fortran': 'FC',
- 'objc': 'OBJC',
- 'objcpp': 'OBJCXX',
- 'rust': 'RUSTC',
- 'vala': 'VALAC',
-
- # Binutils
- 'strip': 'STRIP',
- 'ar': 'AR',
- 'windres': 'WINDRES',
-
- 'cmake': 'CMAKE',
- 'qmake': 'QMAKE',
- 'pkgconfig': 'PKG_CONFIG',
- }
-
- @classmethod
- def detect_ccache(cls):
- try:
- has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except OSError:
- has_ccache = 1
- if has_ccache == 0:
- cmdlist = ['ccache']
- else:
- cmdlist = []
- return cmdlist
-
- @classmethod
- def _warn_about_lang_pointing_to_cross(cls, compiler_exe, evar):
- evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
- if evar_str == compiler_exe:
- mlog.warning('''Env var %s seems to point to the cross compiler.
-This is probably wrong, it should always point to the native compiler.''' % evar)
-
- @classmethod
- def parse_entry(cls, entry):
- compiler = mesonlib.stringlistify(entry)
- # Ensure ccache exists and remove it if it doesn't
- if compiler[0] == 'ccache':
- compiler = compiler[1:]
- ccache = cls.detect_ccache()
- else:
- ccache = []
- # Return value has to be a list of compiler 'choices'
- return compiler, ccache
-
- def lookup_entry(self, name):
- """Lookup binary
-
- Returns command with args as list if found, Returns `None` if nothing is
- found.
-
- First tries looking in explicit map, then tries environment variable.
- """
- # Try explict map, don't fall back on env var
- command = self.binaries.get(name)
- if command is not None:
- command = mesonlib.stringlistify(command)
- # Relies on there being no "" env var
- evar = self.evarMap.get(name, "")
- self._warn_about_lang_pointing_to_cross(command[0], evar)
- elif self.fallback:
- # Relies on there being no "" env var
- evar = self.evarMap.get(name, "")
- command = os.environ.get(evar)
- if command is not None:
- command = shlex.split(command)
- return command
-
-class Directories:
-
- """Data class that holds information about directories for native and cross
- builds.
- """
-
- def __init__(self, bindir: typing.Optional[str] = None, datadir: typing.Optional[str] = None,
- includedir: typing.Optional[str] = None, infodir: typing.Optional[str] = None,
- libdir: typing.Optional[str] = None, libexecdir: typing.Optional[str] = None,
- localedir: typing.Optional[str] = None, localstatedir: typing.Optional[str] = None,
- mandir: typing.Optional[str] = None, prefix: typing.Optional[str] = None,
- sbindir: typing.Optional[str] = None, sharedstatedir: typing.Optional[str] = None,
- sysconfdir: typing.Optional[str] = None):
- self.bindir = bindir
- self.datadir = datadir
- self.includedir = includedir
- self.infodir = infodir
- self.libdir = libdir
- self.libexecdir = libexecdir
- self.localedir = localedir
- self.localstatedir = localstatedir
- self.mandir = mandir
- self.prefix = prefix
- self.sbindir = sbindir
- self.sharedstatedir = sharedstatedir
- self.sysconfdir = sysconfdir
-
- def __contains__(self, key: str) -> str:
- return hasattr(self, key)
-
- def __getitem__(self, key: str) -> str:
- return getattr(self, key)
-
- def __setitem__(self, key: str, value: typing.Optional[str]) -> None:
- setattr(self, key, value)
-
- def __iter__(self) -> typing.Iterator[typing.Tuple[str, str]]:
- return iter(self.__dict__.items())
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index e33cb7f..e48733a 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -611,6 +611,8 @@ class Headers(InterpreterObject):
InterpreterObject.__init__(self)
self.sources = sources
self.install_subdir = kwargs.get('subdir', '')
+ if os.path.isabs(self.install_subdir):
+ raise InterpreterException('Subdir keyword must not be an absolute path.')
self.custom_install_dir = kwargs.get('install_dir', None)
self.custom_install_mode = kwargs.get('install_mode', None)
if self.custom_install_dir is not None:
@@ -850,7 +852,7 @@ class RunTargetHolder(InterpreterObject, ObjectHolder):
class Test(InterpreterObject):
def __init__(self, name, project, suite, exe, depends, is_parallel,
- cmd_args, env, should_fail, timeout, workdir):
+ cmd_args, env, should_fail, timeout, workdir, protocol):
InterpreterObject.__init__(self)
self.name = name
self.suite = suite
@@ -863,6 +865,7 @@ class Test(InterpreterObject):
self.should_fail = should_fail
self.timeout = timeout
self.workdir = workdir
+ self.protocol = protocol
def get_exe(self):
return self.exe
@@ -1973,7 +1976,8 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'library': known_library_kwargs,
'subdir': {'if_found'},
'subproject': {'version', 'default_options', 'required'},
- 'test': {'args', 'depends', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
+ 'test': {'args', 'depends', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir',
+ 'suite', 'protocol'},
'vcs_tag': {'input', 'output', 'fallback', 'command', 'replace_string'},
}
@@ -2025,7 +2029,7 @@ class Interpreter(InterpreterBase):
# have the compilers needed to gain more knowledge, so wipe out old
# inferrence and start over.
self.build.environment.machines.miss_defaulting()
- self.build.environment.machines.detect_build(self.coredata.compilers)
+ self.build.environment.detect_build_machine(self.coredata.compilers)
self.build.environment.machines.default_missing()
assert self.build.environment.machines.build.cpu is not None
assert self.build.environment.machines.host.cpu is not None
@@ -2782,6 +2786,9 @@ external dependencies (including libraries) must go to "dependencies".''')
progobj = self.program_from_file_for(for_machine, args, silent=silent)
if progobj is None:
progobj = self.program_from_system(args, silent=silent)
+ if progobj is None and args[0].endswith('python3'):
+ prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = ExternalProgramHolder(prog)
if required and (progobj is None or not progobj.found()):
raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
if progobj is None:
@@ -3266,6 +3273,9 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
workdir = None
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
+ protocol = kwargs.get('protocol', 'exitcode')
+ if protocol not in ('exitcode', 'tap'):
+ raise InterpreterException('Protocol must be "exitcode" or "tap".')
suite = []
prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
@@ -3277,7 +3287,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if not isinstance(dep, (build.CustomTarget, build.BuildTarget)):
raise InterpreterException('Depends items must be build targets.')
t = Test(args[0], prj, suite, exe.held_object, depends, par, cmd_args,
- env, should_fail, timeout, workdir)
+ env, should_fail, timeout, workdir, protocol)
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test', mlog.bold(args[0], True))
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index ee3366f..0afc21b 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -655,6 +655,8 @@ def default_libdir():
return 'lib/' + archpath
except Exception:
pass
+ if is_freebsd():
+ return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
return 'lib'
@@ -678,6 +680,8 @@ def get_library_dirs() -> List[str]:
# problematic, please raise the issue on the mailing list.
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
+ if is_freebsd():
+ return unixdirs
# FIXME: this needs to be further genericized for aarch64 etc.
machine = platform.machine()
if machine in ('i386', 'i486', 'i586', 'i686'):
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index d7bccdb..91a52b1 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -18,6 +18,7 @@ import importlib
import traceback
import argparse
import codecs
+import shutil
from . import mesonlib
from . import mlog
@@ -31,9 +32,12 @@ from .wrap import wraptool
# scripts in $MESONSRC/data/shell-completions/
class CommandLineParser:
def __init__(self):
+ self.term_width = shutil.get_terminal_size().columns
+ self.formater = lambda prog: argparse.HelpFormatter(prog, max_help_position=int(self.term_width / 2), width=self.term_width)
+
self.commands = {}
self.hidden_commands = []
- self.parser = argparse.ArgumentParser(prog='meson')
+ self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formater)
self.subparsers = self.parser.add_subparsers(title='Commands',
description='If no command is specified it defaults to setup command.')
self.add_command('setup', msetup.add_arguments, msetup.run,
@@ -54,26 +58,27 @@ class CommandLineParser:
help='Manage subprojects')
self.add_command('help', self.add_help_arguments, self.run_help_command,
help='Print help of a subcommand')
+ self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formater), rewriter.run,
+ help='Modify the project definition')
# Hidden commands
- self.add_command('rewrite', rewriter.add_arguments, rewriter.run,
- help=argparse.SUPPRESS)
self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
help=argparse.SUPPRESS)
self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
help=argparse.SUPPRESS)
- def add_command(self, name, add_arguments_func, run_func, help):
+ def add_command(self, name, add_arguments_func, run_func, help, aliases=[]):
# FIXME: Cannot have hidden subparser:
# https://bugs.python.org/issue22848
if help == argparse.SUPPRESS:
- p = argparse.ArgumentParser(prog='meson ' + name)
+ p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formater)
self.hidden_commands.append(name)
else:
- p = self.subparsers.add_parser(name, help=help)
+ p = self.subparsers.add_parser(name, help=help, aliases=aliases, formatter_class=self.formater)
add_arguments_func(p)
p.set_defaults(run_func=run_func)
- self.commands[name] = p
+ for i in [name] + aliases:
+ self.commands[i] = p
def add_runpython_arguments(self, parser):
parser.add_argument('script_file')
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 2d01c11..243dc5d 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -22,9 +22,11 @@ project files and don't need this info."""
import json
from . import build, coredata as cdata
from . import mesonlib
-from .ast import IntrospectionInterpreter
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator
from . import mlog
from .backend import backends
+from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
+from typing import List, Optional
import sys, os
import pathlib
@@ -37,7 +39,10 @@ def get_meson_introspection_version():
def get_meson_introspection_required_version():
return ['>=1.0', '<2.0']
-def get_meson_introspection_types(coredata: cdata.CoreData = None, builddata: build.Build = None, backend: backends.Backend = None):
+def get_meson_introspection_types(coredata: Optional[cdata.CoreData] = None,
+ builddata: Optional[build.Build] = None,
+ backend: Optional[backends.Backend] = None,
+ sourcedir: Optional[str] = None):
if backend and builddata:
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
testdata = backend.create_test_serialisation(builddata.get_tests())
@@ -52,6 +57,7 @@ def get_meson_introspection_types(coredata: cdata.CoreData = None, builddata: bu
},
'buildoptions': {
'func': lambda: list_buildoptions(coredata),
+ 'no_bd': lambda intr: list_buildoptions_from_source(intr),
'desc': 'List all build options.',
},
'buildsystem_files': {
@@ -61,18 +67,26 @@ def get_meson_introspection_types(coredata: cdata.CoreData = None, builddata: bu
},
'dependencies': {
'func': lambda: list_deps(coredata),
+ 'no_bd': lambda intr: list_deps_from_source(intr),
'desc': 'List external dependencies.',
},
+ 'scan_dependencies': {
+ 'no_bd': lambda intr: list_deps_from_source(intr),
+ 'desc': 'Scan for dependencies used in the meson.build file.',
+ 'key': 'scan-dependencies',
+ },
'installed': {
'func': lambda: list_installed(installdata),
'desc': 'List all installed files and directories.',
},
'projectinfo': {
'func': lambda: list_projinfo(builddata),
+ 'no_bd': lambda intr: list_projinfo_from_source(sourcedir, intr),
'desc': 'Information about projects.',
},
'targets': {
'func': lambda: list_targets(builddata, installdata, backend),
+ 'no_bd': lambda intr: list_targets_from_source(intr),
'desc': 'List top level targets.',
},
'tests': {
@@ -113,6 +127,46 @@ def list_installed(installdata):
res[path] = os.path.join(installdata.prefix, installpath)
return res
+def list_targets_from_source(intr: IntrospectionInterpreter):
+ tlist = []
+ for i in intr.targets:
+ sources = []
+ for n in i['sources']:
+ args = []
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ for j in args:
+ if isinstance(j, StringNode):
+ sources += [j.value]
+ elif isinstance(j, str):
+ sources += [j]
+
+ tlist += [{
+ 'name': i['name'],
+ 'id': i['id'],
+ 'type': i['type'],
+ 'defined_in': i['defined_in'],
+ 'filename': [os.path.join(i['subdir'], x) for x in i['outputs']],
+ 'build_by_default': i['build_by_default'],
+ 'target_sources': [{
+ 'language': 'unknown',
+ 'compiler': [],
+ 'parameters': [],
+ 'sources': [os.path.normpath(os.path.join(os.path.abspath(intr.source_root), i['subdir'], x)) for x in sources],
+ 'generated_sources': []
+ }],
+ 'subproject': None, # Subprojects are not supported
+ 'installed': i['installed']
+ }]
+
+ return tlist
+
def list_targets(builddata: build.Build, installdata, backend: backends.Backend):
tlist = []
build_dir = builddata.environment.get_build_dir()
@@ -135,7 +189,8 @@ def list_targets(builddata: build.Build, installdata, backend: backends.Backend)
'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')),
'filename': [os.path.join(build_dir, target.subdir, x) for x in target.get_outputs()],
'build_by_default': target.build_by_default,
- 'target_sources': backend.get_introspection_data(idname, target)
+ 'target_sources': backend.get_introspection_data(idname, target),
+ 'subproject': target.subproject or None
}
if installdata and target.should_install():
@@ -146,15 +201,8 @@ def list_targets(builddata: build.Build, installdata, backend: backends.Backend)
tlist.append(t)
return tlist
-def list_buildoptions_from_source(sourcedir, backend, indent):
- # Make sure that log entries in other parts of meson don't interfere with the JSON output
- mlog.disable()
- backend = backends.get_backend_from_name(backend, None)
- intr = IntrospectionInterpreter(sourcedir, '', backend.name)
- intr.analyze()
- # Reenable logging just in case
- mlog.enable()
- print(json.dumps(list_buildoptions(intr.coredata), indent=indent))
+def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> List[dict]:
+ return list_buildoptions(intr.coredata)
def list_target_files(target_name: str, targets: list, source_dir: str):
sys.stderr.write("WARNING: The --target-files introspection API is deprecated. Use --targets instead.\n")
@@ -177,7 +225,7 @@ def list_target_files(target_name: str, targets: list, source_dir: str):
return result
-def list_buildoptions(coredata: cdata.CoreData):
+def list_buildoptions(coredata: cdata.CoreData) -> List[dict]:
optlist = []
dir_option_names = ['bindir',
@@ -249,6 +297,12 @@ def list_buildsystem_files(builddata: build.Build):
filelist = [os.path.join(src_dir, x) for x in filelist]
return filelist
+def list_deps_from_source(intr: IntrospectionInterpreter):
+ result = []
+ for i in intr.dependencies:
+ result += [{k: v for k, v in i.items() if k in ['name', 'required', 'has_fallback', 'conditional']}]
+ return result
+
def list_deps(coredata: cdata.CoreData):
result = []
for d in coredata.deps.values():
@@ -287,7 +341,8 @@ def list_benchmarks(benchdata):
def list_projinfo(builddata: build.Build):
result = {'version': builddata.project_version,
- 'descriptive_name': builddata.project_name}
+ 'descriptive_name': builddata.project_name,
+ 'subproject_dir': builddata.subproject_dir}
subprojects = []
for k, v in builddata.subprojects.items():
c = {'name': k,
@@ -297,38 +352,58 @@ def list_projinfo(builddata: build.Build):
result['subprojects'] = subprojects
return result
-def list_projinfo_from_source(sourcedir, indent):
+def list_projinfo_from_source(sourcedir: str, intr: IntrospectionInterpreter):
files = find_buildsystem_files_list(sourcedir)
files = [os.path.normpath(x) for x in files]
- mlog.disable()
- intr = IntrospectionInterpreter(sourcedir, '', 'ninja')
- intr.analyze()
- mlog.enable()
-
for i in intr.project_data['subprojects']:
basedir = os.path.join(intr.subproject_dir, i['name'])
i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
files = [x for x in files if not x.startswith(basedir)]
intr.project_data['buildsystem_files'] = files
- print(json.dumps(intr.project_data, indent=indent))
+ intr.project_data['subproject_dir'] = intr.subproject_dir
+ return intr.project_data
+
+def print_results(options, results, indent):
+ if len(results) == 0 and not options.force_dict:
+ print('No command specified')
+ return 1
+ elif len(results) == 1 and not options.force_dict:
+ # Make to keep the existing output format for a single option
+ print(json.dumps(results[0][1], indent=indent))
+ else:
+ out = {}
+ for i in results:
+ out[i[0]] = i[1]
+ print(json.dumps(out, indent=indent))
+ return 0
def run(options):
datadir = 'meson-private'
infodir = 'meson-info'
- indent = 4 if options.indent else None
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
infodir = os.path.join(options.builddir, infodir)
+ indent = 4 if options.indent else None
+ results = []
+ sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
+ intro_types = get_meson_introspection_types(sourcedir=sourcedir)
+
if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
- sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
- if options.projectinfo:
- list_projinfo_from_source(sourcedir, indent)
- return 0
- if options.buildoptions:
- list_buildoptions_from_source(sourcedir, options.backend, indent)
- return 0
+ # Make sure that log entries in other parts of meson don't interfere with the JSON output
+ mlog.disable()
+ backend = backends.get_backend_from_name(options.backend, None)
+ intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ intr.analyze()
+ # Reenable logging just in case
+ mlog.enable()
+ for key, val in intro_types.items():
+ if (not options.all and not getattr(options, key, False)) or 'no_bd' not in val:
+ continue
+ results += [(key, val['no_bd'](intr))]
+ return print_results(options, results, indent)
+
infofile = get_meson_info_file(infodir)
if not os.path.isdir(datadir) or not os.path.isdir(infodir) or not os.path.isfile(infofile):
print('Current directory is not a meson build directory.'
@@ -352,9 +427,6 @@ def run(options):
.format(intro_vers, ' and '.join(vers_to_check)))
return 1
- results = []
- intro_types = get_meson_introspection_types()
-
# Handle the one option that does not have its own JSON file (meybe deprecate / remove this?)
if options.target_files is not None:
targets_file = os.path.join(infodir, 'intro-targets.json')
@@ -364,6 +436,8 @@ def run(options):
# Extract introspection information from JSON
for i in intro_types.keys():
+ if 'func' not in intro_types[i]:
+ continue
if not options.all and not getattr(options, i, False):
continue
curr = os.path.join(infodir, 'intro-{}.json'.format(i))
@@ -373,18 +447,7 @@ def run(options):
with open(curr, 'r') as fp:
results += [(i, json.load(fp))]
- if len(results) == 0 and not options.force_dict:
- print('No command specified')
- return 1
- elif len(results) == 1 and not options.force_dict:
- # Make to keep the existing output format for a single option
- print(json.dumps(results[0][1], indent=indent))
- else:
- out = {}
- for i in results:
- out[i[0]] = i[1]
- print(json.dumps(out, indent=indent))
- return 0
+ return print_results(options, results, indent)
updated_introspection_files = []
@@ -405,6 +468,8 @@ def generate_introspection_file(builddata: build.Build, backend: backends.Backen
intro_info = []
for key, val in intro_types.items():
+ if 'func' not in val:
+ continue
intro_info += [(key, val['func']())]
write_intro_info(intro_info, builddata.environment.info_dir)
@@ -433,6 +498,8 @@ def write_meson_info_file(builddata: build.Build, errors: list, build_files_upda
intro_info = {}
for i in intro_types.keys():
+ if 'func' not in intro_types[i]:
+ continue
intro_info[i] = {
'file': 'intro-{}.json'.format(i),
'updated': i in updated_introspection_files
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index a8b146f..0434274 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -48,6 +48,7 @@ log_depth = 0
log_timestamp_start = None
log_fatal_warnings = False
log_disable_stdout = False
+log_errors_only = False
def disable():
global log_disable_stdout
@@ -57,6 +58,14 @@ def enable():
global log_disable_stdout
log_disable_stdout = False
+def set_quiet():
+ global log_errors_only
+ log_errors_only = True
+
+def set_verbose():
+ global log_errors_only
+ log_errors_only = False
+
def initialize(logdir, fatal_warnings=False):
global log_dir, log_file, log_fatal_warnings
log_dir = logdir
@@ -152,14 +161,16 @@ def debug(*args, **kwargs):
print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
log_file.flush()
-def log(*args, **kwargs):
+def log(*args, is_error=False, **kwargs):
+ global log_errors_only
arr = process_markup(args, False)
if log_file is not None:
print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
log_file.flush()
if colorize_console:
arr = process_markup(args, True)
- force_print(*arr, **kwargs)
+ if not log_errors_only or is_error:
+ force_print(*arr, **kwargs)
def _log_error(severity, *args, **kwargs):
from .mesonlib import get_error_location_string
@@ -187,13 +198,13 @@ def _log_error(severity, *args, **kwargs):
raise MesonException("Fatal warnings enabled, aborting")
def error(*args, **kwargs):
- return _log_error('error', *args, **kwargs)
+ return _log_error('error', *args, **kwargs, is_error=True)
def warning(*args, **kwargs):
- return _log_error('warning', *args, **kwargs)
+ return _log_error('warning', *args, **kwargs, is_error=True)
def deprecation(*args, **kwargs):
- return _log_error('deprecation', *args, **kwargs)
+ return _log_error('deprecation', *args, **kwargs, is_error=True)
def exception(e, prefix=red('ERROR:')):
log()
diff --git a/mesonbuild/modules/unstable_cuda.py b/mesonbuild/modules/unstable_cuda.py
index 941b15a..cd116cc 100644
--- a/mesonbuild/modules/unstable_cuda.py
+++ b/mesonbuild/modules/unstable_cuda.py
@@ -44,6 +44,7 @@ class CudaModule(ExtensionModule):
raise argerror
driver_version_table = [
+ {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'},
{'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
{'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'},
{'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'},
@@ -77,11 +78,19 @@ class CudaModule(ExtensionModule):
@staticmethod
def _break_arch_string(s):
- s = re.sub('[ \t,;]+', ';', s)
+ s = re.sub('[ \t\r\n,;]+', ';', s)
s = s.strip(';').split(';')
return s
@staticmethod
+ def _detected_cc_from_compiler(c):
+ if isinstance(c, CompilerHolder):
+ c = c.compiler
+ if isinstance(c, CudaCompiler):
+ return c.detected_cc
+ return ''
+
+ @staticmethod
def _version_from_compiler(c):
if isinstance(c, CompilerHolder):
c = c.compiler
@@ -97,7 +106,8 @@ class CudaModule(ExtensionModule):
if len(args) < 1:
raise argerror
else:
- cuda_version = self._version_from_compiler(args[0])
+ compiler = args[0]
+ cuda_version = self._version_from_compiler(compiler)
if cuda_version == 'unknown':
raise argerror
@@ -108,7 +118,8 @@ class CudaModule(ExtensionModule):
raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
- detected = flatten([kwargs.get('detected', [])])
+ detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler))
+ detected = flatten([detected])
detected = [self._break_arch_string(a) for a in detected]
detected = flatten(detected)
if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
@@ -148,7 +159,7 @@ class CudaModule(ExtensionModule):
cuda_limit_gpu_architecture = '7.0' # noqa: E221
if version_compare(cuda_version, '>=9.0'):
- cuda_known_gpu_architectures += ['Volta', 'Volta+Tegra'] # noqa: E221
+ cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221
cuda_common_gpu_architectures += ['7.0', '7.0+PTX'] # noqa: E221
cuda_all_gpu_architectures += ['7.0', '7.0+PTX', '7.2', '7.2+PTX'] # noqa: E221
@@ -215,7 +226,7 @@ class CudaModule(ExtensionModule):
'Pascal': (['6.0', '6.1'], ['6.1']),
'Pascal+Tegra': (['6.2'], []),
'Volta': (['7.0'], ['7.0']),
- 'Volta+Tegra': (['7.2'], []),
+ 'Xavier': (['7.2'], []),
'Turing': (['7.5'], ['7.5']),
}.get(arch_name, (None, None))
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index f18352b..17783ce 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -262,17 +262,21 @@ class BreakNode(ElementaryNode):
pass
class ArrayNode(BaseNode):
- def __init__(self, args, lineno, colno):
+ def __init__(self, args, lineno, colno, end_lineno, end_colno):
self.subdir = args.subdir
self.lineno = lineno
self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.args = args
class DictNode(BaseNode):
- def __init__(self, args, lineno, colno):
+ def __init__(self, args, lineno, colno, end_lineno, end_colno):
self.subdir = args.subdir
self.lineno = lineno
self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.args = args
class EmptyNode(BaseNode):
@@ -349,10 +353,12 @@ class MethodNode(BaseNode):
self.args = args
class FunctionNode(BaseNode):
- def __init__(self, subdir, lineno, colno, func_name, args):
+ def __init__(self, subdir, lineno, colno, end_lineno, end_colno, func_name, args):
self.subdir = subdir
self.lineno = lineno
self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.func_name = func_name
assert(isinstance(func_name, str))
self.args = args
@@ -405,7 +411,8 @@ class IfNode(BaseNode):
self.block = block
class TernaryNode(BaseNode):
- def __init__(self, lineno, colno, condition, trueblock, falseblock):
+ def __init__(self, subdir, lineno, colno, condition, trueblock, falseblock):
+ self.subdir = subdir
self.lineno = lineno
self.colno = colno
self.condition = condition
@@ -540,7 +547,7 @@ class Parser:
self.expect('colon')
falseblock = self.e1()
self.in_ternary = False
- return TernaryNode(left.lineno, left.colno, left, trueblock, falseblock)
+ return TernaryNode(left.subdir, left.lineno, left.colno, left, trueblock, falseblock)
return left
def e2(self):
@@ -619,7 +626,7 @@ class Parser:
if not isinstance(left, IdNode):
raise ParseException('Function call must be applied to plain id',
self.getline(), left.lineno, left.colno)
- left = FunctionNode(left.subdir, left.lineno, left.colno, left.value, args)
+ left = FunctionNode(left.subdir, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
go_again = True
while go_again:
go_again = False
@@ -640,11 +647,11 @@ class Parser:
elif self.accept('lbracket'):
args = self.args()
self.block_expect('rbracket', block_start)
- return ArrayNode(args, block_start.lineno, block_start.colno)
+ return ArrayNode(args, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
elif self.accept('lcurl'):
key_values = self.key_values()
self.block_expect('rcurl', block_start)
- return DictNode(key_values, block_start.lineno, block_start.colno)
+ return DictNode(key_values, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
else:
return self.e9()
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 351e45d..77a0f82 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -23,6 +23,9 @@ from mesonbuild.dependencies import ExternalProgram
from mesonbuild.mesonlib import substring_is_in_list, MesonException
from mesonbuild import mlog
+from collections import namedtuple
+import io
+import re
import tempfile
import time, datetime, multiprocessing, json
import concurrent.futures as conc
@@ -36,6 +39,10 @@ import enum
# mean that the test should be skipped.
GNU_SKIP_RETURNCODE = 77
+# GNU autotools interprets a return code of 99 from tests it executes to
+# mean that the test failed even before testing what it is supposed to test.
+GNU_ERROR_RETURNCODE = 99
+
def is_windows():
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
@@ -146,11 +153,202 @@ class TestResult(enum.Enum):
FAIL = 'FAIL'
EXPECTEDFAIL = 'EXPECTEDFAIL'
UNEXPECTEDPASS = 'UNEXPECTEDPASS'
+ ERROR = 'ERROR'
+
+
+class TAPParser(object):
+ Plan = namedtuple('Plan', ['count', 'late', 'skipped', 'explanation'])
+ Bailout = namedtuple('Bailout', ['message'])
+ Test = namedtuple('Test', ['number', 'name', 'result', 'explanation'])
+ Error = namedtuple('Error', ['message'])
+ Version = namedtuple('Version', ['version'])
+
+ _MAIN = 1
+ _AFTER_TEST = 2
+ _YAML = 3
+
+ _RE_BAILOUT = r'Bail out!\s*(.*)'
+ _RE_DIRECTIVE = r'(?:\s*\#\s*([Ss][Kk][Ii][Pp]\S*|[Tt][Oo][Dd][Oo])\b\s*(.*))?'
+ _RE_PLAN = r'1\.\.([0-9]+)' + _RE_DIRECTIVE
+ _RE_TEST = r'((?:not )?ok)\s*(?:([0-9]+)\s*)?([^#]*)' + _RE_DIRECTIVE
+ _RE_VERSION = r'TAP version ([0-9]+)'
+ _RE_YAML_START = r'(\s+)---.*'
+ _RE_YAML_END = r'\s+\.\.\.\s*'
+
+ def __init__(self, io):
+ self.io = io
+
+ def parse_test(self, ok, num, name, directive, explanation):
+ name = name.strip()
+ explanation = explanation.strip() if explanation else None
+ if directive is not None:
+ directive = directive.upper()
+ if directive == 'SKIP':
+ if ok:
+ yield self.Test(num, name, TestResult.SKIP, explanation)
+ return
+ elif directive == 'TODO':
+ yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
+ return
+ else:
+ yield self.Error('invalid directive "%s"' % (directive,))
+
+ yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
+
+ def parse(self):
+ found_late_test = False
+ bailed_out = False
+ plan = None
+ lineno = 0
+ num_tests = 0
+ yaml_lineno = None
+ yaml_indent = None
+ state = self._MAIN
+ version = 12
+ while True:
+ lineno += 1
+ try:
+ line = next(self.io).rstrip()
+ except StopIteration:
+ break
+
+ # YAML blocks are only accepted after a test
+ if state == self._AFTER_TEST:
+ if version >= 13:
+ m = re.match(self._RE_YAML_START, line)
+ if m:
+ state = self._YAML
+ yaml_lineno = lineno
+ yaml_indent = m.group(1)
+ continue
+ state = self._MAIN
+
+ elif state == self._YAML:
+ if re.match(self._RE_YAML_END, line):
+ state = self._MAIN
+ continue
+ if line.startswith(yaml_indent):
+ continue
+ yield self.Error('YAML block not terminated (started on line %d)' % (yaml_lineno,))
+ state = self._MAIN
+
+ assert state == self._MAIN
+ if line.startswith('#'):
+ continue
+
+ m = re.match(self._RE_TEST, line)
+ if m:
+ if plan and plan.late and not found_late_test:
+ yield self.Error('unexpected test after late plan')
+ found_late_test = True
+ num_tests += 1
+ num = num_tests if m.group(2) is None else int(m.group(2))
+ if num != num_tests:
+ yield self.Error('out of order test numbers')
+ yield from self.parse_test(m.group(1) == 'ok', num,
+ m.group(3), m.group(4), m.group(5))
+ state = self._AFTER_TEST
+ continue
+
+ m = re.match(self._RE_PLAN, line)
+ if m:
+ if plan:
+ yield self.Error('more than one plan found')
+ else:
+ count = int(m.group(1))
+ skipped = (count == 0)
+ if m.group(2):
+ if m.group(2).upper().startswith('SKIP'):
+ if count > 0:
+ yield self.Error('invalid SKIP directive for plan')
+ skipped = True
+ else:
+ yield self.Error('invalid directive for plan')
+ plan = self.Plan(count=count, late=(num_tests > 0),
+ skipped=skipped, explanation=m.group(3))
+ yield plan
+ continue
+
+ m = re.match(self._RE_BAILOUT, line)
+ if m:
+ yield self.Bailout(m.group(1))
+ bailed_out = True
+ continue
+
+ m = re.match(self._RE_VERSION, line)
+ if m:
+ # The TAP version is only accepted as the first line
+ if lineno != 1:
+ yield self.Error('version number must be on the first line')
+ continue
+ version = int(m.group(1))
+ if version < 13:
+ yield self.Error('version number should be at least 13')
+ else:
+ yield self.Version(version=version)
+ continue
+
+ yield self.Error('unexpected input at line %d' % (lineno,))
+
+ if state == self._YAML:
+ yield self.Error('YAML block not terminated (started on line %d)' % (yaml_lineno,))
+
+ if not bailed_out and plan and num_tests != plan.count:
+ if num_tests < plan.count:
+ yield self.Error('Too few tests run (expected %d, got %d)' % (plan.count, num_tests))
+ else:
+ yield self.Error('Too many tests run (expected %d, got %d)' % (plan.count, num_tests))
class TestRun:
- def __init__(self, res, returncode, should_fail, duration, stdo, stde, cmd,
- env):
+ @staticmethod
+ def make_exitcode(test, returncode, duration, stdo, stde, cmd):
+ if returncode == GNU_SKIP_RETURNCODE:
+ res = TestResult.SKIP
+ elif returncode == GNU_ERROR_RETURNCODE:
+ res = TestResult.ERROR
+ elif test.should_fail:
+ res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS
+ else:
+ res = TestResult.FAIL if bool(returncode) else TestResult.OK
+ return TestRun(test, res, returncode, duration, stdo, stde, cmd)
+
+ def make_tap(test, returncode, duration, stdo, stde, cmd):
+ res = None
+ num_tests = 0
+ failed = False
+ num_skipped = 0
+
+ for i in TAPParser(io.StringIO(stdo)).parse():
+ if isinstance(i, TAPParser.Bailout):
+ res = TestResult.ERROR
+ elif isinstance(i, TAPParser.Test):
+ if i.result == TestResult.SKIP:
+ num_skipped += 1
+ elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS):
+ failed = True
+ num_tests += 1
+ elif isinstance(i, TAPParser.Error):
+ res = TestResult.ERROR
+ stde += '\nTAP parsing error: ' + i.message
+
+ if returncode != 0:
+ res = TestResult.ERROR
+ stde += '\n(test program exited with status code %d)' % (returncode,)
+
+ if res is None:
+ # Now determine the overall result of the test based on the outcome of the subcases
+ if num_skipped == num_tests:
+ # This includes the case where num_tests is zero
+ res = TestResult.SKIP
+ elif test.should_fail:
+ res = TestResult.EXPECTEDFAIL if failed else TestResult.UNEXPECTEDPASS
+ else:
+ res = TestResult.FAIL if failed else TestResult.OK
+
+ return TestRun(test, res, returncode, duration, stdo, stde, cmd)
+
+ def __init__(self, test, res, returncode, duration, stdo, stde, cmd):
assert isinstance(res, TestResult)
self.res = res
self.returncode = returncode
@@ -158,8 +356,8 @@ class TestRun:
self.stdo = stdo
self.stde = stde
self.cmd = cmd
- self.env = env
- self.should_fail = should_fail
+ self.env = test.env
+ self.should_fail = test.should_fail
def get_log(self):
res = '--- command ---\n'
@@ -257,9 +455,8 @@ class SingleTestRunner:
cmd = self._get_cmd()
if cmd is None:
skip_stdout = 'Not run because can not execute cross compiled binaries.'
- return TestRun(res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
- should_fail=self.test.should_fail, duration=0.0,
- stdo=skip_stdout, stde=None, cmd=None, env=self.test.env)
+ return TestRun(test=self.test, res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
+ duration=0.0, stdo=skip_stdout, stde=None, cmd=None)
else:
wrap = TestHarness.get_wrapper(self.options)
if self.options.gdb:
@@ -388,14 +585,12 @@ class SingleTestRunner:
stdo = ""
stde = additional_error
if timed_out:
- res = TestResult.TIMEOUT
- elif p.returncode == GNU_SKIP_RETURNCODE:
- res = TestResult.SKIP
- elif self.test.should_fail:
- res = TestResult.EXPECTEDFAIL if bool(p.returncode) else TestResult.UNEXPECTEDPASS
+ return TestRun(self.test, TestResult.TIMEOUT, p.returncode, duration, stdo, stde, cmd)
else:
- res = TestResult.FAIL if bool(p.returncode) else TestResult.OK
- return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env)
+ if self.test.protocol == 'exitcode':
+ return TestRun.make_exitcode(self.test, p.returncode, duration, stdo, stde, cmd)
+ else:
+ return TestRun.make_tap(self.test, p.returncode, duration, stdo, stde, cmd)
class TestHarness:
@@ -471,7 +666,7 @@ class TestHarness:
self.skip_count += 1
elif result.res is TestResult.OK:
self.success_count += 1
- elif result.res is TestResult.FAIL:
+ elif result.res is TestResult.FAIL or result.res is TestResult.ERROR:
self.fail_count += 1
elif result.res is TestResult.EXPECTEDFAIL:
self.expectedfail_count += 1
@@ -493,9 +688,11 @@ class TestHarness:
(num, name, padding1, result.res.value, padding2, result.duration,
status)
ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL)
+ bad_statuses = (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS,
+ TestResult.ERROR)
if not self.options.quiet or result.res not in ok_statuses:
if result.res not in ok_statuses and mlog.colorize_console:
- if result.res in (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS):
+ if result.res in bad_statuses:
decorator = mlog.red
elif result.res is TestResult.SKIP:
decorator = mlog.yellow
@@ -505,8 +702,7 @@ class TestHarness:
else:
print(result_str)
result_str += "\n\n" + result.get_log()
- if (result.returncode != GNU_SKIP_RETURNCODE) \
- and (result.returncode != 0) != result.should_fail:
+ if result.res in bad_statuses:
if self.options.print_errorlogs:
self.collected_logs.append(result_str)
if self.logfile:
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index fa26571..975655c 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -23,23 +23,51 @@
# - move targets
# - reindent?
-from .ast import IntrospectionInterpreter, build_target_functions, AstIDGenerator, AstIndentationGenerator, AstPrinter
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
from mesonbuild.mesonlib import MesonException
-from . import mlog, mparser, environment
+from . import mlog, environment
from functools import wraps
-from pprint import pprint
-from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, IdNode, FunctionNode, StringNode
-import json, os
+from typing import List, Dict, Optional
+from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, BaseNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import json, os, re, sys
class RewriterException(MesonException):
pass
-def add_arguments(parser):
- parser.add_argument('--sourcedir', default='.',
- help='Path to source directory.')
- parser.add_argument('-p', '--print', action='store_true', default=False, dest='print',
- help='Print the parsed AST.')
- parser.add_argument('command', type=str)
+def add_arguments(parser, formater=None):
+ parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
+ parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
+ parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
+ subparsers = parser.add_subparsers(dest='type', title='Rewriter commands', description='Rewrite command to execute')
+
+ # Target
+ tgt_parser = subparsers.add_parser('target', help='Modify a target', formatter_class=formater)
+ tgt_parser.add_argument('-s', '--subdir', default='', dest='subdir', help='Subdirectory of the new target (only for the "add_target" action)')
+ tgt_parser.add_argument('--type', dest='tgt_type', choices=rewriter_keys['target']['target_type'][2], default='executable',
+ help='Type of the target to add (only for the "add_target" action)')
+ tgt_parser.add_argument('target', help='Name or ID of the target')
+ tgt_parser.add_argument('operation', choices=['add', 'rm', 'add_target', 'rm_target', 'info'],
+ help='Action to execute')
+ tgt_parser.add_argument('sources', nargs='*', help='Sources to add/remove')
+
+ # KWARGS
+ kw_parser = subparsers.add_parser('kwargs', help='Modify keyword arguments', formatter_class=formater)
+ kw_parser.add_argument('operation', choices=rewriter_keys['kwargs']['operation'][2],
+ help='Action to execute')
+ kw_parser.add_argument('function', choices=list(rewriter_func_kwargs.keys()),
+ help='Function type to modify')
+ kw_parser.add_argument('id', help='ID of the function to modify (can be anything for "project")')
+ kw_parser.add_argument('kwargs', nargs='*', help='Pairs of keyword and value')
+
+ # Default options
+ def_parser = subparsers.add_parser('default-options', help='Modify the project default options', formatter_class=formater)
+ def_parser.add_argument('operation', choices=rewriter_keys['default_options']['operation'][2],
+ help='Action to execute')
+ def_parser.add_argument('options', nargs='*', help='Key, value pairs of configuration option')
+
+ # JSON file/command
+ cmd_parser = subparsers.add_parser('command', help='Execute a JSON array of commands', formatter_class=formater)
+ cmd_parser.add_argument('json', help='JSON string or file to execute')
class RequiredKeys:
def __init__(self, keys):
@@ -73,7 +101,7 @@ class RequiredKeys:
return wrapped
class MTypeBase:
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
if node is None:
self.node = self._new_node()
else:
@@ -85,7 +113,7 @@ class MTypeBase:
def _new_node(self):
# Overwrite in derived class
- return mparser.BaseNode()
+ return BaseNode()
def can_modify(self):
return self.node_type is not None
@@ -109,68 +137,76 @@ class MTypeBase:
# Overwrite in derived class
mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
+ def remove_regex(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
+
class MTypeStr(MTypeBase):
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_node(self):
- return mparser.StringNode(mparser.Token('', '', 0, 0, 0, None, ''))
+ return StringNode(Token('', '', 0, 0, 0, None, ''))
def supported_nodes(self):
- return [mparser.StringNode]
+ return [StringNode]
def set_value(self, value):
self.node.value = str(value)
class MTypeBool(MTypeBase):
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_node(self):
- return mparser.StringNode(mparser.Token('', '', 0, 0, 0, None, False))
+ return StringNode(Token('', '', 0, 0, 0, None, False))
def supported_nodes(self):
- return [mparser.BooleanNode]
+ return [BooleanNode]
def set_value(self, value):
self.node.value = bool(value)
class MTypeID(MTypeBase):
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_node(self):
- return mparser.StringNode(mparser.Token('', '', 0, 0, 0, None, ''))
+ return StringNode(Token('', '', 0, 0, 0, None, ''))
def supported_nodes(self):
- return [mparser.IdNode]
+ return [IdNode]
def set_value(self, value):
self.node.value = str(value)
class MTypeList(MTypeBase):
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_node(self):
- return mparser.ArrayNode(mparser.ArgumentNode(mparser.Token('', '', 0, 0, 0, None, '')), 0, 0)
+ return ArrayNode(ArgumentNode(Token('', '', 0, 0, 0, None, '')), 0, 0, 0, 0)
def _new_element_node(self, value):
# Overwrite in derived class
- return mparser.BaseNode()
+ return BaseNode()
def _ensure_array_node(self):
- if not isinstance(self.node, mparser.ArrayNode):
+ if not isinstance(self.node, ArrayNode):
tmp = self.node
self.node = self._new_node()
self.node.args.arguments += [tmp]
- def _check_is_equal(self, node, value):
+ def _check_is_equal(self, node, value) -> bool:
+ # Overwrite in derived class
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
# Overwrite in derived class
return False
def get_node(self):
- if isinstance(self.node, mparser.ArrayNode):
+ if isinstance(self.node, ArrayNode):
if len(self.node.args.arguments) == 1:
return self.node.args.arguments[0]
return self.node
@@ -180,7 +216,7 @@ class MTypeList(MTypeBase):
return []
def supported_nodes(self):
- return [mparser.ArrayNode] + self.supported_element_nodes()
+ return [ArrayNode] + self.supported_element_nodes()
def set_value(self, value):
if not isinstance(value, list):
@@ -197,10 +233,10 @@ class MTypeList(MTypeBase):
for i in value:
self.node.args.arguments += [self._new_element_node(i)]
- def remove_value(self, value):
+ def _remove_helper(self, value, equal_func):
def check_remove_node(node):
for j in value:
- if self._check_is_equal(i, j):
+ if equal_func(i, j):
return True
return False
@@ -213,50 +249,69 @@ class MTypeList(MTypeBase):
removed_list += [i]
self.node.args.arguments = removed_list
-class MtypeStrList(MTypeList):
- def __init__(self, node: mparser.BaseNode):
+ def remove_value(self, value):
+ self._remove_helper(value, self._check_is_equal)
+
+ def remove_regex(self, regex: str):
+ self._remove_helper(regex, self._check_regex_matches)
+
+class MTypeStrList(MTypeList):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_element_node(self, value):
- return mparser.StringNode(mparser.Token('', '', 0, 0, 0, None, str(value)))
+ return StringNode(Token('', '', 0, 0, 0, None, str(value)))
- def _check_is_equal(self, node, value):
- if isinstance(node, mparser.StringNode):
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, StringNode):
return node.value == value
return False
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
def supported_element_nodes(self):
- return [mparser.StringNode]
+ return [StringNode]
class MTypeIDList(MTypeList):
- def __init__(self, node: mparser.BaseNode):
+ def __init__(self, node: Optional[BaseNode] = None):
super().__init__(node)
def _new_element_node(self, value):
- return mparser.IdNode(mparser.Token('', '', 0, 0, 0, None, str(value)))
+ return IdNode(Token('', '', 0, 0, 0, None, str(value)))
- def _check_is_equal(self, node, value):
- if isinstance(node, mparser.IdNode):
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, IdNode):
return node.value == value
return False
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
def supported_element_nodes(self):
- return [mparser.IdNode]
+ return [IdNode]
rewriter_keys = {
+ 'default_options': {
+ 'operation': (str, None, ['set', 'delete']),
+ 'options': (dict, {}, None)
+ },
'kwargs': {
'function': (str, None, None),
'id': (str, None, None),
- 'operation': (str, None, ['set', 'delete', 'add', 'remove', 'info']),
+ 'operation': (str, None, ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']),
'kwargs': (dict, {}, None)
},
'target': {
'target': (str, None, None),
- 'operation': (str, None, ['src_add', 'src_rm', 'tgt_rm', 'tgt_add', 'info']),
+ 'operation': (str, None, ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']),
'sources': (list, [], None),
'subdir': (str, '', None),
'target_type': (str, 'executable', ['both_libraries', 'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library']),
- 'debug': (bool, False, None)
}
}
@@ -268,8 +323,8 @@ rewriter_func_kwargs = {
'not_found_message': MTypeStr,
'required': MTypeBool,
'static': MTypeBool,
- 'version': MtypeStrList,
- 'modules': MtypeStrList
+ 'version': MTypeStrList,
+ 'modules': MTypeStrList
},
'target': {
'build_by_default': MTypeBool,
@@ -285,21 +340,24 @@ rewriter_func_kwargs = {
'pie': MTypeBool
},
'project': {
+ 'default_options': MTypeStrList,
'meson_version': MTypeStr,
- 'license': MtypeStrList,
+ 'license': MTypeStrList,
'subproject_dir': MTypeStr,
'version': MTypeStr
}
}
class Rewriter:
- def __init__(self, sourcedir: str, generator: str = 'ninja'):
+ def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
self.sourcedir = sourcedir
- self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator()])
+ self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ self.skip_errors = skip_errors
self.modefied_nodes = []
self.to_remove_nodes = []
self.to_add_nodes = []
self.functions = {
+ 'default_options': self.process_default_options,
'kwargs': self.process_kwargs,
'target': self.process_target,
}
@@ -321,29 +379,45 @@ class Rewriter:
def print_info(self):
if self.info_dump is None:
return
- # Wrap the dump in magic strings
- print('!!==JSON DUMP: BEGIN==!!')
- print(json.dumps(self.info_dump, indent=2))
- print('!!==JSON DUMP: END==!!')
+ sys.stderr.write(json.dumps(self.info_dump, indent=2))
+
+ def on_error(self):
+ if self.skip_errors:
+ return mlog.cyan('-->'), mlog.yellow('skipping')
+ return mlog.cyan('-->'), mlog.red('aborting')
+
+ def handle_error(self):
+ if self.skip_errors:
+ return None
+ raise MesonException('Rewriting the meson.build failed')
def find_target(self, target: str):
- def check_list(name: str):
+ def check_list(name: str) -> List[BaseNode]:
+ result = []
for i in self.interpreter.targets:
if name == i['name'] or name == i['id']:
- return i
- return None
+ result += [i]
+ return result
- tgt = check_list(target)
- if tgt is not None:
- return tgt
+ targets = check_list(target)
+ if targets:
+ if len(targets) == 1:
+ return targets[0]
+ else:
+ mlog.error('There are multiple targets matching', mlog.bold(target))
+ for i in targets:
+ mlog.error(' -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
+ mlog.error('Please try again with the unique ID of the target', *self.on_error())
+ self.handle_error()
+ return None
# Check the assignments
+ tgt = None
if target in self.interpreter.assignments:
node = self.interpreter.assignments[target][0]
- if isinstance(node, mparser.FunctionNode):
+ if isinstance(node, FunctionNode):
if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
- name = self.interpreter.flatten_args(node.args)[0]
- tgt = check_list(name)
+ tgt = self.interpreter.assign_vals[target][0]
return tgt
@@ -361,25 +435,74 @@ class Rewriter:
# Check the assignments
if dependency in self.interpreter.assignments:
node = self.interpreter.assignments[dependency][0]
- if isinstance(node, mparser.FunctionNode):
+ if isinstance(node, FunctionNode):
if node.func_name in ['dependency']:
name = self.interpreter.flatten_args(node.args)[0]
dep = check_list(name)
return dep
+ @RequiredKeys(rewriter_keys['default_options'])
+ def process_default_options(self, cmd):
+ # First, remove the old values
+ kwargs_cmd = {
+ 'function': 'project',
+ 'id': "",
+ 'operation': 'remove_regex',
+ 'kwargs': {
+ 'default_options': ['{}=.*'.format(x) for x in cmd['options'].keys()]
+ }
+ }
+ self.process_kwargs(kwargs_cmd)
+
+ # Then add the new values
+ if cmd['operation'] != 'set':
+ return
+
+ kwargs_cmd['operation'] = 'add'
+ kwargs_cmd['kwargs']['default_options'] = []
+
+ cdata = self.interpreter.coredata
+ options = {
+ **cdata.builtins,
+ **cdata.backend_options,
+ **cdata.base_options,
+ **cdata.compiler_options.build,
+ **cdata.user_options
+ }
+
+ for key, val in sorted(cmd['options'].items()):
+ if key not in options:
+ mlog.error('Unknown options', mlog.bold(key), *self.on_error())
+ self.handle_error()
+ continue
+
+ try:
+ val = options[key].validate_value(val)
+ except MesonException as e:
+ mlog.error('Unable to set', mlog.bold(key), mlog.red(str(e)), *self.on_error())
+ self.handle_error()
+ continue
+
+ kwargs_cmd['kwargs']['default_options'] += ['{}={}'.format(key, val)]
+
+ self.process_kwargs(kwargs_cmd)
+
@RequiredKeys(rewriter_keys['kwargs'])
def process_kwargs(self, cmd):
mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
if cmd['function'] not in rewriter_func_kwargs:
- mlog.error('Unknown function type {} --> skipping'.format(cmd['function']))
- return
+ mlog.error('Unknown function type', cmd['function'], *self.on_error())
+ return self.handle_error()
kwargs_def = rewriter_func_kwargs[cmd['function']]
# Find the function node to modify
node = None
arg_node = None
if cmd['function'] == 'project':
+ if cmd['id'] != '/':
+ mlog.error('The ID for the function type project must be an empty string', *self.on_error())
+ self.handle_error()
node = self.interpreter.project_node
arg_node = node.args
elif cmd['function'] == 'target':
@@ -394,21 +517,21 @@ class Rewriter:
arg_node = node.args
if not node:
mlog.error('Unable to find the function node')
- assert(isinstance(node, mparser.FunctionNode))
- assert(isinstance(arg_node, mparser.ArgumentNode))
+ assert(isinstance(node, FunctionNode))
+ assert(isinstance(arg_node, ArgumentNode))
# Print kwargs info
if cmd['operation'] == 'info':
info_data = {}
- for key, val in arg_node.kwargs.items():
+ for key, val in sorted(arg_node.kwargs.items()):
info_data[key] = None
- if isinstance(val, mparser.ElementaryNode):
+ if isinstance(val, ElementaryNode):
info_data[key] = val.value
- elif isinstance(val, mparser.ArrayNode):
+ elif isinstance(val, ArrayNode):
data_list = []
for i in val.args.arguments:
element = None
- if isinstance(i, mparser.ElementaryNode):
+ if isinstance(i, ElementaryNode):
element = i.value
data_list += [element]
info_data[key] = data_list
@@ -418,9 +541,10 @@ class Rewriter:
# Modify the kwargs
num_changed = 0
- for key, val in cmd['kwargs'].items():
+ for key, val in sorted(cmd['kwargs'].items()):
if key not in kwargs_def:
- mlog.error('Cannot modify unknown kwarg --> skipping', mlog.bold(key))
+ mlog.error('Cannot modify unknown kwarg', mlog.bold(key), *self.on_error())
+ self.handle_error()
continue
# Remove the key from the kwargs
@@ -450,6 +574,9 @@ class Rewriter:
elif cmd['operation'] == 'remove':
mlog.log(' -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
modifyer.remove_value(val)
+ elif cmd['operation'] == 'remove_regex':
+ mlog.log(' -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
+ modifyer.remove_regex(val)
# Write back the result
arg_node.kwargs[key] = modifyer.get_node()
@@ -458,7 +585,7 @@ class Rewriter:
if num_changed > 0 and node not in self.modefied_nodes:
self.modefied_nodes += [node]
- def find_assignment_node(self, node: mparser) -> AssignmentNode:
+ def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
if hasattr(node, 'ast_id') and node.ast_id in self.interpreter.reverse_assignment:
return self.interpreter.reverse_assignment[node.ast_id]
return None
@@ -467,13 +594,22 @@ class Rewriter:
def process_target(self, cmd):
mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
target = self.find_target(cmd['target'])
- if target is None and cmd['operation'] != 'tgt_add':
- mlog.error('Unknown target "{}" --> skipping'.format(cmd['target']))
- if cmd['debug']:
- pprint(self.interpreter.targets)
- return
- if cmd['debug']:
- pprint(target)
+ if target is None and cmd['operation'] != 'target_add':
+ mlog.error('Unknown target', mlog.bold(cmd['target']), *self.on_error())
+ return self.handle_error()
+
+ # Make source paths relative to the current subdir
+ def rel_source(src: str) -> str:
+ subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+ if os.path.isabs(src):
+ return os.path.relpath(src, subdir)
+ elif not os.path.exists(src):
+ return src # Trust the user when the source doesn't exist
+ # Make sure that the path is relative to the subdir
+ return os.path.relpath(os.path.abspath(src), subdir)
+
+ if target is not None:
+ cmd['sources'] = [rel_source(x) for x in cmd['sources']]
# Utility function to get a list of the sources from a node
def arg_list_from_node(n):
@@ -488,6 +624,8 @@ class Rewriter:
args = n.arguments
return args
+ to_sort_nodes = []
+
if cmd['operation'] == 'src_add':
node = None
if target['sources']:
@@ -496,23 +634,36 @@ class Rewriter:
node = target['node']
assert(node is not None)
+ # Generate the current source list
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ src_list += [j.value]
+
# Generate the new String nodes
to_append = []
- for i in cmd['sources']:
+ for i in sorted(set(cmd['sources'])):
+ if i in src_list:
+ mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping')
+ continue
mlog.log(' -- Adding source', mlog.green(i), 'at',
mlog.yellow('{}:{}'.format(os.path.join(node.subdir, environment.build_filename), node.lineno)))
token = Token('string', node.subdir, 0, 0, 0, None, i)
to_append += [StringNode(token)]
# Append to the AST at the right place
- if isinstance(node, FunctionNode):
- node.args.arguments += to_append
- elif isinstance(node, ArrayNode):
- node.args.arguments += to_append
+ arg_node = None
+ if isinstance(node, (FunctionNode, ArrayNode)):
+ arg_node = node.args
elif isinstance(node, ArgumentNode):
- node.arguments += to_append
+ arg_node = node
+ assert(arg_node is not None)
+ arg_node.arguments += to_append
# Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
+ to_sort_nodes += [arg_node]
if node not in self.modefied_nodes:
self.modefied_nodes += [node]
@@ -535,11 +686,9 @@ class Rewriter:
# Remove the found string node from the argument list
arg_node = None
- if isinstance(root, FunctionNode):
+ if isinstance(root, (FunctionNode, ArrayNode)):
arg_node = root.args
- if isinstance(root, ArrayNode):
- arg_node = root.args
- if isinstance(root, ArgumentNode):
+ elif isinstance(root, ArgumentNode):
arg_node = root
assert(arg_node is not None)
mlog.log(' -- Removing source', mlog.green(i), 'from',
@@ -547,37 +696,43 @@ class Rewriter:
arg_node.arguments.remove(string_node)
# Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+ to_sort_nodes += [arg_node]
if root not in self.modefied_nodes:
self.modefied_nodes += [root]
- elif cmd['operation'] == 'tgt_add':
+ elif cmd['operation'] == 'target_add':
if target is not None:
- mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists')
- return
+ mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists', *self.on_error())
+ return self.handle_error()
+
+ id_base = re.sub(r'[- ]', '_', cmd['target'])
+ target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
+ source_id = id_base + '_sources'
# Build src list
src_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
- src_arr_node = ArrayNode(src_arg_node, 0, 0)
+ src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
src_far_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
- src_fun_node = FunctionNode(cmd['subdir'], 0, 0, 'files', src_far_node)
- src_ass_node = AssignmentNode(cmd['subdir'], 0, 0, '{}_src'.format(cmd['target']), src_fun_node)
+ src_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, 'files', src_far_node)
+ src_ass_node = AssignmentNode(cmd['subdir'], 0, 0, source_id, src_fun_node)
src_arg_node.arguments = [StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, x)) for x in cmd['sources']]
src_far_node.arguments = [src_arr_node]
# Build target
tgt_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
- tgt_fun_node = FunctionNode(cmd['subdir'], 0, 0, cmd['target_type'], tgt_arg_node)
- tgt_ass_node = AssignmentNode(cmd['subdir'], 0, 0, '{}_tgt'.format(cmd['target']), tgt_fun_node)
+ tgt_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+ tgt_ass_node = AssignmentNode(cmd['subdir'], 0, 0, target_id, tgt_fun_node)
tgt_arg_node.arguments = [
StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, cmd['target'])),
- IdNode(Token('string', cmd['subdir'], 0, 0, 0, None, '{}_src'.format(cmd['target'])))
+ IdNode(Token('string', cmd['subdir'], 0, 0, 0, None, source_id))
]
src_ass_node.accept(AstIndentationGenerator())
tgt_ass_node.accept(AstIndentationGenerator())
self.to_add_nodes += [src_ass_node, tgt_ass_node]
- elif cmd['operation'] == 'tgt_rm':
+ elif cmd['operation'] == 'target_rm':
to_remove = self.find_assignment_node(target['node'])
if to_remove is None:
to_remove = target['node']
@@ -598,6 +753,17 @@ class Rewriter:
}
self.add_info('target', target['id'], test_data)
+ # Sort files
+ for i in to_sort_nodes:
+ convert = lambda text: int(text) if text.isdigit() else text.lower()
+ alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+ path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+
+ unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
+ sources = [x for x in i.arguments if isinstance(x, StringNode)]
+ sources = sorted(sources, key=lambda x: path_sorter(x.value))
+ i.arguments = unknown + sources
+
def process(self, cmd):
if 'type' not in cmd:
raise RewriterException('Command has no key "type"')
@@ -614,7 +780,7 @@ class Rewriter:
# Sort based on line and column in reversed order
work_nodes = [{'node': x, 'action': 'modify'} for x in self.modefied_nodes]
work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
- work_nodes = list(sorted(work_nodes, key=lambda x: x['node'].lineno * 1000 + x['node'].colno, reverse=True))
+ work_nodes = list(sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True))
work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
# Generating the new replacement string
@@ -671,31 +837,8 @@ class Rewriter:
col = node.colno
start = offsets[line] + col
end = start
- if isinstance(node, ArrayNode):
- if raw[end] != '[':
- mlog.warning('Internal error: expected "[" at {}:{} but got "{}"'.format(line, col, raw[end]))
- return
- counter = 1
- while counter > 0:
- end += 1
- if raw[end] == '[':
- counter += 1
- elif raw[end] == ']':
- counter -= 1
- end += 1
-
- elif isinstance(node, FunctionNode):
- while raw[end] != '(':
- end += 1
- end += 1
- counter = 1
- while counter > 0:
- end += 1
- if raw[end] == '(':
- counter += 1
- elif raw[end] == ')':
- counter -= 1
- end += 1
+ if isinstance(node, (ArrayNode, FunctionNode)):
+ end = offsets[node.end_lineno - 1] + node.end_colno
# Only removal is supported for assignments
elif isinstance(node, AssignmentNode) and i['action'] == 'rm':
@@ -722,23 +865,92 @@ class Rewriter:
with open(val['path'], 'w') as fp:
fp.write(val['raw'])
-def run(options):
- rewriter = Rewriter(options.sourcedir)
- rewriter.analyze_meson()
- if os.path.exists(options.command):
- with open(options.command, 'r') as fp:
- commands = json.load(fp)
- else:
- commands = json.loads(options.command)
-
- if not isinstance(commands, list):
- raise TypeError('Command is not a list')
+target_operation_map = {
+ 'add': 'src_add',
+ 'rm': 'src_rm',
+ 'add_target': 'target_add',
+ 'rm_target': 'target_rm',
+ 'info': 'info',
+}
- for i in commands:
- if not isinstance(i, object):
- raise TypeError('Command is not an object')
- rewriter.process(i)
+def list_to_dict(in_list: List[str]) -> Dict[str, str]:
+ if len(in_list) % 2 != 0:
+ raise TypeError('An even ammount of arguments are required')
+ result = {}
+ for i in range(0, len(in_list), 2):
+ result[in_list[i]] = in_list[i + 1]
+ return result
+
+def generate_target(options) -> List[dict]:
+ return [{
+ 'type': 'target',
+ 'target': options.target,
+ 'operation': target_operation_map[options.operation],
+ 'sources': options.sources,
+ 'subdir': options.subdir,
+ 'target_type': options.tgt_type,
+ }]
+
+def generate_kwargs(options) -> List[dict]:
+ return [{
+ 'type': 'kwargs',
+ 'function': options.function,
+ 'id': options.id,
+ 'operation': options.operation,
+ 'kwargs': list_to_dict(options.kwargs),
+ }]
+
+def generate_def_opts(options) -> List[dict]:
+ return [{
+ 'type': 'default_options',
+ 'operation': options.operation,
+ 'options': list_to_dict(options.options),
+ }]
+
+def genreate_cmd(options) -> List[dict]:
+ if os.path.exists(options.json):
+ with open(options.json, 'r') as fp:
+ return json.load(fp)
+ else:
+ return json.loads(options.json)
+
+# Map options.type to the actual type name
+cli_type_map = {
+ 'target': generate_target,
+ 'tgt': generate_target,
+ 'kwargs': generate_kwargs,
+ 'default-options': generate_def_opts,
+ 'def': generate_def_opts,
+ 'command': genreate_cmd,
+ 'cmd': genreate_cmd,
+}
- rewriter.apply_changes()
- rewriter.print_info()
- return 0
+def run(options):
+ if not options.verbose:
+ mlog.set_quiet()
+
+ try:
+ rewriter = Rewriter(options.sourcedir, skip_errors=options.skip)
+ rewriter.analyze_meson()
+
+ if options.type is None:
+ mlog.error('No command specified')
+ return 1
+
+ commands = cli_type_map[options.type](options)
+
+ if not isinstance(commands, list):
+ raise TypeError('Command is not a list')
+
+ for i in commands:
+ if not isinstance(i, object):
+ raise TypeError('Command is not an object')
+ rewriter.process(i)
+
+ rewriter.apply_changes()
+ rewriter.print_info()
+ return 0
+ except Exception as e:
+ raise e
+ finally:
+ mlog.set_verbose()
diff --git a/run_project_tests.py b/run_project_tests.py
index c35928f..bb0e062 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -440,6 +440,14 @@ def have_d_compiler():
elif shutil.which("gdc"):
return True
elif shutil.which("dmd"):
+ # The Windows installer sometimes produces a DMD install
+ # that exists but segfaults every time the compiler is run.
+ # Don't know why. Don't know how to fix. Skip in this case.
+ cp = subprocess.run(['dmd', '--version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ if cp.stdout == b'':
+ return False
return True
return False
@@ -490,6 +498,10 @@ def skippable(suite, test):
if test.endswith('10 gtk-doc'):
return True
+ # NetCDF is not in the CI image
+ if test.endswith('netcdf'):
+ return True
+
# No frameworks test should be skipped on linux CI, as we expect all
# prerequisites to be installed
if mesonlib.is_linux():
diff --git a/run_unittests.py b/run_unittests.py
index ba0527d..690ea33 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -27,6 +27,7 @@ import unittest
import platform
import pickle
import functools
+import io
from itertools import chain
from unittest import mock
from configparser import ConfigParser
@@ -54,6 +55,8 @@ from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
from mesonbuild.build import Target
import mesonbuild.modules.pkgconfig
+from mesonbuild.mtest import TAPParser, TestResult
+
from run_tests import (
Backend, FakeBuild, FakeCompilerOptions,
ensure_backend_detects_changes, exe_suffix, get_backend_commands,
@@ -3226,6 +3229,7 @@ recommended as it is not supported on some platforms''')
expected = {
'descriptive_name': 'proj',
'version': 'undefined',
+ 'subproject_dir': 'subprojects',
'subprojects': [
{
'descriptive_name': 'sub',
@@ -3236,6 +3240,34 @@ recommended as it is not supported on some platforms''')
}
self.assertDictEqual(res, expected)
+ def test_introspection_target_subproject(self):
+ testdir = os.path.join(self.common_test_dir, '46 subproject')
+ self.init(testdir)
+ res = self.introspect('--targets')
+
+ expected = {
+ 'sublib': 'sublib',
+ 'simpletest': 'sublib',
+ 'user': None
+ }
+
+ for entry in res:
+ name = entry['name']
+ self.assertEquals(entry['subproject'], expected[name])
+
+ def test_introspect_projectinfo_subproject_dir(self):
+ testdir = os.path.join(self.common_test_dir, '79 custom subproject dir')
+ self.init(testdir)
+ res = self.introspect('--projectinfo')
+
+ self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
+
+ def test_introspect_projectinfo_subproject_dir_from_source(self):
+ testfile = os.path.join(self.common_test_dir, '79 custom subproject dir', 'meson.build')
+ res = self.introspect_directory(testfile, '--projectinfo')
+
+ self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
+
@skipIfNoExecutable('clang-format')
def test_clang_format(self):
if self.backend is not Backend.ninja:
@@ -3389,7 +3421,7 @@ recommended as it is not supported on some platforms''')
self.assertListEqual(dependencies_to_find, [])
# Check projectinfo
- self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subprojects': []})
+ self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []})
# Check targets
targets_to_find = {
@@ -3479,6 +3511,70 @@ recommended as it is not supported on some platforms''')
self.assertListEqual(res1, res2)
+ def test_introspect_targets_from_source(self):
+ testdir = os.path.join(self.unit_test_dir, '52 introspection')
+ testfile = os.path.join(testdir, 'meson.build')
+ introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json')
+ self.init(testdir)
+ self.assertPathExists(introfile)
+ with open(introfile, 'r') as fp:
+ res_wb = json.load(fp)
+
+ res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args)
+
+ # Account for differences in output
+ for i in res_wb:
+ i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
+ if 'install_filename' in i:
+ del i['install_filename']
+
+ sources = []
+ for j in i['target_sources']:
+ sources += j['sources']
+ i['target_sources'] = [{
+ 'language': 'unknown',
+ 'compiler': [],
+ 'parameters': [],
+ 'sources': sources,
+ 'generated_sources': []
+ }]
+
+ self.maxDiff = None
+ self.assertListEqual(res_nb, res_wb)
+
+ def test_introspect_dependencies_from_source(self):
+ testdir = os.path.join(self.unit_test_dir, '52 introspection')
+ testfile = os.path.join(testdir, 'meson.build')
+ res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args)
+ expected = [
+ {
+ 'name': 'threads',
+ 'required': True,
+ 'has_fallback': False,
+ 'conditional': False
+ },
+ {
+ 'name': 'zlib',
+ 'required': False,
+ 'has_fallback': False,
+ 'conditional': False
+ },
+ {
+ 'name': 'somethingthatdoesnotexist',
+ 'required': True,
+ 'has_fallback': False,
+ 'conditional': True
+ },
+ {
+ 'name': 'look_i_have_a_fallback',
+ 'required': True,
+ 'has_fallback': True,
+ 'conditional': True
+ }
+ ]
+ self.maxDiff = None
+ self.assertListEqual(res_nb, expected)
+
class FailureTests(BasePlatformTests):
'''
Tests that test failure conditions. Build files here should be dynamically
@@ -3747,6 +3843,7 @@ class FailureTests(BasePlatformTests):
"""Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""")
+@unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)")
class WindowsTests(BasePlatformTests):
'''
Tests that should run on Cygwin, MinGW, and MSVC
@@ -3857,6 +3954,7 @@ class WindowsTests(BasePlatformTests):
return
self.build()
+@unittest.skipUnless(is_osx(), "requires Darwin")
class DarwinTests(BasePlatformTests):
'''
Tests that should run on macOS
@@ -3953,6 +4051,7 @@ class DarwinTests(BasePlatformTests):
del os.environ["LDFLAGS"]
+@unittest.skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
'''
Tests that should run on Linux, macOS, and *BSD
@@ -4931,6 +5030,10 @@ endian = 'little'
self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.')
+def should_run_cross_arm_tests():
+ return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
+
+@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
class LinuxCrossArmTests(BasePlatformTests):
'''
Tests that cross-compilation to Linux/ARM works
@@ -4979,6 +5082,10 @@ class LinuxCrossArmTests(BasePlatformTests):
self.assertTrue(False, 'Option libdir not in introspect data.')
+def should_run_cross_mingw_tests():
+ return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
+
+@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
class LinuxCrossMingwTests(BasePlatformTests):
'''
Tests that cross-compilation to Windows/MinGW works
@@ -5080,8 +5187,6 @@ class PythonTests(BasePlatformTests):
class RewriterTests(BasePlatformTests):
- data_regex = re.compile(r'.*\n!!==JSON DUMP: BEGIN==!!\n(.*)\n!!==JSON DUMP: END==!!\n', re.MULTILINE | re.DOTALL)
-
def setUp(self):
super().setUp()
self.maxDiff = None
@@ -5089,32 +5194,35 @@ class RewriterTests(BasePlatformTests):
def prime(self, dirname):
copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir)
- def rewrite(self, directory, args):
+ def rewrite_raw(self, directory, args):
if isinstance(args, str):
args = [args]
- command = self.rewrite_command + ['--sourcedir', directory] + args
- p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args
+ p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, timeout=60)
+ print('STDOUT:')
print(p.stdout)
+ print('STDERR:')
+ print(p.stderr)
if p.returncode != 0:
if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
- return p.stdout
+ if not p.stderr:
+ return {}
+ return json.loads(p.stderr)
- def extract_test_data(self, out):
- match = RewriterTests.data_regex.match(out)
- result = {}
- if match:
- result = json.loads(match.group(1))
- return result
+ def rewrite(self, directory, args):
+ if isinstance(args, str):
+ args = [args]
+ return self.rewrite_raw(directory, ['command'] + args)
def test_target_source_list(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
@@ -5131,33 +5239,42 @@ class RewriterTests(BasePlatformTests):
def test_target_add_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
- out = self.extract_test_data(out)
expected = {
'target': {
- 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
- 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp', 'a7.cpp']},
- 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp', 'a5.cpp']},
- 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'a5.cpp', 'fileA.cpp']},
- 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'a3.cpp', 'fileB.cpp', 'fileC.cpp', 'a7.cpp']},
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']},
- 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
- 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
- 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
}
}
self.assertDictEqual(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
+ self.assertDictEqual(out, expected)
+
+ def test_target_add_sources_abs(self):
+ self.prime('1 basic')
+ abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']]
+ add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}])
+ inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}])
+ self.rewrite(self.builddir, add)
+ out = self.rewrite(self.builddir, inf)
+ expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}}
self.assertDictEqual(out, expected)
def test_target_remove_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
- out = self.extract_test_data(out)
expected = {
'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']},
@@ -5173,26 +5290,22 @@ class RewriterTests(BasePlatformTests):
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
self.assertDictEqual(out, expected)
def test_target_subdir(self):
self.prime('2 subdirs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
- out = self.extract_test_data(out)
expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']}
self.assertDictEqual(list(out['target'].values())[0], expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
self.assertDictEqual(list(out['target'].values())[0], expected)
def test_target_remove(self):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'target': {
@@ -5211,10 +5324,10 @@ class RewriterTests(BasePlatformTests):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
@@ -5233,24 +5346,75 @@ class RewriterTests(BasePlatformTests):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
self.assertDictEqual(out, {})
- def test_tatrget_add_subdir(self):
+ def test_target_add_subdir(self):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {'name': 'something', 'sources': ['first.c', 'second.c']}
- self.assertDictEqual(list(out['target'].values())[0], expected)
+ self.assertDictEqual(out['target']['94b671c@@something@exe'], expected)
+
+ def test_target_source_sorting(self):
+ self.prime('5 sorting')
+ add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}])
+ inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}])
+ out = self.rewrite(self.builddir, add_json)
+ out = self.rewrite(self.builddir, inf_json)
+ expected = {
+ 'target': {
+ 'exe1@exe': {
+ 'name': 'exe1',
+ 'sources': [
+ 'aaa/a/a1.c',
+ 'aaa/b/b1.c',
+ 'aaa/b/b2.c',
+ 'aaa/f1.c',
+ 'aaa/f2.c',
+ 'aaa/f3.c',
+ 'bbb/a/b1.c',
+ 'bbb/b/b2.c',
+ 'bbb/c1/b5.c',
+ 'bbb/c2/b7.c',
+ 'bbb/c10/b6.c',
+ 'bbb/a4.c',
+ 'bbb/b3.c',
+ 'bbb/b4.c',
+ 'bbb/b5.c',
+ 'a1.c',
+ 'a2.c',
+ 'a3.c',
+ 'a10.c',
+ 'a20.c',
+ 'a30.c',
+ 'a100.c',
+ 'a101.c',
+ 'a110.c',
+ 'a210.c',
+ 'a666.c',
+ 'b1.c',
+ 'c2.c'
+ ]
+ }
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_target_same_name_skip(self):
+ self.prime('4 same name targets')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {'name': 'myExe', 'sources': ['main.cpp']}
+ self.assertEqual(len(out['target']), 2)
+ for _, val in out['target'].items():
+ self.assertDictEqual(expected, val)
def test_kwargs_info(self):
self.prime('3 kwargs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'kwargs': {
- 'project#': {'version': '0.0.1'},
+ 'project#/': {'version': '0.0.1'},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
@@ -5261,10 +5425,9 @@ class RewriterTests(BasePlatformTests):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'kwargs': {
- 'project#': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']},
+ 'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']},
'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'},
'dependency#dep1': {'required': True, 'method': 'cmake'}
}
@@ -5275,10 +5438,9 @@ class RewriterTests(BasePlatformTests):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'kwargs': {
- 'project#': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD']},
+ 'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
@@ -5289,10 +5451,22 @@ class RewriterTests(BasePlatformTests):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'kwargs': {
- 'project#': {'version': '0.0.1', 'license': 'GPL'},
+ 'project#/': {'version': '0.0.1', 'license': 'GPL'},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_remove_regex(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=true']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
@@ -5303,16 +5477,41 @@ class RewriterTests(BasePlatformTests):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- out = self.extract_test_data(out)
expected = {
'kwargs': {
- 'project#': {},
+ 'project#/': {},
'target#tgt1': {},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
+ def test_default_options_set(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_default_options_delete(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
class NativeFileTests(BasePlatformTests):
def setUp(self):
@@ -5646,6 +5845,272 @@ class CrossFileTests(BasePlatformTests):
'-Ddef_sysconfdir=sysconfbar'])
+class TAPParserTests(unittest.TestCase):
+ def assert_test(self, events, **kwargs):
+ if 'explanation' not in kwargs:
+ kwargs['explanation'] = None
+ self.assertEqual(next(events), TAPParser.Test(**kwargs))
+
+ def assert_plan(self, events, **kwargs):
+ if 'skipped' not in kwargs:
+ kwargs['skipped'] = False
+ if 'explanation' not in kwargs:
+ kwargs['explanation'] = None
+ self.assertEqual(next(events), TAPParser.Plan(**kwargs))
+
+ def assert_version(self, events, **kwargs):
+ self.assertEqual(next(events), TAPParser.Version(**kwargs))
+
+ def assert_error(self, events):
+ self.assertEqual(type(next(events)), TAPParser.Error)
+
+ def assert_bailout(self, events, **kwargs):
+ self.assertEqual(next(events), TAPParser.Bailout(**kwargs))
+
+ def assert_last(self, events):
+ with self.assertRaises(StopIteration):
+ next(events)
+
+ def parse_tap(self, s):
+ parser = TAPParser(io.StringIO(s))
+ return iter(parser.parse())
+
+ def parse_tap_v13(self, s):
+ events = self.parse_tap('TAP version 13\n' + s)
+ self.assert_version(events, version=13)
+ return events
+
+ def test_empty(self):
+ events = self.parse_tap('')
+ self.assert_last(events)
+
+ def test_empty_plan(self):
+ events = self.parse_tap('1..0')
+ self.assert_plan(events, count=0, late=False, skipped=True)
+ self.assert_last(events)
+
+ def test_plan_directive(self):
+ events = self.parse_tap('1..0 # skipped for some reason')
+ self.assert_plan(events, count=0, late=False, skipped=True,
+ explanation='for some reason')
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1 # skipped for some reason\nok 1')
+ self.assert_error(events)
+ self.assert_plan(events, count=1, late=False, skipped=True,
+ explanation='for some reason')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1 # todo not supported here\nok 1')
+ self.assert_error(events)
+ self.assert_plan(events, count=1, late=False, skipped=False,
+ explanation='not supported here')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_ok(self):
+ events = self.parse_tap('ok')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_with_number(self):
+ events = self.parse_tap('ok 1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_with_name(self):
+ events = self.parse_tap('ok 1 abc')
+ self.assert_test(events, number=1, name='abc', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_not_ok(self):
+ events = self.parse_tap('not ok')
+ self.assert_test(events, number=1, name='', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_one_test_todo(self):
+ events = self.parse_tap('not ok 1 abc # TODO')
+ self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL)
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # TODO')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
+ self.assert_last(events)
+
+ def test_one_test_skip(self):
+ events = self.parse_tap('ok 1 abc # SKIP')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
+ self.assert_last(events)
+
+ def test_one_test_skip_failure(self):
+ events = self.parse_tap('not ok 1 abc # SKIP')
+ self.assert_test(events, number=1, name='abc', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_many_early_plan(self):
+ events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4')
+ self.assert_plan(events, count=4, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_test(events, number=4, name='', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_many_late_plan(self):
+ events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_test(events, number=4, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=4, late=True)
+ self.assert_last(events)
+
+ def test_directive_case(self):
+ events = self.parse_tap('ok 1 abc # skip')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # ToDo')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
+ self.assert_last(events)
+
+ def test_directive_explanation(self):
+ events = self.parse_tap('ok 1 abc # skip why')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP,
+ explanation='why')
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # ToDo Because')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS,
+ explanation='Because')
+ self.assert_last(events)
+
+ def test_one_test_early_plan(self):
+ events = self.parse_tap('1..1\nok')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_late_plan(self):
+ events = self.parse_tap('ok\n1..1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_last(events)
+
+ def test_out_of_order(self):
+ events = self.parse_tap('ok 2')
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_middle_plan(self):
+ events = self.parse_tap('ok 1\n1..2\nok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=2, late=True)
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_too_many_plans(self):
+ events = self.parse_tap('1..1\n1..2\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_error(events)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_too_many(self):
+ events = self.parse_tap('ok 1\nnot ok 2\n1..1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1\nok 1\nnot ok 2')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_too_few(self):
+ events = self.parse_tap('ok 1\nnot ok 2\n1..3')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=3, late=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..3\nok 1\nnot ok 2')
+ self.assert_plan(events, count=3, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_too_few_bailout(self):
+ events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test')
+ self.assert_plan(events, count=3, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_bailout(events, message='no third test')
+ self.assert_last(events)
+
+ def test_diagnostics(self):
+ events = self.parse_tap('1..1\n# ignored\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_last(events)
+
+ def test_unexpected(self):
+ events = self.parse_tap('1..1\ninvalid\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_error(events)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_version(self):
+ events = self.parse_tap('TAP version 13\n')
+ self.assert_version(events, version=13)
+ self.assert_last(events)
+
+ events = self.parse_tap('TAP version 12\n')
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..0\nTAP version 13\n')
+ self.assert_plan(events, count=0, late=False, skipped=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_yaml(self):
+ events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_last(events)
+
def unset_envs():
# For unit tests we must fully control all command lines
# so that there are no unexpected changes coming from the
@@ -5655,26 +6120,14 @@ def unset_envs():
if v in os.environ:
del os.environ[v]
-def should_run_cross_arm_tests():
- return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
-
-def should_run_cross_mingw_tests():
- return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
-
def main():
unset_envs()
cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
- 'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests']
- if not is_windows():
- cases += ['LinuxlikeTests']
- if should_run_cross_arm_tests():
- cases += ['LinuxCrossArmTests']
- if should_run_cross_mingw_tests():
- cases += ['LinuxCrossMingwTests']
- if is_windows() or is_cygwin():
- cases += ['WindowsTests']
- if is_osx():
- cases += ['DarwinTests']
+ 'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
+ 'TAPParserTests',
+
+ 'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
+ 'WindowsTests', 'DarwinTests']
return unittest.main(defaultTest=cases, buffer=True)
diff --git a/setup.py b/setup.py
index f352960..07bd3dd 100644
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ packages = ['mesonbuild',
'mesonbuild.modules',
'mesonbuild.scripts',
'mesonbuild.wrap']
-package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt']}
+package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakePathInfo.txt']}
data_files = []
if sys.platform != 'win32':
# Only useful on UNIX-like systems
diff --git a/test cases/common/113 ternary/meson.build b/test cases/common/113 ternary/meson.build
index 3e65046..7539d56 100644
--- a/test cases/common/113 ternary/meson.build
+++ b/test cases/common/113 ternary/meson.build
@@ -1,7 +1,12 @@
project('ternary operator', 'c')
+x = true
one = true ? 1 : error('False branch should not be evaluated')
two = false ? error('True branch should not be evaluated.') : 2
+three = '@0@'.format(x ? 'yes' : 'no')
+four = [x ? '0' : '1']
assert(one == 1, 'Return value from ternary true is wrong.')
assert(two == 2, 'Return value from ternary false is wrong.')
+assert(three == 'yes', 'Return value for ternary inside method call is wrong.')
+assert(four == ['0'], 'Return value for ternary inside of list is wrong.')
diff --git a/test cases/common/13 pch/c/meson.build b/test cases/common/13 pch/c/meson.build
index cb8349d..fe4ac68 100644
--- a/test cases/common/13 pch/c/meson.build
+++ b/test cases/common/13 pch/c/meson.build
@@ -5,4 +5,4 @@ if cc_id == 'lcc'
endif
exe = executable('prog', 'prog.c',
-c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
+c_pch : 'pch/prog.h')
diff --git a/test cases/common/13 pch/c/pch/prog_pch.c b/test cases/common/13 pch/c/pch/prog_pch.c
deleted file mode 100644
index 4960505..0000000
--- a/test cases/common/13 pch/c/pch/prog_pch.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.h"
diff --git a/test cases/common/13 pch/cpp/meson.build b/test cases/common/13 pch/cpp/meson.build
index 802c3e1..b01cd58 100644
--- a/test cases/common/13 pch/cpp/meson.build
+++ b/test cases/common/13 pch/cpp/meson.build
@@ -1 +1 @@
-exe = executable('prog', 'prog.cc', cpp_pch : ['pch/prog.hh', 'pch/prog_pch.cc'])
+exe = executable('prog', 'prog.cc', cpp_pch : 'pch/prog.hh')
diff --git a/test cases/common/13 pch/cpp/pch/prog_pch.cc b/test cases/common/13 pch/cpp/pch/prog_pch.cc
deleted file mode 100644
index aff1225..0000000
--- a/test cases/common/13 pch/cpp/pch/prog_pch.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.hh"
diff --git a/test cases/common/13 pch/generated/meson.build b/test cases/common/13 pch/generated/meson.build
index 372a00e..1ef771b 100644
--- a/test cases/common/13 pch/generated/meson.build
+++ b/test cases/common/13 pch/generated/meson.build
@@ -13,4 +13,4 @@ generated_generator = generator(find_program('gen_generator.py'),
arguments: ['@INPUT@', '@OUTPUT@'])
exe = executable('prog', 'prog.c', generated_customTarget, generated_generator.process('generated_generator.in'),
- c_pch: ['pch/prog_pch.c', 'pch/prog.h'])
+ c_pch: 'pch/prog.h')
diff --git a/test cases/common/13 pch/generated/pch/prog_pch.c b/test cases/common/13 pch/generated/pch/prog_pch.c
deleted file mode 100644
index 4960505..0000000
--- a/test cases/common/13 pch/generated/pch/prog_pch.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.h"
diff --git a/test cases/common/13 pch/meson.build b/test cases/common/13 pch/meson.build
index 43129c9..4438c9e 100644
--- a/test cases/common/13 pch/meson.build
+++ b/test cases/common/13 pch/meson.build
@@ -3,6 +3,7 @@ project('pch test', 'c', 'cpp')
subdir('c')
subdir('cpp')
subdir('generated')
+subdir('userDefined')
subdir('withIncludeDirectories')
if meson.backend() == 'xcode'
diff --git a/test cases/common/13 pch/mixed/meson.build b/test cases/common/13 pch/mixed/meson.build
index f0c3eca..cbb7bac 100644
--- a/test cases/common/13 pch/mixed/meson.build
+++ b/test cases/common/13 pch/mixed/meson.build
@@ -1,17 +1,6 @@
exe = executable(
'prog',
files('main.cc', 'func.c'),
- c_pch : ['pch/func.h', 'pch/func_pch.c'],
- cpp_pch : ['pch/main_pch.cc', 'pch/main.h'],
+ c_pch : ['pch/func.h'],
+ cpp_pch : ['pch/main.h'],
)
-
-# test pch when only a header is given (not supported by msvc)
-cc = meson.get_compiler('c')
-if not ['msvc', 'clang-cl'].contains(cc.get_id())
- exe2 = executable(
- 'prog2',
- files('main.cc', 'func.c'),
- c_pch : 'pch/func.h',
- cpp_pch : 'pch/main.h',
- )
-endif
diff --git a/test cases/common/13 pch/mixed/pch/func_pch.c b/test cases/common/13 pch/mixed/pch/func_pch.c
deleted file mode 100644
index 5566739..0000000
--- a/test cases/common/13 pch/mixed/pch/func_pch.c
+++ /dev/null
@@ -1 +0,0 @@
-#include"func.h"
diff --git a/test cases/common/13 pch/mixed/pch/main_pch.cc b/test cases/common/13 pch/mixed/pch/main_pch.cc
deleted file mode 100644
index acd3f57..0000000
--- a/test cases/common/13 pch/mixed/pch/main_pch.cc
+++ /dev/null
@@ -1 +0,0 @@
-#include"main.h"
diff --git a/test cases/common/13 pch/userDefined/meson.build b/test cases/common/13 pch/userDefined/meson.build
new file mode 100644
index 0000000..9b60572
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/meson.build
@@ -0,0 +1,10 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+
+# User supplied PCH implementation should override the auto
+# generated one. PCH implementations are only supported for
+# msvc and generally should not be used at all. Support for
+# them is only kept for backwards compatibility.
+if cc_id == 'msvc'
+ exe = executable('prog', 'prog.c', c_pch : ['pch/pch.h', 'pch/pch.c'])
+endif
diff --git a/test cases/common/13 pch/userDefined/pch/pch.c b/test cases/common/13 pch/userDefined/pch/pch.c
new file mode 100644
index 0000000..c107b1a
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/pch/pch.c
@@ -0,0 +1,5 @@
+#include "pch.h"
+
+int foo() {
+ return 0;
+}
diff --git a/test cases/common/13 pch/userDefined/pch/pch.h b/test cases/common/13 pch/userDefined/pch/pch.h
new file mode 100644
index 0000000..5d5f8f0
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/pch/pch.h
@@ -0,0 +1 @@
+int foo();
diff --git a/test cases/common/13 pch/userDefined/prog.c b/test cases/common/13 pch/userDefined/prog.c
new file mode 100644
index 0000000..eb068d9
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/prog.c
@@ -0,0 +1,8 @@
+// No includes here, they need to come from the PCH
+
+int main(int argc, char **argv) {
+ // Method is implemented in pch.c.
+ // This makes sure that we can properly handle user defined
+ // pch implementation files and not only auto-generated ones.
+ return foo();
+}
diff --git a/test cases/common/13 pch/withIncludeDirectories/meson.build b/test cases/common/13 pch/withIncludeDirectories/meson.build
index 2ab2cd8..68e544b 100644
--- a/test cases/common/13 pch/withIncludeDirectories/meson.build
+++ b/test cases/common/13 pch/withIncludeDirectories/meson.build
@@ -6,4 +6,4 @@ endif
exe = executable('prog', 'prog.c',
include_directories: 'include',
- c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
+ c_pch : 'pch/prog.h')
diff --git a/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c b/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c
deleted file mode 100644
index 4960505..0000000
--- a/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.h"
diff --git a/test cases/common/212 tap tests/meson.build b/test cases/common/212 tap tests/meson.build
new file mode 100644
index 0000000..58529a7
--- /dev/null
+++ b/test cases/common/212 tap tests/meson.build
@@ -0,0 +1,10 @@
+project('test features', 'c')
+
+tester = executable('tester', 'tester.c')
+test('pass', tester, args : ['ok'], protocol: 'tap')
+test('fail', tester, args : ['not ok'], should_fail: true, protocol: 'tap')
+test('xfail', tester, args : ['not ok # todo'], protocol: 'tap')
+test('xpass', tester, args : ['ok # todo'], should_fail: true, protocol: 'tap')
+test('skip', tester, args : ['ok # skip'], protocol: 'tap')
+test('skip failure', tester, args : ['not ok # skip'], should_fail: true, protocol: 'tap')
+test('no tests', tester, args : ['1..0 # skip'], protocol: 'tap')
diff --git a/test cases/common/212 tap tests/tester.c b/test cases/common/212 tap tests/tester.c
new file mode 100644
index 0000000..ac582e7
--- /dev/null
+++ b/test cases/common/212 tap tests/tester.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ if (argc != 2) {
+ fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+ return 1;
+ }
+ puts(argv[1]);
+ return 0;
+}
diff --git a/test cases/cuda/5 threads/main.cu b/test cases/cuda/5 threads/main.cu
new file mode 100644
index 0000000..d251167
--- /dev/null
+++ b/test cases/cuda/5 threads/main.cu
@@ -0,0 +1,20 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "shared/kernels.h"
+
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ printf("No Cuda hardware found. Exiting.\n");
+ return 0;
+ }
+
+ if(run_tests() != 0){
+ printf("CUDA tests failed! Exiting.\n");
+ return 0;
+ }
+
+ return 0;
+}
diff --git a/test cases/cuda/5 threads/meson.build b/test cases/cuda/5 threads/meson.build
new file mode 100644
index 0000000..2a804a3
--- /dev/null
+++ b/test cases/cuda/5 threads/meson.build
@@ -0,0 +1,7 @@
+project('simple', 'cuda', version : '1.0.0')
+
+subdir('shared')
+
+thread_dep = dependency('threads')
+exe = executable('prog', 'main.cu', dependencies: [libkernels, thread_dep])
+test('cudatest', exe)
diff --git a/test cases/cuda/5 threads/shared/kernels.cu b/test cases/cuda/5 threads/shared/kernels.cu
new file mode 100644
index 0000000..41a9553
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/kernels.cu
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "kernels.h"
+
+
+TAG_HIDDEN __global__ void kernel (void){
+}
+
+TAG_PUBLIC int run_tests(void) {
+ kernel<<<1,1>>>();
+
+ return (int)cudaDeviceSynchronize();
+}
+
diff --git a/test cases/cuda/5 threads/shared/kernels.h b/test cases/cuda/5 threads/shared/kernels.h
new file mode 100644
index 0000000..dbcb99d
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/kernels.h
@@ -0,0 +1,86 @@
+/* Include Guard */
+#ifndef SHARED_KERNELS_H
+#define SHARED_KERNELS_H
+
+/**
+ * Includes
+ */
+
+#include <cuda_runtime.h>
+
+
+/**
+ * Defines
+ */
+
+/**
+ * When building a library, it is a good idea to expose as few as possible
+ * internal symbols (functions, objects, data structures). Not only does it
+ * prevent users from relying on private portions of the library that are
+ * subject to change without any notice, but it can have performance
+ * advantages:
+ *
+ * - It can make shared libraries link faster at dynamic-load time.
+ * - It can make internal function calls faster by bypassing the PLT.
+ *
+ * Thus, the compilation should by default hide all symbols, while the API
+ * headers will explicitly mark public the few symbols the users are permitted
+ * to use with a PUBLIC tag. We also define a HIDDEN tag, since it may be
+ * required to explicitly tag certain C++ types as visible in order for
+ * exceptions to function correctly.
+ *
+ * Additional complexity comes from non-POSIX-compliant systems, which
+ * artificially impose a requirement on knowing whether we are building or
+ * using a DLL.
+ *
+ * The above commentary and below code is inspired from
+ * 'https://gcc.gnu.org/wiki/Visibility'
+ */
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+# define TAG_ATTRIBUTE_EXPORT __declspec(dllexport)
+# define TAG_ATTRIBUTE_IMPORT __declspec(dllimport)
+# define TAG_ATTRIBUTE_HIDDEN
+#elif __GNUC__ >= 4
+# define TAG_ATTRIBUTE_EXPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_IMPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_HIDDEN __attribute__((visibility("hidden")))
+#else
+# define TAG_ATTRIBUTE_EXPORT
+# define TAG_ATTRIBUTE_IMPORT
+# define TAG_ATTRIBUTE_HIDDEN
+#endif
+
+#if TAG_IS_SHARED
+# if TAG_IS_BUILDING
+# define TAG_PUBLIC TAG_ATTRIBUTE_EXPORT
+# else
+# define TAG_PUBLIC TAG_ATTRIBUTE_IMPORT
+# endif
+# define TAG_HIDDEN TAG_ATTRIBUTE_HIDDEN
+#else
+# define TAG_PUBLIC
+# define TAG_HIDDEN
+#endif
+#define TAG_STATIC static
+
+
+
+
+/* Extern "C" Guard */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+/* Function Prototypes */
+TAG_PUBLIC int run_tests(void);
+
+
+
+/* End Extern "C" and Include Guard */
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/test cases/cuda/5 threads/shared/meson.build b/test cases/cuda/5 threads/shared/meson.build
new file mode 100644
index 0000000..5987916
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/meson.build
@@ -0,0 +1,5 @@
+libkernels = shared_library('kernels', 'kernels.cu',
+ cuda_args: ['-DTAG_IS_SHARED=1', '-DTAG_IS_BUILDING=1'],
+ gnu_symbol_visibility: 'hidden')
+libkernels = declare_dependency(compile_args: ['-DTAG_IS_SHARED=1'],
+ link_with: libkernels)
diff --git a/test cases/failing test/4 hard error/main.c b/test cases/failing test/4 hard error/main.c
new file mode 100644
index 0000000..a1e705a
--- /dev/null
+++ b/test cases/failing test/4 hard error/main.c
@@ -0,0 +1,3 @@
+int main(void) {
+ return 99;
+}
diff --git a/test cases/failing test/4 hard error/meson.build b/test cases/failing test/4 hard error/meson.build
new file mode 100644
index 0000000..6979b04
--- /dev/null
+++ b/test cases/failing test/4 hard error/meson.build
@@ -0,0 +1,4 @@
+project('trivial', 'c')
+
+# Exit code 99 even overrides should_fail
+test('My Test', executable('main', 'main.c'), should_fail: true)
diff --git a/test cases/failing test/5 tap tests/meson.build b/test cases/failing test/5 tap tests/meson.build
new file mode 100644
index 0000000..844c1f9
--- /dev/null
+++ b/test cases/failing test/5 tap tests/meson.build
@@ -0,0 +1,6 @@
+project('test features', 'c')
+
+tester = executable('tester', 'tester.c')
+test('nonzero return code', tester, args : [], protocol: 'tap')
+test('missing test', tester, args : ['1..1'], protocol: 'tap')
+test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap')
diff --git a/test cases/failing test/5 tap tests/tester.c b/test cases/failing test/5 tap tests/tester.c
new file mode 100644
index 0000000..ac582e7
--- /dev/null
+++ b/test cases/failing test/5 tap tests/tester.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ if (argc != 2) {
+ fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+ return 1;
+ }
+ puts(argv[1]);
+ return 0;
+}
diff --git a/test cases/fortran/4 self dependency/meson.build b/test cases/fortran/4 self dependency/meson.build
index bc5dab4..e791284 100644
--- a/test cases/fortran/4 self dependency/meson.build
+++ b/test cases/fortran/4 self dependency/meson.build
@@ -2,3 +2,7 @@ project('selfdep', 'fortran')
e = executable('selfdep', 'selfdep.f90')
test('selfdep', e)
+
+library('selfmod', 'src/selfdep_mod.f90')
+
+subproject('sub1')
diff --git a/test cases/fortran/4 self dependency/src/selfdep_mod.f90 b/test cases/fortran/4 self dependency/src/selfdep_mod.f90
new file mode 100644
index 0000000..4aa0057
--- /dev/null
+++ b/test cases/fortran/4 self dependency/src/selfdep_mod.f90
@@ -0,0 +1,6 @@
+module a
+end module a
+
+module b
+use a
+end module b
diff --git a/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 b/test cases/fortran/4 self dependency/subprojects/sub1/main.f90
new file mode 100644
index 0000000..873427d
--- /dev/null
+++ b/test cases/fortran/4 self dependency/subprojects/sub1/main.f90
@@ -0,0 +1,6 @@
+module a
+end
+
+program b
+ use a
+end
diff --git a/test cases/fortran/4 self dependency/subprojects/sub1/meson.build b/test cases/fortran/4 self dependency/subprojects/sub1/meson.build
new file mode 100644
index 0000000..606f338
--- /dev/null
+++ b/test cases/fortran/4 self dependency/subprojects/sub1/meson.build
@@ -0,0 +1,3 @@
+project('subproject self-def', 'fortran')
+
+library('subself', 'main.f90')
diff --git a/test cases/fortran/7 generated/prog.f90 b/test cases/fortran/7 generated/prog.f90
index c476e9c..8a102c0 100644
--- a/test cases/fortran/7 generated/prog.f90
+++ b/test cases/fortran/7 generated/prog.f90
@@ -1,9 +1,7 @@
program prog
- use mod2
- implicit none
+use mod2
+implicit none
- if (modval1 + modval2 /= 3) then
- stop 1
- end if
+if (modval1 + modval2 /= 3) stop 1
end program prog
diff --git a/test cases/frameworks/26 netcdf/main.c b/test cases/frameworks/26 netcdf/main.c
new file mode 100644
index 0000000..e592585
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.c
@@ -0,0 +1,14 @@
+#include "netcdf.h"
+
+int main(void)
+{
+int ret, ncid;
+
+if ((ret = nc_create("foo.nc", NC_CLOBBER, &ncid)))
+ return ret;
+
+if ((ret = nc_close(ncid)))
+ return ret;
+
+return 0;
+}
diff --git a/test cases/frameworks/26 netcdf/main.cpp b/test cases/frameworks/26 netcdf/main.cpp
new file mode 100644
index 0000000..a3c98ef
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.cpp
@@ -0,0 +1,15 @@
+#include <iostream>
+#include "netcdf.h"
+
+int main(void)
+{
+int ret, ncid;
+
+if ((ret = nc_create("foo.nc", NC_CLOBBER, &ncid)))
+ return ret;
+
+if ((ret = nc_close(ncid)))
+ return ret;
+
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/26 netcdf/main.f90 b/test cases/frameworks/26 netcdf/main.f90
new file mode 100644
index 0000000..3872298
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.f90
@@ -0,0 +1,19 @@
+use netcdf
+
+implicit none
+
+integer :: ncid
+
+call check( nf90_create("foo.nc", NF90_CLOBBER, ncid) )
+
+call check( nf90_close(ncid) )
+
+contains
+
+ subroutine check(status)
+ integer, intent (in) :: status
+
+ if(status /= nf90_noerr) error stop trim(nf90_strerror(status))
+end subroutine check
+
+end program
diff --git a/test cases/frameworks/26 netcdf/meson.build b/test cases/frameworks/26 netcdf/meson.build
new file mode 100644
index 0000000..5a10d09
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/meson.build
@@ -0,0 +1,35 @@
+project('netcdf_test', 'c', 'cpp')
+
+
+# --- C tests
+nc_c = dependency('netcdf', language : 'c', required : false)
+if not nc_c.found()
+ error('MESON_SKIP_TEST: NetCDF C library not found, skipping NetCDF framework tests.')
+endif
+exec = executable('exec', 'main.c', dependencies : nc_c)
+
+test('NetCDF C', exec)
+
+# --- C++ tests
+nc_cpp = dependency('netcdf', language : 'cpp', required : false)
+if nc_cpp.found()
+ execpp = executable('execpp', 'main.cpp', dependencies : nc_cpp)
+ test('NetCDF C++', execpp)
+endif
+
+# --- Fortran tests
+if build_machine.system() != 'windows'
+ add_languages('fortran')
+
+ nc_f = dependency('netcdf', language : 'fortran', required : false)
+ if nc_f.found()
+ exef = executable('exef', 'main.f90', dependencies : nc_f)
+
+ test('NetCDF Fortran', exef)
+ endif
+endif
+
+# Check we can apply a version constraint
+if nc_c.version() != 'unknown'
+ dependency('netcdf', version: '>=@0@'.format(nc_c.version()))
+endif
diff --git a/test cases/rewrite/1 basic/addSrc.json b/test cases/rewrite/1 basic/addSrc.json
index 9d6dafd..b8bc439 100644
--- a/test cases/rewrite/1 basic/addSrc.json
+++ b/test cases/rewrite/1 basic/addSrc.json
@@ -3,7 +3,7 @@
"type": "target",
"target": "trivialprog1",
"operation": "src_add",
- "sources": ["a1.cpp", "a2.cpp"]
+ "sources": ["a2.cpp", "a1.cpp", "a2.cpp"]
},
{
"type": "target",
@@ -39,7 +39,12 @@
"type": "target",
"target": "trivialprog9",
"operation": "src_add",
- "sources": ["a6.cpp"]
+ "sources": ["a6.cpp", "a1.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
},
{
"type": "target",
diff --git a/test cases/rewrite/1 basic/addTgt.json b/test cases/rewrite/1 basic/addTgt.json
index 432e299..2f4e7e2 100644
--- a/test cases/rewrite/1 basic/addTgt.json
+++ b/test cases/rewrite/1 basic/addTgt.json
@@ -2,7 +2,7 @@
{
"type": "target",
"target": "trivialprog10",
- "operation": "tgt_add",
+ "operation": "target_add",
"sources": ["new1.cpp", "new2.cpp"],
"target_type": "shared_library"
}
diff --git a/test cases/rewrite/1 basic/info.json b/test cases/rewrite/1 basic/info.json
index 7e44bec..0f1a3bd 100644
--- a/test cases/rewrite/1 basic/info.json
+++ b/test cases/rewrite/1 basic/info.json
@@ -1,6 +1,11 @@
[
{
"type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
+ },
+ {
+ "type": "target",
"target": "trivialprog1",
"operation": "info"
},
diff --git a/test cases/rewrite/1 basic/meson.build b/test cases/rewrite/1 basic/meson.build
index 920553d..0f87c45 100644
--- a/test cases/rewrite/1 basic/meson.build
+++ b/test cases/rewrite/1 basic/meson.build
@@ -7,6 +7,7 @@ src4 = [src3]
# Magic comment
+exe0 = executable('trivialprog0', src1 + src2)
exe1 = executable('trivialprog1', src1)
exe2 = executable('trivialprog2', [src2])
exe3 = executable('trivialprog3', ['main.cpp', 'fileA.cpp'])
diff --git a/test cases/rewrite/1 basic/rmSrc.json b/test cases/rewrite/1 basic/rmSrc.json
index 86880a9..2e7447c 100644
--- a/test cases/rewrite/1 basic/rmSrc.json
+++ b/test cases/rewrite/1 basic/rmSrc.json
@@ -37,6 +37,11 @@
},
{
"type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
+ },
+ {
+ "type": "target",
"target": "trivialprog1",
"operation": "info"
},
diff --git a/test cases/rewrite/1 basic/rmTgt.json b/test cases/rewrite/1 basic/rmTgt.json
index ac3f3a2..dbaf025 100644
--- a/test cases/rewrite/1 basic/rmTgt.json
+++ b/test cases/rewrite/1 basic/rmTgt.json
@@ -1,12 +1,17 @@
[
{
"type": "target",
+ "target": "exe0",
+ "operation": "target_rm"
+ },
+ {
+ "type": "target",
"target": "trivialprog1",
- "operation": "tgt_rm"
+ "operation": "target_rm"
},
{
"type": "target",
"target": "trivialprog9",
- "operation": "tgt_rm"
+ "operation": "target_rm"
}
]
diff --git a/test cases/rewrite/2 subdirs/addTgt.json b/test cases/rewrite/2 subdirs/addTgt.json
index 01e9a6e..2e1e8bc 100644
--- a/test cases/rewrite/2 subdirs/addTgt.json
+++ b/test cases/rewrite/2 subdirs/addTgt.json
@@ -2,7 +2,7 @@
{
"type": "target",
"target": "newLib",
- "operation": "tgt_add",
+ "operation": "target_add",
"sources": ["new1.cpp", "new2.cpp"],
"target_type": "shared_library",
"subdir": "sub2"
diff --git a/test cases/rewrite/2 subdirs/rmTgt.json b/test cases/rewrite/2 subdirs/rmTgt.json
index 73a7b1d..9b112f9 100644
--- a/test cases/rewrite/2 subdirs/rmTgt.json
+++ b/test cases/rewrite/2 subdirs/rmTgt.json
@@ -2,6 +2,6 @@
{
"type": "target",
"target": "something",
- "operation": "tgt_rm"
+ "operation": "target_rm"
}
]
diff --git a/test cases/rewrite/3 kwargs/add.json b/test cases/rewrite/3 kwargs/add.json
index e398b7b..2148a1e 100644
--- a/test cases/rewrite/3 kwargs/add.json
+++ b/test cases/rewrite/3 kwargs/add.json
@@ -2,7 +2,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "set",
"kwargs": {
"license": "GPL"
@@ -11,7 +11,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "add",
"kwargs": {
"license": ["MIT"]
@@ -20,7 +20,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "add",
"kwargs": {
"license": "BSD"
diff --git a/test cases/rewrite/3 kwargs/defopts_delete.json b/test cases/rewrite/3 kwargs/defopts_delete.json
new file mode 100644
index 0000000..4fe39e2
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/defopts_delete.json
@@ -0,0 +1,18 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "default_options": ["cpp_std=c++14", "buildtype=release", "debug=true"]
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "delete",
+ "options": {
+ "buildtype": null
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/defopts_set.json b/test cases/rewrite/3 kwargs/defopts_set.json
new file mode 100644
index 0000000..f8f855f
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/defopts_set.json
@@ -0,0 +1,24 @@
+[
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "cpp_std": "c++14"
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "buildtype": "release",
+ "debug": true
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "cpp_std": "c++11"
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/info.json b/test cases/rewrite/3 kwargs/info.json
index 5fd1a64..0eed404 100644
--- a/test cases/rewrite/3 kwargs/info.json
+++ b/test cases/rewrite/3 kwargs/info.json
@@ -2,7 +2,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "info"
},
{
diff --git a/test cases/rewrite/3 kwargs/remove.json b/test cases/rewrite/3 kwargs/remove.json
index bd7596f..5dc7836 100644
--- a/test cases/rewrite/3 kwargs/remove.json
+++ b/test cases/rewrite/3 kwargs/remove.json
@@ -2,7 +2,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "set",
"kwargs": {
"license": ["GPL", "MIT", "BSD"]
@@ -11,7 +11,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "remove",
"kwargs": {
"license": ["MIT"]
@@ -20,7 +20,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "remove",
"kwargs": {
"license": "BSD"
diff --git a/test cases/rewrite/3 kwargs/remove_regex.json b/test cases/rewrite/3 kwargs/remove_regex.json
new file mode 100644
index 0000000..1043101
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/remove_regex.json
@@ -0,0 +1,20 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "default_options": ["cpp_std=c++14", "buildtype=release", "debug=true"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "remove_regex",
+ "kwargs": {
+ "default_options": ["cpp_std=.*"]
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/set.json b/test cases/rewrite/3 kwargs/set.json
index 7d60c4f..a56c599 100644
--- a/test cases/rewrite/3 kwargs/set.json
+++ b/test cases/rewrite/3 kwargs/set.json
@@ -2,7 +2,7 @@
{
"type": "kwargs",
"function": "project",
- "id": "",
+ "id": "/",
"operation": "set",
"kwargs": {
"version": "0.0.2",
diff --git a/test cases/rewrite/4 same name targets/addSrc.json b/test cases/rewrite/4 same name targets/addSrc.json
new file mode 100644
index 0000000..98d0d1e
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/addSrc.json
@@ -0,0 +1,8 @@
+[
+ {
+ "type": "target",
+ "target": "myExe",
+ "operation": "src_add",
+ "sources": ["a1.cpp", "a2.cpp"]
+ }
+]
diff --git a/test cases/rewrite/4 same name targets/info.json b/test cases/rewrite/4 same name targets/info.json
new file mode 100644
index 0000000..a9fc2dd
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/info.json
@@ -0,0 +1,12 @@
+[
+ {
+ "type": "target",
+ "target": "exe1",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe2",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/4 same name targets/meson.build b/test cases/rewrite/4 same name targets/meson.build
new file mode 100644
index 0000000..384fa2b
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/meson.build
@@ -0,0 +1,6 @@
+project('rewrite same name targets', 'cpp')
+
+src1 = ['main.cpp']
+
+exe1 = executable('myExe', src1)
+subdir('sub1')
diff --git a/test cases/rewrite/4 same name targets/sub1/meson.build b/test cases/rewrite/4 same name targets/sub1/meson.build
new file mode 100644
index 0000000..ac53667
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/sub1/meson.build
@@ -0,0 +1,3 @@
+src2 = ['main.cpp']
+
+exe2 = executable('myExe', src2)
diff --git a/test cases/rewrite/5 sorting/meson.build b/test cases/rewrite/5 sorting/meson.build
new file mode 100644
index 0000000..80934a0
--- /dev/null
+++ b/test cases/rewrite/5 sorting/meson.build
@@ -0,0 +1,33 @@
+project('rewriter source sorting', ['c', 'cpp'])
+
+src1 = files([
+ 'a1.c',
+ 'a10.c',
+ 'a2.c',
+ 'a3.c',
+ 'bbb/a/b1.c',
+ 'bbb/a4.c',
+ 'bbb/b3.c',
+ 'bbb/b4.c',
+ 'bbb/b/b2.c',
+ 'bbb/c1/b5.c',
+ 'bbb/c10/b6.c',
+ 'bbb/c2/b7.c',
+ 'bbb/b5.c',
+ 'a110.c',
+ 'aaa/f1.c',
+ 'aaa/f2.c',
+ 'aaa/f3.c',
+ 'a20.c',
+ 'b1.c',
+ 'aaa/b/b1.c',
+ 'aaa/b/b2.c',
+ 'a30.c',
+ 'a100.c',
+ 'aaa/a/a1.c',
+ 'a101.c',
+ 'a210.c',
+ 'c2.c'
+])
+
+exe1 = executable('exe1', src1)
diff --git a/test cases/unit/52 introspection/meson.build b/test cases/unit/52 introspection/meson.build
index 14d880b..98f6f22 100644
--- a/test cases/unit/52 introspection/meson.build
+++ b/test cases/unit/52 introspection/meson.build
@@ -1,6 +1,12 @@
project('introspection', ['c', 'cpp'], version: '1.2.3', default_options: ['cpp_std=c++11', 'buildtype=debug'])
dep1 = dependency('threads')
+dep2 = dependency('zlib', required: false)
+
+if false
+ dependency('somethingthatdoesnotexist', required: true)
+ dependency('look_i_have_a_fallback', fallback: ['oh_no', 'the_subproject_does_not_exist'])
+endif
subdir('sharedlib')
subdir('staticlib')
diff --git a/test cases/unit/53 introspect buildoptions/main.c b/test cases/unit/53 introspect buildoptions/main.c
new file mode 100644
index 0000000..ef99ae6
--- /dev/null
+++ b/test cases/unit/53 introspect buildoptions/main.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello World");
+ return 0;
+}
diff --git a/test cases/unit/53 introspect buildoptions/meson.build b/test cases/unit/53 introspect buildoptions/meson.build
index e94ef61..8052b5f 100644
--- a/test cases/unit/53 introspect buildoptions/meson.build
+++ b/test cases/unit/53 introspect buildoptions/meson.build
@@ -2,6 +2,11 @@ project('introspect buildargs', ['c'], default_options: ['c_std=c11', 'cpp_std=c
subA = subproject('projectA')
+target_name = 'MAIN'
+target_src = ['main.c']
+
+executable(target_name, target_src)
+
r = run_command(find_program('c_compiler.py'))
if r.returncode() != 0
error('FAILED')