aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--azure-pipelines.yml2
-rw-r--r--ciimage/Dockerfile1
-rw-r--r--docs/markdown/Builtin-options.md2
-rw-r--r--docs/markdown/Cuda-module.md183
-rw-r--r--docs/markdown/Dependencies.md19
-rw-r--r--docs/markdown/External-commands.md8
-rw-r--r--docs/markdown/Reference-manual.md6
-rw-r--r--docs/markdown/howtox.md17
-rw-r--r--docs/markdown/i18n-module.md1
-rw-r--r--docs/markdown/snippets/cmake_module_path.md9
-rw-r--r--docs/markdown/snippets/fortran_submodule.md12
-rw-r--r--docs/markdown/snippets/hdf5.md3
-rw-r--r--docs/markdown/snippets/run_command_env.md9
-rw-r--r--docs/sitemap.txt1
-rw-r--r--mesonbuild/ast/__init__.py32
-rw-r--r--mesonbuild/ast/interpreter.py (renamed from mesonbuild/astinterpreter.py)236
-rw-r--r--mesonbuild/ast/introspection.py241
-rw-r--r--mesonbuild/ast/postprocess.py86
-rw-r--r--mesonbuild/ast/printer.py203
-rw-r--r--mesonbuild/ast/visitor.py140
-rw-r--r--mesonbuild/backend/backends.py23
-rw-r--r--mesonbuild/backend/ninjabackend.py110
-rw-r--r--mesonbuild/backend/vs2010backend.py117
-rw-r--r--mesonbuild/build.py10
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c.py110
-rw-r--r--mesonbuild/compilers/compilers.py19
-rw-r--r--mesonbuild/compilers/cs.py1
-rw-r--r--mesonbuild/compilers/d.py15
-rw-r--r--mesonbuild/compilers/fortran.py20
-rw-r--r--mesonbuild/compilers/java.py1
-rw-r--r--mesonbuild/compilers/vala.py6
-rw-r--r--mesonbuild/coredata.py90
-rw-r--r--mesonbuild/dependencies/__init__.py4
-rw-r--r--mesonbuild/dependencies/base.py423
-rw-r--r--mesonbuild/dependencies/misc.py53
-rw-r--r--mesonbuild/dependencies/platform.py18
-rw-r--r--mesonbuild/dependencies/ui.py23
-rw-r--r--mesonbuild/environment.py46
-rw-r--r--mesonbuild/interpreter.py61
-rw-r--r--mesonbuild/interpreterbase.py2
-rw-r--r--mesonbuild/mconf.py6
-rw-r--r--mesonbuild/mesonlib.py25
-rw-r--r--mesonbuild/mesonmain.py2
-rw-r--r--mesonbuild/minstall.py4
-rw-r--r--mesonbuild/mintro.py126
-rw-r--r--mesonbuild/modules/__init__.py4
-rw-r--r--mesonbuild/modules/gnome.py192
-rw-r--r--mesonbuild/modules/i18n.py6
-rw-r--r--mesonbuild/modules/pkgconfig.py41
-rw-r--r--mesonbuild/modules/python.py7
-rw-r--r--mesonbuild/modules/unstable_cuda.py259
-rw-r--r--mesonbuild/mparser.py68
-rw-r--r--mesonbuild/mtest.py7
-rw-r--r--mesonbuild/rewriter.py309
-rw-r--r--mesonbuild/scripts/dist.py3
-rw-r--r--mesonbuild/wrap/wrap.py2
-rwxr-xr-xrun_project_tests.py4
-rwxr-xr-xrun_tests.py16
-rwxr-xr-xrun_unittests.py394
-rw-r--r--setup.py1
-rw-r--r--test cases/common/13 pch/c/pch/prog.h5
-rw-r--r--test cases/common/13 pch/generated/gen_custom.py5
-rw-r--r--test cases/common/13 pch/generated/gen_generator.py7
-rw-r--r--test cases/common/13 pch/generated/generated_generator.in1
-rw-r--r--test cases/common/13 pch/generated/meson.build16
-rw-r--r--test cases/common/13 pch/generated/pch/prog.h2
-rw-r--r--test cases/common/13 pch/generated/pch/prog_pch.c5
-rw-r--r--test cases/common/13 pch/generated/prog.c6
-rw-r--r--test cases/common/13 pch/meson.build2
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h1
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/meson.build9
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/pch/prog.h1
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c5
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/prog.c10
-rw-r--r--test cases/common/14 configure file/meson.build6
-rw-r--r--test cases/common/190 openmp/meson.build12
-rw-r--r--test cases/common/36 run program/meson.build6
-rwxr-xr-xtest cases/common/53 custom target/depfile/dep.py2
-rw-r--r--test cases/cuda/3 cudamodule/meson.build16
-rw-r--r--test cases/cuda/3 cudamodule/prog.cu30
-rw-r--r--test cases/failing/93 pch source different folder/include/pch.h0
-rw-r--r--test cases/failing/93 pch source different folder/meson.build5
-rw-r--r--test cases/failing/93 pch source different folder/prog.c1
-rw-r--r--test cases/failing/93 pch source different folder/src/pch.c0
-rw-r--r--test cases/fortran/1 basic/meson.build5
-rw-r--r--test cases/fortran/10 find library/gzip.f9056
-rw-r--r--test cases/fortran/10 find library/main.f9078
-rw-r--r--test cases/fortran/11 compiles links runs/meson.build20
-rw-r--r--test cases/fortran/12 submodule/a1.f9025
-rw-r--r--test cases/fortran/12 submodule/a2.f9010
-rw-r--r--test cases/fortran/12 submodule/a3.f9010
-rw-r--r--test cases/fortran/12 submodule/child.f9010
-rw-r--r--test cases/fortran/12 submodule/meson.build7
-rw-r--r--test cases/fortran/12 submodule/parent.f9023
-rw-r--r--test cases/fortran/4 self dependency/selfdep.f9015
-rw-r--r--test cases/fortran/5 static/main.f9010
-rw-r--r--test cases/fortran/5 static/static_hello.f9018
-rw-r--r--test cases/fortran/6 dynamic/dynamic.f9018
-rw-r--r--test cases/fortran/6 dynamic/main.f909
-rw-r--r--test cases/fortran/8 module names/mod1.f904
-rw-r--r--test cases/fortran/8 module names/mod2.f904
-rw-r--r--test cases/fortran/8 module names/test.f9011
-rw-r--r--test cases/fortran/9 cpp/fortran.f12
-rw-r--r--test cases/fortran/9 cpp/meson.build8
-rw-r--r--test cases/frameworks/17 mpi/meson.build12
-rw-r--r--test cases/frameworks/25 hdf5/main.c30
-rw-r--r--test cases/frameworks/25 hdf5/main.cpp29
-rw-r--r--test cases/frameworks/25 hdf5/main.f9017
-rw-r--r--test cases/frameworks/25 hdf5/meson.build43
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/meson.build6
-rw-r--r--test cases/osx/2 library versions/meson.build32
-rw-r--r--test cases/osx/2 library versions/require_pkgconfig.py9
-rw-r--r--test cases/osx/5 extra frameworks/installed_files.txt2
-rw-r--r--test cases/osx/5 extra frameworks/meson.build13
-rw-r--r--test cases/osx/5 extra frameworks/prog.c3
-rw-r--r--test cases/osx/5 extra frameworks/stat.c1
-rw-r--r--test cases/rewrite/1 basic/addSrc.json89
-rw-r--r--test cases/rewrite/1 basic/added.txt5
-rw-r--r--test cases/rewrite/1 basic/info.json47
-rw-r--r--test cases/rewrite/1 basic/meson.build19
-rw-r--r--test cases/rewrite/1 basic/removed.txt5
-rw-r--r--test cases/rewrite/1 basic/rmSrc.json83
-rw-r--r--test cases/rewrite/2 subdirs/addSrc.json13
-rw-r--r--test cases/rewrite/2 subdirs/info.json7
-rw-r--r--test cases/rewrite/2 subdirs/meson.build1
-rw-r--r--test cases/rewrite/2 subdirs/sub1/after.txt1
-rw-r--r--test cases/rewrite/2 subdirs/sub2/meson.build1
129 files changed, 3771 insertions, 1053 deletions
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 39e41e9..90ebeff 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -136,6 +136,7 @@ jobs:
git ^
mercurial ^
mingw-w64-$(MSYS2_ARCH)-cmake ^
+ mingw-w64-$(MSYS2_ARCH)-ninja ^
mingw-w64-$(MSYS2_ARCH)-pkg-config ^
mingw-w64-$(MSYS2_ARCH)-python2 ^
mingw-w64-$(MSYS2_ARCH)-python3 ^
@@ -144,7 +145,6 @@ jobs:
displayName: Install Dependencies
- script: |
set PATH=%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem
- %MSYS2_ROOT%\usr\bin\bash -lc "wget https://github.com/mesonbuild/cidata/raw/master/ninja.exe; mv ninja.exe /$MSYSTEM/bin"
set PATHEXT=%PATHEXT%;.py
if %compiler%==clang ( set CC=clang && set CXX=clang++ )
%MSYS2_ROOT%\usr\bin\bash -lc "MSYSTEM= python3 run_tests.py --backend=ninja"
diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile
index 520ce0f..980ed53 100644
--- a/ciimage/Dockerfile
+++ b/ciimage/Dockerfile
@@ -20,6 +20,7 @@ RUN sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" \
&& apt-get -y install --no-install-recommends wine-stable \
&& apt-get -y install llvm-dev libclang-dev \
&& apt-get -y install libgcrypt11-dev \
+&& apt-get -y install libhdf5-dev \
&& dub fetch urld && dub build urld --compiler=gdc \
&& dub fetch dubtestproject \
&& dub build dubtestproject:test1 --compiler=ldc2 \
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index 288bd79..0d1a16b 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -42,7 +42,7 @@ Installation options are all relative to the prefix, except:
| werror | false | Treat warnings as errors |
| warning_level {1, 2, 3} | 1 | Set the warning level. From 1 = lowest to 3 = highest |
| layout {mirror,flat} | mirror | Build directory layout. |
-| default-library {shared, static, both} | shared | Default library type. |
+| default_library {shared, static, both} | shared | Default library type. |
| backend {ninja, vs,<br>vs2010, vs2015, vs2017, xcode} | | Backend to use (default: ninja). |
| stdsplit | | Split stdout and stderr in test logs. |
| errorlogs | | Whether to print the logs from failing tests. |
diff --git a/docs/markdown/Cuda-module.md b/docs/markdown/Cuda-module.md
new file mode 100644
index 0000000..caa1756
--- /dev/null
+++ b/docs/markdown/Cuda-module.md
@@ -0,0 +1,183 @@
+---
+short-description: CUDA module
+authors:
+ - name: Olexa Bilaniuk
+ years: [2019]
+ has-copyright: false
+...
+
+# Unstable CUDA Module
+_Since: 0.50.0_
+
+This module provides helper functionality related to the CUDA Toolkit and
+building code using it.
+
+
+**Note**: this module is unstable. It is only provided as a technology preview.
+Its API may change in arbitrary ways between releases or it might be removed
+from Meson altogether.
+
+
+## Importing the module
+
+The module may be imported as follows:
+
+``` meson
+cuda = import('unstable-cuda')
+```
+
+It offers several useful functions that are enumerated below.
+
+
+## Functions
+
+### `nvcc_arch_flags()`
+_Since: 0.50.0_
+
+``` meson
+cuda.nvcc_arch_flags(nvcc_or_version, ...,
+ detected: string_or_array)
+```
+
+Returns a list of `-gencode` flags that should be passed to `cuda_args:` in
+order to compile a "fat binary" for the architectures/compute capabilities
+enumerated in the positional argument(s). The flags shall be acceptable to
+the NVCC compiler object `nvcc_or_version`, or its version string.
+
+A set of architectures and/or compute capabilities may be specified by:
+
+- The single positional argument `'All'`, `'Common'` or `'Auto'`
+- As (an array of)
+ - Architecture names (`'Kepler'`, `'Maxwell+Tegra'`, `'Turing'`) and/or
+ - Compute capabilities (`'3.0'`, `'3.5'`, `'5.3'`, `'7.5'`)
+
+A suffix of `+PTX` requests PTX code generation for the given architecture.
+A compute capability given as `A.B(X.Y)` requests PTX generation for an older
+virtual architecture `X.Y` before binary generation for a newer architecture
+`A.B`.
+
+Multiple architectures and compute capabilities may be passed in using
+
+- Multiple positional arguments
+- Lists of strings
+- Space (` `), comma (`,`) or semicolon (`;`)-separated strings
+
+The single-word architectural sets `'All'`, `'Common'` or `'Auto'` cannot be
+mixed with architecture names or compute capabilities. Their interpretation is:
+
+| Name | Compute Capability |
+|-------------------|--------------------|
+| `'All'` | All CCs supported by given NVCC compiler. |
+| `'Common'` | Relatively common CCs supported by given NVCC compiler. Generally excludes Tegra and Tesla devices. |
+| `'Auto'` | The CCs provided by the `detected:` keyword, filtered for support by given NVCC compiler. |
+
+The supported architecture names and their corresponding compute capabilities
+are:
+
+| Name | Compute Capability |
+|-------------------|--------------------|
+| `'Fermi'` | 2.0, 2.1(2.0) |
+| `'Kepler'` | 3.0, 3.5 |
+| `'Kepler+Tegra'` | 3.2 |
+| `'Kepler+Tesla'` | 3.7 |
+| `'Maxwell'` | 5.0, 5.2 |
+| `'Maxwell+Tegra'` | 5.3 |
+| `'Pascal'` | 6.0, 6.1 |
+| `'Pascal+Tegra'` | 6.2 |
+| `'Volta'` | 7.0 |
+| `'Volta+Tegra'` | 7.2 |
+| `'Turing'` | 7.5 |
+
+
+Examples:
+
+ cuda.nvcc_arch_flags('10.0', '3.0', '3.5', '5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', ['3.0', '3.5', '5.0+PTX'])
+ cuda.nvcc_arch_flags('10.0', [['3.0', '3.5'], '5.0+PTX'])
+ cuda.nvcc_arch_flags('10.0', '3.0 3.5 5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', '3.0,3.5,5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', '3.0;3.5;5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', 'Kepler 5.0+PTX')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_50,code=sm_50',
+ # '-gencode', 'arch=compute_50,code=compute_50']
+
+ cuda.nvcc_arch_flags('10.0', '3.5(3.0)')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_35']
+
+ cuda.nvcc_arch_flags('8.0', 'Common')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_50,code=sm_50',
+ # '-gencode', 'arch=compute_52,code=sm_52',
+ # '-gencode', 'arch=compute_60,code=sm_60',
+ # '-gencode', 'arch=compute_61,code=sm_61',
+ # '-gencode', 'arch=compute_61,code=compute_61']
+
+ cuda.nvcc_arch_flags('9.2', 'Auto', detected: '6.0 6.0 6.0 6.0')
+ cuda.nvcc_arch_flags('9.2', 'Auto', detected: ['6.0', '6.0', '6.0', '6.0'])
+ # Returns ['-gencode', 'arch=compute_60,code=sm_60']
+
+ cuda.nvcc_arch_flags(nvcc, 'All')
+ # Returns ['-gencode', 'arch=compute_20,code=sm_20',
+ # '-gencode', 'arch=compute_20,code=sm_21',
+ # '-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_32,code=sm_32',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_37,code=sm_37',
+ # '-gencode', 'arch=compute_50,code=sm_50', # nvcc.version() < 7.0
+ # '-gencode', 'arch=compute_52,code=sm_52',
+ # '-gencode', 'arch=compute_53,code=sm_53', # nvcc.version() >= 7.0
+ # '-gencode', 'arch=compute_60,code=sm_60',
+ # '-gencode', 'arch=compute_61,code=sm_61', # nvcc.version() >= 8.0
+ # '-gencode', 'arch=compute_70,code=sm_70',
+ # '-gencode', 'arch=compute_72,code=sm_72', # nvcc.version() >= 9.0
+ # '-gencode', 'arch=compute_75,code=sm_75'] # nvcc.version() >= 10.0
+
+_Note:_ This function is intended to closely replicate CMake's FindCUDA module
+function `CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, [list of CUDA compute architectures])`
+
+
+
+### `nvcc_arch_readable()`
+_Since: 0.50.0_
+
+``` meson
+cuda.nvcc_arch_readable(nvcc_or_version, ...,
+ detected: string_or_array)
+```
+
+Has precisely the same interface as [`nvcc_arch_flags()`](#nvcc_arch_flags),
+but rather than returning a list of flags, it returns a "readable" list of
+architectures that will be compiled for. The output of this function is solely
+intended for informative message printing.
+
+ archs = '3.0 3.5 5.0+PTX'
+ readable = cuda.nvcc_arch_readable(nvcc, archs)
+ message('Building for architectures ' + ' '.join(readable))
+
+This will print
+
+ Message: Building for architectures sm30 sm35 sm50 compute50
+
+_Note:_ This function is intended to closely replicate CMake's FindCUDA module function
+`CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, [list of CUDA compute architectures])`
+
+
+
+### `min_driver_version()`
+_Since: 0.50.0_
+
+``` meson
+cuda.min_driver_version(nvcc_or_version)
+```
+
+Returns the minimum NVIDIA proprietary driver version required, on the host
+system, by kernels compiled with the given NVCC compiler or its version string.
+
+The output of this function is generally intended for informative message
+printing, but could be used for assertions or to conditionally enable
+features known to exist within the minimum NVIDIA driver required.
+
+
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index e3fedc4..259f09e 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -148,6 +148,14 @@ it automatically.
cmake_dep = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB'])
```
+It is also possible to reuse existing `Find<name>.cmake` files with the
+`cmake_module_path` property. Using this property is equivalent to setting the
+`CMAKE_MODULE_PATH` variable in CMake. The path(s) given to `cmake_module_path`
+should all be relative to the project source directory. Absolute paths
+should only be used if the CMake files are not stored in the project itself.
+
+Additional CMake parameters can be specified with the `cmake_args` property.
+
### Some notes on Dub
Please understand that meson is only able to find dependencies that
@@ -269,6 +277,17 @@ e = executable('testprog', 'test.cc', dependencies : gtest_dep)
test('gtest test', e)
```
+## HDF5
+HDF5 is supported for C, C++ and Fortran. Because dependencies are
+language-specific, you must specify the requested language using the
+`language` keyword argument, i.e.,
+ * `dependency('hdf5', language: 'c')` for the C HDF5 headers and libraries
+ * `dependency('hdf5', language: 'cpp')` for the C++ HDF5 headers and libraries
+ * `dependency('hdf5', language: 'fortran')` for the Fortran HDF5 headers and libraries
+
+Meson uses pkg-config to find HDF5. The standard low-level HDF5 function and the `HL` high-level HDF5 functions are linked for each language.
+
+
## libwmf
*(added 0.44.0)*
diff --git a/docs/markdown/External-commands.md b/docs/markdown/External-commands.md
index 9336ec3..4c8c8e4 100644
--- a/docs/markdown/External-commands.md
+++ b/docs/markdown/External-commands.md
@@ -16,6 +16,14 @@ output = r.stdout().strip()
errortxt = r.stderr().strip()
```
+Additionally, since 0.50.0, you can pass the command [`environment`](Reference-manual.html#environment-object) object:
+
+```meson
+env = environment()
+env.set('FOO', 'bar')
+run_command('command', 'arg1', 'arg2', env: env)
+```
+
The `run_command` function returns an object that can be queried for
return value and text written to stdout and stderr. The `strip` method
call is used to strip trailing and leading whitespace from
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index e913e25..db43813 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -1183,12 +1183,14 @@ and Meson will set three environment variables `MESON_SOURCE_ROOT`,
directory, build directory and subdirectory the target was defined in,
respectively.
-This function has one keyword argument.
+This function supports the following keyword arguments:
- `check` takes a boolean. If `true`, the exit status code of the command will
be checked, and the configuration will fail if it is non-zero. The default is
`false`.
Since 0.47.0
+ - `env` an [environment object](#environment-object) to use a custom environment
+ Since 0.50.0
See also [External commands](External-commands.md).
@@ -2175,7 +2177,7 @@ and has the following methods:
This object is returned by [`environment()`](#environment) and stores
detailed information about how environment variables should be set
during tests. It should be passed as the `env` keyword argument to
-tests. It has the following methods.
+tests and other functions. It has the following methods.
- `append(varname, value1, value2, ...)` appends the given values to
the old value of the environment variable, e.g. `env.append('FOO',
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 3d8515f..8ae4fde 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -203,3 +203,20 @@ executable(..., dependencies : m_dep)
```meson
executable(..., install : true, install_dir : get_option('libexecdir'))
```
+
+## Use existing `Find<name>.cmake` files
+
+Meson can use the CMake `find_package()` ecosystem if CMake is installed.
+To find a dependency with custom `Find<name>.cmake`, set the `cmake_module_path`
+property to the path in your project where the CMake scripts are stored.
+
+Example for a `FindCmakeOnlyDep.cmake` in a `cmake` subdirectory:
+
+```meson
+cm_dep = dependency('CmakeOnlyDep', cmake_module_path : 'cmake')
+```
+
+The `cmake_module_path` property is only needed for custom CMake scripts. System
+wide CMake scripts are found automatically.
+
+More information can be found [here](Dependencies.md#cmake)
diff --git a/docs/markdown/i18n-module.md b/docs/markdown/i18n-module.md
index 88f059b..9053edc 100644
--- a/docs/markdown/i18n-module.md
+++ b/docs/markdown/i18n-module.md
@@ -29,6 +29,7 @@ argument which is the name of the gettext module.
[source](https://github.com/mesonbuild/meson/blob/master/mesonbuild/modules/i18n.py)
for for their value
* `install`: (*Added 0.43.0*) if false, do not install the built translations.
+* `install_dir`: (*Added 0.50.0*) override default install location, default is `localedir`
This function also defines targets for maintainers to use:
**Note**: These output to the source directory
diff --git a/docs/markdown/snippets/cmake_module_path.md b/docs/markdown/snippets/cmake_module_path.md
new file mode 100644
index 0000000..7beb453
--- /dev/null
+++ b/docs/markdown/snippets/cmake_module_path.md
@@ -0,0 +1,9 @@
+## Added `cmake_module_path` and `cmake_args` to dependency
+
+The CMake dependency backend can now make use of existing `Find<name>.cmake`
+files by setting the `CMAKE_MODULE_PATH` with the new `dependency()` property
+`cmake_module_path`. The paths given to `cmake_module_path` should be relative
+to the project source directory.
+
+Furthermore the property `cmake_args` was added to give CMake additional
+parameters.
diff --git a/docs/markdown/snippets/fortran_submodule.md b/docs/markdown/snippets/fortran_submodule.md
new file mode 100644
index 0000000..9e4b9cc
--- /dev/null
+++ b/docs/markdown/snippets/fortran_submodule.md
@@ -0,0 +1,12 @@
+## Fortran submodule support
+
+Initial support for Fortran ``submodule`` was added, where the submodule is in
+the same or different file than the parent ``module``.
+The submodule hierarchy specified in the source Fortran code `submodule`
+statements are used by Meson to resolve source file dependencies.
+For example:
+
+```fortran
+submodule (ancestor:parent) child
+```
+
diff --git a/docs/markdown/snippets/hdf5.md b/docs/markdown/snippets/hdf5.md
new file mode 100644
index 0000000..8ebb4c0
--- /dev/null
+++ b/docs/markdown/snippets/hdf5.md
@@ -0,0 +1,3 @@
+## HDF5
+
+HDF5 support is added via pkg-config.
diff --git a/docs/markdown/snippets/run_command_env.md b/docs/markdown/snippets/run_command_env.md
new file mode 100644
index 0000000..dfa5ac5
--- /dev/null
+++ b/docs/markdown/snippets/run_command_env.md
@@ -0,0 +1,9 @@
+## `run_command` accepts `env` kwarg
+
+You can pass [`environment`](Reference-manual.html#environment-object) object to [`run_command`](Reference-manual.html#run-command), just like to `test`:
+
+```meson
+env = environment()
+env.set('FOO', 'bar')
+run_command('command', 'arg1', 'arg2', env: env)
+```
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index f80c279..6987641 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -44,6 +44,7 @@ index.md
RPM-module.md
Simd-module.md
Windows-module.md
+ Cuda-module.md
Java.md
Vala.md
D.md
diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py
new file mode 100644
index 0000000..a9370dc
--- /dev/null
+++ b/mesonbuild/ast/__init__.py
@@ -0,0 +1,32 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+__all__ = [
+ 'AstInterpreter',
+ 'AstIDGenerator',
+ 'AstIndentationGenerator',
+ 'AstVisitor',
+ 'AstPrinter',
+ 'IntrospectionInterpreter',
+ 'build_target_functions',
+]
+
+from .interpreter import AstInterpreter
+from .introspection import IntrospectionInterpreter, build_target_functions
+from .visitor import AstVisitor
+from .postprocess import AstIDGenerator, AstIndentationGenerator
+from .printer import AstPrinter
diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/ast/interpreter.py
index f68aa7a..68c017a 100644
--- a/mesonbuild/astinterpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -15,10 +15,10 @@
# This class contains the basic functionality needed to run any interpreter
# or an interpreter-based tool.
-from . import interpreterbase, mparser, mesonlib
-from . import environment
+from .. import interpreterbase, mparser, mesonlib
+from .. import environment
-from .interpreterbase import InterpreterException, InvalidArguments, BreakRequest, ContinueRequest
+from ..interpreterbase import InvalidArguments, BreakRequest, ContinueRequest
import os, sys
@@ -46,6 +46,8 @@ REMOVE_SOURCE = 1
class AstInterpreter(interpreterbase.InterpreterBase):
def __init__(self, source_root, subdir):
super().__init__(source_root, subdir)
+ self.visited_subdirs = {}
+ self.assignments = {}
self.funcs.update({'project': self.func_do_nothing,
'test': self.func_do_nothing,
'benchmark': self.func_do_nothing,
@@ -83,15 +85,58 @@ class AstInterpreter(interpreterbase.InterpreterBase):
'build_target': self.func_do_nothing,
'custom_target': self.func_do_nothing,
'run_target': self.func_do_nothing,
- 'subdir': self.func_do_nothing,
+ 'subdir': self.func_subdir,
'set_variable': self.func_do_nothing,
'get_variable': self.func_do_nothing,
'is_variable': self.func_do_nothing,
+ 'disabler': self.func_do_nothing,
+ 'gettext': self.func_do_nothing,
+ 'jar': self.func_do_nothing,
+ 'warning': self.func_do_nothing,
+ 'shared_module': self.func_do_nothing,
+ 'option': self.func_do_nothing,
+ 'both_libraries': self.func_do_nothing,
+ 'add_test_setup': self.func_do_nothing,
+ 'find_library': self.func_do_nothing,
+ 'subdir_done': self.func_do_nothing,
})
def func_do_nothing(self, node, args, kwargs):
return True
+ def func_subdir(self, node, args, kwargs):
+ args = self.flatten_args(args)
+ if len(args) != 1 or not isinstance(args[0], str):
+ sys.stderr.write('Unable to evaluate subdir({}) in AstInterpreter --> Skipping\n'.format(args))
+ return
+
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ absdir = os.path.join(self.source_root, subdir)
+ buildfilename = os.path.join(self.subdir, environment.build_filename)
+ absname = os.path.join(self.source_root, buildfilename)
+ symlinkless_dir = os.path.realpath(absdir)
+ if symlinkless_dir in self.visited_subdirs:
+ sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0]))
+ return
+ self.visited_subdirs[symlinkless_dir] = True
+
+ if not os.path.isfile(absname):
+ sys.stderr.write('Unable to find build file {} --> Skipping\n'.format(buildfilename))
+ return
+ with open(absname, encoding='utf8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, self.subdir).parse()
+ except mesonlib.MesonException as me:
+ me.file = buildfilename
+ raise me
+
+ self.subdir = subdir
+ self.evaluate_codeblock(codeblock)
+ self.subdir = prev_subdir
+
def method_call(self, node):
return True
@@ -99,7 +144,11 @@ class AstInterpreter(interpreterbase.InterpreterBase):
return 0
def evaluate_plusassign(self, node):
- return 0
+ assert(isinstance(node, mparser.PlusAssignmentNode))
+ if node.var_name not in self.assignments:
+ self.assignments[node.var_name] = []
+ self.assignments[node.var_name] += [node.value] # Save a reference to the value node
+ self.evaluate_statement(node.value) # Evaluate the value just in case
def evaluate_indexing(self, node):
return 0
@@ -134,148 +183,37 @@ class AstInterpreter(interpreterbase.InterpreterBase):
return 0
def assignment(self, node):
- pass
-
-class RewriterInterpreter(AstInterpreter):
- def __init__(self, source_root, subdir):
- super().__init__(source_root, subdir)
- self.asts = {}
- self.funcs.update({'files': self.func_files,
- 'executable': self.func_executable,
- 'static_library': self.func_static_lib,
- 'shared_library': self.func_shared_lib,
- 'library': self.func_library,
- 'build_target': self.func_build_target,
- 'custom_target': self.func_custom_target,
- 'run_target': self.func_run_target,
- 'subdir': self.func_subdir,
- 'set_variable': self.func_set_variable,
- 'get_variable': self.func_get_variable,
- 'is_variable': self.func_is_variable,
- })
-
- def func_executable(self, node, args, kwargs):
- if args[0] == self.targetname:
- if self.operation == ADD_SOURCE:
- self.add_source_to_target(node, args, kwargs)
- elif self.operation == REMOVE_SOURCE:
- self.remove_source_from_target(node, args, kwargs)
+ assert(isinstance(node, mparser.AssignmentNode))
+ self.assignments[node.var_name] = [node.value] # Save a reference to the value node
+ self.evaluate_statement(node.value) # Evaluate the value just in case
+
+ def flatten_args(self, args, include_unknown_args: bool = False):
+ # Resolve mparser.ArrayNode if needed
+ flattend_args = []
+ temp_args = []
+ if isinstance(args, mparser.ArrayNode):
+ args = [x for x in args.args.arguments]
+ elif isinstance(args, mparser.ArgumentNode):
+ args = [x for x in args.arguments]
+ for i in args:
+ if isinstance(i, mparser.ArrayNode):
+ temp_args += [x for x in i.args.arguments]
else:
- raise NotImplementedError('Bleep bloop')
- return MockExecutable()
-
- def func_static_lib(self, node, args, kwargs):
- return MockStaticLibrary()
-
- def func_shared_lib(self, node, args, kwargs):
- return MockSharedLibrary()
-
- def func_library(self, node, args, kwargs):
- return self.func_shared_lib(node, args, kwargs)
-
- def func_custom_target(self, node, args, kwargs):
- return MockCustomTarget()
-
- def func_run_target(self, node, args, kwargs):
- return MockRunTarget()
-
- def func_subdir(self, node, args, kwargs):
- prev_subdir = self.subdir
- subdir = os.path.join(prev_subdir, args[0])
- self.subdir = subdir
- buildfilename = os.path.join(self.subdir, environment.build_filename)
- absname = os.path.join(self.source_root, buildfilename)
- if not os.path.isfile(absname):
- self.subdir = prev_subdir
- raise InterpreterException('Nonexistent build def file %s.' % buildfilename)
- with open(absname, encoding='utf8') as f:
- code = f.read()
- assert(isinstance(code, str))
- try:
- codeblock = mparser.Parser(code, self.subdir).parse()
- self.asts[subdir] = codeblock
- except mesonlib.MesonException as me:
- me.file = buildfilename
- raise me
- self.evaluate_codeblock(codeblock)
- self.subdir = prev_subdir
-
- def func_files(self, node, args, kwargs):
- if not isinstance(args, list):
- return [args]
- return args
-
- def transform(self):
- self.load_root_meson_file()
- self.asts[''] = self.ast
- self.sanity_check_ast()
- self.parse_project()
- self.run()
-
- def add_source(self, targetname, filename):
- self.operation = ADD_SOURCE
- self.targetname = targetname
- self.filename = filename
- self.transform()
-
- def remove_source(self, targetname, filename):
- self.operation = REMOVE_SOURCE
- self.targetname = targetname
- self.filename = filename
- self.transform()
-
- def add_source_to_target(self, node, args, kwargs):
- namespan = node.args.arguments[0].bytespan
- buildfilename = os.path.join(self.source_root, self.subdir, environment.build_filename)
- raw_data = open(buildfilename, 'r').read()
- updated = raw_data[0:namespan[1]] + (", '%s'" % self.filename) + raw_data[namespan[1]:]
- open(buildfilename, 'w').write(updated)
- sys.exit(0)
-
- def remove_argument_item(self, args, i):
- assert(isinstance(args, mparser.ArgumentNode))
- namespan = args.arguments[i].bytespan
- # Usually remove the comma after this item but if it is
- # the last argument, we need to remove the one before.
- if i >= len(args.commas):
- i -= 1
- if i < 0:
- commaspan = (0, 0) # Removed every entry in the list.
- else:
- commaspan = args.commas[i].bytespan
- if commaspan[0] < namespan[0]:
- commaspan, namespan = namespan, commaspan
- buildfilename = os.path.join(self.source_root, args.subdir, environment.build_filename)
- raw_data = open(buildfilename, 'r').read()
- intermediary = raw_data[0:commaspan[0]] + raw_data[commaspan[1]:]
- updated = intermediary[0:namespan[0]] + intermediary[namespan[1]:]
- open(buildfilename, 'w').write(updated)
- sys.exit(0)
-
- def hacky_find_and_remove(self, node_to_remove):
- for a in self.asts[node_to_remove.subdir].lines:
- if a.lineno == node_to_remove.lineno:
- if isinstance(a, mparser.AssignmentNode):
- v = a.value
- if not isinstance(v, mparser.ArrayNode):
- raise NotImplementedError('Not supported yet, bro.')
- args = v.args
- for i in range(len(args.arguments)):
- if isinstance(args.arguments[i], mparser.StringNode) and self.filename == args.arguments[i].value:
- self.remove_argument_item(args, i)
- raise NotImplementedError('Sukkess')
-
- def remove_source_from_target(self, node, args, kwargs):
- for i in range(1, len(node.args)):
- # Is file name directly in function call as a string.
- if isinstance(node.args.arguments[i], mparser.StringNode) and self.filename == node.args.arguments[i].value:
- self.remove_argument_item(node.args, i)
- # Is file name in a variable that gets expanded here.
- if isinstance(node.args.arguments[i], mparser.IdNode):
- avar = self.get_variable(node.args.arguments[i].value)
- if not isinstance(avar, list):
- raise NotImplementedError('Non-arrays not supported yet, sorry.')
- for entry in avar:
- if isinstance(entry, mparser.StringNode) and entry.value == self.filename:
- self.hacky_find_and_remove(entry)
- sys.exit('Could not find source %s in target %s.' % (self.filename, args[0]))
+ temp_args += [i]
+ for i in temp_args:
+ if isinstance(i, mparser.ElementaryNode) and not isinstance(i, mparser.IdNode):
+ flattend_args += [i.value]
+ elif isinstance(i, (str, bool, int, float)) or include_unknown_args:
+ flattend_args += [i]
+ return flattend_args
+
+ def flatten_kwargs(self, kwargs: object, include_unknown_args: bool = False):
+ flattend_kwargs = {}
+ for key, val in kwargs.items():
+ if isinstance(val, mparser.ElementaryNode):
+ flattend_kwargs[key] = val.value
+ elif isinstance(val, (mparser.ArrayNode, mparser.ArgumentNode)):
+ flattend_kwargs[key] = self.flatten_args(val, include_unknown_args)
+ elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
+ flattend_kwargs[key] = val
+ return flattend_kwargs
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
new file mode 100644
index 0000000..5d0ec5a
--- /dev/null
+++ b/mesonbuild/ast/introspection.py
@@ -0,0 +1,241 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from . import AstInterpreter
+from .. import compilers, environment, mesonlib, mparser, optinterpreter
+from .. import coredata as cdata
+from ..interpreterbase import InvalidArguments
+from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
+import os
+
+build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']
+
+class IntrospectionHelper:
+ # mimic an argparse namespace
+ def __init__(self, cross_file):
+ self.cross_file = cross_file
+ self.native_file = None
+ self.cmd_line_options = {}
+
+class IntrospectionInterpreter(AstInterpreter):
+ # Interpreter to detect the options without a build directory
+ # Most of the code is stolen from interperter.Interpreter
+ def __init__(self, source_root, subdir, backend, cross_file=None, subproject='', subproject_dir='subprojects', env=None):
+ super().__init__(source_root, subdir)
+
+ options = IntrospectionHelper(cross_file)
+ self.cross_file = cross_file
+ if env is None:
+ self.environment = environment.Environment(source_root, None, options)
+ else:
+ self.environment = env
+ self.subproject = subproject
+ self.subproject_dir = subproject_dir
+ self.coredata = self.environment.get_coredata()
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self.backend = backend
+ self.default_options = {'backend': self.backend}
+ self.project_data = {}
+ self.targets = []
+
+ self.funcs.update({
+ 'add_languages': self.func_add_languages,
+ 'executable': self.func_executable,
+ 'jar': self.func_jar,
+ 'library': self.func_library,
+ 'project': self.func_project,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
+ })
+
+ def func_project(self, node, args, kwargs):
+ if len(args) < 1:
+ raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+
+ proj_name = args[0]
+ proj_vers = kwargs.get('version', 'undefined')
+ proj_langs = self.flatten_args(args[1:])
+ if isinstance(proj_vers, mparser.ElementaryNode):
+ proj_vers = proj_vers.value
+ if not isinstance(proj_vers, str):
+ proj_vers = 'undefined'
+ self.project_data = {'descriptive_name': proj_name, 'version': proj_vers}
+
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi.process(self.option_file)
+ self.coredata.merge_user_options(oi.options)
+
+ def_opts = self.flatten_args(kwargs.get('default_options', []))
+ self.project_default_options = mesonlib.stringlistify(def_opts)
+ self.project_default_options = cdata.create_options_dict(self.project_default_options)
+ self.default_options.update(self.project_default_options)
+ self.coredata.set_default_options(self.default_options, self.subproject, self.environment.cmd_line_options)
+
+ if not self.is_subproject() and 'subproject_dir' in kwargs:
+ spdirname = kwargs['subproject_dir']
+ if isinstance(spdirname, str):
+ self.subproject_dir = spdirname
+ if not self.is_subproject():
+ self.project_data['subprojects'] = []
+ subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
+ if os.path.isdir(subprojects_dir):
+ for i in os.listdir(subprojects_dir):
+ if os.path.isdir(os.path.join(subprojects_dir, i)):
+ self.do_subproject(i)
+
+ self.coredata.init_backend_options(self.backend)
+ options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
+
+ self.coredata.set_options(options)
+ self.func_add_languages(None, proj_langs, None)
+
+ def do_subproject(self, dirname):
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ subpr = os.path.join(subproject_dir_abs, dirname)
+ try:
+ subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment)
+ subi.analyze()
+ subi.project_data['name'] = dirname
+ self.project_data['subprojects'] += [subi.project_data]
+ except:
+ return
+
+ def func_add_languages(self, node, args, kwargs):
+ args = self.flatten_args(args)
+ need_cross_compiler = self.environment.is_cross_build()
+ for lang in sorted(args, key=compilers.sort_clink):
+ lang = lang.lower()
+ if lang not in self.coredata.compilers:
+ self.environment.detect_compilers(lang, need_cross_compiler)
+
+ def build_target(self, node, args, kwargs, targetclass):
+ if not args:
+ return
+ kwargs = self.flatten_kwargs(kwargs, True)
+ name = self.flatten_args(args)[0]
+ srcqueue = [node]
+ if 'sources' in kwargs:
+ srcqueue += kwargs['sources']
+
+ source_nodes = []
+ while srcqueue:
+ curr = srcqueue.pop(0)
+ arg_node = None
+ if isinstance(curr, mparser.FunctionNode):
+ arg_node = curr.args
+ elif isinstance(curr, mparser.ArrayNode):
+ arg_node = curr.args
+ elif isinstance(curr, mparser.IdNode):
+ # Try to resolve the ID and append the node to the queue
+ id = curr.value
+ if id in self.assignments and self.assignments[id]:
+ node = self.assignments[id][0]
+ if isinstance(node, (mparser.ArrayNode, mparser.IdNode, mparser.FunctionNode)):
+ srcqueue += [node]
+ if arg_node is None:
+ continue
+ elemetary_nodes = list(filter(lambda x: isinstance(x, (str, mparser.StringNode)), arg_node.arguments))
+ srcqueue += list(filter(lambda x: isinstance(x, (mparser.FunctionNode, mparser.ArrayNode, mparser.IdNode)), arg_node.arguments))
+ # Pop the first element if the function is a build target function
+ if isinstance(curr, mparser.FunctionNode) and curr.func_name in build_target_functions:
+ elemetary_nodes.pop(0)
+ if elemetary_nodes:
+ source_nodes += [curr]
+
+ # Filter out kwargs from other target types. For example 'soversion'
+ # passed to library() when default_library == 'static'.
+ kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
+ is_cross = False
+ objects = []
+ empty_sources = [] # Passing the unresolved sources list causes errors
+ target = targetclass(name, self.subdir, self.subproject, is_cross, empty_sources, objects, self.environment, kwargs)
+
+ self.targets += [{
+ 'name': target.get_basename(),
+ 'id': target.get_id(),
+ 'type': target.get_typename(),
+ 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
+ 'subdir': self.subdir,
+ 'build_by_default': target.build_by_default,
+ 'sources': source_nodes,
+ 'kwargs': kwargs,
+ 'node': node,
+ }]
+
+ return
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_builtin_option('default_library')
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif default_library == 'both':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_executable(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, Executable)
+
+ def func_static_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, StaticLibrary)
+
+ def func_shared_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_shared_module(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedModule)
+
+ def func_library(self, node, args, kwargs):
+ return self.build_library(node, args, kwargs)
+
+ def func_jar(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, Jar)
+
+ def func_build_target(self, node, args, kwargs):
+ if 'target_type' not in kwargs:
+ return
+ target_type = kwargs.pop('target_type')
+ if isinstance(target_type, mparser.ElementaryNode):
+ target_type = target_type.value
+ if target_type == 'executable':
+ return self.build_target(node, args, kwargs, Executable)
+ elif target_type == 'shared_library':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'static_library':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif target_type == 'both_libraries':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'library':
+ return self.build_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.build_target(node, args, kwargs, Jar)
+
+ def is_subproject(self):
+ return self.subproject != ''
+
+ def analyze(self):
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ self.parse_project()
+ self.run()
diff --git a/mesonbuild/ast/postprocess.py b/mesonbuild/ast/postprocess.py
new file mode 100644
index 0000000..e913b4f
--- /dev/null
+++ b/mesonbuild/ast/postprocess.py
@@ -0,0 +1,86 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from . import AstVisitor
+from .. import mparser
+
+class AstIndentationGenerator(AstVisitor):
+ def __init__(self):
+ self.level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ # Store the current level in the node
+ node.level = self.level
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.level += 1
+ node.elseblock.accept(self)
+ self.level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+class AstIDGenerator(AstVisitor):
+ def __init__(self):
+ self.counter = {}
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ name = type(node).__name__
+ if name not in self.counter:
+ self.counter[name] = 0
+ node.ast_id = name + '#' + str(self.counter[name])
+ self.counter[name] += 1
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
new file mode 100644
index 0000000..60e0b0d
--- /dev/null
+++ b/mesonbuild/ast/printer.py
@@ -0,0 +1,203 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+from . import AstVisitor
+import re
+
+arithmic_map = {
+ 'add': '+',
+ 'sub': '-',
+ 'mod': '%',
+ 'mul': '*',
+ 'div': '/'
+}
+
+class AstPrinter(AstVisitor):
+ def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5):
+ self.result = ''
+ self.indent = indent
+ self.arg_newline_cutoff = arg_newline_cutoff
+ self.ci = ''
+ self.is_newline = True
+ self.last_level = 0
+
+ def post_process(self):
+ self.result = re.sub(r'\s+\n', '\n', self.result)
+
+ def append(self, data: str, node: mparser.BaseNode):
+ level = 0
+ if node and hasattr(node, 'level'):
+ level = node.level
+ else:
+ level = self.last_level
+ self.last_level = level
+ if self.is_newline:
+ self.result += ' ' * (level * self.indent)
+ self.result += data
+ self.is_newline = False
+
+ def append_padded(self, data: str, node: mparser.BaseNode):
+ if self.result[-1] not in [' ', '\n']:
+ data = ' ' + data
+ self.append(data + ' ', node)
+
+ def newline(self):
+ self.result += '\n'
+ self.is_newline = True
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode):
+ self.append('true' if node.value else 'false', node)
+
+ def visit_IdNode(self, node: mparser.IdNode):
+ self.append(node.value, node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode):
+ self.append(str(node.value), node)
+
+ def visit_StringNode(self, node: mparser.StringNode):
+ self.append("'" + node.value + "'", node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode):
+ self.append('continue', node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode):
+ self.append('break', node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.append('[', node)
+ node.args.accept(self)
+ self.append(']', node)
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.append('{', node)
+ node.args.accept(self)
+ self.append('}', node)
+
+ def visit_OrNode(self, node: mparser.OrNode):
+ node.left.accept(self)
+ self.append_padded('or', node)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode):
+ node.left.accept(self)
+ self.append_padded('and', node)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode):
+ node.left.accept(self)
+ self.append_padded(mparser.comparison_map[node.ctype], node)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode):
+ node.left.accept(self)
+ self.append_padded(arithmic_map[node.operation], node)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode):
+ self.append_padded('not', node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode):
+ for i in node.lines:
+ i.accept(self)
+ self.newline()
+
+ def visit_IndexNode(self, node: mparser.IndexNode):
+ self.append('[', node)
+ node.index.accept(self)
+ self.append(']', node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ node.source_object.accept(self)
+ self.append('.' + node.name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.append(node.func_name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode):
+ self.append(node.var_name + ' = ', node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode):
+ self.append(node.var_name + ' += ', node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ varnames = [x.value for x in node.varnames]
+ self.append_padded('foreach', node)
+ self.append_padded(', '.join(varnames), node)
+ self.append_padded(':', node)
+ node.items.accept(self)
+ self.newline()
+ node.block.accept(self)
+ self.append('endforeach', node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ prefix = ''
+ for i in node.ifs:
+ self.append_padded(prefix + 'if', node)
+ prefix = 'el'
+ i.accept(self)
+ if node.elseblock:
+ self.append('else', node)
+ node.elseblock.accept(self)
+ self.append('endif', node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode):
+ self.append_padded('-', node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ node.condition.accept(self)
+ self.newline()
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode):
+ node.condition.accept(self)
+ self.append_padded('?', node)
+ node.trueblock.accept(self)
+ self.append_padded(':', node)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode):
+ break_args = True if (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff else False
+ for i in node.arguments + list(node.kwargs.values()):
+ if not isinstance(i, mparser.ElementaryNode):
+ break_args = True
+ if break_args:
+ self.newline()
+ for i in node.arguments:
+ i.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ for key, val in node.kwargs.items():
+ self.append(key, node)
+ self.append_padded(':', node)
+ val.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ if break_args:
+ self.result = re.sub(r', \n$', '\n', self.result)
+ else:
+ self.result = re.sub(r', $', '', self.result)
diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py
new file mode 100644
index 0000000..c8769d4
--- /dev/null
+++ b/mesonbuild/ast/visitor.py
@@ -0,0 +1,140 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+
+class AstVisitor:
+ def __init__(self):
+ pass
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ pass
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode):
+ self.visit_default_func(node)
+
+ def visit_IdNode(self, node: mparser.IdNode):
+ self.visit_default_func(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode):
+ self.visit_default_func(node)
+
+ def visit_StringNode(self, node: mparser.StringNode):
+ self.visit_default_func(node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode):
+ self.visit_default_func(node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode):
+ self.visit_default_func(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_EmptyNode(self, node: mparser.EmptyNode):
+ self.visit_default_func(node)
+
+ def visit_OrNode(self, node: mparser.OrNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode):
+ self.visit_default_func(node)
+ for i in node.lines:
+ i.accept(self)
+
+ def visit_IndexNode(self, node: mparser.IndexNode):
+ self.visit_default_func(node)
+ node.index.accept(self)
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ node.args.accept(self)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ node.items.accept(self)
+ node.block.accept(self)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ node.elseblock.accept(self)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode):
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.trueblock.accept(self)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode):
+ self.visit_default_func(node)
+ for i in node.arguments:
+ i.accept(self)
+ for i in node.commas:
+ pass
+ for val in node.kwargs.values():
+ val.accept(self)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 9f3f5d6..ba5bd90 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -20,7 +20,7 @@ from .. import mesonlib
from .. import mlog
import json
import subprocess
-from ..mesonlib import MesonException, OrderedSet
+from ..mesonlib import MachineChoice, MesonException, OrderedSet
from ..mesonlib import classify_unity_sources
from ..mesonlib import File
from ..compilers import CompilerArgs, VisualStudioCCompiler
@@ -185,9 +185,14 @@ class Backend:
self.environment.coredata.base_options)
def get_compiler_options_for_target(self, target):
- return OptionOverrideProxy(target.option_overrides,
- # no code depends on builtins for now
- self.environment.coredata.compiler_options)
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
+ return OptionOverrideProxy(
+ target.option_overrides,
+ self.environment.coredata.compiler_options[for_machine])
def get_option_for_target(self, option_name, target):
if option_name in target.option_overrides:
@@ -574,10 +579,14 @@ class Backend:
# Add compile args added using add_global_arguments()
# These override per-project arguments
commands += self.build.get_global_args(compiler, target.is_cross)
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
if not target.is_cross:
# Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
# to override all the defaults, but not the per-target compile args.
- commands += self.environment.coredata.get_external_args(compiler.get_language())
+ commands += self.environment.coredata.get_external_args(for_machine, compiler.get_language())
# Always set -fPIC for shared libraries
if isinstance(target, build.SharedLibrary):
commands += compiler.get_pic_args()
@@ -798,7 +807,7 @@ class Backend:
def replace_outputs(self, args, private_dir, output_list):
newargs = []
- regex = re.compile('@OUTPUT(\d+)@')
+ regex = re.compile(r'@OUTPUT(\d+)@')
for arg in args:
m = regex.search(arg)
while m is not None:
@@ -938,7 +947,7 @@ class Backend:
dfilename = os.path.join(outdir, target.depfile)
i = i.replace('@DEPFILE@', dfilename)
elif '@PRIVATE_OUTDIR_' in i:
- match = re.search('@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
if not match:
msg = 'Custom target {!r} has an invalid argument {!r}' \
''.format(target.name, i)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index debb4fb..afef9a9 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+from typing import List
import os
import re
import shlex
@@ -29,9 +29,9 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
-from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler
+from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler, FortranCompiler
from ..linkers import ArLinker
-from ..mesonlib import File, MesonException, OrderedSet
+from ..mesonlib import File, MachineChoice, MesonException, OrderedSet
from ..mesonlib import get_compiler_for_source, has_path_sep
from .backends import CleanTrees
from ..build import InvalidArguments
@@ -428,12 +428,7 @@ int dummy;
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
obj_list = []
- use_pch = self.environment.coredata.base_options.get('b_pch', False)
is_unity = self.is_unity(target)
- if use_pch and target.has_pch():
- pch_objects = self.generate_pch(target, outfile)
- else:
- pch_objects = []
header_deps = []
unity_src = []
unity_deps = [] # Generated sources that must be built before compiling a Unity target.
@@ -486,6 +481,12 @@ int dummy;
header_deps=header_deps)
obj_list.append(o)
+ use_pch = self.environment.coredata.base_options.get('b_pch', False)
+ if use_pch and target.has_pch():
+ pch_objects = self.generate_pch(target, outfile, header_deps=header_deps)
+ else:
+ pch_objects = []
+
# Generate compilation targets for C sources generated from Vala
# sources. This can be extended to other $LANG->C compilers later if
# necessary. This needs to be separate for at least Vala
@@ -1460,7 +1461,7 @@ int dummy;
or langname == 'cs':
continue
crstr = ''
- cross_args = self.environment.properties.host.get_external_link_args(langname)
+ cross_args = self.environment.coredata.get_external_link_args(MachineChoice.HOST, langname)
if is_cross:
crstr = '_CROSS'
rule = 'rule %s%s_LINKER\n' % (langname, crstr)
@@ -1826,7 +1827,8 @@ rule FORTRAN_DEP_HACK%s
if compiler is None:
self.fortran_deps[target.get_basename()] = {}
return
- modre = re.compile(r"\s*module\s+(\w+)", re.IGNORECASE)
+ modre = re.compile(r"\s*\bmodule\b\s+(\w+)\s*$", re.IGNORECASE)
+ submodre = re.compile(r"\s*\bsubmodule\b\s+\((\w+:?\w+)\)\s+(\w+)\s*$", re.IGNORECASE)
module_files = {}
for s in target.get_sources():
# FIXME, does not work for Fortran sources generated by
@@ -1836,34 +1838,44 @@ rule FORTRAN_DEP_HACK%s
continue
filename = s.absolute_path(self.environment.get_source_dir(),
self.environment.get_build_dir())
- # Some Fortran editors save in weird encodings,
- # but all the parts we care about are in ASCII.
- with open(filename, errors='ignore') as f:
+ # Fortran keywords must be ASCII.
+ with open(filename, encoding='ascii', errors='ignore') as f:
for line in f:
modmatch = modre.match(line)
if modmatch is not None:
modname = modmatch.group(1).lower()
- if modname == 'procedure':
- # MODULE PROCEDURE construct
- continue
if modname in module_files:
raise InvalidArguments(
'Namespace collision: module %s defined in '
'two files %s and %s.' %
(modname, module_files[modname], s))
module_files[modname] = s
+ else:
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ submodname = submodmatch.group(2).lower()
+ if submodname in module_files:
+ raise InvalidArguments(
+ 'Namespace collision: submodule %s defined in '
+ 'two files %s and %s.' %
+ (submodname, module_files[submodname], s))
+ module_files[submodname] = s
+
self.fortran_deps[target.get_basename()] = module_files
- def get_fortran_deps(self, compiler, src, target):
+ def get_fortran_deps(self, compiler: FortranCompiler, src: str, target) -> List[str]:
mod_files = []
- usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE)
+ usere = re.compile(r"\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)", re.IGNORECASE)
+ submodre = re.compile(r"\s*\bsubmodule\b\s+\((\w+:?\w+)\)\s+(\w+)\s*$", re.IGNORECASE)
dirname = self.get_target_private_dir(target)
tdeps = self.fortran_deps[target.get_basename()]
- with open(src) as f:
+ with open(src, encoding='ascii', errors='ignore') as f:
for line in f:
usematch = usere.match(line)
if usematch is not None:
usename = usematch.group(1).lower()
+ if usename == 'intrinsic': # this keeps the regex simpler
+ continue
if usename not in tdeps:
# The module is not provided by any source file. This
# is due to:
@@ -1882,9 +1894,23 @@ rule FORTRAN_DEP_HACK%s
# the same name.
if mod_source_file.fname == os.path.basename(src):
continue
- mod_name = compiler.module_name_to_filename(
- usematch.group(1))
+ mod_name = compiler.module_name_to_filename(usename)
mod_files.append(os.path.join(dirname, mod_name))
+ else:
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ parents = submodmatch.group(1).lower().split(':')
+ assert len(parents) in (1, 2), (
+ 'submodule ancestry must be specified as'
+ ' ancestor:parent but Meson found {}'.parents)
+ for parent in parents:
+ if parent not in tdeps:
+ raise MesonException("submodule {} relies on parent module {} that was not found.".format(submodmatch.group(2).lower(), parent))
+ if tdeps[parent].fname == os.path.basename(src): # same file
+ continue
+ mod_name = compiler.module_name_to_filename(parent)
+ mod_files.append(os.path.join(dirname, mod_name))
+
return mod_files
def get_cross_stdlib_args(self, target, compiler):
@@ -2184,12 +2210,7 @@ rule FORTRAN_DEP_HACK%s
commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
- for d in header_deps:
- if isinstance(d, File):
- d = d.rel_to_builddir(self.build_to_src)
- elif not self.has_dir_part(d):
- d = os.path.join(self.get_target_private_dir(target), d)
- element.add_dep(d)
+ self.add_header_deps(target, element, header_deps)
for d in extra_deps:
element.add_dep(d)
for d in order_deps:
@@ -2198,7 +2219,14 @@ rule FORTRAN_DEP_HACK%s
elif not self.has_dir_part(d):
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
- element.add_orderdep(pch_dep)
+ if compiler.id == 'msvc':
+ # MSVC does not show includes coming from the PCH with '/showIncludes',
+ # thus we must add an implicit dependency to the generated PCH.
+ element.add_dep(pch_dep)
+ else:
+ # All other compilers properly handle includes through the PCH, so only an
+ # orderdep is needed to make the initial build without depfile work.
+ element.add_orderdep(pch_dep)
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
@@ -2209,6 +2237,14 @@ rule FORTRAN_DEP_HACK%s
element.write(outfile)
return rel_obj
+ def add_header_deps(self, target, ninja_element, header_deps):
+ for d in header_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ ninja_element.add_dep(d)
+
def has_dir_part(self, fname):
# FIXME FIXME: The usage of this is a terrible and unreliable hack
if isinstance(fname, File):
@@ -2239,6 +2275,7 @@ rule FORTRAN_DEP_HACK%s
just_name = os.path.basename(header)
(objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
commands += pch_args
+ commands += self._generate_single_compile(target, compiler)
commands += self.get_compile_debugfile_args(compiler, target, objname)
dep = dst + '.' + compiler.get_depfile_suffix()
return commands, dep, dst, [objname]
@@ -2254,7 +2291,7 @@ rule FORTRAN_DEP_HACK%s
dep = dst + '.' + compiler.get_depfile_suffix()
return commands, dep, dst, [] # Gcc does not create an object file during pch generation.
- def generate_pch(self, target, outfile):
+ def generate_pch(self, target, outfile, header_deps=[]):
cstr = ''
pch_objects = []
if target.is_cross:
@@ -2285,6 +2322,7 @@ rule FORTRAN_DEP_HACK%s
elem = NinjaBuildElement(self.all_outputs, dst, rulename, src)
if extradep is not None:
elem.add_dep(extradep)
+ self.add_header_deps(target, elem, header_deps)
elem.add_item('ARGS', commands)
elem.add_item('DEPFILE', dep)
elem.write(outfile)
@@ -2350,15 +2388,14 @@ rule FORTRAN_DEP_HACK%s
target_args = self.build_target_link_arguments(linker, target.link_whole_targets)
return linker.get_link_whole_for(target_args) if len(target_args) else []
- @staticmethod
@lru_cache(maxsize=None)
- def guess_library_absolute_path(linker, libname, search_dirs, patterns):
+ def guess_library_absolute_path(self, linker, libname, search_dirs, patterns):
for d in search_dirs:
for p in patterns:
trial = CCompiler._get_trials_from_pattern(p, d, libname)
if not trial:
continue
- trial = CCompiler._get_file_from_list(trial)
+ trial = CCompiler._get_file_from_list(self.environment, trial)
if not trial:
continue
# Return the first result
@@ -2471,6 +2508,11 @@ rule FORTRAN_DEP_HACK%s
if not isinstance(target, build.StaticLibrary):
commands += self.get_link_whole_args(linker, target)
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
if not isinstance(target, build.StaticLibrary):
# Add link args added using add_project_link_arguments()
commands += self.build.get_project_link_args(linker, target.subproject, target.is_cross)
@@ -2480,7 +2522,7 @@ rule FORTRAN_DEP_HACK%s
if not target.is_cross:
# Link args added from the env: LDFLAGS. We want these to
# override all the defaults but not the per-target link args.
- commands += self.environment.coredata.get_external_link_args(linker.get_language())
+ commands += self.environment.coredata.get_external_link_args(for_machine, linker.get_language())
# Now we will add libraries and library paths from various sources
@@ -2526,7 +2568,7 @@ rule FORTRAN_DEP_HACK%s
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
- commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
+ commands += linker.get_option_link_args(self.environment.coredata.compiler_options[for_machine])
dep_targets = []
dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal))
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 939f7b4..074c3a9 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -25,7 +25,9 @@ from .. import dependencies
from .. import mlog
from .. import compilers
from ..compilers import CompilerArgs
-from ..mesonlib import MesonException, File, python_command, replace_if_different
+from ..mesonlib import (
+ MesonException, MachineChoice, File, python_command, replace_if_different
+)
from ..environment import Environment, build_filename
def autodetect_vs_version(build):
@@ -472,14 +474,14 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -509,12 +511,15 @@ class Vs2010Backend(backends.Backend):
elif isinstance(i, File):
relfname = i.rel_to_builddir(self.build_to_src)
cmd.append(os.path.join(self.environment.get_build_dir(), relfname))
+ elif isinstance(i, str):
+ # Escape embedded quotes, because we quote the entire argument below.
+ cmd.append(i.replace('"', '\\"'))
else:
cmd.append(i)
cmd_templ = '''"%s" ''' * len(cmd)
ET.SubElement(customstep, 'Command').text = cmd_templ % tuple(cmd)
ET.SubElement(customstep, 'Message').text = 'Running custom command.'
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def gen_custom_target_vcxproj(self, target, ofname, guid):
@@ -540,7 +545,7 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(customstep, 'Command').text = ' '.join(self.quote_arguments(wrapper_cmd))
ET.SubElement(customstep, 'Outputs').text = ';'.join(ofilenames)
ET.SubElement(customstep, 'Inputs').text = ';'.join([exe_data] + srcs + depend_files)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self.generate_custom_generator_commands(target, root)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
@@ -553,18 +558,37 @@ class Vs2010Backend(backends.Backend):
return 'cpp'
raise MesonException('Could not guess language from source file %s.' % src)
- def add_pch(self, inc_cl, proj_to_src_dir, pch_sources, source_file):
+ def add_pch(self, pch_sources, lang, inc_cl):
if len(pch_sources) <= 1:
# We only need per file precompiled headers if we have more than 1 language.
return
- lang = Vs2010Backend.lang_from_source_file(source_file)
- header = os.path.join(proj_to_src_dir, pch_sources[lang][0])
- pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
- pch_file.text = header
+ self.use_pch(pch_sources, lang, inc_cl)
+
+ def create_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Create'
+ self.add_pch_files(pch_sources, lang, inc_cl)
+
+ def use_pch(self, pch_sources, lang, inc_cl):
+ header = self.add_pch_files(pch_sources, lang, inc_cl)
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
pch_include.text = header + ';%(ForcedIncludeFiles)'
+
+ def add_pch_files(self, pch_sources, lang, inc_cl):
+ header = os.path.basename(pch_sources[lang][0])
+ pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
+ # When USING PCHs, MSVC will not do the regular include
+ # directory lookup, but simply use a string match to find the
+ # PCH to use. That means the #include directive must match the
+ # pch_file.text used during PCH CREATION verbatim.
+ # When CREATING a PCH, MSVC will do the include directory
+ # lookup to find the actual PCH header to use. Thus, the PCH
+ # header must either be in the include_directories of the target
+ # or be in the same directory as the PCH implementation.
+ pch_file.text = header
pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang
+ return header
def is_argument_with_msbuild_xml_entry(self, entry):
# Remove arguments that have a top level XML entry so
@@ -747,7 +771,7 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
# Start configuration
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType').text = conftype
@@ -820,7 +844,7 @@ class Vs2010Backend(backends.Backend):
elif '/Od' in o_flags:
ET.SubElement(type_config, 'Optimization').text = 'Disabled'
# End configuration
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
(gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
(custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(custom_target_output_files)
@@ -856,10 +880,14 @@ class Vs2010Backend(backends.Backend):
file_inc_dirs = dict((lang, []) for lang in target.compilers)
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
for l, comp in target.compilers.items():
if l in file_args:
file_args[l] += compilers.get_base_compile_args(self.get_base_options_for_target(target), comp)
- file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options)
+ file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options[for_machine])
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args.get(target.subproject, {}).items():
@@ -871,9 +899,10 @@ class Vs2010Backend(backends.Backend):
if l in file_args:
file_args[l] += args
if not target.is_cross:
- # Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
- # to override all the defaults, but not the per-target compile args.
- for key, opt in self.environment.coredata.compiler_options.items():
+ # Compile args added from the env or cross file: CFLAGS/CXXFLAGS,
+ # etc. We want these to override all the defaults, but not the
+ # per-target compile args.
+ for key, opt in self.environment.coredata.compiler_options[for_machine].items():
l, suffix = key.split('_', 1)
if suffix == 'args' and l in file_args:
file_args[l] += opt.value
@@ -1007,14 +1036,7 @@ class Vs2010Backend(backends.Backend):
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
- pch_source = list(pch_sources.values())[0]
- header = os.path.join(proj_to_src_dir, pch_source[0])
- pch_file = ET.SubElement(clconf, 'PrecompiledHeaderFile')
- pch_file.text = header
- pch_include = ET.SubElement(clconf, 'ForcedIncludeFiles')
- pch_include.text = header + ';%(ForcedIncludeFiles)'
- pch_out = ET.SubElement(clconf, 'PrecompiledHeaderOutputFile')
- pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % pch_source[2]
+ self.use_pch(pch_sources, list(pch_sources)[0], clconf)
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
@@ -1039,9 +1061,10 @@ class Vs2010Backend(backends.Backend):
# These override per-project link arguments
extra_link_args += self.build.get_global_link_args(compiler, target.is_cross)
if not target.is_cross:
- # Link args added from the env: LDFLAGS. We want these to
- # override all the defaults but not the per-target link args.
- extra_link_args += self.environment.coredata.get_external_link_args(compiler.get_language())
+ # Link args added from the env: LDFLAGS, or the cross file. We
+ # want these to override all the defaults but not the
+ # per-target link args.
+ extra_link_args += self.environment.coredata.get_external_link_args(for_machine, compiler.get_language())
# Only non-static built targets need link args and link dependencies
extra_link_args += target.link_args
# External deps must be last because target link libraries may depend on them.
@@ -1064,7 +1087,7 @@ class Vs2010Backend(backends.Backend):
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
- extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options)
+ extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options[for_machine])
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
# Add more libraries to be linked if needed
@@ -1138,7 +1161,7 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
extra_files = target.extra_files
- if len(headers) + len(gen_hdrs) + len(extra_files) > 0:
+ if len(headers) + len(gen_hdrs) + len(extra_files) + len(pch_sources) > 0:
inc_hdrs = ET.SubElement(root, 'ItemGroup')
for h in headers:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
@@ -1148,6 +1171,9 @@ class Vs2010Backend(backends.Backend):
for h in target.extra_files:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+ for lang in pch_sources:
+ h = pch_sources[lang][0]
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=os.path.join(proj_to_src_dir, h))
if len(sources) + len(gen_src) + len(pch_sources) > 0:
inc_src = ET.SubElement(root, 'ItemGroup')
@@ -1155,7 +1181,7 @@ class Vs2010Backend(backends.Backend):
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1163,7 +1189,7 @@ class Vs2010Backend(backends.Backend):
for s in gen_src:
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1172,14 +1198,7 @@ class Vs2010Backend(backends.Backend):
if impl:
relpath = os.path.join(proj_to_src_dir, impl)
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
- pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
- pch.text = 'Create'
- pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
- pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % suffix
- pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
- # MSBuild searches for the header relative from the implementation, so we have to use
- # just the file name instead of the relative path to the file.
- pch_file.text = os.path.basename(header)
+ self.create_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1193,7 +1212,7 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(inc_objs, 'Object', Include=s)
self.add_generated_objects(inc_objs, gen_objs)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
# Reference the regen target.
regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
@@ -1221,14 +1240,14 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType').text = "Utility"
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1273,7 +1292,7 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(custombuild, 'Outputs').text = Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())
deps = self.get_regen_filelist()
ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
@@ -1300,14 +1319,14 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1346,7 +1365,7 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
self.serialize_tests()
ET.SubElement(postbuild, 'Command').text =\
cmd_templ % ('" "'.join(test_command))
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def gen_installproj(self, target_name, ofname):
@@ -1373,14 +1392,14 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1414,7 +1433,7 @@ exit /b %%1
if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(postbuild, 'Command').text =\
cmd_templ % ('" "'.join(test_command))
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def generate_debug_information(self, link):
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index d20b576..702b338 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -36,6 +36,7 @@ pch_kwargs = set(['c_pch', 'cpp_pch'])
lang_arg_kwargs = set([
'c_args',
'cpp_args',
+ 'cuda_args',
'd_args',
'd_import_dirs',
'd_unittest',
@@ -797,13 +798,13 @@ just like those detected with the dependency() function.''')
for linktarget in lwhole:
self.link_whole(linktarget)
- c_pchlist, cpp_pchlist, clist, cpplist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
- = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cs_args', 'vala_args', 'objc_args',
+ c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
+ = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args',
'objcpp_args', 'fortran_args', 'rust_args')
self.add_pch('c', c_pchlist)
self.add_pch('cpp', cpp_pchlist)
- compiler_args = {'c': clist, 'cpp': cpplist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
+ compiler_args = {'c': clist, 'cpp': cpplist, 'cuda': cudalist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
'fortran': fortranlist, 'rust': rustlist
}
for key, value in compiler_args.items():
@@ -1089,6 +1090,9 @@ You probably should put it in link_with instead.''')
pchlist = [pchlist[1], pchlist[0]]
else:
raise InvalidArguments('PCH argument %s is of unknown type.' % pchlist[0])
+
+ if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])):
+ raise InvalidArguments('PCH files must be stored in the same folder.')
elif len(pchlist) > 2:
raise InvalidArguments('PCH definition may have a maximum of 2 files.')
for f in pchlist:
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index 60cca93..5de0e59 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -72,7 +72,7 @@ __all__ = [
'JavaCompiler',
'LLVMDCompiler',
'MonoCompiler',
- 'NvidiaCudaCompiler',
+ 'CudaCompiler',
'VisualStudioCsCompiler',
'NAGFortranCompiler',
'ObjCCompiler',
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index a8be2b0..b47be7d 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -19,13 +19,15 @@ import subprocess
import functools
import itertools
from pathlib import Path
+from typing import List
from .. import mlog
from .. import coredata
from . import compilers
from ..mesonlib import (
- EnvironmentException, MesonException, version_compare, Popen_safe, listify,
- for_windows, for_darwin, for_cygwin, for_haiku, for_openbsd,
+ EnvironmentException, MachineChoice, MesonException, Popen_safe, listify,
+ version_compare, for_windows, for_darwin, for_cygwin, for_haiku,
+ for_openbsd, darwin_get_object_archs
)
from .c_function_attributes import C_FUNC_ATTRIBUTES
@@ -58,6 +60,7 @@ class CCompiler(Compiler):
library_dirs_cache = {}
program_dirs_cache = {}
find_library_cache = {}
+ find_framework_cache = {}
internal_libs = gnu_compiler_internal_libs
@staticmethod
@@ -424,12 +427,16 @@ class CCompiler(Compiler):
# Read c_args/cpp_args/etc from the cross-info file (if needed)
args += self.get_cross_extra_flags(env, link=(mode == 'link'))
if not self.is_cross:
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
if mode == 'preprocess':
# Add CPPFLAGS from the env.
- args += env.coredata.get_external_preprocess_args(self.language)
+ args += env.coredata.get_external_preprocess_args(for_machine, self.language)
elif mode == 'compile':
# Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
- sys_args = env.coredata.get_external_args(self.language)
+ sys_args = env.coredata.get_external_args(for_machine, self.language)
# Apparently it is a thing to inject linker flags both
# via CFLAGS _and_ LDFLAGS, even though the former are
# also used during linking. These flags can break
@@ -438,7 +445,7 @@ class CCompiler(Compiler):
args += cleaned_sys_args
elif mode == 'link':
# Add LDFLAGS from the env
- args += env.coredata.get_external_link_args(self.language)
+ args += env.coredata.get_external_link_args(for_machine, self.language)
args += self.get_compiler_check_args()
# extra_args must override all other arguments, so we add them last
args += extra_args
@@ -456,7 +463,7 @@ class CCompiler(Compiler):
return self.compiles(code, env, extra_args=extra_args,
dependencies=dependencies, mode='link')
- def run(self, code, env, *, extra_args=None, dependencies=None):
+ def run(self, code: str, env, *, extra_args=None, dependencies=None):
if self.is_cross and self.exe_wrapper is None:
raise CrossNoRunException('Can not run test applications in this cross environment.')
with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p:
@@ -978,10 +985,28 @@ class CCompiler(Compiler):
return [f.as_posix()]
@staticmethod
- def _get_file_from_list(files):
+ def _get_file_from_list(env, files: List[str]) -> str:
+ '''
+ We just check whether the library exists. We can't do a link check
+ because the library might have unresolved symbols that require other
+ libraries. On macOS we check if the library matches our target
+ architecture.
+ '''
+ # If not building on macOS for Darwin, do a simple file check
+ if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
+ for f in files:
+ if os.path.isfile(f):
+ return f
+ # Run `lipo` and check if the library supports the arch we want
for f in files:
- if os.path.isfile(f):
+ if not os.path.isfile(f):
+ continue
+ archs = darwin_get_object_archs(f)
+ if archs and env.machines.host.cpu_family in archs:
return f
+ else:
+ mlog.debug('Rejected {}, supports {} but need {}'
+ .format(f, archs, env.machines.host.cpu_family))
return None
@functools.lru_cache()
@@ -1022,10 +1047,7 @@ class CCompiler(Compiler):
trial = self._get_trials_from_pattern(p, d, libname)
if not trial:
continue
- # We just check whether the library exists. We can't do a link
- # check because the library might have unresolved symbols that
- # require other libraries.
- trial = self._get_file_from_list(trial)
+ trial = self._get_file_from_list(env, trial)
if not trial:
continue
return [trial]
@@ -1051,6 +1073,70 @@ class CCompiler(Compiler):
code = 'int main(int argc, char **argv) { return 0; }'
return self.find_library_impl(libname, env, extra_dirs, code, libtype)
+ def find_framework_paths(self, env):
+ '''
+ These are usually /Library/Frameworks and /System/Library/Frameworks,
+ unless you select a particular macOS SDK with the -isysroot flag.
+ You can also add to this by setting -F in CFLAGS.
+ '''
+ if self.id != 'clang':
+ raise MesonException('Cannot find framework path with non-clang compiler')
+ # Construct the compiler command-line
+ commands = self.get_exelist() + ['-v', '-E', '-']
+ commands += self.get_always_args()
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+ commands += env.coredata.get_external_args(for_machine, self.language)
+ mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n')
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ _, _, stde = Popen_safe(commands, env=os_env, stdin=subprocess.PIPE)
+ paths = []
+ for line in stde.split('\n'):
+ if '(framework directory)' not in line:
+ continue
+ # line is of the form:
+ # ` /path/to/framework (framework directory)`
+ paths.append(line[:-21].strip())
+ return paths
+
+ def find_framework_real(self, name, env, extra_dirs, allow_system):
+ code = 'int main(int argc, char **argv) { return 0; }'
+ link_args = []
+ for d in extra_dirs:
+ link_args += ['-F' + d]
+ # We can pass -Z to disable searching in the system frameworks, but
+ # then we must also pass -L/usr/lib to pick up libSystem.dylib
+ extra_args = [] if allow_system else ['-Z', '-L/usr/lib']
+ link_args += ['-framework', name]
+ if self.links(code, env, extra_args=(extra_args + link_args)):
+ return link_args
+
+ def find_framework_impl(self, name, env, extra_dirs, allow_system):
+ if isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system)
+ if key in self.find_framework_cache:
+ value = self.find_framework_cache[key]
+ else:
+ value = self.find_framework_real(name, env, extra_dirs, allow_system)
+ self.find_framework_cache[key] = value
+ if value is None:
+ return None
+ return value[:]
+
+ def find_framework(self, name, env, extra_dirs, allow_system=True):
+ '''
+ Finds the framework with the specified name, and returns link args for
+ the same or returns None when the framework is not found.
+ '''
+ if self.id != 'clang':
+ raise MesonException('Cannot find frameworks with non-clang compiler')
+ return self.find_framework_impl(name, env, extra_dirs, allow_system)
+
def thread_flags(self, env):
if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env):
return []
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index b1f3cc2..9a101bf 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -21,8 +21,8 @@ from .. import coredata
from .. import mlog
from .. import mesonlib
from ..mesonlib import (
- EnvironmentException, MesonException, OrderedSet, version_compare,
- Popen_safe
+ EnvironmentException, MachineChoice, MesonException, OrderedSet,
+ version_compare, Popen_safe
)
"""This file contains the data files of all compilers Meson knows
@@ -150,7 +150,7 @@ cuda_buildtype_args = {'plain': [],
'debugoptimized': [],
'release': [],
'minsize': [],
- }
+ }
arm_buildtype_args = {'plain': [],
'debug': ['-O0', '--debug'],
@@ -1011,7 +1011,11 @@ class Compiler:
opts = {} # build afresh every time
# Take default values from env variables.
- compile_args, link_args = self.get_args_from_envvars()
+ if not self.is_cross:
+ compile_args, link_args = self.get_args_from_envvars()
+ else:
+ compile_args = []
+ link_args = []
description = 'Extra arguments passed to the {}'.format(self.get_display_language())
opts.update({
self.language + '_args': coredata.UserArrayOption(
@@ -1083,10 +1087,9 @@ class Compiler:
def get_cross_extra_flags(self, environment, link):
extra_flags = []
if self.is_cross and environment:
- props = environment.properties.host
- extra_flags += props.get_external_args(self.language)
+ extra_flags += environment.coredata.get_external_args(MachineChoice.HOST, self.language)
if link:
- extra_flags += props.get_external_link_args(self.language)
+ extra_flags += environment.coredata.get_external_link_args(MachineChoice.HOST, self.language)
return extra_flags
def _get_compile_output(self, dirname, mode):
@@ -1795,7 +1798,7 @@ class ArmclangCompiler:
EnvironmentException('armlink version string not found')
# Using the regular expression from environment.search_version,
# which is used for searching compiler version
- version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
+ version_regex = r'(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
linker_ver = re.search(version_regex, ver_str)
if linker_ver:
linker_ver = linker_ver.group(0)
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index cbfcd9c..cd67da0 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -32,6 +32,7 @@ class CsCompiler(Compiler):
self.language = 'cs'
super().__init__(exelist, version)
self.id = id
+ self.is_cross = False
self.runner = runner
def get_display_language(self):
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 3065ac7..40906c5 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -14,7 +14,9 @@
import os.path, subprocess
-from ..mesonlib import EnvironmentException, version_compare, is_windows, is_osx
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, version_compare, is_windows, is_osx
+)
from .compilers import (
CompilerType,
@@ -306,12 +308,17 @@ class DCompiler(Compiler):
# Add link flags needed to find dependencies
args += d.get_link_args()
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
if mode == 'compile':
# Add DFLAGS from the env
- args += env.coredata.get_external_args(self.language)
+ args += env.coredata.get_external_args(for_machine, self.language)
elif mode == 'link':
# Add LDFLAGS from the env
- args += env.coredata.get_external_link_args(self.language)
+ args += env.coredata.get_external_link_args(for_machine, self.language)
# extra_args must override all other arguments, so we add them last
args += extra_args
return args
@@ -373,7 +380,7 @@ class DCompiler(Compiler):
# translate library link flag
dcargs.append('-L=' + arg)
continue
- elif arg.startswith('-L'):
+ elif arg.startswith('-L/') or arg.startswith('-L./'):
# we need to handle cases where -L is set by e.g. a pkg-config
# setting to select a linker search path. We can however not
# unconditionally prefix '-L' with '-L' because the user might
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index e7486db..8c50736 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -11,6 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List
+import subprocess, os
from .c import CCompiler
from .compilers import (
@@ -29,7 +31,7 @@ from .compilers import (
)
from mesonbuild.mesonlib import EnvironmentException, is_osx
-import subprocess, os
+
class FortranCompiler(Compiler):
library_dirs_cache = CCompiler.library_dirs_cache
@@ -75,10 +77,7 @@ class FortranCompiler(Compiler):
source_name = os.path.join(work_dir, 'sanitycheckf.f90')
binary_name = os.path.join(work_dir, 'sanitycheckf')
with open(source_name, 'w') as ofile:
- ofile.write('''program prog
- print *, "Fortran compilation is working."
-end program prog
-''')
+ ofile.write('print *, "Fortran compilation is working."; end')
extra_flags = self.get_cross_extra_flags(environment, link=True)
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait()
@@ -180,7 +179,7 @@ end program prog
return parameter_list
- def module_name_to_filename(self, module_name):
+ def module_name_to_filename(self, module_name: str) -> str:
return module_name.lower() + '.mod'
def get_std_shared_lib_link_args(self):
@@ -228,7 +227,7 @@ end program prog
dependencies=dependencies)
def run(self, code, env, *, extra_args=None, dependencies=None):
- return CCompiler.run(self, code, env, extra_args, dependencies)
+ return CCompiler.run(self, code, env, extra_args=extra_args, dependencies=dependencies)
def _get_patterns(self, *args, **kwargs):
return CCompiler._get_patterns(self, *args, **kwargs)
@@ -263,6 +262,13 @@ end program prog
def has_multi_arguments(self, args, env):
return CCompiler.has_multi_arguments(self, args, env)
+ @classmethod
+ def _get_trials_from_pattern(cls, pattern, directory, libname):
+ return CCompiler._get_trials_from_pattern(pattern, directory, libname)
+
+ @staticmethod
+ def _get_file_from_list(env, files: List[str]) -> str:
+ return CCompiler._get_file_from_list(env, files)
class GnuFortranCompiler(GnuCompiler, FortranCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs):
diff --git a/mesonbuild/compilers/java.py b/mesonbuild/compilers/java.py
index 03ee382..5d7f865 100644
--- a/mesonbuild/compilers/java.py
+++ b/mesonbuild/compilers/java.py
@@ -23,6 +23,7 @@ class JavaCompiler(Compiler):
self.language = 'java'
super().__init__(exelist, version)
self.id = 'unknown'
+ self.is_cross = False
self.javarunner = 'java'
def get_soname_args(self, *args):
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index e64d57f..5303298 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -49,6 +49,12 @@ class ValaCompiler(Compiler):
def get_pic_args(self):
return []
+ def get_pie_args(self):
+ return []
+
+ def get_pie_link_args(self):
+ return []
+
def get_always_args(self):
return ['-C']
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 9850722..3ce272e 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -19,7 +19,8 @@ from itertools import chain
from pathlib import PurePath
from collections import OrderedDict
from .mesonlib import (
- MesonException, default_libdir, default_libexecdir, default_prefix
+ MesonException, MachineChoice, PerMachine,
+ default_libdir, default_libexecdir, default_prefix
)
from .wrap import WrapMode
import ast
@@ -234,7 +235,7 @@ def load_configs(filenames):
raise MesonException('Cannot find specified native file: ' + f)
- config = configparser.SafeConfigParser()
+ config = configparser.ConfigParser()
config.read(gen())
return config
@@ -261,9 +262,9 @@ class CoreData:
self.init_builtins()
self.backend_options = {}
self.user_options = {}
- self.compiler_options = {}
+ self.compiler_options = PerMachine({}, {}, {})
self.base_options = {}
- self.external_preprocess_args = {} # CPPFLAGS only
+ self.external_preprocess_args = PerMachine({}, {}, {}) # CPPFLAGS only
self.cross_file = self.__load_cross_file(options.cross_file)
self.compilers = OrderedDict()
self.cross_compilers = OrderedDict()
@@ -457,16 +458,18 @@ class CoreData:
mode = 'custom'
self.builtins['buildtype'].set_value(mode)
+ def get_all_compiler_options(self):
+ # TODO think about cross and command-line interface. (Only .build is mentioned here.)
+ yield self.compiler_options.build
+
def _get_all_nonbuiltin_options(self):
yield self.backend_options
yield self.user_options
- yield self.compiler_options
+ yield from self.get_all_compiler_options()
yield self.base_options
def get_all_options(self):
- return chain(
- iter([self.builtins]),
- self._get_all_nonbuiltin_options())
+ return chain([self.builtins], self._get_all_nonbuiltin_options())
def validate_option_value(self, option_name, override_value):
for opts in self.get_all_options():
@@ -475,14 +478,14 @@ class CoreData:
return opt.validate_value(override_value)
raise MesonException('Tried to validate unknown option %s.' % option_name)
- def get_external_args(self, lang):
- return self.compiler_options[lang + '_args'].value
+ def get_external_args(self, for_machine: MachineChoice, lang):
+ return self.compiler_options[for_machine][lang + '_args'].value
- def get_external_link_args(self, lang):
- return self.compiler_options[lang + '_link_args'].value
+ def get_external_link_args(self, for_machine: MachineChoice, lang):
+ return self.compiler_options[for_machine][lang + '_link_args'].value
- def get_external_preprocess_args(self, lang):
- return self.external_preprocess_args[lang]
+ def get_external_preprocess_args(self, for_machine: MachineChoice, lang):
+ return self.external_preprocess_args[for_machine][lang]
def merge_user_options(self, options):
for (name, value) in options.items():
@@ -493,7 +496,7 @@ class CoreData:
if type(oldval) != type(value):
self.user_options[name] = value
- def set_options(self, options, subproject=''):
+ def set_options(self, options, subproject='', warn_unknown=True):
# Set prefix first because it's needed to sanitize other options
prefix = self.builtins['prefix'].value
if 'prefix' in options:
@@ -517,8 +520,7 @@ class CoreData:
break
else:
unknown_options.append(k)
-
- if unknown_options:
+ if unknown_options and warn_unknown:
unknown_options = ', '.join(sorted(unknown_options))
sub = 'In subproject {}: '.format(subproject) if subproject else ''
mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options))
@@ -553,36 +555,54 @@ class CoreData:
self.set_options(options, subproject)
- def process_new_compilers(self, lang: str, comp, cross_comp, cmd_line_options):
+ def process_new_compilers(self, lang: str, comp, cross_comp, env):
from . import compilers
+
self.compilers[lang] = comp
- # Native compiler always exist so always add its options.
- new_options = comp.get_options()
if cross_comp is not None:
self.cross_compilers[lang] = cross_comp
- new_options.update(cross_comp.get_options())
+
+ # Native compiler always exist so always add its options.
+ new_options_for_build = comp.get_options()
+ preproc_flags_for_build = comp.get_preproc_flags()
+ if cross_comp is not None:
+ new_options_for_host = cross_comp.get_options()
+ preproc_flags_for_host = cross_comp.get_preproc_flags()
+ else:
+ new_options_for_host = comp.get_options()
+ preproc_flags_for_host = comp.get_preproc_flags()
+
+ opts_machines_list = [
+ (new_options_for_build, preproc_flags_for_build, MachineChoice.BUILD),
+ (new_options_for_host, preproc_flags_for_host, MachineChoice.HOST),
+ ]
optprefix = lang + '_'
- for k, o in new_options.items():
- if not k.startswith(optprefix):
- raise MesonException('Internal error, %s has incorrect prefix.' % k)
- if k in cmd_line_options:
- o.set_value(cmd_line_options[k])
- self.compiler_options.setdefault(k, o)
-
- # Unlike compiler and linker flags, preprocessor flags are not in
- # compiler_options because they are not visible to user.
- preproc_flags = comp.get_preproc_flags()
- preproc_flags = shlex.split(preproc_flags)
- self.external_preprocess_args.setdefault(lang, preproc_flags)
+ for new_options, preproc_flags, for_machine in opts_machines_list:
+ for k, o in new_options.items():
+ if not k.startswith(optprefix):
+ raise MesonException('Internal error, %s has incorrect prefix.' % k)
+ if k in env.properties[for_machine]:
+ # Get from configuration files.
+ o.set_value(env.properties[for_machine][k])
+ if (env.machines.matches_build_machine(for_machine) and
+ k in env.cmd_line_options):
+ # TODO think about cross and command-line interface.
+ o.set_value(env.cmd_line_options[k])
+ self.compiler_options[for_machine].setdefault(k, o)
+
+ # Unlike compiler and linker flags, preprocessor flags are not in
+ # compiler_options because they are not visible to user.
+ preproc_flags = shlex.split(preproc_flags)
+ self.external_preprocess_args[for_machine].setdefault(lang, preproc_flags)
enabled_opts = []
for optname in comp.base_options:
if optname in self.base_options:
continue
oobj = compilers.base_options[optname]
- if optname in cmd_line_options:
- oobj.set_value(cmd_line_options[optname])
+ if optname in env.cmd_line_options:
+ oobj.set_value(env.cmd_line_options[optname])
enabled_opts.append(optname)
self.base_options[optname] = oobj
self.emit_base_options_warnings(enabled_opts)
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index afe2a3b..f5034db 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -18,7 +18,7 @@ from .base import ( # noqa: F401
ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
-from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
+from .misc import (HDF5Dependency, MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
from .platform import AppleFrameworks
from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
@@ -33,6 +33,7 @@ packages.update({
# From misc:
'boost': BoostDependency,
'mpi': MPIDependency,
+ 'hdf5': HDF5Dependency,
'openmp': OpenMPDependency,
'python3': Python3Dependency,
'threads': ThreadDependency,
@@ -54,6 +55,7 @@ packages.update({
'vulkan': VulkanDependency,
})
_packages_accept_language.update({
+ 'hdf5',
'mpi',
'openmp',
})
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index f5eb513..8196124 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -19,7 +19,6 @@ import copy
import functools
import os
import re
-import stat
import json
import shlex
import shutil
@@ -27,15 +26,17 @@ import textwrap
import platform
import itertools
import ctypes
+from typing import List
from enum import Enum
-from pathlib import PurePath
+from pathlib import Path, PurePath
from .. import mlog
from .. import mesonlib
from ..compilers import clib_langs
-from ..environment import BinaryTable
+from ..environment import BinaryTable, Environment
from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine
from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify
+from ..mesonlib import Version
# These must be defined in this file to avoid cyclical references.
packages = {}
@@ -509,51 +510,61 @@ class PkgConfigDependency(ExternalDependency):
else:
for_machine = MachineChoice.HOST
- # Create a nested function for sake of early return
+ # Create an iterator of options
def search():
- # Only search for the pkg-config for each machine the first time and
- # store the result in the class definition
- if PkgConfigDependency.class_pkgbin[for_machine] is None:
- mlog.debug('Pkg-config binary for %s is not cached.' % for_machine)
- else:
- mlog.debug('Pkg-config binary for %s is cached.' % for_machine)
- choice = PkgConfigDependency.class_pkgbin[for_machine]
- assert choice is not None
- return choice
# Lookup in cross or machine file.
- bt = environment.binaries[for_machine]
- potential_pkgpath = bt.lookup_entry('pkgconfig')
- if potential_pkgpath is None:
- mlog.debug('Pkg-config binary missing from cross or native file, or PKG_CONFIG undefined.')
- else:
- mlog.debug('Pkg-config binary for %s specified from config file as %s.', for_machine, potential_pkgpath)
- potential_pkgbin = ExternalProgram.from_entry('pkgconfig', potential_pkgpath)
- if not potential_pkgbin.found():
- mlog.debug(
- 'Pkg-config %s for machine %s specified at %s but not found.',
- potential_pkgbin.name, for_machine, potential_pkgbin.command)
- else:
- return potential_pkgbin
+ potential_pkgpath = environment.binaries[for_machine].lookup_entry('pkgconfig')
+ if potential_pkgpath is not None:
+ mlog.debug('Pkg-config binary for {} specified from cross file, native file, '
+ 'or env var as {}'.format(for_machine, potential_pkgpath))
+ yield ExternalProgram.from_entry('pkgconfig', potential_pkgpath)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('Pkg-config binary missing from cross or native file, or env var undefined.')
# Fallback on hard-coded defaults.
+ # TODO prefix this for the cross case instead of ignoring thing.
if environment.machines.matches_build_machine(for_machine):
for potential_pkgpath in environment.default_pkgconfig:
- potential_pkgbin = self.check_pkgconfig(potential_pkgpath)
- if potential_pkgbin is None:
- mlog.debug(
- 'default Pkg-config fallback %s for machine %s specified at %s but not found.',
- potential_pkgbin.name, for_machine, potential_pkgbin.command)
- else:
- return potential_pkgbin
-
- self.pkgbin = search()
- if self.pkgbin is None:
- msg = 'Pkg-config binary for machine %s not found.' % for_machine
+ mlog.debug('Trying a default pkg-config fallback at', potential_pkgpath)
+ yield ExternalProgram(potential_pkgpath, silent=True)
+
+ # Only search for pkg-config for each machine the first time and store
+ # the result in the class definition
+ if PkgConfigDependency.class_pkgbin[for_machine] is False:
+ mlog.debug('Pkg-config binary for %s is cached as not found.' % for_machine)
+ elif PkgConfigDependency.class_pkgbin[for_machine] is not None:
+ mlog.debug('Pkg-config binary for %s is cached.' % for_machine)
+ else:
+ assert PkgConfigDependency.class_pkgbin[for_machine] is None
+ mlog.debug('Pkg-config binary for %s is not cached.' % for_machine)
+ for potential_pkgbin in search():
+ mlog.debug('Trying pkg-config binary {} for machine {} at {}'
+ .format(potential_pkgbin.name, for_machine, potential_pkgbin.command))
+ version_if_ok = self.check_pkgconfig(potential_pkgbin)
+ if not version_if_ok:
+ continue
+ if not self.silent:
+ mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+ '(%s)' % version_if_ok)
+ PkgConfigDependency.class_pkgbin[for_machine] = potential_pkgbin
+ break
+ else:
+ if not self.silent:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ PkgConfigDependency.class_pkgbin[for_machine] = False
+
+ self.pkgbin = PkgConfigDependency.class_pkgbin[for_machine]
+ if self.pkgbin is False:
+ self.pkgbin = None
+ msg = 'Pkg-config binary for machine %s not found. Giving up.' % for_machine
if self.required:
raise DependencyException(msg)
else:
mlog.debug(msg)
- else:
- PkgConfigDependency.class_pkgbin[for_machine] = self.pkgbin
+ return
mlog.debug('Determining dependency {!r} with pkg-config executable '
'{!r}'.format(name, self.pkgbin.get_path()))
@@ -783,10 +794,10 @@ class PkgConfigDependency(ExternalDependency):
if 'define_variable' in kwargs:
definition = kwargs.get('define_variable', [])
if not isinstance(definition, list):
- raise MesonException('define_variable takes a list')
+ raise DependencyException('define_variable takes a list')
if len(definition) != 2 or not all(isinstance(i, str) for i in definition):
- raise MesonException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
+ raise DependencyException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
options = ['--define-variable=' + '='.join(definition)] + options
@@ -817,27 +828,26 @@ class PkgConfigDependency(ExternalDependency):
return [DependencyMethods.PKGCONFIG]
def check_pkgconfig(self, pkgbin):
- pkgbin = ExternalProgram(pkgbin, silent=True)
- if pkgbin.found():
- try:
- p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
- if p.returncode != 0:
- mlog.warning('Found pkg-config {!r} but couldn\'t run it'
- ''.format(' '.join(pkgbin.get_command())))
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- pkgbin = False
- except (FileNotFoundError, PermissionError):
- pkgbin = False
- else:
- pkgbin = False
- if not self.silent:
- if pkgbin:
- mlog.log(mlog.bold('pkg-config'), 'found:', mlog.green('YES'), '({})'.format(pkgbin.get_path()),
- out.strip())
- else:
- mlog.log(mlog.bold('pkg-config'), 'found:', mlog.red('NO'))
- return pkgbin
+ if not pkgbin.found():
+ mlog.log('Did not find pkg-config by name {!r}'.format(pkgbin.name))
+ return None
+ try:
+ p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found pkg-config {!r} but it failed when run'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found pkg-config {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found pkg-config {!r} but didn\'t have permissions to run it.'.format(' '.join(pkgbin.get_command()))
+ if not mesonlib.is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ return out.strip()
def extract_field(self, la_file, fieldname):
with open(la_file) as f:
@@ -904,8 +914,8 @@ class CMakeTarget:
class CMakeDependency(ExternalDependency):
# The class's copy of the CMake path. Avoids having to search for it
# multiple times in the same Meson invocation.
- class_cmakebin = None
- class_cmakevers = None
+ class_cmakebin = PerMachine(None, None, None)
+ class_cmakevers = PerMachine(None, None, None)
# We cache all pkg-config subprocess invocations to avoid redundant calls
cmake_cache = {}
# Version string for the minimum CMake version
@@ -916,7 +926,7 @@ class CMakeDependency(ExternalDependency):
def _gen_exception(self, msg):
return DependencyException('Dependency {} not found: {}'.format(self.name, msg))
- def __init__(self, name, environment, kwargs, language=None):
+ def __init__(self, name: str, environment: Environment, kwargs, language=None):
super().__init__('cmake', environment, language, kwargs)
self.name = name
self.is_libtool = False
@@ -937,43 +947,88 @@ class CMakeDependency(ExternalDependency):
# When finding dependencies for cross-compiling, we don't care about
# the 'native' CMake binary
# TODO: Test if this works as expected
- if self.want_cross:
- if 'cmake' not in environment.cross_info.config['binaries']:
- if self.required:
- raise self._gen_exception('CMake binary missing from cross file')
- else:
- potential_cmake = ExternalProgram.from_cross_info(environment.cross_info, 'cmake')
- if potential_cmake.found():
- self.cmakebin = potential_cmake
- CMakeDependency.class_cmakebin = self.cmakebin
- else:
- mlog.debug('Cross CMake %s not found.' % potential_cmake.name)
- # Only search for the native CMake the first time and
- # store the result in the class definition
- elif CMakeDependency.class_cmakebin is None:
- self.cmakebin, self.cmakevers = self.check_cmake()
- CMakeDependency.class_cmakebin = self.cmakebin
- CMakeDependency.class_cmakevers = self.cmakevers
+ if environment.is_cross_build() and not self.want_cross:
+ for_machine = MachineChoice.BUILD
else:
- self.cmakebin = CMakeDependency.class_cmakebin
- self.cmakevers = CMakeDependency.class_cmakevers
+ for_machine = MachineChoice.HOST
- if not self.cmakebin:
+ # Create an iterator of options
+ def search():
+ # Lookup in cross or machine file.
+ potential_cmakepath = environment.binaries[for_machine].lookup_entry('cmake')
+ if potential_cmakepath is not None:
+ mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', for_machine, potential_cmakepath)
+ yield ExternalProgram.from_entry('cmake', potential_cmakepath)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('CMake binary missing from cross or native file, or env var undefined.')
+ # Fallback on hard-coded defaults.
+ # TODO prefix this for the cross case instead of ignoring thing.
+ if environment.machines.matches_build_machine(for_machine):
+ for potential_cmakepath in environment.default_cmake:
+ mlog.debug('Trying a default CMake fallback at', potential_cmakepath)
+ yield ExternalProgram(potential_cmakepath, silent=True)
+
+ # Only search for CMake the first time and store the result in the class
+ # definition
+ if CMakeDependency.class_cmakebin[for_machine] is False:
+ mlog.debug('CMake binary for %s is cached as not found' % for_machine)
+ elif CMakeDependency.class_cmakebin[for_machine] is not None:
+ mlog.debug('CMake binary for %s is cached.' % for_machine)
+ else:
+ assert CMakeDependency.class_cmakebin[for_machine] is None
+ mlog.debug('CMake binary for %s is not cached' % for_machine)
+ for potential_cmakebin in search():
+ mlog.debug('Trying CMake binary {} for machine {} at {}'
+ .format(potential_cmakebin.name, for_machine, potential_cmakebin.command))
+ version_if_ok = self.check_cmake(potential_cmakebin)
+ if not version_if_ok:
+ continue
+ if not self.silent:
+ mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()),
+ '(%s)' % version_if_ok)
+ CMakeDependency.class_cmakebin[for_machine] = potential_cmakebin
+ CMakeDependency.class_cmakevers[for_machine] = version_if_ok
+ break
+ else:
+ if not self.silent:
+ mlog.log('Found CMake:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ CMakeDependency.class_cmakebin[for_machine] = False
+ CMakeDependency.class_cmakevers[for_machine] = None
+
+ self.cmakebin = CMakeDependency.class_cmakebin[for_machine]
+ self.cmakevers = CMakeDependency.class_cmakevers[for_machine]
+ if self.cmakebin is False:
+ self.cmakebin = None
+ msg = 'No CMake binary for machine %s not found. Giving up.' % for_machine
if self.required:
- raise self._gen_exception('CMake not found.')
+ raise DependencyException(msg)
+ mlog.debug(msg)
return
modules = kwargs.get('modules', [])
+ cm_path = kwargs.get('cmake_module_path', [])
+ cm_args = kwargs.get('cmake_args', [])
if not isinstance(modules, list):
modules = [modules]
- self._detect_dep(name, modules)
+ if not isinstance(cm_path, list):
+ cm_path = [cm_path]
+ if not isinstance(cm_args, list):
+ cm_args = [cm_args]
+ cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
+ if cm_path:
+ cm_args += ['-DCMAKE_MODULE_PATH={}'.format(';'.join(cm_path))]
+ self._detect_dep(name, modules, cm_args)
def __repr__(self):
s = '<{0} {1}: {2} {3}>'
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
- def _detect_dep(self, name, modules):
+ def _detect_dep(self, name: str, modules: List[str], args: List[str]):
# Detect a dependency with CMake using the '--find-package' mode
# and the trace output (stderr)
#
@@ -989,7 +1044,7 @@ class CMakeDependency(ExternalDependency):
mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
# Prepare options
- cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '.']
+ cmake_opts = ['--trace-expand', '-DNAME={}'.format(name)] + args + ['.']
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
@@ -1438,48 +1493,34 @@ set(CMAKE_SIZEOF_VOID_P "{}")
def get_methods():
return [DependencyMethods.CMAKE]
- def check_cmake(self):
- evar = 'CMAKE'
- if evar in os.environ:
- cmakebin = os.environ[evar].strip()
- else:
- cmakebin = 'cmake'
- cmakebin = ExternalProgram(cmakebin, silent=True)
- cmvers = None
- invalid_version = False
- if cmakebin.found():
- try:
- p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2]
- if p.returncode != 0:
- mlog.warning('Found CMake {!r} but couldn\'t run it'
- ''.format(' '.join(cmakebin.get_command())))
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- cmakebin = False
- except (FileNotFoundError, PermissionError):
- cmakebin = False
-
- cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
- if not version_compare(cmvers, CMakeDependency.class_cmake_version):
- invalid_version = True
- else:
- cmakebin = False
- if not self.silent:
- if cmakebin and invalid_version:
- mlog.log('Found CMake:', mlog.red('NO'), '(version of', mlog.bold(cmakebin.get_path()),
- 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version),
- 'is required)')
- elif cmakebin:
- mlog.log('Found CMake:', mlog.bold(cmakebin.get_path()),
- '(%s)' % cmvers)
- else:
- mlog.log('Found CMake:', mlog.red('NO'))
-
- if invalid_version:
- cmakebin = False
- cmvers = None
-
- return cmakebin, cmvers
+ def check_cmake(self, cmakebin):
+ if not cmakebin.found():
+ mlog.log('Did not find CMake {!r}'.format(cmakebin.name))
+ return None
+ try:
+ p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found CMake {!r} but couldn\'t run it'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found CMake {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmakebin.get_command()))
+ if not mesonlib.is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
+ if not version_compare(cmvers, CMakeDependency.class_cmake_version):
+ mlog.warning(
+ 'The version of CMake', mlog.bold(cmakebin.get_path()),
+ 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version),
+ 'is required')
+ return None
+ return cmvers
def log_tried(self):
return self.type_name
@@ -1633,9 +1674,9 @@ class DubDependency(ExternalDependency):
return ''
# Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
- build_name = 'library-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
+ build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
for entry in os.listdir(module_build_path):
- if entry.startswith(build_name):
+ if build_name in entry:
for file in os.listdir(os.path.join(module_build_path, entry)):
if file == lib_file_name:
if folder_only:
@@ -1952,40 +1993,91 @@ class ExternalLibrary(ExternalDependency):
class ExtraFrameworkDependency(ExternalDependency):
- def __init__(self, name, required, path, env, lang, kwargs):
+ system_framework_paths = None
+
+ def __init__(self, name, required, paths, env, lang, kwargs):
super().__init__('extraframeworks', env, lang, kwargs)
self.name = name
self.required = required
- self.detect(name, path)
- if self.found():
- self.compile_args = ['-I' + os.path.join(self.path, self.name, 'Headers')]
- self.link_args = ['-F' + self.path, '-framework', self.name.split('.')[0]]
-
- def detect(self, name, path):
- # should use the compiler to look for frameworks, rather than peering at
- # the filesystem, so we can also find them when cross-compiling
- if self.want_cross:
+ # Full path to framework directory
+ self.framework_path = None
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available')
+ if self.system_framework_paths is None:
+ self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env)
+ self.detect(name, paths)
+
+ def detect(self, name, paths):
+ if not paths:
+ paths = self.system_framework_paths
+ for p in paths:
+ mlog.debug('Looking for framework {} in {}'.format(name, p))
+ # We need to know the exact framework path because it's used by the
+ # Qt5 dependency class, and for setting the include path. We also
+ # want to avoid searching in an invalid framework path which wastes
+ # time and can cause a false positive.
+ framework_path = self._get_framework_path(p, name)
+ if framework_path is None:
+ continue
+ # We want to prefer the specified paths (in order) over the system
+ # paths since these are "extra" frameworks.
+ # For example, Python2's framework is in /System/Library/Frameworks and
+ # Python3's framework is in /Library/Frameworks, but both are called
+ # Python.framework. We need to know for sure that the framework was
+ # found in the path we expect.
+ allow_system = p in self.system_framework_paths
+ args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+ if args is None:
+ continue
+ self.link_args = args
+ self.framework_path = framework_path.as_posix()
+ self.compile_args = ['-F' + self.framework_path]
+ # We need to also add -I includes to the framework because all
+ # cross-platform projects such as OpenGL, Python, Qt, GStreamer,
+ # etc do not use "framework includes":
+ # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html
+ incdir = self._get_framework_include_path(framework_path)
+ if incdir:
+ self.compile_args += ['-I' + incdir]
+ self.is_found = True
return
+ def _get_framework_path(self, path, name):
+ p = Path(path)
lname = name.lower()
- if path is None:
- paths = ['/System/Library/Frameworks', '/Library/Frameworks']
- else:
- paths = [path]
- for p in paths:
- for d in os.listdir(p):
- fullpath = os.path.join(p, d)
- if lname != d.rsplit('.', 1)[0].lower():
- continue
- if not stat.S_ISDIR(os.stat(fullpath).st_mode):
- continue
- self.path = p
- self.name = d
- self.is_found = True
- return
+ for d in p.glob('*.framework/'):
+ if lname == d.name.rsplit('.', 1)[0].lower():
+ return d
+ return None
+
+ def _get_framework_latest_version(self, path):
+ versions = []
+ for each in path.glob('Versions/*'):
+ # macOS filesystems are usually case-insensitive
+ if each.name.lower() == 'current':
+ continue
+ versions.append(Version(each.name))
+ return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s)
+
+ def _get_framework_include_path(self, path):
+ # According to the spec, 'Headers' must always be a symlink to the
+ # Headers directory inside the currently-selected version of the
+ # framework, but sometimes frameworks are broken. Look in 'Versions'
+ # for the currently-selected version or pick the latest one.
+ trials = ('Headers', 'Versions/Current/Headers',
+ self._get_framework_latest_version(path))
+ for each in trials:
+ trial = path / each
+ if trial.is_dir():
+ return trial.as_posix()
+ return None
+
+ @staticmethod
+ def get_methods():
+ return [DependencyMethods.EXTRAFRAMEWORK]
def log_info(self):
- return os.path.join(self.path, self.name)
+ return self.framework_path
def log_tried(self):
return 'framework'
@@ -2012,6 +2104,7 @@ display_name_map = {
'dub': 'DUB',
'gmock': 'GMock',
'gtest': 'GTest',
+ 'hdf5': 'HDF5',
'llvm': 'LLVM',
'mpi': 'MPI',
'openmp': 'OpenMP',
@@ -2054,7 +2147,7 @@ def find_external_dependency(name, env, kwargs):
d = c()
d._check_version()
pkgdep.append(d)
- except Exception as e:
+ except DependencyException as e:
pkg_exc.append(e)
mlog.debug(str(e))
else:
@@ -2096,7 +2189,7 @@ def find_external_dependency(name, env, kwargs):
# if an exception occurred with the first detection method, re-raise it
# (on the grounds that it came from the preferred dependency detection
# method)
- if pkg_exc[0]:
+ if pkg_exc and pkg_exc[0]:
raise pkg_exc[0]
# we have a list of failed ExternalDependency objects, so we can report
@@ -2141,6 +2234,14 @@ def _build_external_dependency_list(name, env, kwargs):
candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
return candidates
+ # If it's explicitly requested, use the Extraframework detection method (only)
+ if 'extraframework' == kwargs.get('method', ''):
+ # On OSX, also try framework dependency detector
+ if mesonlib.is_osx():
+ candidates.append(functools.partial(ExtraFrameworkDependency, name,
+ False, None, env, None, kwargs))
+ return candidates
+
# Otherwise, just use the pkgconfig and cmake dependency detector
if 'auto' == kwargs.get('method', 'auto'):
candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 9e0a65a..208f063 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -14,14 +14,13 @@
# This file contains the detection logic for miscellaneous external dependencies.
+from pathlib import Path
import functools
import os
import re
import shlex
import sysconfig
-from pathlib import Path
-
from .. import mlog
from .. import mesonlib
from ..environment import detect_cpu_family
@@ -33,6 +32,52 @@ from .base import (
)
+class HDF5Dependency(ExternalDependency):
+
+ def __init__(self, environment, kwargs):
+ language = kwargs.get('language', 'c')
+ super().__init__('hdf5', environment, language, kwargs)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ pkgconfig_files = ['hdf5']
+
+ if language not in ('c', 'cpp', 'fortran'):
+ raise DependencyException('Language {} is not supported with HDF5.'.format(language))
+
+ for pkg in pkgconfig_files:
+ try:
+ pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
+ if pkgdep.found():
+ self.compile_args = pkgdep.get_compile_args()
+ # derive needed libraries by language
+ pd_link_args = pkgdep.get_link_args()
+ link_args = []
+ for larg in pd_link_args:
+ lpath = Path(larg)
+ if lpath.is_file():
+ if language == 'cpp':
+ link_args.append(str(lpath.parent / (lpath.stem + '_hl_cpp' + lpath.suffix)))
+ link_args.append(str(lpath.parent / (lpath.stem + '_cpp' + lpath.suffix)))
+ elif language == 'fortran':
+ link_args.append(str(lpath.parent / (lpath.stem + 'hl_fortran' + lpath.suffix)))
+ link_args.append(str(lpath.parent / (lpath.stem + '_fortran' + lpath.suffix)))
+
+ # HDF5 C libs are required by other HDF5 languages
+ link_args.append(str(lpath.parent / (lpath.stem + '_hl' + lpath.suffix)))
+ link_args.append(larg)
+ else:
+ link_args.append(larg)
+
+ self.link_args = link_args
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ self.pcdep = pkgdep
+ break
+ except Exception:
+ pass
+
class MPIDependency(ExternalDependency):
def __init__(self, environment, kwargs):
@@ -176,7 +221,7 @@ class MPIDependency(ExternalDependency):
mlog.debug(mlog.bold('Standard output\n'), o)
mlog.debug(mlog.bold('Standard error\n'), e)
return
- version = re.search('\d+.\d+.\d+', o)
+ version = re.search(r'\d+.\d+.\d+', o)
if version:
version = version.group(0)
else:
@@ -307,7 +352,7 @@ class Python3Dependency(ExternalDependency):
# There is a python in /System/Library/Frameworks, but that's
# python 2, Python 3 will always be in /Library
candidates.append(functools.partial(
- ExtraFrameworkDependency, 'python', False, '/Library/Frameworks',
+ ExtraFrameworkDependency, 'Python', False, ['/Library/Frameworks'],
environment, kwargs.get('language', None), kwargs))
return candidates
diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py
index c78ebed..7e9f9d8 100644
--- a/mesonbuild/dependencies/platform.py
+++ b/mesonbuild/dependencies/platform.py
@@ -15,8 +15,6 @@
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
-from .. import mesonlib
-
from .base import ExternalDependency, DependencyException
@@ -29,11 +27,19 @@ class AppleFrameworks(ExternalDependency):
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
- # FIXME: Use self.clib_compiler to check if the frameworks are available
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available, cannot find the framework')
+ self.is_found = True
for f in self.frameworks:
- self.link_args += ['-framework', f]
-
- self.is_found = mesonlib.for_darwin(self.want_cross, self.env)
+ args = self.clib_compiler.find_framework(f, env, [])
+ if args is not None:
+ # No compile args are needed for system frameworks
+ self.link_args = args
+ else:
+ self.is_found = False
+
+ def log_info(self):
+ return ', '.join(self.frameworks)
def log_tried(self):
return 'framework'
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 03c96f2..ce1ca68 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -91,9 +91,9 @@ class GnuStepDependency(ConfigToolDependency):
'link_args'))
def find_config(self, versions=None):
- tool = self.tools[0]
+ tool = [self.tools[0]]
try:
- p, out = Popen_safe([tool, '--help'])[:2]
+ p, out = Popen_safe(tool + ['--help'])[:2]
except (FileNotFoundError, PermissionError):
return (None, None)
if p.returncode != 0:
@@ -177,13 +177,13 @@ def _qt_get_private_includes(mod_inc_dir, module, mod_version):
os.path.join(private_dir, 'Qt' + module))
class QtExtraFrameworkDependency(ExtraFrameworkDependency):
- def __init__(self, name, required, path, env, lang, kwargs):
- super().__init__(name, required, path, env, lang, kwargs)
+ def __init__(self, name, required, paths, env, lang, kwargs):
+ super().__init__(name, required, paths, env, lang, kwargs)
self.mod_name = name[2:]
def get_compile_args(self, with_private_headers=False, qt_version="0"):
if self.found():
- mod_inc_dir = os.path.join(self.path, self.name, 'Headers')
+ mod_inc_dir = os.path.join(self.framework_path, 'Headers')
args = ['-I' + mod_inc_dir]
if with_private_headers:
args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)]
@@ -216,9 +216,11 @@ class QtBaseDependency(ExternalDependency):
methods = []
# Prefer pkg-config, then fallback to `qmake -query`
if DependencyMethods.PKGCONFIG in self.methods:
+ mlog.debug('Trying to find qt with pkg-config')
self._pkgconfig_detect(mods, kwargs)
methods.append('pkgconfig')
if not self.is_found and DependencyMethods.QMAKE in self.methods:
+ mlog.debug('Trying to find qt with qmake')
self.from_text = self._qmake_detect(mods, kwargs)
methods.append('qmake-' + self.name)
methods.append('qmake')
@@ -360,9 +362,9 @@ class QtBaseDependency(ExternalDependency):
# Didn't find qmake :(
self.is_found = False
return
- self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0)
+ self.version = re.search(self.qtver + r'(\.\d+)+', stdo).group(0)
# Query library path, header path, and binary path
- mlog.log("Found qmake:", mlog.bold(self.qmake.get_name()), '(%s)' % self.version)
+ mlog.log("Found qmake:", mlog.bold(self.qmake.get_path()), '(%s)' % self.version)
stdo = Popen_safe(self.qmake.get_command() + ['-query'])[1]
qvars = {}
for line in stdo.split('\n'):
@@ -371,7 +373,9 @@ class QtBaseDependency(ExternalDependency):
continue
(k, v) = tuple(line.split(':', 1))
qvars[k] = v
- if mesonlib.is_osx():
+ # Qt on macOS uses a framework, but Qt for iOS does not
+ if self.env.machines.host.is_darwin() and 'ios' not in qvars['QMAKE_XSPEC']:
+ mlog.debug("Building for macOS, looking for framework")
self._framework_detect(qvars, mods, kwargs)
return qmake
incdir = qvars['QT_INSTALL_HEADERS']
@@ -442,7 +446,8 @@ class QtBaseDependency(ExternalDependency):
for m in modules:
fname = 'Qt' + m
- fwdep = QtExtraFrameworkDependency(fname, False, libdir, self.env,
+ mlog.debug('Looking for qt framework ' + fname)
+ fwdep = QtExtraFrameworkDependency(fname, False, [libdir], self.env,
self.language, fw_kwargs)
self.compile_args.append('-F' + libdir)
if fwdep.found():
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index d853020..b23509a 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import configparser, os, platform, re, sys, shlex, shutil, subprocess
+import configparser, os, platform, re, sys, shlex, shutil, subprocess, typing
from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker
@@ -321,7 +321,7 @@ def search_version(text):
# This regex is reaching magic levels. If it ever needs
# to be updated, do not complexify but convert to something
# saner instead.
- version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
+ version_regex = r'(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
match = re.search(version_regex, text)
if match:
return match.group(0)
@@ -433,6 +433,7 @@ class Environment:
self.cuda_static_linker = ['nvlink']
self.gcc_static_linker = ['gcc-ar']
self.clang_static_linker = ['llvm-ar']
+ self.default_cmake = ['cmake']
self.default_pkgconfig = ['pkg-config']
# Various prefixes and suffixes for import libraries, shared libraries,
@@ -765,7 +766,22 @@ class Environment:
except OSError as e:
popen_exceptions[' '.join(compiler + [arg])] = e
continue
- version = search_version(out)
+ # Example nvcc printout:
+ #
+ # nvcc: NVIDIA (R) Cuda compiler driver
+ # Copyright (c) 2005-2018 NVIDIA Corporation
+ # Built on Sat_Aug_25_21:08:01_CDT_2018
+ # Cuda compilation tools, release 10.0, V10.0.130
+ #
+ # search_version() first finds the "10.0" after "release",
+ # rather than the more precise "10.0.130" after "V".
+ # The patch version number is occasionally important; For
+ # instance, on Linux,
+ # - CUDA Toolkit 8.0.44 requires NVIDIA Driver 367.48
+ # - CUDA Toolkit 8.0.61 requires NVIDIA Driver 375.26
+ # Luckily, the "V" also makes it very simple to extract
+ # the full version:
+ version = out.strip().split('V')[-1]
cls = CudaCompiler
return cls(ccache + compiler, version, is_cross, exe_wrap)
raise EnvironmentException('Could not find suitable CUDA compiler: "' + ' '.join(compilers) + '"')
@@ -1085,7 +1101,7 @@ class Environment:
def detect_compilers(self, lang: str, need_cross_compiler: bool):
(comp, cross_comp) = self.compilers_from_language(lang, need_cross_compiler)
if comp is not None:
- self.coredata.process_new_compilers(lang, comp, cross_comp, self.cmd_line_options)
+ self.coredata.process_new_compilers(lang, comp, cross_comp, self)
return comp, cross_comp
def detect_static_linker(self, compiler):
@@ -1267,14 +1283,10 @@ class MesonConfigFile:
return out
class Properties:
- def __init__(self):
- self.properties = {}
-
- def get_external_args(self, language):
- return mesonlib.stringlistify(self.properties.get(language + '_args', []))
-
- def get_external_link_args(self, language):
- return mesonlib.stringlistify(self.properties.get(language + '_link_args', []))
+ def __init__(
+ self,
+ properties: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None):
+ self.properties = properties or {}
def has_stdlib(self, language):
return language + '_stdlib' in self.properties
@@ -1288,6 +1300,11 @@ class Properties:
def get_sys_root(self):
return self.properties.get('sys_root', None)
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.properties == other.properties
+ return NotImplemented
+
# TODO consider removing so Properties is less freeform
def __getitem__(self, key):
return self.properties[key]
@@ -1321,6 +1338,9 @@ class MachineInfo:
return NotImplemented
return not self.__eq__(other)
+ def __repr__(self):
+ return '<MachineInfo: {} {} ({})>'.format(self.system, self.cpu_family, self.cpu)
+
@staticmethod
def detect(compilers = None):
"""Detect the machine we're running on
@@ -1503,6 +1523,8 @@ class BinaryTable:
'ar': 'AR',
'windres': 'WINDRES',
+ 'cmake': 'CMAKE',
+ 'qmake': 'QMAKE',
'pkgconfig': 'PKG_CONFIG',
}
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index ee571fd..2eb0720 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -36,6 +36,7 @@ import os, shutil, uuid
import re, shlex
import subprocess
from collections import namedtuple
+from itertools import chain
from pathlib import PurePath
import functools
@@ -143,31 +144,32 @@ class TryRunResultHolder(InterpreterObject):
class RunProcess(InterpreterObject):
- def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True):
+ def __init__(self, cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True):
super().__init__()
if not isinstance(cmd, ExternalProgram):
raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
self.capture = capture
- pc, self.stdout, self.stderr = self.run_command(cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+ pc, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
self.returncode = pc.returncode
self.methods.update({'returncode': self.returncode_method,
'stdout': self.stdout_method,
'stderr': self.stderr_method,
})
- def run_command(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check=False):
+ def run_command(self, cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check=False):
command_array = cmd.get_command() + args
- env = {'MESON_SOURCE_ROOT': source_dir,
- 'MESON_BUILD_ROOT': build_dir,
- 'MESON_SUBDIR': subdir,
- 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
- }
+ menv = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+ }
if in_builddir:
cwd = os.path.join(build_dir, subdir)
else:
cwd = os.path.join(source_dir, subdir)
child_env = os.environ.copy()
- child_env.update(env)
+ child_env.update(menv)
+ child_env = env.get_env(child_env)
stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
mlog.debug('Running command:', ' '.join(command_array))
try:
@@ -275,7 +277,8 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2:
mlog.deprecation('Passing a list as the single argument to '
'configuration_data.set is deprecated. This will '
- 'become a hard error in the future.')
+ 'become a hard error in the future.',
+ location=self.current_node)
args = args[0]
if len(args) != 2:
@@ -288,7 +291,7 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
msg = 'Setting a configuration data value to {!r} is invalid, ' \
'and will fail at configure_file(). If you are using it ' \
'just to store some values, please use a dict instead.'
- mlog.deprecation(msg.format(val))
+ mlog.deprecation(msg.format(val), location=self.current_node)
desc = kwargs.get('description', None)
if not isinstance(name, str):
raise InterpreterException("First argument to set must be a string.")
@@ -996,8 +999,13 @@ class CompilerHolder(InterpreterObject):
idir = os.path.join(self.environment.get_source_dir(),
i.held_object.get_curdir(), idir)
args += self.compiler.get_include_args(idir, False)
+ native = kwargs.get('native', None)
+ if native:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
if not nobuiltins:
- opts = self.environment.coredata.compiler_options
+ opts = self.environment.coredata.compiler_options[for_machine]
args += self.compiler.get_option_compile_args(opts)
if mode == 'link':
args += self.compiler.get_option_link_args(opts)
@@ -1927,6 +1935,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'main',
'method',
'modules',
+ 'cmake_module_path',
'optional_modules',
'native',
'not_found_message',
@@ -1934,6 +1943,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'static',
'version',
'private_headers',
+ 'cmake_args',
},
'declare_dependency': {'include_directories',
'link_with',
@@ -1954,7 +1964,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},
'jar': build.known_jar_kwargs,
'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
- 'run_command': {'check', 'capture'},
+ 'run_command': {'check', 'capture', 'env'},
'run_target': {'command', 'depends'},
'shared_library': build.known_shlib_kwargs,
'shared_module': build.known_shmod_kwargs,
@@ -2033,9 +2043,10 @@ class Interpreter(InterpreterBase):
for def_opt_name, def_opt_value in self.project_default_options.items():
for option_type in env.coredata.get_all_options():
for cur_opt_name, cur_opt_value in option_type.items():
- if (def_opt_name == cur_opt_name and
- def_opt_value != cur_opt_value.value):
- yield (def_opt_name, def_opt_value, cur_opt_value)
+ if def_opt_name == cur_opt_name:
+ def_opt_value = env.coredata.validate_option_value(def_opt_name, def_opt_value)
+ if def_opt_value != cur_opt_value.value:
+ yield (def_opt_name, def_opt_value, cur_opt_value)
def build_func_dict(self):
self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
@@ -2259,6 +2270,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if not isinstance(actual, wanted):
raise InvalidArguments('Incorrect argument type.')
+ @FeatureNewKwargs('run_command', '0.50.0', ['env'])
@FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture'])
@permittedKwargs(permitted_kwargs['run_command'])
def func_run_command(self, node, args, kwargs):
@@ -2277,6 +2289,8 @@ external dependencies (including libraries) must go to "dependencies".''')
if not isinstance(check, bool):
raise InterpreterException('Check must be boolean.')
+ env = self.unpack_env_kwarg(kwargs)
+
m = 'must be a string, or the output of find_program(), files() '\
'or configure_file(), or a compiler object; not {!r}'
if isinstance(cmd, ExternalProgramHolder):
@@ -2326,7 +2340,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if not a.startswith('..'):
if a not in self.build_def_files:
self.build_def_files.append(a)
- return RunProcess(cmd, expanded_args, srcdir, builddir, self.subdir,
+ return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
self.environment.get_build_command() + ['introspect'],
in_builddir=in_builddir, check=check, capture=capture)
@@ -2446,8 +2460,9 @@ external dependencies (including libraries) must go to "dependencies".''')
def get_option_internal(self, optname):
# Some base options are not defined in some environments, return the
# default value from compilers.base_options in that case.
- for d in [self.coredata.base_options, compilers.base_options,
- self.coredata.builtins, self.coredata.compiler_options]:
+ for d in chain(
+ [self.coredata.base_options, compilers.base_options, self.coredata.builtins],
+ self.coredata.get_all_compiler_options()):
try:
return d[optname]
except KeyError:
@@ -2848,7 +2863,7 @@ external dependencies (including libraries) must go to "dependencies".''')
subproject = self.subprojects[dirname]
if subproject.found():
dep = self.subprojects[dirname].get_variable_method([varname], {})
- except InvalidArguments as e:
+ except InvalidArguments:
pass
if not isinstance(dep, DependencyHolder):
@@ -2897,10 +2912,10 @@ external dependencies (including libraries) must go to "dependencies".''')
elif name == 'openmp':
FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
+ @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
@FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
@FeatureNewKwargs('dependency', '0.40.0', ['method'])
@FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
- @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message'])
@disablerIfNotFound
@permittedKwargs(permitted_kwargs['dependency'])
def func_dependency(self, node, args, kwargs):
@@ -3589,6 +3604,10 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
# for backwards compatibility. That was the behaviour before
# 0.45.0 so preserve it.
idir = kwargs.get('install_dir', '')
+ if idir is False:
+ idir = ''
+ mlog.deprecation('Please use the new `install:` kwarg instead of passing '
+ '`false` to `install_dir:`', location=node)
if not isinstance(idir, str):
raise InterpreterException('"install_dir" must be a string')
install = kwargs.get('install', idir != '')
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 45a4cb0..9206d02 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -18,7 +18,7 @@
from . import mparser, mesonlib, mlog
from . import environment, dependencies
-import os, copy, re, types
+import os, copy, re
from functools import wraps
class ObjectHolder:
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 9f74eae..fbb528c 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -168,7 +168,8 @@ class Conf:
self.print_options('Core options', core_options)
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
- self.print_options('Compiler options', self.coredata.compiler_options)
+ # TODO others
+ self.print_options('Compiler options', self.coredata.compiler_options.build)
self.print_options('Directories', dir_options)
self.print_options('Project options', self.coredata.user_options)
self.print_options('Testing options', test_options)
@@ -191,6 +192,9 @@ def run(options):
save = False
if len(options.cmd_line_options) > 0:
c.set_options(options.cmd_line_options)
+ if not c.build.environment.is_cross_build():
+ # TODO think about cross and command-line interface.
+ c.coredata.compiler_options.host = c.coredata.compiler_options.build
coredata.update_cmd_line_file(builddir, options)
save = True
elif options.clearcache:
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 2170fec..540fcdc 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -461,6 +461,26 @@ def exe_exists(arglist):
pass
return False
+lru_cache(maxsize=None)
+def darwin_get_object_archs(objpath):
+ '''
+ For a specific object (executable, static library, dylib, etc), run `lipo`
+ to fetch the list of archs supported by it. Supports both thin objects and
+ 'fat' objects.
+ '''
+ _, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
+ if not stdo:
+ mlog.debug('lipo {}: {}'.format(objpath, stderr))
+ return None
+ stdo = stdo.rsplit(': ', 1)[1]
+ # Convert from lipo-style archs to meson-style CPUs
+ stdo = stdo.replace('i386', 'x86')
+ stdo = stdo.replace('arm64', 'aarch64')
+ # Add generic name for armv7 and armv7s
+ if 'armv7' in stdo:
+ stdo += ' arm'
+ return stdo.split()
+
def detect_vcs(source_dir):
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
@@ -496,6 +516,9 @@ class Version:
def __str__(self):
return '%s (V=%s)' % (self._s, str(self._v))
+ def __repr__(self):
+ return '<Version: {}>'.format(self._s)
+
def __lt__(self, other):
return self.__cmp__(other) == -1
@@ -608,7 +631,7 @@ def version_compare_condition_with_min(condition, minimum):
# Map versions in the constraint of the form '0.46' to '0.46.0', to embed
# this knowledge of the meson versioning scheme.
condition = condition.strip()
- if re.match('^\d+.\d+$', condition):
+ if re.match(r'^\d+.\d+$', condition):
condition += '.0'
return cmpop(Version(minimum), Version(condition))
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index 037d76c..f0f287f 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -120,7 +120,7 @@ class CommandLineParser:
if os.environ.get('MESON_FORCE_BACKTRACE'):
raise
return 1
- except Exception as e:
+ except Exception:
if os.environ.get('MESON_FORCE_BACKTRACE'):
raise
traceback.print_exc()
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 74a8497..c6b6bbf 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -101,7 +101,7 @@ def set_chown(path, user=None, group=None, dir_fd=None, follow_symlinks=True):
def set_chmod(path, mode, dir_fd=None, follow_symlinks=True):
try:
os.chmod(path, mode, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
- except (NotImplementedError, OSError, SystemError) as e:
+ except (NotImplementedError, OSError, SystemError):
if not os.path.islink(path):
os.chmod(path, mode, dir_fd=dir_fd)
@@ -157,7 +157,7 @@ def restore_selinux_contexts():
'''
try:
subprocess.check_call(['selinuxenabled'])
- except (FileNotFoundError, PermissionError, subprocess.CalledProcessError) as e:
+ except (FileNotFoundError, PermissionError, subprocess.CalledProcessError):
# If we don't have selinux or selinuxenabled returned 1, failure
# is ignored quietly.
return
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 4657b8c..2d01c11 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -21,14 +21,9 @@ project files and don't need this info."""
import json
from . import build, coredata as cdata
-from . import environment
from . import mesonlib
-from . import astinterpreter
-from . import mparser
+from .ast import IntrospectionInterpreter
from . import mlog
-from . import compilers
-from . import optinterpreter
-from .interpreterbase import InvalidArguments
from .backend import backends
import sys, os
import pathlib
@@ -151,122 +146,6 @@ def list_targets(builddata: build.Build, installdata, backend: backends.Backend)
tlist.append(t)
return tlist
-class IntrospectionHelper:
- # mimic an argparse namespace
- def __init__(self, cross_file):
- self.cross_file = cross_file
- self.native_file = None
- self.cmd_line_options = {}
-
-class IntrospectionInterpreter(astinterpreter.AstInterpreter):
- # Interpreter to detect the options without a build directory
- # Most of the code is stolen from interperter.Interpreter
- def __init__(self, source_root, subdir, backend, cross_file=None, subproject='', subproject_dir='subprojects', env=None):
- super().__init__(source_root, subdir)
-
- options = IntrospectionHelper(cross_file)
- self.cross_file = cross_file
- if env is None:
- self.environment = environment.Environment(source_root, None, options)
- else:
- self.environment = env
- self.subproject = subproject
- self.subproject_dir = subproject_dir
- self.coredata = self.environment.get_coredata()
- self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
- self.backend = backend
- self.default_options = {'backend': self.backend}
- self.project_data = {}
-
- self.funcs.update({
- 'project': self.func_project,
- 'add_languages': self.func_add_languages
- })
-
- def flatten_args(self, args):
- # Resolve mparser.ArrayNode if needed
- flattend_args = []
- if isinstance(args, mparser.ArrayNode):
- args = [x.value for x in args.args.arguments]
- for i in args:
- if isinstance(i, mparser.ArrayNode):
- flattend_args += [x.value for x in i.args.arguments]
- elif isinstance(i, str):
- flattend_args += [i]
- else:
- pass
- return flattend_args
-
- def func_project(self, node, args, kwargs):
- if len(args) < 1:
- raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
-
- proj_name = args[0]
- proj_vers = kwargs.get('version', 'undefined')
- proj_langs = self.flatten_args(args[1:])
- if isinstance(proj_vers, mparser.ElementaryNode):
- proj_vers = proj_vers.value
- if not isinstance(proj_vers, str):
- proj_vers = 'undefined'
- self.project_data = {'descriptive_name': proj_name, 'version': proj_vers}
-
- if os.path.exists(self.option_file):
- oi = optinterpreter.OptionInterpreter(self.subproject)
- oi.process(self.option_file)
- self.coredata.merge_user_options(oi.options)
-
- def_opts = self.flatten_args(kwargs.get('default_options', []))
- self.project_default_options = mesonlib.stringlistify(def_opts)
- self.project_default_options = cdata.create_options_dict(self.project_default_options)
- self.default_options.update(self.project_default_options)
- self.coredata.set_default_options(self.default_options, self.subproject, self.environment.cmd_line_options)
-
- if not self.is_subproject() and 'subproject_dir' in kwargs:
- spdirname = kwargs['subproject_dir']
- if isinstance(spdirname, str):
- self.subproject_dir = spdirname
- if not self.is_subproject():
- self.project_data['subprojects'] = []
- subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
- if os.path.isdir(subprojects_dir):
- for i in os.listdir(subprojects_dir):
- if os.path.isdir(os.path.join(subprojects_dir, i)):
- self.do_subproject(i)
-
- self.coredata.init_backend_options(self.backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
-
- self.coredata.set_options(options)
- self.func_add_languages(None, proj_langs, None)
-
- def do_subproject(self, dirname):
- subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
- subpr = os.path.join(subproject_dir_abs, dirname)
- try:
- subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment)
- subi.analyze()
- subi.project_data['name'] = dirname
- self.project_data['subprojects'] += [subi.project_data]
- except:
- return
-
- def func_add_languages(self, node, args, kwargs):
- args = self.flatten_args(args)
- need_cross_compiler = self.environment.is_cross_build()
- for lang in sorted(args, key=compilers.sort_clink):
- lang = lang.lower()
- if lang not in self.coredata.compilers:
- self.environment.detect_compilers(lang, need_cross_compiler)
-
- def is_subproject(self):
- return self.subproject != ''
-
- def analyze(self):
- self.load_root_meson_file()
- self.sanity_check_ast()
- self.parse_project()
- self.run()
-
def list_buildoptions_from_source(sourcedir, backend, indent):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
@@ -325,7 +204,8 @@ def list_buildoptions(coredata: cdata.CoreData):
add_keys(optlist, core_options, 'core')
add_keys(optlist, coredata.backend_options, 'backend')
add_keys(optlist, coredata.base_options, 'base')
- add_keys(optlist, coredata.compiler_options, 'compiler')
+ # TODO others
+ add_keys(optlist, coredata.compiler_options.build, 'compiler')
add_keys(optlist, dir_options, 'directory')
add_keys(optlist, coredata.user_options, 'user')
add_keys(optlist, test_options, 'test')
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 6b6aa8b..2df4d7c 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -58,6 +58,10 @@ class GResourceHeaderTarget(build.CustomTarget):
def __init__(self, name, subdir, subproject, kwargs):
super().__init__(name, subdir, subproject, kwargs)
+class GResourceObjectTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
class GirTarget(build.CustomTarget):
def __init__(self, name, subdir, subproject, kwargs):
super().__init__(name, subdir, subproject, kwargs)
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 2ab575c..4473bcb 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -16,6 +16,8 @@
functionality such as gobject-introspection, gresources and gtk-doc'''
import os
+import re
+import sys
import copy
import shlex
import subprocess
@@ -25,11 +27,13 @@ from .. import mlog
from .. import mesonlib
from .. import compilers
from .. import interpreter
-from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
+from . import GResourceTarget, GResourceHeaderTarget, GResourceObjectTarget, GirTarget, TypelibTarget, VapiTarget
from . import get_include_args
from . import ExtensionModule
from . import ModuleReturnValue
-from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list
+from ..mesonlib import (
+ MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list
+)
from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -40,6 +44,8 @@ from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewK
# https://bugzilla.gnome.org/show_bug.cgi?id=774368
gresource_dep_needed_version = '>= 2.51.1'
+gresource_ld_binary_needed_version = '>= 2.60'
+
native_glib_version = None
girwarning_printed = False
gdbuswarning_printed = False
@@ -164,7 +170,10 @@ class GnomeModule(ExtensionModule):
cmd += ['--sourcedir', source_dir]
if 'c_name' in kwargs:
- cmd += ['--c-name', kwargs.pop('c_name')]
+ c_name = kwargs.pop('c_name')
+ cmd += ['--c-name', c_name]
+ else:
+ c_name = None
export = kwargs.pop('export', False)
if not export:
cmd += ['--internal']
@@ -173,13 +182,19 @@ class GnomeModule(ExtensionModule):
cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
+ gresource_ld_binary = False
+ if mesonlib.is_linux() and mesonlib.version_compare(glib_version, gresource_ld_binary_needed_version) and not state.environment.is_cross_build():
+ ld_obj = self.interpreter.find_program_impl('ld', required=False)
+ if ld_obj.found():
+ gresource_ld_binary = True
+
gresource = kwargs.pop('gresource_bundle', False)
- if gresource:
- output = args[0] + '.gresource'
- name = args[0] + '_gresource'
- else:
- output = args[0] + '.c'
- name = args[0] + '_c'
+ if gresource or gresource_ld_binary:
+ g_output = args[0] + '.gresource'
+ g_name = args[0] + '_gresource'
+
+ output = args[0] + '.c'
+ name = args[0] + '_c'
if kwargs.get('install', False) and not gresource:
raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
@@ -193,18 +208,44 @@ class GnomeModule(ExtensionModule):
kwargs['input'] = args[1]
kwargs['output'] = output
kwargs['depends'] = depends
+ if gresource or gresource_ld_binary:
+ g_kwargs = copy.deepcopy(kwargs)
+ g_kwargs['input'] = args[1]
+ g_kwargs['output'] = g_output
+ g_kwargs['depends'] = depends
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
# This will eventually go out of sync if dependencies are added
kwargs['depend_files'] = depend_files
- kwargs['command'] = cmd
+ if gresource_ld_binary:
+ kwargs['command'] = copy.copy(cmd) + ['--external-data']
+ else:
+ kwargs['command'] = cmd
+ if gresource or gresource_ld_binary:
+ # This will eventually go out of sync if dependencies are added
+ g_kwargs['depend_files'] = depend_files
+ g_kwargs['command'] = cmd
else:
depfile = kwargs['output'] + '.d'
- kwargs['depfile'] = depfile
- kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
- target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
+ if gresource_ld_binary:
+ depfile2 = kwargs['output'] + '.2.d'
+ kwargs['depfile'] = depfile2
+ kwargs['command'] = copy.copy(cmd) + ['--external-data', '--dependency-file', '@DEPFILE@']
+ else:
+ kwargs['depfile'] = depfile
+ kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+ if gresource or gresource_ld_binary:
+ g_kwargs['depfile'] = depfile
+ g_kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+
+ if gresource or gresource_ld_binary:
+ target_g = GResourceTarget(g_name, state.subdir, state.subproject, g_kwargs)
+ if gresource: # Only one target for .gresource files
+ if target_g.get_id() not in self.interpreter.build.targets:
+ return ModuleReturnValue(target_g, [target_g])
+ else:
+ return ModuleReturnValue(target_g, [])
- if gresource: # Only one target for .gresource files
- return ModuleReturnValue(target_c, [target_c])
+ target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
h_kwargs = {
'command': cmd,
@@ -220,9 +261,99 @@ class GnomeModule(ExtensionModule):
h_kwargs['install_dir'] = kwargs.get('install_dir',
state.environment.coredata.get_builtin_option('includedir'))
target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
- rv = [target_c, target_h]
+
+ if gresource_ld_binary:
+ return self._create_gresource_ld_binary_targets(args, state, ifile, ld_obj, c_name, target_g, g_output, target_c, target_h)
+ else:
+ rv = [target_c, target_h]
+
return ModuleReturnValue(rv, rv)
+ def _create_gresource_ld_binary_targets(self, args, state, ifile, ld_obj, c_name, target_g, g_output, target_c, target_h):
+ if c_name is None:
+ # Create proper c identifier from filename in the way glib-compile-resources does
+ c_name = os.path.basename(ifile).partition('.')[0]
+ c_name = c_name.replace('-', '_')
+ c_name = re.sub(r'^([^(_a-zA-Z)])+', '', c_name)
+ c_name = re.sub(r'([^(_a-zA-Z0-9)])', '', c_name)
+
+ c_name_no_underscores = re.sub(r'^_+', '', c_name)
+
+ ld = ld_obj.get_command()
+ objcopy_object = self.interpreter.find_program_impl('objcopy', required=False)
+ if objcopy_object.found():
+ objcopy = objcopy_object.get_command()
+ else:
+ objcopy = None
+
+ o_kwargs = {
+ 'command': [ld, '-r', '-b', 'binary', '@INPUT@', '-o', '@OUTPUT@'],
+ 'input': target_g,
+ 'output': args[0] + '1.o'
+ }
+
+ target_o = GResourceObjectTarget(args[0] + '1_o', state.subdir, state.subproject, o_kwargs)
+
+ builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ linkerscript_name = args[0] + '_map.ld'
+ linkerscript_path = os.path.join(builddir, linkerscript_name)
+ linkerscript_file = open(linkerscript_path, 'w')
+
+ # Create symbol name the way bfd does
+ binary_name = os.path.join(state.subdir, g_output)
+ encoding = sys.getfilesystemencoding()
+ symbol_name = re.sub(rb'([^(_a-zA-Z0-9)])', b'_', binary_name.encode(encoding)).decode(encoding)
+
+ linkerscript_string = '''SECTIONS
+{{
+ .gresource.{} : ALIGN(8)
+ {{
+ {}_resource_data = _binary_{}_start;
+ }}
+ .data :
+ {{
+ *(.data)
+ }}
+}}'''.format(c_name_no_underscores, c_name, symbol_name)
+
+ linkerscript_file.write(linkerscript_string)
+
+ o2_kwargs = {
+ 'command': [ld, '-r', '-T', os.path.join(state.subdir, linkerscript_name), '@INPUT@', '-o', '@OUTPUT@'],
+ 'input': target_o,
+ 'output': args[0] + '2.o',
+ }
+ target_o2 = GResourceObjectTarget(args[0] + '2_o', state.subdir, state.subproject, o2_kwargs)
+
+ if objcopy is not None:
+ objcopy_cmd = [objcopy, '--set-section-flags', '.gresource.' + c_name + '=readonly,alloc,load,data']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_start']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_end']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_size']
+ objcopy_cmd += ['@INPUT@', '@OUTPUT@']
+
+ o3_kwargs = {
+ 'command': objcopy_cmd,
+ 'input': target_o2,
+ 'output': args[0] + '3.o'
+ }
+
+ target_o3 = GResourceObjectTarget(args[0] + '3_o', state.subdir, state.subproject, o3_kwargs)
+
+ rv1 = [target_c, target_h, target_o3]
+ if target_g.get_id() not in self.interpreter.build.targets:
+ rv2 = rv1 + [target_g, target_o, target_o2]
+ else:
+ rv2 = rv1 + [target_o, target_o2]
+ else:
+ rv1 = [target_c, target_h, target_o2]
+ if target_g.get_id() not in self.interpreter.build.targets:
+ rv2 = rv1 + [target_g, target_o]
+ else:
+ rv2 = rv1 + [target_o]
+
+ return ModuleReturnValue(rv1, rv2)
+
def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
cmd = ['glib-compile-resources',
@@ -531,11 +662,7 @@ class GnomeModule(ExtensionModule):
ret = []
for lang in langs:
- if state.environment.is_cross_build():
- link_args = state.environment.properties.host.get_external_link_args(lang)
- else:
- link_args = state.environment.coredata.get_external_link_args(lang)
-
+ link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang)
for link_arg in link_args:
if link_arg.startswith('-L'):
ret.append(link_arg)
@@ -607,9 +734,15 @@ class GnomeModule(ExtensionModule):
if 'b_sanitize' in compiler.base_options:
sanitize = state.environment.coredata.base_options['b_sanitize'].value
cflags += compilers.sanitizer_compile_args(sanitize)
- if 'address' in sanitize.split(','):
- internal_ldflags += ['-lasan'] # This must be first in ldflags
- # FIXME: Linking directly to libasan is not recommended but g-ir-scanner
+ sanitize = sanitize.split(',')
+ # These must be first in ldflags
+ if 'address' in sanitize:
+ internal_ldflags += ['-lasan']
+ if 'thread' in sanitize:
+ internal_ldflags += ['-ltsan']
+ if 'undefined' in sanitize:
+ internal_ldflags += ['-lubsan']
+ # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner
# does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
# ldflags += compilers.sanitizer_link_args(sanitize)
@@ -714,10 +847,7 @@ class GnomeModule(ExtensionModule):
def _get_external_args_for_langs(self, state, langs):
ret = []
for lang in langs:
- if state.environment.is_cross_build():
- ret += state.environment.properties.host.get_external_args(lang)
- else:
- ret += state.environment.coredata.get_external_args(lang)
+ ret += state.environment.coredata.get_external_args(MachineChoice.HOST, lang)
return ret
@staticmethod
@@ -1042,13 +1172,11 @@ This will become a hard error in the future.''')
ldflags.update(internal_ldflags)
ldflags.update(external_ldflags)
+ cflags.update(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c'))
+ ldflags.update(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c'))
if state.environment.is_cross_build():
- cflags.update(state.environment.properties.host.get_external_args('c'))
- ldflags.update(state.environment.properties.host.get_external_link_args('c'))
compiler = state.environment.coredata.cross_compilers.get('c')
else:
- cflags.update(state.environment.coredata.get_external_args('c'))
- ldflags.update(state.environment.coredata.get_external_link_args('c'))
compiler = state.environment.coredata.compilers.get('c')
compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)])
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index aeab813..4deb437 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -102,7 +102,8 @@ class I18nModule(ExtensionModule):
return ModuleReturnValue(ct, [ct])
@FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset'])
- @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install'})
+ @FeatureNewKwargs('i18n.gettext', '0.50.0', ['install_dir'])
+ @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install', 'install_dir'})
def gettext(self, state, args, kwargs):
if len(args) != 1:
raise coredata.MesonException('Gettext requires one positional argument (package name).')
@@ -151,10 +152,11 @@ class I18nModule(ExtensionModule):
install = kwargs.get('install', True)
if install:
+ install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('localedir'))
script = state.environment.get_build_command()
args = ['--internal', 'gettext', 'install',
'--subdir=' + state.subdir,
- '--localedir=' + state.environment.coredata.get_builtin_option('localedir'),
+ '--localedir=' + install_dir,
pkg_arg]
if lang_arg:
args.append(lang_arg)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index bc9bff8..8ce28ba 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -23,6 +23,8 @@ from . import ModuleReturnValue
from . import ExtensionModule
from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+already_warned_objs = set()
+
class DependenciesHelper:
def __init__(self, name):
self.name = name
@@ -51,16 +53,21 @@ class DependenciesHelper:
self.priv_reqs += self._process_reqs(reqs)
def _check_generated_pc_deprecation(self, obj):
- if hasattr(obj, 'generated_pc_warn'):
- mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
- '"libraries" keyword argument of a previous call '
- 'to generate() method instead of first positional '
- 'argument.', 'Adding', mlog.bold(obj.generated_pc),
- 'to "Requires" field, but this is a deprecated '
- 'behaviour that will change in a future version '
- 'of Meson. Please report the issue if this '
- 'warning cannot be avoided in your case.',
- location=obj.generated_pc_warn)
+ if not hasattr(obj, 'generated_pc_warn'):
+ return
+ name = obj.generated_pc_warn[0]
+ if (name, obj.name) in already_warned_objs:
+ return
+ mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
+ '"libraries" keyword argument of a previous call '
+ 'to generate() method instead of first positional '
+ 'argument.', 'Adding', mlog.bold(obj.generated_pc),
+ 'to "Requires" field, but this is a deprecated '
+ 'behaviour that will change in a future version '
+ 'of Meson. Please report the issue if this '
+ 'warning cannot be avoided in your case.',
+ location=obj.generated_pc_warn[1])
+ already_warned_objs.add((name, obj.name))
def _process_reqs(self, reqs):
'''Returns string names of requirements'''
@@ -239,7 +246,7 @@ class PkgConfigModule(ExtensionModule):
# https://bugs.freedesktop.org/show_bug.cgi?id=103203
if isinstance(value, PurePath):
value = value.as_posix()
- return value.replace(' ', '\ ')
+ return value.replace(' ', r'\ ')
def _make_relative(self, prefix, subdir):
if isinstance(prefix, PurePath):
@@ -438,11 +445,13 @@ class PkgConfigModule(ExtensionModule):
mainlib.generated_pc = filebase
else:
mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
- for lib in deps.pub_libs:
- if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
- lib.generated_pc = filebase
- lib.generated_pc_warn = types.SimpleNamespace(subdir=state.subdir,
- lineno=state.current_lineno)
+ else:
+ for lib in deps.pub_libs:
+ if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
+ lib.generated_pc = filebase
+ location = types.SimpleNamespace(subdir=state.subdir,
+ lineno=state.current_lineno)
+ lib.generated_pc_warn = [name, location]
return ModuleReturnValue(res, [res])
def initialize(*args, **kwargs):
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 1d41165..049c457 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -60,6 +60,7 @@ class PythonDependency(ExternalDependency):
self.pkgdep = None
self.variables = python_holder.variables
self.paths = python_holder.paths
+ self.link_libpython = python_holder.link_libpython
if mesonlib.version_compare(self.version, '>= 3.0'):
self.major_version = 3
else:
@@ -149,11 +150,11 @@ class PythonDependency(ExternalDependency):
libdirs = []
largs = self.clib_compiler.find_library(libname, environment, libdirs)
-
- self.is_found = largs is not None
- if self.is_found:
+ if largs is not None:
self.link_args = largs
+ self.is_found = largs is not None or not self.link_libpython
+
inc_paths = mesonlib.OrderedSet([
self.variables.get('INCLUDEPY'),
self.paths.get('include'),
diff --git a/mesonbuild/modules/unstable_cuda.py b/mesonbuild/modules/unstable_cuda.py
new file mode 100644
index 0000000..941b15a
--- /dev/null
+++ b/mesonbuild/modules/unstable_cuda.py
@@ -0,0 +1,259 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from ..mesonlib import version_compare
+from ..interpreter import CompilerHolder
+from ..compilers import CudaCompiler
+
+from . import ExtensionModule, ModuleReturnValue
+
+from ..interpreterbase import (
+ flatten, permittedKwargs, noKwargs,
+ InvalidArguments, FeatureNew
+)
+
+class CudaModule(ExtensionModule):
+
+ @FeatureNew('CUDA module', '0.50.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @noKwargs
+ def min_driver_version(self, state, args, kwargs):
+ argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' +
+ 'an NVCC compiler object, or its version string.')
+
+ if len(args) != 1:
+ raise argerror
+ else:
+ cuda_version = self._version_from_compiler(args[0])
+ if cuda_version == 'unknown':
+ raise argerror
+
+ driver_version_table = [
+ {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
+ {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'},
+ {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'},
+ {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'},
+ {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'},
+ {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'},
+ {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'},
+ {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'},
+ {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'},
+ ]
+
+ driver_version = 'unknown'
+ for d in driver_version_table:
+ if version_compare(cuda_version, d['cuda_version']):
+ driver_version = d.get(state.host_machine.system, d['linux'])
+ break
+
+ return ModuleReturnValue(driver_version, [driver_version])
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_flags(self, state, args, kwargs):
+ nvcc_arch_args = self._validate_nvcc_arch_args(state, args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[0]
+ return ModuleReturnValue(ret, [ret])
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_readable(self, state, args, kwargs):
+ nvcc_arch_args = self._validate_nvcc_arch_args(state, args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[1]
+ return ModuleReturnValue(ret, [ret])
+
+ @staticmethod
+ def _break_arch_string(s):
+ s = re.sub('[ \t,;]+', ';', s)
+ s = s.strip(';').split(';')
+ return s
+
+ @staticmethod
+ def _version_from_compiler(c):
+ if isinstance(c, CompilerHolder):
+ c = c.compiler
+ if isinstance(c, CudaCompiler):
+ return c.version
+ if isinstance(c, str):
+ return c
+ return 'unknown'
+
+ def _validate_nvcc_arch_args(self, state, args, kwargs):
+ argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!')
+
+ if len(args) < 1:
+ raise argerror
+ else:
+ cuda_version = self._version_from_compiler(args[0])
+ if cuda_version == 'unknown':
+ raise argerror
+
+ arch_list = [] if len(args) <= 1 else flatten(args[1:])
+ arch_list = [self._break_arch_string(a) for a in arch_list]
+ arch_list = flatten(arch_list)
+ if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+ arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
+
+ detected = flatten([kwargs.get('detected', [])])
+ detected = [self._break_arch_string(a) for a in detected]
+ detected = flatten(detected)
+ if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+
+ return cuda_version, arch_list, detected
+
+ def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''):
+ """
+ Using the CUDA Toolkit version (the NVCC version) and the target
+ architectures, compute the NVCC architecture flags.
+ """
+
+ cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221
+ cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221
+ cuda_limit_gpu_architecture = None # noqa: E221
+ cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221
+
+ if version_compare(cuda_version, '<7.0'):
+ cuda_limit_gpu_architecture = '5.2'
+
+ if version_compare(cuda_version, '>=7.0'):
+ cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['5.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<8.0'):
+ cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221
+ cuda_limit_gpu_architecture = '6.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=8.0'):
+ cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221
+ cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<9.0'):
+ cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221
+ cuda_limit_gpu_architecture = '7.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=9.0'):
+ cuda_known_gpu_architectures += ['Volta', 'Volta+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.0', '7.0+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.0', '7.0+PTX', '7.2', '7.2+PTX'] # noqa: E221
+
+ if version_compare(cuda_version, '<10.0'):
+ cuda_limit_gpu_architecture = '7.5'
+
+ if version_compare(cuda_version, '>=10.0'):
+ cuda_known_gpu_architectures += ['Turing'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.5', '7.5+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.5', '7.5+PTX'] # noqa: E221
+
+ if version_compare(cuda_version, '<11.0'):
+ cuda_limit_gpu_architecture = '8.0'
+
+ if not cuda_arch_list:
+ cuda_arch_list = 'Auto'
+
+ if cuda_arch_list == 'All': # noqa: E271
+ cuda_arch_list = cuda_known_gpu_architectures
+ elif cuda_arch_list == 'Common': # noqa: E271
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif cuda_arch_list == 'Auto': # noqa: E271
+ if detected:
+ if isinstance(detected, list):
+ cuda_arch_list = detected
+ else:
+ cuda_arch_list = self._break_arch_string(detected)
+
+ if cuda_limit_gpu_architecture:
+ filtered_cuda_arch_list = []
+ for arch in cuda_arch_list:
+ if arch:
+ if version_compare(arch, '>=' + cuda_limit_gpu_architecture):
+ arch = cuda_common_gpu_architectures[-1]
+ if arch not in filtered_cuda_arch_list:
+ filtered_cuda_arch_list.append(arch)
+ cuda_arch_list = filtered_cuda_arch_list
+ else:
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif isinstance(cuda_arch_list, str):
+ cuda_arch_list = self._break_arch_string(cuda_arch_list)
+
+ cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])
+
+ cuda_arch_bin = []
+ cuda_arch_ptx = []
+ for arch_name in cuda_arch_list:
+ arch_bin = []
+ arch_ptx = []
+ add_ptx = arch_name.endswith('+PTX')
+ if add_ptx:
+ arch_name = arch_name[:-len('+PTX')]
+
+ if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name):
+ arch_bin, arch_ptx = [arch_name], [arch_name]
+ else:
+ arch_bin, arch_ptx = {
+ 'Fermi': (['2.0', '2.1(2.0)'], []),
+ 'Kepler+Tegra': (['3.2'], []),
+ 'Kepler+Tesla': (['3.7'], []),
+ 'Kepler': (['3.0', '3.5'], ['3.5']),
+ 'Maxwell+Tegra': (['5.3'], []),
+ 'Maxwell': (['5.0', '5.2'], ['5.2']),
+ 'Pascal': (['6.0', '6.1'], ['6.1']),
+ 'Pascal+Tegra': (['6.2'], []),
+ 'Volta': (['7.0'], ['7.0']),
+ 'Volta+Tegra': (['7.2'], []),
+ 'Turing': (['7.5'], ['7.5']),
+ }.get(arch_name, (None, None))
+
+ if arch_bin is None:
+ raise InvalidArguments('Unknown CUDA Architecture Name {}!'
+ .format(arch_name))
+
+ cuda_arch_bin += arch_bin
+
+ if add_ptx:
+ if not arch_ptx:
+ arch_ptx = arch_bin
+ cuda_arch_ptx += arch_ptx
+
+ cuda_arch_bin = re.sub('\\.', '', ' '.join(cuda_arch_bin))
+ cuda_arch_ptx = re.sub('\\.', '', ' '.join(cuda_arch_ptx))
+ cuda_arch_bin = re.findall('[0-9()]+', cuda_arch_bin)
+ cuda_arch_ptx = re.findall('[0-9]+', cuda_arch_ptx)
+ cuda_arch_bin = sorted(list(set(cuda_arch_bin)))
+ cuda_arch_ptx = sorted(list(set(cuda_arch_ptx)))
+
+ nvcc_flags = []
+ nvcc_archs_readable = []
+
+ for arch in cuda_arch_bin:
+ m = re.match('([0-9]+)\\(([0-9]+)\\)', arch)
+ if m:
+ nvcc_flags += ['-gencode', 'arch=compute_' + m[2] + ',code=sm_' + m[1]]
+ nvcc_archs_readable += ['sm_' + m[1]]
+ else:
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch]
+ nvcc_archs_readable += ['sm_' + arch]
+
+ for arch in cuda_arch_ptx:
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch]
+ nvcc_archs_readable += ['compute_' + arch]
+
+ return nvcc_flags, nvcc_archs_readable
+
+def initialize(*args, **kwargs):
+ return CudaModule(*args, **kwargs)
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index fd8052e..ed0dc1b 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -37,7 +37,7 @@ class MesonUnicodeDecodeError(MesonException):
def decode_match(match):
try:
return codecs.decode(match.group(0), 'unicode_escape')
- except UnicodeDecodeError as err:
+ except UnicodeDecodeError:
raise MesonUnicodeDecodeError(match.group(0))
class ParseException(MesonException):
@@ -212,7 +212,15 @@ This will become a hard error in a future Meson release.""", self.getline(line_s
if not matched:
raise ParseException('lexer', self.getline(line_start), lineno, col)
-class ElementaryNode:
+class BaseNode:
+ def accept(self, visitor):
+ fname = 'visit_{}'.format(type(self).__name__)
+ if hasattr(visitor, fname):
+ func = getattr(visitor, fname)
+ if hasattr(func, '__call__'):
+ func(self)
+
+class ElementaryNode(BaseNode):
def __init__(self, token):
self.lineno = token.lineno
self.subdir = token.subdir
@@ -253,28 +261,28 @@ class ContinueNode(ElementaryNode):
class BreakNode(ElementaryNode):
pass
-class ArrayNode:
- def __init__(self, args):
+class ArrayNode(BaseNode):
+ def __init__(self, args, lineno, colno):
self.subdir = args.subdir
- self.lineno = args.lineno
- self.colno = args.colno
+ self.lineno = lineno
+ self.colno = colno
self.args = args
-class DictNode:
- def __init__(self, args):
+class DictNode(BaseNode):
+ def __init__(self, args, lineno, colno):
self.subdir = args.subdir
- self.lineno = args.lineno
- self.colno = args.colno
+ self.lineno = lineno
+ self.colno = colno
self.args = args
-class EmptyNode:
+class EmptyNode(BaseNode):
def __init__(self, lineno, colno):
self.subdir = ''
self.lineno = lineno
self.colno = colno
self.value = None
-class OrNode:
+class OrNode(BaseNode):
def __init__(self, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -282,7 +290,7 @@ class OrNode:
self.left = left
self.right = right
-class AndNode:
+class AndNode(BaseNode):
def __init__(self, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -290,7 +298,7 @@ class AndNode:
self.left = left
self.right = right
-class ComparisonNode:
+class ComparisonNode(BaseNode):
def __init__(self, ctype, left, right):
self.lineno = left.lineno
self.colno = left.colno
@@ -299,7 +307,7 @@ class ComparisonNode:
self.right = right
self.ctype = ctype
-class ArithmeticNode:
+class ArithmeticNode(BaseNode):
def __init__(self, operation, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -308,21 +316,21 @@ class ArithmeticNode:
self.right = right
self.operation = operation
-class NotNode:
+class NotNode(BaseNode):
def __init__(self, location_node, value):
self.subdir = location_node.subdir
self.lineno = location_node.lineno
self.colno = location_node.colno
self.value = value
-class CodeBlockNode:
+class CodeBlockNode(BaseNode):
def __init__(self, location_node):
self.subdir = location_node.subdir
self.lineno = location_node.lineno
self.colno = location_node.colno
self.lines = []
-class IndexNode:
+class IndexNode(BaseNode):
def __init__(self, iobject, index):
self.iobject = iobject
self.index = index
@@ -330,7 +338,7 @@ class IndexNode:
self.lineno = iobject.lineno
self.colno = iobject.colno
-class MethodNode:
+class MethodNode(BaseNode):
def __init__(self, subdir, lineno, colno, source_object, name, args):
self.subdir = subdir
self.lineno = lineno
@@ -340,7 +348,7 @@ class MethodNode:
assert(isinstance(self.name, str))
self.args = args
-class FunctionNode:
+class FunctionNode(BaseNode):
def __init__(self, subdir, lineno, colno, func_name, args):
self.subdir = subdir
self.lineno = lineno
@@ -349,7 +357,7 @@ class FunctionNode:
assert(isinstance(func_name, str))
self.args = args
-class AssignmentNode:
+class AssignmentNode(BaseNode):
def __init__(self, lineno, colno, var_name, value):
self.lineno = lineno
self.colno = colno
@@ -357,7 +365,7 @@ class AssignmentNode:
assert(isinstance(var_name, str))
self.value = value
-class PlusAssignmentNode:
+class PlusAssignmentNode(BaseNode):
def __init__(self, lineno, colno, var_name, value):
self.lineno = lineno
self.colno = colno
@@ -365,7 +373,7 @@ class PlusAssignmentNode:
assert(isinstance(var_name, str))
self.value = value
-class ForeachClauseNode:
+class ForeachClauseNode(BaseNode):
def __init__(self, lineno, colno, varnames, items, block):
self.lineno = lineno
self.colno = colno
@@ -373,28 +381,28 @@ class ForeachClauseNode:
self.items = items
self.block = block
-class IfClauseNode:
+class IfClauseNode(BaseNode):
def __init__(self, lineno, colno):
self.lineno = lineno
self.colno = colno
self.ifs = []
self.elseblock = EmptyNode(lineno, colno)
-class UMinusNode:
+class UMinusNode(BaseNode):
def __init__(self, current_location, value):
self.subdir = current_location.subdir
self.lineno = current_location.lineno
self.colno = current_location.colno
self.value = value
-class IfNode:
+class IfNode(BaseNode):
def __init__(self, lineno, colno, condition, block):
self.lineno = lineno
self.colno = colno
self.condition = condition
self.block = block
-class TernaryNode:
+class TernaryNode(BaseNode):
def __init__(self, lineno, colno, condition, trueblock, falseblock):
self.lineno = lineno
self.colno = colno
@@ -402,7 +410,7 @@ class TernaryNode:
self.trueblock = trueblock
self.falseblock = falseblock
-class ArgumentNode:
+class ArgumentNode(BaseNode):
def __init__(self, token):
self.lineno = token.lineno
self.colno = token.colno
@@ -630,11 +638,11 @@ class Parser:
elif self.accept('lbracket'):
args = self.args()
self.block_expect('rbracket', block_start)
- return ArrayNode(args)
+ return ArrayNode(args, block_start.lineno, block_start.colno)
elif self.accept('lcurl'):
key_values = self.key_values()
self.block_expect('rcurl', block_start)
- return DictNode(key_values)
+ return DictNode(key_values, block_start.lineno, block_start.colno)
else:
return self.e9()
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index b4bd4f2..6536558 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -438,6 +438,8 @@ class TestHarness:
current = self.build_data.test_setups[full_name]
if not options.gdb:
options.gdb = current.gdb
+ if options.gdb:
+ options.verbose = True
if options.timeout_multiplier is None:
options.timeout_multiplier = current.timeout_multiplier
# if options.env is None:
@@ -693,18 +695,17 @@ Timeout: %4d
for _ in range(self.options.repeat):
for i, test in enumerate(tests):
visible_name = self.get_pretty_suite(test)
+ single_test = self.get_test_runner(test)
- if not test.is_parallel or self.options.gdb:
+ if not test.is_parallel or single_test.options.gdb:
self.drain_futures(futures)
futures = []
- single_test = self.get_test_runner(test)
res = single_test.run()
self.process_test_result(res)
self.print_stats(numlen, tests, visible_name, res, i)
else:
if not executor:
executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
- single_test = self.get_test_runner(test)
f = executor.submit(single_test.run)
futures.append((f, numlen, tests, visible_name, i))
if self.options.repeat > 1 and self.fail_count:
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index 37ed7ef..277835c 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -23,36 +23,295 @@
# - move targets
# - reindent?
-import mesonbuild.astinterpreter
+from .ast import IntrospectionInterpreter, build_target_functions, AstIDGenerator, AstIndentationGenerator, AstPrinter
from mesonbuild.mesonlib import MesonException
-from mesonbuild import mlog
-import sys, traceback
+from . import mlog, mparser, environment
+from functools import wraps
+from pprint import pprint
+import json, os
+
+class RewriterException(MesonException):
+ pass
def add_arguments(parser):
parser.add_argument('--sourcedir', default='.',
help='Path to source directory.')
- parser.add_argument('--target', default=None,
- help='Name of target to edit.')
- parser.add_argument('--filename', default=None,
- help='Name of source file to add or remove to target.')
- parser.add_argument('commands', nargs='+')
+ parser.add_argument('-p', '--print', action='store_true', default=False, dest='print',
+ help='Print the parsed AST.')
+ parser.add_argument('command', type=str)
+
+class RequiredKeys:
+ def __init__(self, keys):
+ self.keys = keys
+
+ def __call__(self, f):
+ @wraps(f)
+ def wrapped(*wrapped_args, **wrapped_kwargs):
+ assert(len(wrapped_args) >= 2)
+ cmd = wrapped_args[1]
+ for key, val in self.keys.items():
+ typ = val[0] # The type of the value
+ default = val[1] # The default value -- None is required
+ choices = val[2] # Valid choices -- None is for everything
+ if key not in cmd:
+ if default is not None:
+ cmd[key] = default
+ else:
+ raise RewriterException('Key "{}" is missing in object for {}'
+ .format(key, f.__name__))
+ if not isinstance(cmd[key], typ):
+ raise RewriterException('Invalid type of "{}". Required is {} but provided was {}'
+ .format(key, typ.__name__, type(cmd[key]).__name__))
+ if choices is not None:
+ assert(isinstance(choices, list))
+ if cmd[key] not in choices:
+ raise RewriterException('Invalid value of "{}": Possible values are {} but provided was "{}"'
+ .format(key, choices, cmd[key]))
+ return f(*wrapped_args, **wrapped_kwargs)
+
+ return wrapped
+
+rewriter_keys = {
+ 'target': {
+ 'target': (str, None, None),
+ 'operation': (str, None, ['src_add', 'src_rm', 'test']),
+ 'sources': (list, [], None),
+ 'debug': (bool, False, None)
+ }
+}
+
+class Rewriter:
+ def __init__(self, sourcedir: str, generator: str = 'ninja'):
+ self.sourcedir = sourcedir
+ self.interpreter = IntrospectionInterpreter(sourcedir, '', generator)
+ self.id_generator = AstIDGenerator()
+ self.modefied_nodes = []
+ self.functions = {
+ 'target': self.process_target,
+ }
+
+ def analyze_meson(self):
+ mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
+ self.interpreter.analyze()
+ mlog.log(' -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
+ mlog.log(' -- Version:', mlog.cyan(self.interpreter.project_data['version']))
+ self.interpreter.ast.accept(AstIndentationGenerator())
+ self.interpreter.ast.accept(self.id_generator)
+
+ def find_target(self, target: str):
+ for i in self.interpreter.targets:
+ if target == i['name'] or target == i['id']:
+ return i
+ return None
+
+ @RequiredKeys(rewriter_keys['target'])
+ def process_target(self, cmd):
+ mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
+ target = self.find_target(cmd['target'])
+ if target is None:
+ mlog.error('Unknown target "{}" --> skipping'.format(cmd['target']))
+ if cmd['debug']:
+ pprint(self.interpreter.targets)
+ return
+ if cmd['debug']:
+ pprint(target)
+
+ # Utility function to get a list of the sources from a node
+ def arg_list_from_node(n):
+ args = []
+ if isinstance(n, mparser.FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, mparser.ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, mparser.ArgumentNode):
+ args = n.arguments
+ return args
+
+ if cmd['operation'] == 'src_add':
+ node = None
+ if target['sources']:
+ node = target['sources'][0]
+ else:
+ node = target['node']
+ assert(node is not None)
+
+ # Generate the new String nodes
+ to_append = []
+ for i in cmd['sources']:
+ mlog.log(' -- Adding source', mlog.green(i), 'at',
+ mlog.yellow('{}:{}'.format(os.path.join(node.subdir, environment.build_filename), node.lineno)))
+ token = mparser.Token('string', node.subdir, 0, 0, 0, None, i)
+ to_append += [mparser.StringNode(token)]
+
+ # Append to the AST at the right place
+ if isinstance(node, mparser.FunctionNode):
+ node.args.arguments += to_append
+ elif isinstance(node, mparser.ArrayNode):
+ node.args.arguments += to_append
+ elif isinstance(node, mparser.ArgumentNode):
+ node.arguments += to_append
+
+ # Mark the node as modified
+ if node not in self.modefied_nodes:
+ self.modefied_nodes += [node]
+
+ elif cmd['operation'] == 'src_rm':
+ # Helper to find the exact string node and its parent
+ def find_node(src):
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, mparser.StringNode):
+ if j.value == src:
+ return i, j
+ return None, None
+
+ for i in cmd['sources']:
+ # Try to find the node with the source string
+ root, string_node = find_node(i)
+ if root is None:
+ mlog.warning(' -- Unable to find source', mlog.green(i), 'in the target')
+ continue
+
+ # Remove the found string node from the argument list
+ arg_node = None
+ if isinstance(root, mparser.FunctionNode):
+ arg_node = root.args
+ if isinstance(root, mparser.ArrayNode):
+ arg_node = root.args
+ if isinstance(root, mparser.ArgumentNode):
+ arg_node = root
+ assert(arg_node is not None)
+ mlog.log(' -- Removing source', mlog.green(i), 'from',
+ mlog.yellow('{}:{}'.format(os.path.join(string_node.subdir, environment.build_filename), string_node.lineno)))
+ arg_node.arguments.remove(string_node)
+
+ # Mark the node as modified
+ if root not in self.modefied_nodes:
+ self.modefied_nodes += [root]
+
+ elif cmd['operation'] == 'test':
+ # List all sources in the target
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, mparser.StringNode):
+ src_list += [j.value]
+ test_data = {
+ 'name': target['name'],
+ 'sources': src_list
+ }
+ mlog.log(' !! target {}={}'.format(target['id'], json.dumps(test_data)))
+
+ def process(self, cmd):
+ if 'type' not in cmd:
+ raise RewriterException('Command has no key "type"')
+ if cmd['type'] not in self.functions:
+ raise RewriterException('Unknown command "{}". Supported commands are: {}'
+ .format(cmd['type'], list(self.functions.keys())))
+ self.functions[cmd['type']](cmd)
+
+ def apply_changes(self):
+ assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'subdir') for x in self.modefied_nodes))
+ assert(all(isinstance(x, (mparser.ArrayNode, mparser.FunctionNode)) for x in self.modefied_nodes))
+ # Sort based on line and column in reversed order
+ work_nodes = list(sorted(self.modefied_nodes, key=lambda x: x.lineno * 1000 + x.colno, reverse=True))
+
+ # Generating the new replacement string
+ str_list = []
+ for i in work_nodes:
+ printer = AstPrinter()
+ i.accept(printer)
+ printer.post_process()
+ data = {
+ 'file': os.path.join(i.subdir, environment.build_filename),
+ 'str': printer.result.strip(),
+ 'node': i
+ }
+ str_list += [data]
+
+ # Load build files
+ files = {}
+ for i in str_list:
+ if i['file'] in files:
+ continue
+ fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+ fdata = ''
+ with open(fpath, 'r') as fp:
+ fdata = fp.read()
+
+ # Generate line offsets numbers
+ m_lines = fdata.splitlines(True)
+ offset = 0
+ line_offsets = []
+ for j in m_lines:
+ line_offsets += [offset]
+ offset += len(j)
+
+ files[i['file']] = {
+ 'path': fpath,
+ 'raw': fdata,
+ 'offsets': line_offsets
+ }
+
+ # Replace in source code
+ for i in str_list:
+ offsets = files[i['file']]['offsets']
+ raw = files[i['file']]['raw']
+ node = i['node']
+ line = node.lineno - 1
+ col = node.colno
+ start = offsets[line] + col
+ end = start
+ if isinstance(node, mparser.ArrayNode):
+ if raw[end] != '[':
+ mlog.warning('Internal error: expected "[" at {}:{} but got "{}"'.format(line, col, raw[end]))
+ continue
+ counter = 1
+ while counter > 0:
+ end += 1
+ if raw[end] == '[':
+ counter += 1
+ elif raw[end] == ']':
+ counter -= 1
+ end += 1
+ elif isinstance(node, mparser.FunctionNode):
+ while raw[end] != '(':
+ end += 1
+ end += 1
+ counter = 1
+ while counter > 0:
+ end += 1
+ if raw[end] == '(':
+ counter += 1
+ elif raw[end] == ')':
+ counter -= 1
+ end += 1
+ raw = files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
+
+ # Write the files back
+ for key, val in files.items():
+ mlog.log('Rewriting', mlog.yellow(key))
+ with open(val['path'], 'w') as fp:
+ fp.write(val['raw'])
def run(options):
- if options.target is None or options.filename is None:
- sys.exit("Must specify both target and filename.")
- print('This tool is highly experimental, use with care.')
- rewriter = mesonbuild.astinterpreter.RewriterInterpreter(options.sourcedir, '')
- try:
- if options.commands[0] == 'add':
- rewriter.add_source(options.target, options.filename)
- elif options.commands[0] == 'remove':
- rewriter.remove_source(options.target, options.filename)
- else:
- sys.exit('Unknown command: ' + options.commands[0])
- except Exception as e:
- if isinstance(e, MesonException):
- mlog.exception(e)
- else:
- traceback.print_exc()
- return 1
+ rewriter = Rewriter(options.sourcedir)
+ rewriter.analyze_meson()
+ if os.path.exists(options.command):
+ with open(options.command, 'r') as fp:
+ commands = json.load(fp)
+ else:
+ commands = json.loads(options.command)
+
+ if not isinstance(commands, list):
+ raise TypeError('Command is not a list')
+
+ for i in commands:
+ if not isinstance(i, object):
+ raise TypeError('Command is not an object')
+ rewriter.process(i)
+
+ rewriter.apply_changes()
return 0
diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py
index f49492c..a8d9674 100644
--- a/mesonbuild/scripts/dist.py
+++ b/mesonbuild/scripts/dist.py
@@ -188,7 +188,8 @@ def run(args):
dist_name = build.project_name + '-' + build.project_version
- if os.path.isdir(os.path.join(src_root, '.git')):
+ _git = os.path.join(src_root, '.git')
+ if os.path.isdir(_git) or os.path.isfile(_git):
names = create_dist_git(dist_name, src_root, bld_root, dist_sub, build.dist_scripts)
elif os.path.isdir(os.path.join(src_root, '.hg')):
names = create_dist_hg(dist_name, src_root, bld_root, dist_sub, build.dist_scripts)
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index f4134d3..e5d0a71 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -349,7 +349,7 @@ class Resolver:
if os.path.exists(dst_file):
try:
os.remove(dst_file)
- except PermissionError as exc:
+ except PermissionError:
os.chmod(dst_file, stat.S_IWUSR)
os.remove(dst_file)
shutil.copy2(src_file, dst_dir)
diff --git a/run_project_tests.py b/run_project_tests.py
index 4c6ca3b..d10f3a2 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -496,6 +496,10 @@ def skippable(suite, test):
return 'BOOST_ROOT' not in os.environ
return False
+ # Qt is provided on macOS by Homebrew
+ if test.endswith('4 qt') and mesonlib.is_osx():
+ return False
+
# Other framework tests are allowed to be skipped on other platforms
return True
diff --git a/run_tests.py b/run_tests.py
index 25a2d7f..d72546b 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -66,7 +66,7 @@ class FakeCompilerOptions:
def __init__(self):
self.value = []
-def get_fake_options(prefix):
+def get_fake_options(prefix=''):
import argparse
opts = argparse.Namespace()
opts.cross_file = None
@@ -76,11 +76,12 @@ def get_fake_options(prefix):
opts.native_file = []
return opts
-def get_fake_env(sdir, bdir, prefix, opts = None):
+def get_fake_env(sdir='', bdir=None, prefix='', opts=None):
if opts is None:
opts = get_fake_options(prefix)
env = Environment(sdir, bdir, opts)
- env.coredata.compiler_options['c_args'] = FakeCompilerOptions()
+ env.coredata.compiler_options.host['c_args'] = FakeCompilerOptions()
+ env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library
return env
@@ -213,6 +214,14 @@ def run_mtest_inprocess(commandlist):
sys.stderr = old_stderr
return returncode, mystdout.getvalue(), mystderr.getvalue()
+def clear_meson_configure_class_caches():
+ mesonbuild.compilers.CCompiler.library_dirs_cache = {}
+ mesonbuild.compilers.CCompiler.program_dirs_cache = {}
+ mesonbuild.compilers.CCompiler.find_library_cache = {}
+ mesonbuild.compilers.CCompiler.find_framework_cache = {}
+ mesonbuild.dependencies.PkgConfigDependency.pkgbin_cache = {}
+ mesonbuild.dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None, None)
+
def run_configure_inprocess(commandlist):
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
@@ -223,6 +232,7 @@ def run_configure_inprocess(commandlist):
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
+ clear_meson_configure_class_caches()
return returncode, mystdout.getvalue(), mystderr.getvalue()
def run_configure_external(full_command):
diff --git a/run_unittests.py b/run_unittests.py
index d97ae7e..a244bbd 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -33,6 +33,7 @@ from configparser import ConfigParser
from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
+from distutils.dir_util import copy_tree
import mesonbuild.mlog
import mesonbuild.compilers
@@ -41,10 +42,11 @@ import mesonbuild.mesonlib
import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.interpreter import Interpreter, ObjectHolder
+from mesonbuild.ast import AstInterpreter
from mesonbuild.mesonlib import (
is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku,
windows_proof_rmtree, python_command, version_compare,
- BuildDirLock, Version
+ BuildDirLock, Version, PerMachine
)
from mesonbuild.environment import detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
@@ -103,6 +105,16 @@ def _git_init(project_dir):
subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
stdout=subprocess.DEVNULL)
+@functools.lru_cache()
+def is_real_gnu_compiler(path):
+ '''
+ Check if the gcc we have is a real gcc and not a macOS wrapper around clang
+ '''
+ if not path:
+ return False
+ out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT)
+ return 'Free Software Foundation' in out
+
def skipIfNoExecutable(exename):
'''
Skip this test if the given executable is not found.
@@ -152,7 +164,7 @@ def skip_if_not_language(lang):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
- env = get_fake_env('', '', '')
+ env = get_fake_env()
f = getattr(env, 'detect_{}_compiler'.format(lang))
if lang in ['cs', 'vala', 'java', 'swift']:
f()
@@ -164,14 +176,23 @@ def skip_if_not_language(lang):
return wrapped
return wrapper
-def skip_if_env_value(value):
+def skip_if_env_set(key):
+ '''
+ Skip a test if a particular env is set, except when running under CI
+ '''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
- if value in os.environ:
- raise unittest.SkipTest(
- 'Environment variable "{}" set, skipping.'.format(value))
- return func(*args, **kwargs)
+ old = None
+ if key in os.environ:
+ if not is_ci():
+ raise unittest.SkipTest('Env var {!r} set, skipping'.format(key))
+ old = os.environ.pop(key)
+ try:
+ return func(*args, **kwargs)
+ finally:
+ if old is not None:
+ os.environ[key] = old
return wrapped
return wrapper
@@ -183,7 +204,7 @@ def skip_if_not_base_option(feature):
def actual(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
- env = get_fake_env('', '', '')
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
if feature not in cc.base_options:
raise unittest.SkipTest(
@@ -210,6 +231,32 @@ def temp_filename():
except OSError:
pass
+@contextmanager
+def no_pkgconfig():
+ '''
+ A context manager that overrides shutil.which and ExternalProgram to force
+ them to return None for pkg-config to simulate it not existing.
+ '''
+ old_which = shutil.which
+ old_search = ExternalProgram._search
+
+ def new_search(self, name, search_dir):
+ if name == 'pkg-config':
+ return [None]
+ return old_search(self, name, search_dir)
+
+ def new_which(cmd, *kwargs):
+ if cmd == 'pkg-config':
+ return None
+ return old_which(cmd, *kwargs)
+
+ shutil.which = new_which
+ ExternalProgram._search = new_search
+ try:
+ yield
+ finally:
+ shutil.which = old_which
+ ExternalProgram._search = old_search
class PatchModule:
'''
@@ -561,9 +608,9 @@ class InternalTests(unittest.TestCase):
config.write(configfile)
configfile.flush()
configfile.close()
- opts = get_fake_options('')
+ opts = get_fake_options()
opts.cross_file = configfilename
- env = get_fake_env('', '', '', opts)
+ env = get_fake_env(opts=opts)
detected_value = env.need_exe_wrapper()
os.unlink(configfilename)
@@ -576,9 +623,9 @@ class InternalTests(unittest.TestCase):
configfilename = configfile.name
config.write(configfile)
configfile.close()
- opts = get_fake_options('')
+ opts = get_fake_options()
opts.cross_file = configfilename
- env = get_fake_env('', '', '', opts)
+ env = get_fake_env(opts=opts)
forced_value = env.need_exe_wrapper()
os.unlink(configfilename)
@@ -698,7 +745,7 @@ class InternalTests(unittest.TestCase):
'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll',
'{}.dll.a', '{}.lib', '{}.dll'),
'static': msvc_static}}
- env = get_fake_env('', '', '')
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
if is_osx():
self._test_all_naming(cc, env, patterns, 'darwin')
@@ -735,27 +782,38 @@ class InternalTests(unittest.TestCase):
https://github.com/mesonbuild/meson/issues/3951
'''
+ def create_static_lib(name):
+ if not is_osx():
+ name.open('w').close()
+ return
+ src = name.with_suffix('.c')
+ out = name.with_suffix('.o')
+ with src.open('w') as f:
+ f.write('int meson_foobar (void) { return 0; }')
+ subprocess.check_call(['clang', '-c', str(src), '-o', str(out)])
+ subprocess.check_call(['ar', 'csr', str(name), str(out)])
+
with tempfile.TemporaryDirectory() as tmpdir:
pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
compiler = env.detect_c_compiler(False)
env.coredata.compilers = {'c': compiler}
- env.coredata.compiler_options['c_link_args'] = FakeCompilerOptions()
+ env.coredata.compiler_options.host['c_link_args'] = FakeCompilerOptions()
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
p2.mkdir()
# libfoo.a is in one prefix
- (p1 / 'libfoo.a').open('w').close()
+ create_static_lib(p1 / 'libfoo.a')
# libbar.a is in both prefixes
- (p1 / 'libbar.a').open('w').close()
- (p2 / 'libbar.a').open('w').close()
+ create_static_lib(p1 / 'libbar.a')
+ create_static_lib(p2 / 'libbar.a')
# Ensure that we never statically link to these
- (p1 / 'libpthread.a').open('w').close()
- (p1 / 'libm.a').open('w').close()
- (p1 / 'libc.a').open('w').close()
- (p1 / 'libdl.a').open('w').close()
- (p1 / 'librt.a').open('w').close()
+ create_static_lib(p1 / 'libpthread.a')
+ create_static_lib(p1 / 'libm.a')
+ create_static_lib(p1 / 'libc.a')
+ create_static_lib(p1 / 'libdl.a')
+ create_static_lib(p1 / 'librt.a')
def fake_call_pkgbin(self, args, env=None):
if '--libs' not in args:
@@ -769,30 +827,31 @@ class InternalTests(unittest.TestCase):
old_call = PkgConfigDependency._call_pkgbin
old_check = PkgConfigDependency.check_pkgconfig
- old_pkgbin = PkgConfigDependency.class_pkgbin
PkgConfigDependency._call_pkgbin = fake_call_pkgbin
PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin
# Test begins
- kwargs = {'required': True, 'silent': True}
- foo_dep = PkgConfigDependency('foo', env, kwargs)
- self.assertEqual(foo_dep.get_link_args(),
- [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
- bar_dep = PkgConfigDependency('bar', env, kwargs)
- self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
- internal_dep = PkgConfigDependency('internal', env, kwargs)
- if compiler.get_argument_syntax() == 'msvc':
- self.assertEqual(internal_dep.get_link_args(), [])
- else:
- link_args = internal_dep.get_link_args()
- for link_arg in link_args:
- for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
- self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
- # Test ends
- PkgConfigDependency._call_pkgbin = old_call
- PkgConfigDependency.check_pkgconfig = old_check
- # Reset dependency class to ensure that in-process configure doesn't mess up
- PkgConfigDependency.pkgbin_cache = {}
- PkgConfigDependency.class_pkgbin = old_pkgbin
+ try:
+ kwargs = {'required': True, 'silent': True}
+ foo_dep = PkgConfigDependency('foo', env, kwargs)
+ self.assertEqual(foo_dep.get_link_args(),
+ [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
+ bar_dep = PkgConfigDependency('bar', env, kwargs)
+ self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
+ internal_dep = PkgConfigDependency('internal', env, kwargs)
+ if compiler.get_argument_syntax() == 'msvc':
+ self.assertEqual(internal_dep.get_link_args(), [])
+ else:
+ link_args = internal_dep.get_link_args()
+ for link_arg in link_args:
+ for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
+ self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
+ finally:
+ # Test ends
+ PkgConfigDependency._call_pkgbin = old_call
+ PkgConfigDependency.check_pkgconfig = old_check
+ # Reset dependency class to ensure that in-process configure doesn't mess up
+ PkgConfigDependency.pkgbin_cache = {}
+ PkgConfigDependency.class_pkgbin = PerMachine(None, None, None)
def test_version_compare(self):
comparefunc = mesonbuild.mesonlib.version_compare_many
@@ -942,7 +1001,7 @@ class DataTests(unittest.TestCase):
with open('docs/markdown/Builtin-options.md') as f:
md = f.read()
self.assertIsNotNone(md)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
# FIXME: Support other compilers
cc = env.detect_c_compiler(False)
cpp = env.detect_cpp_compiler(False)
@@ -988,13 +1047,23 @@ class DataTests(unittest.TestCase):
Ensure that syntax highlighting files were updated for new functions in
the global namespace in build files.
'''
- env = get_fake_env('', '', '')
+ env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
with open('data/syntax-highlighting/vim/syntax/meson.vim') as f:
res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE)
defined = set([a.strip() for a in res.group().split('\\')][1:])
self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys())))
+ def test_all_functions_defined_in_ast_interpreter(self):
+ '''
+ Ensure that the all functions defined in the Interpreter are also defined
+ in the AstInterpreter (and vice versa).
+ '''
+ env = get_fake_env()
+ interp = Interpreter(FakeBuild(env), mock=True)
+ astint = AstInterpreter('.', '')
+ self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
+
class BasePlatformTests(unittest.TestCase):
def setUp(self):
@@ -1014,6 +1083,7 @@ class BasePlatformTests(unittest.TestCase):
self.mconf_command = self.meson_command + ['configure']
self.mintro_command = self.meson_command + ['introspect']
self.wrap_command = self.meson_command + ['wrap']
+ self.rewrite_command = self.meson_command + ['rewrite']
# Backend-specific build commands
self.build_command, self.clean_command, self.test_command, self.install_command, \
self.uninstall_command = get_backend_commands(self.backend)
@@ -1022,6 +1092,7 @@ class BasePlatformTests(unittest.TestCase):
self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
+ self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
# Misc stuff
self.orig_env = os.environ.copy()
if self.backend is Backend.ninja:
@@ -2179,7 +2250,7 @@ int main(int argc, char **argv) {
self.assertPathExists(os.path.join(testdir, i))
def detect_prebuild_env(self):
- env = get_fake_env('', self.builddir, self.prefix)
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
stlinker = env.detect_static_linker(cc)
if mesonbuild.mesonlib.is_windows():
@@ -2866,10 +2937,10 @@ recommended as it is not supported on some platforms''')
# c_args value should be parsed with shlex
self.init(testdir, extra_args=['-Dc_args=foo bar "one two"'])
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo', 'bar', 'one two'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo', 'bar', 'one two'])
self.setconf('-Dc_args="foo bar" one two')
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo bar', 'one', 'two'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo bar', 'one', 'two'])
self.wipe()
# Setting a 2nd time the same option should override the first value
@@ -2882,7 +2953,7 @@ recommended as it is not supported on some platforms''')
self.assertEqual(obj.builtins['bindir'].value, 'bar')
self.assertEqual(obj.builtins['buildtype'].value, 'release')
self.assertEqual(obj.base_options['b_sanitize'].value, 'thread')
- self.assertEqual(obj.compiler_options['c_args'].value, ['bar'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['bar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
'-Db_sanitize=thread', '-Db_sanitize=address',
@@ -2891,7 +2962,7 @@ recommended as it is not supported on some platforms''')
self.assertEqual(obj.builtins['bindir'].value, 'foo')
self.assertEqual(obj.builtins['buildtype'].value, 'plain')
self.assertEqual(obj.base_options['b_sanitize'].value, 'address')
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo'])
self.wipe()
except KeyError:
# Ignore KeyError, it happens on CI for compilers that does not
@@ -3280,7 +3351,7 @@ recommended as it is not supported on some platforms''')
# Check buildsystem_files
bs_files = ['meson.build', 'sharedlib/meson.build', 'staticlib/meson.build']
bs_files = [os.path.join(testdir, x) for x in bs_files]
- self.assertPathListEqual(res['buildsystem_files'], bs_files)
+ self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
# Check dependencies
dependencies_to_find = ['threads']
@@ -3390,7 +3461,7 @@ class FailureTests(BasePlatformTests):
and slows down testing.
'''
dnf = "[Dd]ependency.*not found(:.*)?"
- nopkg = '[Pp]kg-config not found'
+ nopkg = '[Pp]kg-config.*not found'
def setUp(self):
super().setUp()
@@ -3471,16 +3542,29 @@ class FailureTests(BasePlatformTests):
self.assertMesonRaises("dependency('appleframeworks')",
"requires at least one module")
+ def test_extraframework_dependency_method(self):
+ code = "dependency('python', method : 'extraframework')"
+ if not is_osx():
+ self.assertMesonRaises(code, self.dnf)
+ else:
+ # Python2 framework is always available on macOS
+ self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES')
+
def test_sdl2_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('sdl2-config'):
raise unittest.SkipTest('sdl2-config found')
self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf)
if shutil.which('pkg-config'):
- errmsg = self.dnf
- else:
- errmsg = self.nopkg
- self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", errmsg)
+ self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf)
+ with no_pkgconfig():
+ # Look for pkg-config, cache it, then
+ # Use cached pkg-config without erroring out, then
+ # Use cached pkg-config to error out
+ code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \
+ "dependency('foobarrr2', method : 'pkg-config', required : false)\n" \
+ "dependency('sdl2', method : 'pkg-config')"
+ self.assertMesonRaises(code, self.nopkg)
def test_gnustep_notfound_dependency(self):
# Want to test failure, so skip if available
@@ -3541,7 +3625,7 @@ class FailureTests(BasePlatformTests):
'''
Test that when we can't detect objc or objcpp, we fail gracefully.
'''
- env = get_fake_env('', self.builddir, self.prefix)
+ env = get_fake_env()
try:
env.detect_objc_compiler(False)
env.detect_objcpp_compiler(False)
@@ -3808,6 +3892,7 @@ class DarwinTests(BasePlatformTests):
self.assertIsNotNone(m, msg=out)
return m.groups()
+ @skipIfNoPkgconfig
def test_library_versioning(self):
'''
Ensure that compatibility_version and current_version are set correctly
@@ -4692,9 +4777,9 @@ endian = 'little'
testdir = os.path.join(self.unit_test_dir, '42 rpath order')
self.init(testdir)
if is_osx():
- rpathre = re.compile('-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
+ rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
else:
- rpathre = re.compile('-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
+ rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if '-rpath' in line:
@@ -4967,68 +5052,115 @@ class PythonTests(BasePlatformTests):
self.wipe()
-class RewriterTests(unittest.TestCase):
+class RewriterTests(BasePlatformTests):
+ data_regex = re.compile(r'^\s*!!\s*(\w+)\s+([^=]+)=(.*)$')
def setUp(self):
super().setUp()
- src_root = os.path.dirname(__file__)
- self.testroot = os.path.realpath(tempfile.mkdtemp())
- self.rewrite_command = python_command + [os.path.join(src_root, 'mesonrewriter.py')]
- self.tmpdir = os.path.realpath(tempfile.mkdtemp())
- self.workdir = os.path.join(self.tmpdir, 'foo')
- self.test_dir = os.path.join(src_root, 'test cases/rewrite')
+ self.maxDiff = None
- def tearDown(self):
- windows_proof_rmtree(self.tmpdir)
+ def prime(self, dirname):
+ copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir)
- def read_contents(self, fname):
- with open(os.path.join(self.workdir, fname)) as f:
- return f.read()
+ def rewrite(self, directory, args):
+ if isinstance(args, str):
+ args = [args]
+ out = subprocess.check_output(self.rewrite_command + ['--sourcedir', directory] + args,
+ universal_newlines=True)
+ return out
- def check_effectively_same(self, mainfile, truth):
- mf = self.read_contents(mainfile)
- t = self.read_contents(truth)
- # Rewriting is not guaranteed to do a perfect job of
- # maintaining whitespace.
- self.assertEqual(mf.replace(' ', ''), t.replace(' ', ''))
+ def extract_test_data(self, out):
+ lines = out.split('\n')
+ result = {}
+ for i in lines:
+ match = RewriterTests.data_regex.match(i)
+ if match:
+ typ = match.group(1)
+ id = match.group(2)
+ data = json.loads(match.group(3))
+ if typ not in result:
+ result[typ] = {}
+ result[typ][id] = data
+ return result
- def prime(self, dirname):
- shutil.copytree(os.path.join(self.test_dir, dirname), self.workdir)
+ def test_target_source_list(self):
+ self.prime('1 basic')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ out = self.extract_test_data(out)
+ expected = {
+ 'target': {
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_target_add_sources(self):
+ self.prime('1 basic')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ out = self.extract_test_data(out)
+ expected = {
+ 'target': {
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp', 'a7.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp', 'a5.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'a5.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'a3.cpp', 'fileB.cpp', 'fileC.cpp', 'a7.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
- def test_basic(self):
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ out = self.extract_test_data(out)
+ self.assertDictEqual(out, expected)
+
+ def test_target_remove_sources(self):
self.prime('1 basic')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'removed.txt')
- subprocess.check_call(self.rewrite_command + ['add',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'added.txt')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'removed.txt')
-
- def test_subdir(self):
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
+ out = self.extract_test_data(out)
+ expected = {
+ 'target': {
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ out = self.extract_test_data(out)
+ self.assertDictEqual(out, expected)
+
+ def test_target_subdir(self):
self.prime('2 subdirs')
- top = self.read_contents('meson.build')
- s2 = self.read_contents('sub2/meson.build')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=something',
- '--filename=second.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('sub1/meson.build', 'sub1/after.txt')
- self.assertEqual(top, self.read_contents('meson.build'))
- self.assertEqual(s2, self.read_contents('sub2/meson.build'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ out = self.extract_test_data(out)
+ expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']}
+ self.assertDictEqual(list(out['target'].values())[0], expected)
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ out = self.extract_test_data(out)
+ self.assertDictEqual(list(out['target'].values())[0], expected)
class NativeFileTests(BasePlatformTests):
@@ -5111,7 +5243,7 @@ class NativeFileTests(BasePlatformTests):
"""Helper for generating tests for overriding compilers for langaugages
with more than one implementation, such as C, C++, ObjC, ObjC++, and D.
"""
- env = get_fake_env('', '', '')
+ env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
if lang not in ['cs']:
getter = functools.partial(getter, False)
@@ -5167,59 +5299,59 @@ class NativeFileTests(BasePlatformTests):
self._simple_test('python', 'python')
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
- @skip_if_env_value('CC')
+ @skip_if_env_set('CC')
def test_c_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
- if not shutil.which('gcc'):
+ if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('c', cb)
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
- @skip_if_env_value('CXX')
+ @skip_if_env_set('CXX')
def test_cpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
- if not shutil.which('g++'):
+ if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('cpp', cb)
@skip_if_not_language('objc')
- @skip_if_env_value('OBJC')
+ @skip_if_env_set('OBJC')
def test_objc_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
- if not shutil.which('gcc'):
+ if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('objc', cb)
@skip_if_not_language('objcpp')
- @skip_if_env_value('OBJCXX')
+ @skip_if_env_set('OBJCXX')
def test_objcpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
- if not shutil.which('g++'):
+ if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('objcpp', cb)
@skip_if_not_language('d')
- @skip_if_env_value('DC')
+ @skip_if_env_set('DC')
def test_d_compiler(self):
def cb(comp):
if comp.id == 'dmd':
@@ -5235,7 +5367,7 @@ class NativeFileTests(BasePlatformTests):
self.helper_for_compiler('d', cb)
@skip_if_not_language('cs')
- @skip_if_env_value('CSC')
+ @skip_if_env_set('CSC')
def test_cs_compiler(self):
def cb(comp):
if comp.id == 'csc':
@@ -5248,17 +5380,21 @@ class NativeFileTests(BasePlatformTests):
self.helper_for_compiler('cs', cb)
@skip_if_not_language('fortran')
- @skip_if_env_value('FC')
+ @skip_if_env_set('FC')
def test_fortran_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if shutil.which('ifort'):
return 'ifort', 'intel'
+ elif shutil.which('flang'):
+ return 'flang', 'flang'
+ elif shutil.which('pgfortran'):
+ return 'pgfortran', 'pgi'
# XXX: there are several other fortran compilers meson
# supports, but I don't have any of them to test with
raise unittest.SkipTest('No alternate Fortran implementation.')
if not shutil.which('gfortran'):
- raise unittest.SkipTest('No alternate C# implementation.')
+ raise unittest.SkipTest('No alternate Fortran implementation.')
return 'gfortran', 'gcc'
self.helper_for_compiler('fortran', cb)
@@ -5268,7 +5404,7 @@ class NativeFileTests(BasePlatformTests):
Builds a wrapper around the compiler to override the version.
"""
wrapper = self.helper_create_binary_wrapper(binary, version=version_str)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
if lang in ['rust']:
getter = functools.partial(getter, False)
@@ -5277,13 +5413,13 @@ class NativeFileTests(BasePlatformTests):
self.assertEqual(compiler.version, version)
@skip_if_not_language('vala')
- @skip_if_env_value('VALAC')
+ @skip_if_env_set('VALAC')
def test_vala_compiler(self):
self._single_implementation_compiler(
'vala', 'valac', 'Vala 1.2345', '1.2345')
@skip_if_not_language('rust')
- @skip_if_env_value('RUSTC')
+ @skip_if_env_set('RUSTC')
def test_rust_compiler(self):
self._single_implementation_compiler(
'rust', 'rustc', 'rustc 1.2345', '1.2345')
@@ -5297,7 +5433,7 @@ class NativeFileTests(BasePlatformTests):
def test_swift_compiler(self):
wrapper = self.helper_create_binary_wrapper(
'swiftc', version='Swift 1.2345', outfile='stderr')
- env = get_fake_env('', '', '')
+ env = get_fake_env()
env.binaries.host.binaries['swift'] = wrapper
compiler = env.detect_swift_compiler()
self.assertEqual(compiler.version, '1.2345')
@@ -5321,7 +5457,7 @@ def should_run_cross_mingw_tests():
def main():
unset_envs()
cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
- 'PythonTests', 'NativeFileTests']
+ 'PythonTests', 'NativeFileTests', 'RewriterTests']
if not is_windows():
cases += ['LinuxlikeTests']
if should_run_cross_arm_tests():
diff --git a/setup.py b/setup.py
index f1f2e81..f352960 100644
--- a/setup.py
+++ b/setup.py
@@ -28,6 +28,7 @@ from setuptools import setup
# Other platforms will create bin/meson
entries = {'console_scripts': ['meson=mesonbuild.mesonmain:main']}
packages = ['mesonbuild',
+ 'mesonbuild.ast',
'mesonbuild.backend',
'mesonbuild.compilers',
'mesonbuild.dependencies',
diff --git a/test cases/common/13 pch/c/pch/prog.h b/test cases/common/13 pch/c/pch/prog.h
index 354499a..c89890a 100644
--- a/test cases/common/13 pch/c/pch/prog.h
+++ b/test cases/common/13 pch/c/pch/prog.h
@@ -1 +1,6 @@
+#ifndef PROG_H
+// Header guards for PCH confuse msvc in some situations.
+// Using them here makes sure we handle this correctly.
+#define PROG_H
#include<stdio.h>
+#endif
diff --git a/test cases/common/13 pch/generated/gen_custom.py b/test cases/common/13 pch/generated/gen_custom.py
new file mode 100644
index 0000000..650e03c
--- /dev/null
+++ b/test cases/common/13 pch/generated/gen_custom.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ f.write("#define FOO 0")
diff --git a/test cases/common/13 pch/generated/gen_generator.py b/test cases/common/13 pch/generated/gen_generator.py
new file mode 100644
index 0000000..a245e7a
--- /dev/null
+++ b/test cases/common/13 pch/generated/gen_generator.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1]) as f:
+ content = f.read()
+with open(sys.argv[2], 'w') as f:
+ f.write(content)
diff --git a/test cases/common/13 pch/generated/generated_generator.in b/test cases/common/13 pch/generated/generated_generator.in
new file mode 100644
index 0000000..1a00ebd
--- /dev/null
+++ b/test cases/common/13 pch/generated/generated_generator.in
@@ -0,0 +1 @@
+#define BAR 0
diff --git a/test cases/common/13 pch/generated/meson.build b/test cases/common/13 pch/generated/meson.build
new file mode 100644
index 0000000..372a00e
--- /dev/null
+++ b/test cases/common/13 pch/generated/meson.build
@@ -0,0 +1,16 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+if cc_id == 'lcc'
+ error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.')
+endif
+
+generated_customTarget = custom_target('makeheader',
+ output: 'generated_customTarget.h',
+ command : [find_program('gen_custom.py'), '@OUTPUT0@'])
+
+generated_generator = generator(find_program('gen_generator.py'),
+ output: '@BASENAME@.h',
+ arguments: ['@INPUT@', '@OUTPUT@'])
+
+exe = executable('prog', 'prog.c', generated_customTarget, generated_generator.process('generated_generator.in'),
+ c_pch: ['pch/prog_pch.c', 'pch/prog.h'])
diff --git a/test cases/common/13 pch/generated/pch/prog.h b/test cases/common/13 pch/generated/pch/prog.h
new file mode 100644
index 0000000..15fec38
--- /dev/null
+++ b/test cases/common/13 pch/generated/pch/prog.h
@@ -0,0 +1,2 @@
+#include "generated_customTarget.h"
+#include "generated_generator.h"
diff --git a/test cases/common/13 pch/generated/pch/prog_pch.c b/test cases/common/13 pch/generated/pch/prog_pch.c
new file mode 100644
index 0000000..4960505
--- /dev/null
+++ b/test cases/common/13 pch/generated/pch/prog_pch.c
@@ -0,0 +1,5 @@
+#if !defined(_MSC_VER)
+#error "This file is only for use with MSVC."
+#endif
+
+#include "prog.h"
diff --git a/test cases/common/13 pch/generated/prog.c b/test cases/common/13 pch/generated/prog.c
new file mode 100644
index 0000000..9b2e2ef
--- /dev/null
+++ b/test cases/common/13 pch/generated/prog.c
@@ -0,0 +1,6 @@
+// No includes here, they need to come from the PCH
+
+int main(int argc, char **argv) {
+ return FOO + BAR;
+}
+
diff --git a/test cases/common/13 pch/meson.build b/test cases/common/13 pch/meson.build
index d39527b..43129c9 100644
--- a/test cases/common/13 pch/meson.build
+++ b/test cases/common/13 pch/meson.build
@@ -2,6 +2,8 @@ project('pch test', 'c', 'cpp')
subdir('c')
subdir('cpp')
+subdir('generated')
+subdir('withIncludeDirectories')
if meson.backend() == 'xcode'
warning('Xcode backend only supports one precompiled header per target. Skipping "mixed" which has various precompiled headers.')
diff --git a/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h b/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h
new file mode 100644
index 0000000..53c5fdf
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h
@@ -0,0 +1 @@
+#include <stdio.h>
diff --git a/test cases/common/13 pch/withIncludeDirectories/meson.build b/test cases/common/13 pch/withIncludeDirectories/meson.build
new file mode 100644
index 0000000..2ab2cd8
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/meson.build
@@ -0,0 +1,9 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+if cc_id == 'lcc'
+ error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.')
+endif
+
+exe = executable('prog', 'prog.c',
+ include_directories: 'include',
+ c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
diff --git a/test cases/common/13 pch/withIncludeDirectories/pch/prog.h b/test cases/common/13 pch/withIncludeDirectories/pch/prog.h
new file mode 100644
index 0000000..383b2c5
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/pch/prog.h
@@ -0,0 +1 @@
+#include<lib/lib.h>
diff --git a/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c b/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c
new file mode 100644
index 0000000..4960505
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/pch/prog_pch.c
@@ -0,0 +1,5 @@
+#if !defined(_MSC_VER)
+#error "This file is only for use with MSVC."
+#endif
+
+#include "prog.h"
diff --git a/test cases/common/13 pch/withIncludeDirectories/prog.c b/test cases/common/13 pch/withIncludeDirectories/prog.c
new file mode 100644
index 0000000..0ce3d0a
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/prog.c
@@ -0,0 +1,10 @@
+// No includes here, they need to come from the PCH
+
+void func() {
+ fprintf(stdout, "This is a function that fails if stdio is not #included.\n");
+}
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build
index 982ae2a..50393e9 100644
--- a/test cases/common/14 configure file/meson.build
+++ b/test cases/common/14 configure file/meson.build
@@ -141,6 +141,12 @@ cfile = configure_file(input : 'config.h.in',
install_dir : '',
configuration : conf)
+# test install_dir : false (deprecated)
+cfile = configure_file(input : 'config.h.in',
+ output : 'do_not_get_installed_please.h',
+ install_dir : false,
+ configuration : conf)
+
# test intsall_dir with install: false
cfile = configure_file(input : 'config.h.in',
output : 'do_not_get_installed_in_install_dir.h',
diff --git a/test cases/common/190 openmp/meson.build b/test cases/common/190 openmp/meson.build
index f4652db..e446891 100644
--- a/test cases/common/190 openmp/meson.build
+++ b/test cases/common/190 openmp/meson.build
@@ -38,11 +38,13 @@ test('OpenMP C++', execpp, env : env)
if add_languages('fortran', required : false)
- exef = executable('exef',
- 'main.f90',
- dependencies : [openmp])
-
- test('OpenMP Fortran', execpp, env : env)
+ # Mixing compilers (msvc/clang with gfortran) does not seem to work on Windows.
+ if build_machine.system() != 'windows' or cc.get_id() == 'gnu'
+ exef = executable('exef',
+ 'main.f90',
+ dependencies : [openmp])
+ test('OpenMP Fortran', execpp, env : env)
+ endif
endif
# Check we can apply a version constraint
diff --git a/test cases/common/36 run program/meson.build b/test cases/common/36 run program/meson.build
index a05cea3..93897e3 100644
--- a/test cases/common/36 run program/meson.build
+++ b/test cases/common/36 run program/meson.build
@@ -65,6 +65,12 @@ ret = run_command(py3, '-c', 'print("some output")', capture : false)
assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr())
assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout()))
+c_env = environment()
+c_env.append('CUSTOM_ENV_VAR', 'FOOBAR')
+ret = run_command(py3, '-c', 'import os; print(os.environ.get("CUSTOM_ENV_VAR"))', env : c_env)
+assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr())
+assert(ret.stdout() == 'FOOBAR\n', 'stdout is "@0@" instead of FOOBAR'.format(ret.stdout()))
+
dd = find_program('dd', required : false)
if dd.found()
ret = run_command(dd, 'if=/dev/urandom', 'bs=10', 'count=1', capture: false)
diff --git a/test cases/common/53 custom target/depfile/dep.py b/test cases/common/53 custom target/depfile/dep.py
index aff325b..476e88b 100755
--- a/test cases/common/53 custom target/depfile/dep.py
+++ b/test cases/common/53 custom target/depfile/dep.py
@@ -7,7 +7,7 @@ _, srcdir, depfile, output = sys.argv
depfiles = glob(os.path.join(srcdir, '*'))
-quoted_depfiles = [x.replace(' ', '\ ') for x in depfiles]
+quoted_depfiles = [x.replace(' ', r'\ ') for x in depfiles]
with open(output, 'w') as f:
f.write('I am the result of globbing.')
diff --git a/test cases/cuda/3 cudamodule/meson.build b/test cases/cuda/3 cudamodule/meson.build
new file mode 100644
index 0000000..0dc9489
--- /dev/null
+++ b/test cases/cuda/3 cudamodule/meson.build
@@ -0,0 +1,16 @@
+project('cudamodule', 'cuda', version : '1.0.0')
+
+nvcc = meson.get_compiler('cuda')
+cuda = import('unstable-cuda')
+
+arch_flags = cuda.nvcc_arch_flags(nvcc, 'Auto', detected: ['3.0'])
+arch_readable = cuda.nvcc_arch_readable(nvcc, 'Auto', detected: ['3.0'])
+driver_version = cuda.min_driver_version(nvcc)
+
+message('NVCC version: ' + nvcc.version())
+message('NVCC flags: ' + ' '.join(arch_flags))
+message('NVCC readable: ' + ' '.join(arch_readable))
+message('Driver version: >=' + driver_version)
+
+exe = executable('prog', 'prog.cu', cuda_args: arch_flags)
+test('cudatest', exe)
diff --git a/test cases/cuda/3 cudamodule/prog.cu b/test cases/cuda/3 cudamodule/prog.cu
new file mode 100644
index 0000000..7eab673
--- /dev/null
+++ b/test cases/cuda/3 cudamodule/prog.cu
@@ -0,0 +1,30 @@
+#include <iostream>
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ std::cout << "CUDA version: " << CUDART_VERSION << "\n";
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ std::cout << "No Cuda hardware found. Exiting.\n";
+ return 0;
+ }
+ std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n";
+ cudaDeviceProp props;
+ cudaGetDeviceProperties(&props, 0);
+ std::cout << "Properties of device 0.\n\n";
+
+ std::cout << " Name: " << props.name << "\n";
+ std::cout << " Global memory: " << props.totalGlobalMem << "\n";
+ std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n";
+ std::cout << " Constant memory: " << props.totalConstMem << "\n";
+ std::cout << " Block registers: " << props.regsPerBlock << "\n";
+
+ std::cout << " Warp size: " << props.warpSize << "\n";
+ std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n";
+ std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n";
+ std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n";
+ std::cout << "\n";
+
+ return 0;
+}
+
diff --git a/test cases/failing/93 pch source different folder/include/pch.h b/test cases/failing/93 pch source different folder/include/pch.h
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/93 pch source different folder/include/pch.h
diff --git a/test cases/failing/93 pch source different folder/meson.build b/test cases/failing/93 pch source different folder/meson.build
new file mode 100644
index 0000000..d320717
--- /dev/null
+++ b/test cases/failing/93 pch source different folder/meson.build
@@ -0,0 +1,5 @@
+project('pch', 'c')
+# It is not allowed to have the PCH implementation in a different
+# folder than the header.
+exe = executable('prog', 'prog.c',
+ c_pch : ['include/pch.h', 'src/pch.c'])
diff --git a/test cases/failing/93 pch source different folder/prog.c b/test cases/failing/93 pch source different folder/prog.c
new file mode 100644
index 0000000..c272dab
--- /dev/null
+++ b/test cases/failing/93 pch source different folder/prog.c
@@ -0,0 +1 @@
+int main() {} \ No newline at end of file
diff --git a/test cases/failing/93 pch source different folder/src/pch.c b/test cases/failing/93 pch source different folder/src/pch.c
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/93 pch source different folder/src/pch.c
diff --git a/test cases/fortran/1 basic/meson.build b/test cases/fortran/1 basic/meson.build
index 833a177..042902f 100644
--- a/test cases/fortran/1 basic/meson.build
+++ b/test cases/fortran/1 basic/meson.build
@@ -1,6 +1,9 @@
project('simple fortran', 'fortran')
-add_global_arguments('-fbounds-check', language : 'fortran')
+fc = meson.get_compiler('fortran')
+if fc == 'gcc'
+ add_global_arguments('-fbounds-check', language : 'fortran')
+endif
e = executable('simple', 'simple.f90',
fortran_args : '-ffree-form')
diff --git a/test cases/fortran/10 find library/gzip.f90 b/test cases/fortran/10 find library/gzip.f90
index 2a7e7df..32f21d7 100644
--- a/test cases/fortran/10 find library/gzip.f90
+++ b/test cases/fortran/10 find library/gzip.f90
@@ -1,32 +1,32 @@
module gzip
- interface
- function gzopen(path, mode) bind(C)
- use iso_c_binding, only: c_char, c_ptr
- implicit none
- character(c_char), intent(in) :: path(*), mode(*)
- type(c_ptr) :: gzopen
- end function gzopen
- end interface
-
- interface
- function gzwrite(file, buf, len) bind(C)
- use iso_c_binding, only: c_int, c_ptr
- implicit none
- type(c_ptr), value, intent(in) :: file
- type(*), intent(in) :: buf
- integer(c_int), value, intent(in) :: len
- integer(c_int) :: gzwrite
- end function gzwrite
- end interface
-
- interface
- function gzclose(file) bind(C)
- use iso_c_binding, only: c_int, c_ptr
- implicit none
- type(c_ptr), value, intent(in) :: file
- integer(c_int) :: gzclose
- end function gzclose
- end interface
+use iso_c_binding, only: c_char, c_ptr, c_int
+implicit none
+
+interface
+type(c_ptr) function gzopen(path, mode) bind(C)
+import c_char, c_ptr
+
+character(kind=c_char), intent(in) :: path(*), mode(*)
+end function gzopen
+end interface
+
+interface
+integer(c_int) function gzwrite(file, buf, len) bind(C)
+import c_int, c_ptr, c_char
+
+type(c_ptr), value, intent(in) :: file
+character(kind=c_char), intent(in) :: buf
+integer(c_int), value, intent(in) :: len
+end function gzwrite
+end interface
+
+interface
+integer(c_int) function gzclose(file) bind(C)
+import c_int, c_ptr
+
+type(c_ptr), value, intent(in) :: file
+end function gzclose
+end interface
end module gzip
diff --git a/test cases/fortran/10 find library/main.f90 b/test cases/fortran/10 find library/main.f90
index 2550b44..1f5c039 100644
--- a/test cases/fortran/10 find library/main.f90
+++ b/test cases/fortran/10 find library/main.f90
@@ -1,40 +1,38 @@
-program main
-
- use iso_c_binding, only: c_int, c_char, c_null_char, c_ptr
- use gzip, only: gzopen, gzwrite, gzclose
-
- implicit none
-
- character(kind=c_char,len=*), parameter :: path = &
- c_char_"test.gz"//c_null_char
- character(kind=c_char,len=*), parameter :: mode = &
- c_char_"wb9"//c_null_char
- integer(c_int), parameter :: buffer_size = 512
-
- type(c_ptr) :: file
- character(len=buffer_size) :: buffer
- integer(c_int) :: ret
- integer :: i
-
- ! open file
- file = gzopen(path, mode)
-
- ! fill buffer with data
- do i=1,buffer_size/4
- write(buffer(4*(i-1)+1:4*i), '(i3.3, a)') i, new_line('')
- end do
- ret = gzwrite(file, buffer, buffer_size)
- if (ret.ne.buffer_size) then
- write(*,'(a, i3, a, i3, a)') 'Error: ', ret, ' / ', buffer_size, &
- ' bytes written.'
- stop 1
- end if
-
- ! close file
- ret = gzclose(file)
- if (ret.ne.0) then
- print *, 'Error: failure to close file with error code ', ret
- stop 1
- end if
-
-end program main
+
+use iso_fortran_env, only: stderr=>error_unit
+use iso_c_binding, only: c_int, c_char, c_null_char, c_ptr
+use gzip, only: gzopen, gzwrite, gzclose
+
+implicit none
+
+character(kind=c_char,len=*), parameter :: path = c_char_"test.gz"//c_null_char
+character(kind=c_char,len=*), parameter :: mode = c_char_"wb9"//c_null_char
+integer(c_int), parameter :: buffer_size = 512
+
+type(c_ptr) :: file
+character(kind=c_char, len=buffer_size) :: buffer
+integer(c_int) :: ret
+integer :: i
+
+! open file
+file = gzopen(path, mode)
+
+! fill buffer with data
+do i=1,buffer_size/4
+ write(buffer(4*(i-1)+1:4*i), '(i3.3, a)') i, new_line('')
+end do
+ret = gzwrite(file, buffer, buffer_size)
+if (ret /= buffer_size) then
+ write(stderr,'(a, i3, a, i3, a)') 'Error: ', ret, ' / ', buffer_size, &
+ ' bytes written.'
+ stop 1
+end if
+
+! close file
+ret = gzclose(file)
+if (ret /= 0) then
+ write(stderr,*) 'Error: failure to close file with error code ', ret
+ stop 1
+end if
+
+end program
diff --git a/test cases/fortran/11 compiles links runs/meson.build b/test cases/fortran/11 compiles links runs/meson.build
new file mode 100644
index 0000000..81eb907
--- /dev/null
+++ b/test cases/fortran/11 compiles links runs/meson.build
@@ -0,0 +1,20 @@
+project('compiles_links_runs', 'fortran')
+
+fc = meson.get_compiler('fortran')
+
+code = '''error stop 123; end'''
+
+if not fc.compiles(code)
+ error('Fortran 2008 code failed to compile')
+endif
+
+if not fc.links(code)
+ error('Fortran 2008 code failed to link')
+endif
+
+if fc.run(code).returncode() != 123
+ error('Fortran 2008 code failed to run')
+endif
+
+
+
diff --git a/test cases/fortran/12 submodule/a1.f90 b/test cases/fortran/12 submodule/a1.f90
new file mode 100644
index 0000000..cb44916
--- /dev/null
+++ b/test cases/fortran/12 submodule/a1.f90
@@ -0,0 +1,25 @@
+module a1
+implicit none
+
+interface
+module elemental real function pi2tau(pi)
+ real, intent(in) :: pi
+end function pi2tau
+
+module real function get_pi()
+end function get_pi
+end interface
+
+end module a1
+
+program hierN
+
+use a1
+
+pi = get_pi()
+
+tau = pi2tau(pi)
+
+print *,'pi=',pi,'tau=',tau
+
+end program
diff --git a/test cases/fortran/12 submodule/a2.f90 b/test cases/fortran/12 submodule/a2.f90
new file mode 100644
index 0000000..b3ce1f0
--- /dev/null
+++ b/test cases/fortran/12 submodule/a2.f90
@@ -0,0 +1,10 @@
+submodule (a1) a2
+
+contains
+
+module procedure pi2tau
+ pi2tau = 2*pi
+end procedure pi2tau
+
+
+end submodule a2
diff --git a/test cases/fortran/12 submodule/a3.f90 b/test cases/fortran/12 submodule/a3.f90
new file mode 100644
index 0000000..d6929b0
--- /dev/null
+++ b/test cases/fortran/12 submodule/a3.f90
@@ -0,0 +1,10 @@
+submodule (a1:a2) a3
+
+contains
+
+module procedure get_pi
+ get_pi = 4.*atan(1.)
+end procedure get_pi
+
+
+end submodule a3
diff --git a/test cases/fortran/12 submodule/child.f90 b/test cases/fortran/12 submodule/child.f90
new file mode 100644
index 0000000..aa5bb5e
--- /dev/null
+++ b/test cases/fortran/12 submodule/child.f90
@@ -0,0 +1,10 @@
+submodule (mother) daughter
+
+contains
+
+module procedure pi2tau
+ pi2tau = 2*pi
+end procedure pi2tau
+
+end submodule daughter
+
diff --git a/test cases/fortran/12 submodule/meson.build b/test cases/fortran/12 submodule/meson.build
new file mode 100644
index 0000000..cd62a30
--- /dev/null
+++ b/test cases/fortran/12 submodule/meson.build
@@ -0,0 +1,7 @@
+project('submodule single level', 'fortran')
+
+hier2 = executable('single', 'parent.f90','child.f90')
+test('single-level hierarchy', hier2)
+
+hierN = executable('multi', 'a1.f90', 'a2.f90', 'a3.f90')
+test('multi-level hierarchy', hierN)
diff --git a/test cases/fortran/12 submodule/parent.f90 b/test cases/fortran/12 submodule/parent.f90
new file mode 100644
index 0000000..05fe431
--- /dev/null
+++ b/test cases/fortran/12 submodule/parent.f90
@@ -0,0 +1,23 @@
+module mother
+real, parameter :: pi = 4.*atan(1.)
+real :: tau
+
+interface
+module elemental real function pi2tau(pi)
+ real, intent(in) :: pi
+end function pi2tau
+end interface
+
+contains
+
+end module mother
+
+
+program hier1
+use mother
+
+tau = pi2tau(pi)
+
+print *,'pi=',pi, 'tau=', tau
+
+end program
diff --git a/test cases/fortran/4 self dependency/selfdep.f90 b/test cases/fortran/4 self dependency/selfdep.f90
index a272832..1a71353 100644
--- a/test cases/fortran/4 self dependency/selfdep.f90
+++ b/test cases/fortran/4 self dependency/selfdep.f90
@@ -1,11 +1,18 @@
-MODULE Circle
- REAL, PARAMETER :: Pi = 3.1415927
+MODULE geom
+
+type :: circle
+ REAL :: Pi = 4.*atan(1.)
REAL :: radius
-END MODULE Circle
+end type circle
+END MODULE geom
PROGRAM prog
-use Circle
+use geom, only : circle
IMPLICIT NONE
+type(circle) :: ell
+
+ell%radius = 3.
+
END PROGRAM prog
diff --git a/test cases/fortran/5 static/main.f90 b/test cases/fortran/5 static/main.f90
index dc6454c..6d878cb 100644
--- a/test cases/fortran/5 static/main.f90
+++ b/test cases/fortran/5 static/main.f90
@@ -1,6 +1,6 @@
-program hello
- use static_hello
- implicit none
- call static_say_hello()
-end program hello
+use static_hello
+implicit none
+
+call static_say_hello()
+end program
diff --git a/test cases/fortran/5 static/static_hello.f90 b/test cases/fortran/5 static/static_hello.f90
index 63415b0..5407560 100644
--- a/test cases/fortran/5 static/static_hello.f90
+++ b/test cases/fortran/5 static/static_hello.f90
@@ -1,17 +1,17 @@
module static_hello
- implicit none
+implicit none
- private
- public :: static_say_hello
+private
+public :: static_say_hello
- interface static_say_hello
- module procedure say_hello
- end interface static_say_hello
+interface static_say_hello
+ module procedure say_hello
+end interface static_say_hello
contains
- subroutine say_hello
- print *, "Static library called."
- end subroutine say_hello
+subroutine say_hello
+ print *, "Static library called."
+end subroutine say_hello
end module static_hello
diff --git a/test cases/fortran/6 dynamic/dynamic.f90 b/test cases/fortran/6 dynamic/dynamic.f90
index e78a406..6a1f359 100644
--- a/test cases/fortran/6 dynamic/dynamic.f90
+++ b/test cases/fortran/6 dynamic/dynamic.f90
@@ -1,17 +1,17 @@
module dynamic
- implicit none
+implicit none
- private
- public :: hello
+private
+public :: hello
- interface hello
- module procedure say
- end interface hello
+interface hello
+ module procedure say
+end interface hello
contains
- subroutine say
- print *, "Hello, hello..."
- end subroutine say
+subroutine say
+ print *, "Hello from shared library."
+end subroutine say
end module dynamic
diff --git a/test cases/fortran/6 dynamic/main.f90 b/test cases/fortran/6 dynamic/main.f90
index cb3a53f..fc48bcb 100644
--- a/test cases/fortran/6 dynamic/main.f90
+++ b/test cases/fortran/6 dynamic/main.f90
@@ -1,6 +1,5 @@
-program main
- use dynamic
- implicit none
+use dynamic, only: hello
+implicit none
- call hello()
-end program main
+call hello()
+end program
diff --git a/test cases/fortran/8 module names/mod1.f90 b/test cases/fortran/8 module names/mod1.f90
index 69cc900..29cd9f4 100644
--- a/test cases/fortran/8 module names/mod1.f90
+++ b/test cases/fortran/8 module names/mod1.f90
@@ -1,6 +1,6 @@
module MyMod1
- implicit none
+implicit none
- integer, parameter :: myModVal1 = 1
+integer, parameter :: myModVal1 = 1
end module MyMod1
diff --git a/test cases/fortran/8 module names/mod2.f90 b/test cases/fortran/8 module names/mod2.f90
index 971df44..2087750 100644
--- a/test cases/fortran/8 module names/mod2.f90
+++ b/test cases/fortran/8 module names/mod2.f90
@@ -1,6 +1,6 @@
module mymod2
- implicit none
+implicit none
- integer, parameter :: myModVal2 = 2
+integer, parameter :: myModVal2 = 2
end module mymod2
diff --git a/test cases/fortran/8 module names/test.f90 b/test cases/fortran/8 module names/test.f90
index ff5a545..28847fb 100644
--- a/test cases/fortran/8 module names/test.f90
+++ b/test cases/fortran/8 module names/test.f90
@@ -1,7 +1,8 @@
-program test
- use mymod1
- use MyMod2
+use mymod1
+use MyMod2
- integer, parameter :: testVar = myModVal1 + myModVal2
+implicit none
-end program test
+integer, parameter :: testVar = myModVal1 + myModVal2
+
+end program
diff --git a/test cases/fortran/9 cpp/fortran.f b/test cases/fortran/9 cpp/fortran.f
index e694669..255872c 100644
--- a/test cases/fortran/9 cpp/fortran.f
+++ b/test cases/fortran/9 cpp/fortran.f
@@ -1,5 +1,11 @@
function fortran() bind(C)
- use, intrinsic :: iso_c_binding
- real(kind=c_double) :: fortran
- fortran = 2.0**rand(1)
+ use, intrinsic :: iso_c_binding, only: dp=>c_double
+ implicit none
+
+ real(dp) :: r, fortran
+
+ call random_number(r)
+
+ fortran = 2._dp**r
+
end function fortran
diff --git a/test cases/fortran/9 cpp/meson.build b/test cases/fortran/9 cpp/meson.build
index 93037aa..ad7d4b2 100644
--- a/test cases/fortran/9 cpp/meson.build
+++ b/test cases/fortran/9 cpp/meson.build
@@ -1,12 +1,16 @@
project('C++ and FORTRAN', 'cpp', 'fortran')
cpp = meson.get_compiler('cpp')
+fc = meson.get_compiler('fortran')
if cpp.get_id() == 'clang'
error('MESON_SKIP_TEST Clang C++ does not find -lgfortran for some reason.')
endif
-fc = meson.get_compiler('fortran')
+if build_machine.system() == 'windows' and cpp.get_id() != 'gnu'
+ error('MESON_SKIP_TEST mixing gfortran with non-GNU C++ does not work.')
+endif
+
link_with = []
if fc.get_id() == 'intel'
link_with += fc.find_library('ifport')
@@ -15,7 +19,7 @@ endif
e = executable(
'cppfort',
['main.cpp', 'fortran.f'],
- dependencies : [link_with],
+ dependencies : link_with,
)
test('C++ FORTRAN', e)
diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build
index 2102b81..2d0e4d3 100644
--- a/test cases/frameworks/17 mpi/meson.build
+++ b/test cases/frameworks/17 mpi/meson.build
@@ -36,11 +36,13 @@ uburesult = run_command(ubudetector)
if uburesult.returncode() != 0 and add_languages('fortran', required : false)
mpifort = dependency('mpi', language : 'fortran')
- exef = executable('exef',
- 'main.f90',
- dependencies : [mpifort])
-
- test('MPI Fortran', exef)
+ # Mixing compilers (msvc/clang with gfortran) does not seem to work on Windows.
+ if build_machine.system() != 'windows' or cc.get_id() == 'gnu'
+ exef = executable('exef',
+ 'main.f90',
+ dependencies : [mpifort])
+ test('MPI Fortran', exef)
+ endif
endif
# Check we can apply a version constraint
diff --git a/test cases/frameworks/25 hdf5/main.c b/test cases/frameworks/25 hdf5/main.c
new file mode 100644
index 0000000..4c46310
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.c
@@ -0,0 +1,30 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "hdf5.h"
+
+int main(void)
+{
+herr_t ier;
+unsigned maj, min, rel;
+
+ier = H5open();
+if (ier) {
+ fprintf(stderr,"Unable to initialize HDF5: %d\n", ier);
+ return EXIT_FAILURE;
+}
+
+ier = H5get_libversion(&maj, &min, &rel);
+if (ier) {
+ fprintf(stderr,"HDF5 did not initialize!\n");
+ return EXIT_FAILURE;
+}
+printf("C HDF5 version %d.%d.%d\n", maj, min, rel);
+
+ier = H5close();
+if (ier) {
+ fprintf(stderr,"Unable to close HDF5: %d\n", ier);
+ return EXIT_FAILURE;
+}
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/25 hdf5/main.cpp b/test cases/frameworks/25 hdf5/main.cpp
new file mode 100644
index 0000000..477e76b
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.cpp
@@ -0,0 +1,29 @@
+#include <iostream>
+#include "hdf5.h"
+
+
+int main(void)
+{
+herr_t ier;
+unsigned maj, min, rel;
+
+ier = H5open();
+if (ier) {
+ std::cerr << "Unable to initialize HDF5: " << ier << std::endl;
+ return EXIT_FAILURE;
+}
+
+ier = H5get_libversion(&maj, &min, &rel);
+if (ier) {
+ std::cerr << "HDF5 did not initialize!" << std::endl;
+ return EXIT_FAILURE;
+}
+std::cout << "C++ HDF5 version " << maj << "." << min << "." << rel << std::endl;
+
+ier = H5close();
+if (ier) {
+ std::cerr << "Unable to close HDF5: " << ier << std::endl;
+ return EXIT_FAILURE;
+}
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/25 hdf5/main.f90 b/test cases/frameworks/25 hdf5/main.f90
new file mode 100644
index 0000000..b21abf1
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.f90
@@ -0,0 +1,17 @@
+use hdf5
+
+implicit none
+
+integer :: ier, major, minor, rel
+
+call h5open_f(ier)
+if (ier /= 0) error stop 'Unable to initialize HDF5'
+
+call h5get_libversion_f(major, minor, rel, ier)
+if (ier /= 0) error stop 'Unable to check HDF5 version'
+print '(A,I1,A1,I0.2,A1,I1)','Fortran HDF5 version ',major,'.',minor,'.',rel
+
+call h5close_f(ier)
+if (ier /= 0) error stop 'Unable to close HDF5 library'
+
+end program
diff --git a/test cases/frameworks/25 hdf5/meson.build b/test cases/frameworks/25 hdf5/meson.build
new file mode 100644
index 0000000..9033354
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/meson.build
@@ -0,0 +1,43 @@
+project('hdf5_test', 'c', 'cpp')
+
+if build_machine.system() == 'darwin'
+ error('MESON_SKIP_TEST: HDF5 CI image not setup for OSX.')
+endif
+
+if build_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST: HDF5 CI image not setup for Cygwin.')
+endif
+
+
+# --- C tests
+h5c = dependency('hdf5', language : 'c', required : false)
+if not h5c.found()
+ error('MESON_SKIP_TEST: HDF5 C library not found, skipping HDF5 framework tests.')
+endif
+exec = executable('exec', 'main.c', dependencies : h5c)
+
+test('HDF5 C', exec)
+
+# --- C++ tests
+h5cpp = dependency('hdf5', language : 'cpp', required : false)
+if h5cpp.found()
+ execpp = executable('execpp', 'main.cpp', dependencies : h5cpp)
+ test('HDF5 C++', execpp)
+endif
+
+# --- Fortran tests
+if build_machine.system() != 'windows'
+ add_languages('fortran')
+
+ h5f = dependency('hdf5', language : 'fortran', required : false)
+ if h5f.found()
+ exef = executable('exef', 'main.f90', dependencies : h5f)
+
+ test('HDF5 Fortran', exef)
+ endif
+endif
+
+# Check we can apply a version constraint
+if h5c.version() != 'unknown'
+ dependency('hdf5', version: '>=@0@'.format(h5c.version()))
+endif
diff --git a/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake
new file mode 100644
index 0000000..a2f8456
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(SomethingLikeZLIB_FOUND ON)
+ set(SomethingLikeZLIB_LIBRARIES ${ZLIB_LIBRARY})
+ set(SomethingLikeZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(SomethingLikeZLIB_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build
index 72773b2..a18cd84 100644
--- a/test cases/linuxlike/13 cmake dependency/meson.build
+++ b/test cases/linuxlike/13 cmake dependency/meson.build
@@ -36,6 +36,12 @@ depf2 = dependency('ZLIB', required : false, method : 'cmake', modules : 'dfggh:
assert(depf2.found() == false, 'Invalid CMake targets should fail')
+# Try to find a dependency with a custom CMake module
+
+depm1 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : 'cmake')
+depm2 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : ['cmake'])
+depm3 = dependency('SomethingLikeZLIB', required : true, cmake_module_path : 'cmake')
+
# Try to compile a test that takes a dep and an include_directories
cc = meson.get_compiler('c')
diff --git a/test cases/osx/2 library versions/meson.build b/test cases/osx/2 library versions/meson.build
index 26f945a..0d21a3a 100644
--- a/test cases/osx/2 library versions/meson.build
+++ b/test cases/osx/2 library versions/meson.build
@@ -1,15 +1,27 @@
project('library versions', 'c')
-zlib_dep = dependency('zlib')
-
-some = shared_library('some', 'lib.c',
- # duplicate the rpath again, in order
- # to test Meson's RPATH deduplication
- build_rpath : zlib_dep.get_pkgconfig_variable('libdir'),
- dependencies : zlib_dep,
- version : '1.2.3',
- soversion : '7',
- install : true)
+if run_command(find_program('require_pkgconfig.py'), check: true).stdout().strip() == 'yes'
+ required = true
+else
+ required = false
+endif
+
+zlib_dep = dependency('zlib', required: required)
+if zlib_dep.found()
+ some = shared_library('some', 'lib.c',
+ # duplicate the rpath again, in order
+ # to test Meson's RPATH deduplication
+ build_rpath : zlib_dep.get_pkgconfig_variable('libdir'),
+ dependencies : zlib_dep,
+ version : '1.2.3',
+ soversion : '7',
+ install : true)
+else
+ some = shared_library('some', 'lib.c',
+ version : '1.2.3',
+ soversion : '7',
+ install : true)
+endif
noversion = shared_library('noversion', 'lib.c',
install : true)
diff --git a/test cases/osx/2 library versions/require_pkgconfig.py b/test cases/osx/2 library versions/require_pkgconfig.py
new file mode 100644
index 0000000..3d228aa
--- /dev/null
+++ b/test cases/osx/2 library versions/require_pkgconfig.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+
+import os
+import shutil
+
+if 'CI' in os.environ or shutil.which('pkg-config'):
+ print('yes')
+else:
+ print('no')
diff --git a/test cases/osx/5 extra frameworks/installed_files.txt b/test cases/osx/5 extra frameworks/installed_files.txt
new file mode 100644
index 0000000..2c6bd93
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/installed_files.txt
@@ -0,0 +1,2 @@
+usr/bin/prog
+usr/lib/libstat.a
diff --git a/test cases/osx/5 extra frameworks/meson.build b/test cases/osx/5 extra frameworks/meson.build
new file mode 100644
index 0000000..cb4847e
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/meson.build
@@ -0,0 +1,13 @@
+project('xcode extra framework test', 'c')
+
+dep_libs = dependency('OpenGL', method : 'extraframework')
+assert(dep_libs.type_name() == 'extraframeworks', 'type_name is ' + dep_libs.type_name())
+
+dep_main = dependency('Foundation')
+assert(dep_main.type_name() == 'extraframeworks', 'type_name is ' + dep_main.type_name())
+
+dep_py = dependency('python', method : 'extraframework')
+assert(dep_main.type_name() == 'extraframeworks', 'type_name is ' + dep_main.type_name())
+
+stlib = static_library('stat', 'stat.c', install : true, dependencies: dep_libs)
+exe = executable('prog', 'prog.c', install : true, dependencies: dep_main)
diff --git a/test cases/osx/5 extra frameworks/prog.c b/test cases/osx/5 extra frameworks/prog.c
new file mode 100644
index 0000000..11b7fad
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/prog.c
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+ return 0;
+}
diff --git a/test cases/osx/5 extra frameworks/stat.c b/test cases/osx/5 extra frameworks/stat.c
new file mode 100644
index 0000000..fa76a65
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/stat.c
@@ -0,0 +1 @@
+int func() { return 933; }
diff --git a/test cases/rewrite/1 basic/addSrc.json b/test cases/rewrite/1 basic/addSrc.json
new file mode 100644
index 0000000..1a504bf
--- /dev/null
+++ b/test cases/rewrite/1 basic/addSrc.json
@@ -0,0 +1,89 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "src_add",
+ "sources": ["a1.cpp", "a2.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "src_add",
+ "sources": ["a7.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "src_add",
+ "sources": ["a5.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "src_add",
+ "sources": ["a5.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "src_add",
+ "sources": ["a3.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "src_add",
+ "sources": ["a4.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "src_add",
+ "sources": ["a6.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog8",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "test"
+ }
+]
diff --git a/test cases/rewrite/1 basic/added.txt b/test cases/rewrite/1 basic/added.txt
deleted file mode 100644
index 657dd42..0000000
--- a/test cases/rewrite/1 basic/added.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-project('rewritetest', 'c')
-
-sources = ['trivial.c']
-
-exe = executable('trivialprog', 'notthere.c', sources)
diff --git a/test cases/rewrite/1 basic/info.json b/test cases/rewrite/1 basic/info.json
new file mode 100644
index 0000000..be2a873
--- /dev/null
+++ b/test cases/rewrite/1 basic/info.json
@@ -0,0 +1,47 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog8",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "test"
+ }
+]
diff --git a/test cases/rewrite/1 basic/meson.build b/test cases/rewrite/1 basic/meson.build
index a0485d0..1bed0e1 100644
--- a/test cases/rewrite/1 basic/meson.build
+++ b/test cases/rewrite/1 basic/meson.build
@@ -1,5 +1,18 @@
-project('rewritetest', 'c')
+project('rewritetest', 'cpp')
-sources = ['trivial.c', 'notthere.c']
+src1 = ['main.cpp', 'fileA.cpp']
+src2 = files(['fileB.cpp', 'fileC.cpp'])
+src3 = src1
+src4 = [src3]
-exe = executable('trivialprog', sources)
+# Magic comment
+
+exe1 = executable('trivialprog1', src1)
+exe2 = executable('trivialprog2', [src2])
+exe3 = executable('trivialprog3', ['main.cpp', 'fileA.cpp'])
+exe4 = executable('trivialprog4', ['main.cpp', ['fileA.cpp']])
+exe5 = executable('trivialprog5', [src2, 'main.cpp'])
+exe6 = executable('trivialprog6', 'main.cpp', 'fileA.cpp')
+exe7 = executable('trivialprog7', 'fileB.cpp', src1, 'fileC.cpp')
+exe8 = executable('trivialprog8', src3)
+exe9 = executable('trivialprog9', src4)
diff --git a/test cases/rewrite/1 basic/removed.txt b/test cases/rewrite/1 basic/removed.txt
deleted file mode 100644
index 5519214..0000000
--- a/test cases/rewrite/1 basic/removed.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-project('rewritetest', 'c')
-
-sources = ['trivial.c']
-
-exe = executable('trivialprog', sources)
diff --git a/test cases/rewrite/1 basic/rmSrc.json b/test cases/rewrite/1 basic/rmSrc.json
new file mode 100644
index 0000000..a8559a5
--- /dev/null
+++ b/test cases/rewrite/1 basic/rmSrc.json
@@ -0,0 +1,83 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "src_rm",
+ "sources": ["fileB.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "src_rm",
+ "sources": ["fileB.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog8",
+ "operation": "test"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "test"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/addSrc.json b/test cases/rewrite/2 subdirs/addSrc.json
new file mode 100644
index 0000000..017476c
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/addSrc.json
@@ -0,0 +1,13 @@
+[
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "src_add",
+ "sources": ["third.c"]
+ },
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "test"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/info.json b/test cases/rewrite/2 subdirs/info.json
new file mode 100644
index 0000000..0173333
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/info.json
@@ -0,0 +1,7 @@
+[
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "test"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/meson.build b/test cases/rewrite/2 subdirs/meson.build
index 79b7ad7..c7f3fec 100644
--- a/test cases/rewrite/2 subdirs/meson.build
+++ b/test cases/rewrite/2 subdirs/meson.build
@@ -2,4 +2,3 @@ project('subdir rewrite', 'c')
subdir('sub1')
subdir('sub2')
-
diff --git a/test cases/rewrite/2 subdirs/sub1/after.txt b/test cases/rewrite/2 subdirs/sub1/after.txt
deleted file mode 100644
index 53ceaff..0000000
--- a/test cases/rewrite/2 subdirs/sub1/after.txt
+++ /dev/null
@@ -1 +0,0 @@
-srcs = ['first.c']
diff --git a/test cases/rewrite/2 subdirs/sub2/meson.build b/test cases/rewrite/2 subdirs/sub2/meson.build
index 0d92e7f..44b4075 100644
--- a/test cases/rewrite/2 subdirs/sub2/meson.build
+++ b/test cases/rewrite/2 subdirs/sub2/meson.build
@@ -1,2 +1 @@
executable('something', srcs)
-