aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cross/ubuntu-armhf.txt4
-rw-r--r--data/shell-completions/zsh/_meson2
-rw-r--r--docs/markdown/Adding-new-projects-to-wrapdb.md5
-rw-r--r--docs/markdown/Cross-compilation.md41
-rw-r--r--docs/markdown/FAQ.md6
-rw-r--r--docs/markdown/Pkg-config-files.md2
-rw-r--r--docs/markdown/Pkgconfig-module.md7
-rw-r--r--docs/markdown/Reference-manual.md60
-rw-r--r--docs/markdown/Release-notes-for-0.46.0.md7
-rw-r--r--docs/markdown/Users.md3
-rw-r--r--docs/markdown/snippets/both-libraries.md9
-rw-r--r--docs/markdown/snippets/new-wrap-mode.md3
-rw-r--r--docs/markdown/snippets/pkgconfig-generator.md14
-rw-r--r--mesonbuild/backend/backends.py12
-rw-r--r--mesonbuild/backend/ninjabackend.py118
-rw-r--r--mesonbuild/backend/vs2010backend.py3
-rw-r--r--mesonbuild/build.py193
-rw-r--r--mesonbuild/compilers/c.py54
-rw-r--r--mesonbuild/coredata.py2
-rw-r--r--mesonbuild/environment.py2
-rw-r--r--mesonbuild/interpreter.py222
-rw-r--r--mesonbuild/interpreterbase.py5
-rw-r--r--mesonbuild/mconf.py16
-rw-r--r--mesonbuild/mintro.py44
-rw-r--r--mesonbuild/modules/gnome.py2
-rw-r--r--mesonbuild/modules/pkgconfig.py61
-rw-r--r--mesonbuild/modules/python3.py7
-rw-r--r--mesonbuild/mtest.py416
-rw-r--r--mesonbuild/rewriter.py22
-rw-r--r--mesonbuild/scripts/coverage.py194
-rw-r--r--mesonbuild/scripts/gtkdochelper.py2
-rw-r--r--mesonbuild/scripts/meson_exe.py8
-rw-r--r--mesonbuild/wrap/__init__.py7
-rwxr-xr-xrun_tests.py2
-rwxr-xr-xrun_unittests.py47
-rw-r--r--test cases/common/142 compute int/config.h.in2
-rw-r--r--test cases/common/142 compute int/meson.build8
-rw-r--r--test cases/common/142 compute int/prog.c.in9
-rw-r--r--test cases/common/188 subdir_done/meson.build10
-rw-r--r--test cases/common/189 bothlibraries/libfile.c7
-rw-r--r--test cases/common/189 bothlibraries/main.c8
-rw-r--r--test cases/common/189 bothlibraries/meson.build12
-rw-r--r--test cases/common/189 bothlibraries/mylib.h13
-rw-r--r--test cases/common/51 pkgconfig-gen/dependencies/meson.build12
-rw-r--r--test cases/d/3 shared library/meson.build9
-rw-r--r--test cases/d/6 unittest/app.d4
-rw-r--r--test cases/d/6 unittest/meson.build6
-rw-r--r--test cases/d/6 unittest/second_unit.d10
-rw-r--r--test cases/d/9 features/app.d9
-rw-r--r--test cases/d/9 features/extra.d9
-rw-r--r--test cases/d/9 features/meson.build10
-rw-r--r--test cases/failing/71 skip only subdir/meson.build8
-rw-r--r--test cases/failing/71 skip only subdir/subdir/meson.build3
-rw-r--r--test cases/unit/26 shared_mod linking/libfile.c14
-rw-r--r--test cases/unit/26 shared_mod linking/main.c11
-rw-r--r--test cases/unit/26 shared_mod linking/meson.build5
-rw-r--r--test cases/unit/27 forcefallback/meson.build8
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/meson.build7
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c6
-rw-r--r--test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h18
-rw-r--r--test cases/unit/27 forcefallback/test_not_zlib.c8
61 files changed, 1158 insertions, 670 deletions
diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt
index 6246ffe..fec8ce7 100644
--- a/cross/ubuntu-armhf.txt
+++ b/cross/ubuntu-armhf.txt
@@ -1,8 +1,8 @@
[binaries]
# we could set exe_wrapper = qemu-arm-static but to test the case
# when cross compiled binaries can't be run we don't do that
-c = '/usr/bin/arm-linux-gnueabihf-gcc-7'
-cpp = '/usr/bin/arm-linux-gnueabihf-g++-7'
+c = '/usr/bin/arm-linux-gnueabihf-gcc'
+cpp = '/usr/bin/arm-linux-gnueabihf-g++'
rust = ['rustc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7']
ar = '/usr/arm-linux-gnueabihf/bin/ar'
strip = '/usr/arm-linux-gnueabihf/bin/strip'
diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson
index 877d700..481d04c 100644
--- a/data/shell-completions/zsh/_meson
+++ b/data/shell-completions/zsh/_meson
@@ -31,7 +31,7 @@ local -i ret
local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})"
local __meson_build_types="(plain debug debugoptimized minsize release)"
-local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload})"
+local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload,forcefallback})"
local -a meson_commands=(
'setup:set up a build directory'
diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md
index 4420de5..58b27ba 100644
--- a/docs/markdown/Adding-new-projects-to-wrapdb.md
+++ b/docs/markdown/Adding-new-projects-to-wrapdb.md
@@ -37,11 +37,10 @@ Each project gets its own repo. It is initialized like this:
git init
git add readme.txt
- git commit -a -m 'Start of project foobar.'
- git tag commit_zero -a -m 'A tag that helps get revision ids for releases.'
+ git add LICENSE.build
+ git commit -a -m 'Create project foobar'
git remote add origin <repo url>
git push -u origin master
- git push --tags
Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches.
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index e739e37..7d316ed 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -10,17 +10,23 @@ nomenclature. The three most important definitions are traditionally
called *build*, *host* and *target*. This is confusing because those
terms are used for quite many different things. To simplify the issue,
we are going to call these the *build machine*, *host machine* and
-*target machine*. Their definitions are the following
+*target machine*. Their definitions are the following:
-* *build machine* is the computer that is doing the actual compiling
-* *host machine* is the machine on which the compiled binary will run
-* *target machine* is the machine on which the compiled binary's output will run (this is only meaningful for programs such as compilers that, when run, produce object code for a different CPU than what the program is being run on)
+* *build machine* is the computer that is doing the actual compiling.
+* *host machine* is the machine on which the compiled binary will run.
+* *target machine* is the machine on which the compiled binary's
+ output will run, *only meaningful* if the program produces
+ machine-specific output.
The `tl/dr` summary is the following: if you are doing regular cross
-compilation, you only care about *build_machine* and
-*host_machine*. Just ignore *target_machine* altogether and you will
-be correct 99% of the time. If your needs are more complex or you are
-interested in the actual details, do read on.
+compilation, you only care about `build_machine` and
+`host_machine`. Just ignore `target_machine` altogether and you will
+be correct 99% of the time. Only compilers and similar tools care
+about the target machine. In fact, for so-called "multi-target" tools
+the target machine need not be fixed at build-time like the others but
+chosen at runtime, so `target_machine` *still* doesn't matter. If your
+needs are more complex or you are interested in the actual details, do
+read on.
This might be easier to understand through examples. Let's start with
the regular, not cross-compiling case. In these cases all of these
@@ -50,6 +56,20 @@ Wikipedia or the net in general. It is very common for them to get
build, host and target mixed up, even in consecutive sentences, which
can leave you puzzled until you figure it out.
+A lot of confusion stems from the fact that when you cross-compile
+something, the 3 systems (*build*, *host*, and *target*) used when
+building the cross compiler don't align with the ones used when
+building something with that newly-built cross compiler. To take our
+Canadian Cross scenario from above (for full generality), since its
+*host machine* is x86 Windows, the *build machine* of anything we
+build with it is *x86 Windows*. And since its *target machine* is MIPS
+Linux, the *host machine* of anything we build with it is *MIPS
+Linux*. Only the *target machine* of whatever we build with it can be
+freely chosen by us, say if we want to build another cross compiler
+that runs on MIPS Linux and targets Aarch64 iOS. As this example
+hopefully makes clear to you, the platforms are shifted over to the
+left by one position.
+
If you did not understand all of the details, don't worry. For most
people it takes a while to wrap their head around these
concepts. Don't panic, it might take a while to click, but you will
@@ -82,8 +102,9 @@ of a wrapper, these lines are all you need to write. Meson will
automatically use the given wrapper when it needs to run host
binaries. This happens e.g. when running the project's test suite.
-The next section lists properties of the cross compiler and thus of
-the host system. It looks like this:
+The next section lists properties of the cross compiler and its target
+system, and thus properties of host system of what we're building. It
+looks like this:
```ini
[properties]
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index f4cf89b..398604a 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -288,3 +288,9 @@ has a option called `wrap-mode` which can be used to disable wrap
downloads altogether with `--wrap-mode=nodownload`. You can also
disable dependency fallbacks altogether with `--wrap-mode=nofallback`,
which also implies the `nodownload` option.
+
+If on the other hand, you want meson to always use the fallback
+for dependencies, even when an external dependency exists and could
+satisfy the version requirements, for example in order to make
+sure your project builds when fallbacks are used, you can use
+`--wrap-mode=forcefallback` since 0.46.0.
diff --git a/docs/markdown/Pkg-config-files.md b/docs/markdown/Pkg-config-files.md
index dde4ac9..ddb8bab 100644
--- a/docs/markdown/Pkg-config-files.md
+++ b/docs/markdown/Pkg-config-files.md
@@ -1,6 +1,6 @@
# Pkg config files
-[Pkg-config](https://en.wikipedia.org/wiki/Pkg-config) is a way for shared libraries to declare the compiler flags needed to use them. There are two different ways of generating Pkg-config files in Meson. The first way is to build them manually with the `configure_files` command. The second way is to use Meson's built in Pkg-config file generator. The difference between the two is that the latter is very simple and meant for basic use cases. The former should be used when you need to provide a more customized solution.
+[Pkg-config](https://en.wikipedia.org/wiki/Pkg-config) is a way for shared libraries to declare the compiler flags needed to use them. There are two different ways of generating Pkg-config files in Meson. The first way is to build them manually with the `configure_file` command. The second way is to use Meson's built in Pkg-config file generator. The difference between the two is that the latter is very simple and meant for basic use cases. The former should be used when you need to provide a more customized solution.
In this document we describe the simple generator approach. It is used in the following way.
diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md
index 853cf50..77db809 100644
--- a/docs/markdown/Pkgconfig-module.md
+++ b/docs/markdown/Pkgconfig-module.md
@@ -51,3 +51,10 @@ keyword arguments.
- `version` a string describing the version of this library
- `d_module_versions` a list of module version flags used when compiling
D sources referred to by this pkg-config file
+
+Since 0.46 a `StaticLibrary` or `SharedLibrary` object can optionally be passed
+as first positional argument. If one is provided a default value will be
+provided for all required fields of the pc file:
+- `install_dir` is set to `pkgconfig` folder in the same location than the provided library.
+- `description` is set to the project's name followed by the library's name.
+- `name` is set to the library's name.
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 54b7131..d98fc19 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -112,6 +112,24 @@ run. The behavior of this function is identical to `test` with the
exception that there is no `is_parallel` keyword, because benchmarks
are never run in parallel.
+### both_libraries()
+
+``` meson
+ buildtarget both_libraries(library_name, list_of_sources, ...)
+```
+
+Builds both a static and shared library with the given sources. Positional and
+keyword arguments are otherwise the same as for [`library`](#library). Source
+files will be compiled only once and object files will be reused to build both
+shared and static libraries, unless `b_staticpic` user option or `pic` argument
+are set to false in which case sources will be compiled twice.
+
+The returned [buildtarget](#build-target-object) always represents the shared
+library. In addition it supports the following extra methods:
+
+- `get_shared_lib()` returns the shared library build target
+- `get_static_lib()` returns the static library build target
+
### build_target()
Creates a build target whose type can be set dynamically with the
@@ -885,10 +903,11 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`.
buildtarget library(library_name, list_of_sources, ...)
```
-Builds a library that is either static or shared depending on the
-value of `default_library` user option. You should use this instead of
-[`shared_library`](#shared_library) or
-[`static_library`](#static_library) most of the time. This allows you
+Builds a library that is either static, shared or both depending on the value of
+`default_library` user option. You should use this instead of
+[`shared_library`](#shared_library),
+[`static_library`](#static_library) or
+[`both_libraries`](#both_libraries) most of the time. This allows you
to toggle your entire project (including subprojects) from shared to
static with only one option.
@@ -911,7 +930,8 @@ The keyword arguments for this are the same as for [`executable`](#executable) w
libraries. Defaults to `dylib` for shared libraries and `rlib` for
static libraries.
-`static_library` and `shared_library` also accept these keyword arguments.
+`static_library`, `shared_library` and `both_libraries` also accept these keyword
+arguments.
### message()
@@ -1129,6 +1149,33 @@ This function has one keyword argument.
recurse in the subdir if they all return `true` when queried with
`.found()`
+### subdir_done()
+
+``` meson
+ subdir_done()
+```
+
+Stops further interpretation of the meson script file from the point of
+the invocation. All steps executed up to this point are valid and will
+be executed by meson. This means that all targets defined before the call
+of `subdir_done` will be build.
+
+If the current script was called by `subdir` the execution returns to the
+calling directory and continues as if the script had reached the end.
+If the current script is the top level script meson configures the project
+as defined up to this point.
+
+Example:
+```meson
+project('example exit', 'cpp')
+executable('exe1', 'exe1.cpp')
+subdir_done()
+executable('exe2', 'exe2.cpp')
+```
+
+The executable `exe1` will be build, while the executable `exe2` is not
+build.
+
### subproject()
``` meson
@@ -1643,7 +1690,8 @@ These are objects returned by the [functions listed above](#functions).
### `build target` object
A build target is either an [executable](#executable),
-[shared](#shared_library), [static library](#static_library) or
+[shared library](#shared_library), [static library](#static_library),
+[both shared and static library](#both_libraries) or
[shared module](#shared_module).
- `extract_all_objects()` is same as `extract_objects` but returns all
diff --git a/docs/markdown/Release-notes-for-0.46.0.md b/docs/markdown/Release-notes-for-0.46.0.md
index 395a94d..e062459 100644
--- a/docs/markdown/Release-notes-for-0.46.0.md
+++ b/docs/markdown/Release-notes-for-0.46.0.md
@@ -14,3 +14,10 @@ whose contents should look like this:
## Feature name
A short description explaining the new feature and how it should be used.
+
+## Allow early return from a script
+
+Added the function `subdir_done()`. Its invocation exits the current script at
+the point of invocation. All previously invoked build targets and commands are
+build/executed. All following ones are ignored. If the current script was
+invoked via `subdir()` the parent script continues normally.
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index e152555..558378c 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -4,7 +4,8 @@ title: Users
# List of projects using Meson
-If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here.
+If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here. Some additional projects are
+listed in the [`meson` GitHub topic](https://github.com/topics/meson).
- [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3
- [Arduino sample project](https://github.com/jpakkane/mesonarduino)
diff --git a/docs/markdown/snippets/both-libraries.md b/docs/markdown/snippets/both-libraries.md
new file mode 100644
index 0000000..1632f63
--- /dev/null
+++ b/docs/markdown/snippets/both-libraries.md
@@ -0,0 +1,9 @@
+## Building both shared and static libraries
+
+A new function `both_libraries()` has been added to build both shared and static
+libraries at the same time. Source files will be compiled only once and object
+files will be reused to build both shared and static libraries, unless
+`b_staticpic` user option or `pic` argument are set to false in which case
+sources will be compiled twice.
+
+The returned `buildtarget` object always represents the shared library.
diff --git a/docs/markdown/snippets/new-wrap-mode.md b/docs/markdown/snippets/new-wrap-mode.md
new file mode 100644
index 0000000..e33dd83
--- /dev/null
+++ b/docs/markdown/snippets/new-wrap-mode.md
@@ -0,0 +1,3 @@
+A new wrap mode was added, `--wrap-mode=forcefallback`. When this is set,
+dependencies for which a fallback was provided will always use it, even
+if an external dependency exists and satisfies the version requirements.
diff --git a/docs/markdown/snippets/pkgconfig-generator.md b/docs/markdown/snippets/pkgconfig-generator.md
new file mode 100644
index 0000000..93920d3
--- /dev/null
+++ b/docs/markdown/snippets/pkgconfig-generator.md
@@ -0,0 +1,14 @@
+## Improvements to pkgconfig module
+
+A `StaticLibrary` or `SharedLibrary` object can optionally be passed
+as first positional argument of the `generate()` method. If one is provided a
+default value will be provided for all required fields of the pc file:
+- `install_dir` is set to `pkgconfig` folder in the same location than the provided library.
+- `description` is set to the project's name followed by the library's name.
+- `name` is set to the library's name.
+
+Generating a .pc file is now as simple as:
+
+```
+pkgconfig.generate(mylib)
+```
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index ad45204..916f680 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -22,6 +22,7 @@ import json
import subprocess
from ..mesonlib import MesonException
from ..mesonlib import get_compiler_for_source, classify_unity_sources
+from ..mesonlib import File
from ..compilers import CompilerArgs
from collections import OrderedDict
import shlex
@@ -414,11 +415,20 @@ class Backend:
objname = objname.replace('/', '_').replace('\\', '_')
objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
return [objpath]
- for osrc in extobj.srclist:
+
+ sources = list(extobj.srclist)
+ for gensrc in extobj.genlist:
+ for s in gensrc.get_outputs():
+ path = self.get_target_generated_dir(extobj.target, gensrc, s)
+ dirpart, fnamepart = os.path.split(path)
+ sources.append(File(True, dirpart, fnamepart))
+
+ for osrc in sources:
objname = self.object_filename_from_source(extobj.target, osrc, False)
if objname:
objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
result.append(objpath)
+
return result
def get_pch_include_args(self, compiler, target):
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index bfac4c7..cee1434 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -613,13 +613,19 @@ int dummy;
self.create_target_alias(target_name, outfile)
self.processed_targets[target.get_id()] = True
+ def generate_coverage_command(self, elem, outputs):
+ elem.add_item('COMMAND', self.environment.get_build_command() +
+ ['--internal', 'coverage'] +
+ outputs +
+ [self.environment.get_source_dir(),
+ os.path.join(self.environment.get_source_dir(),
+ self.build.get_subproject_dir()),
+ self.environment.get_build_dir(),
+ self.environment.get_log_dir()])
+
def generate_coverage_rules(self, outfile):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
- e.add_item('COMMAND', self.environment.get_build_command() +
- ['--internal', 'coverage',
- self.environment.get_source_dir(),
- self.environment.get_build_dir(),
- self.environment.get_log_dir()])
+ self.generate_coverage_command(e, [])
e.add_item('description', 'Generates coverage reports.')
e.write(outfile)
# Alias that runs the target defined above
@@ -627,80 +633,26 @@ int dummy;
self.generate_coverage_legacy_rules(outfile)
def generate_coverage_legacy_rules(self, outfile):
- (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
- added_rule = False
- if gcovr_exe:
- # gcovr >= 3.1 interprets rootdir differently
- if gcovr_new_rootdir:
- rootdir = self.environment.get_build_dir()
- else:
- rootdir = self.environment.get_source_dir(),
- added_rule = True
- elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', '')
- elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', rootdir,
- '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')])
- elem.add_item('DESC', 'Generating XML coverage report.')
- elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-xml', outfile)
- elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', '')
- elem.add_item('COMMAND', [gcovr_exe, '-r', rootdir,
- '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')])
- elem.add_item('DESC', 'Generating text coverage report.')
- elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-text', outfile)
- if lcov_exe and genhtml_exe:
- added_rule = True
- htmloutdir = os.path.join(self.environment.get_log_dir(), 'coveragereport')
- covinfo = os.path.join(self.environment.get_log_dir(), 'coverage.info')
- phony_elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'phony', os.path.join(htmloutdir, 'index.html'))
- phony_elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-html', outfile)
- elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '')
-
- subproject_dir = self.build.get_subproject_dir()
- command = [lcov_exe,
- '--directory', self.environment.get_build_dir(),
- '--capture',
- '--output-file', covinfo,
- '--no-checksum',
- '&&', lcov_exe,
- '--extract',
- covinfo,
- os.path.join(self.environment.get_source_dir(), '*'),
- '--output-file', covinfo,
- '&&', lcov_exe,
- '--remove',
- covinfo,
- os.path.join(self.environment.get_source_dir(), subproject_dir, '*'),
- '--output-file', covinfo,
- '&&', genhtml_exe,
- '--prefix', self.environment.get_build_dir(),
- '--output-directory', htmloutdir,
- '--title', 'Code coverage',
- '--legend',
- '--show-details',
- covinfo]
- elem.add_item('COMMAND', command)
- elem.add_item('DESC', 'Generating HTML coverage report.')
- elem.write(outfile)
- elif gcovr_exe and gcovr_new_rootdir:
- added_rule = True
- htmloutdir = os.path.join(self.environment.get_log_dir(), 'coveragereport')
- phony_elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'phony', os.path.join(htmloutdir, 'index.html'))
- phony_elem.write(outfile)
- # Alias that runs the target defined above
- self.create_target_alias('meson-coverage-html', outfile)
- elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '')
- command = [gcovr_exe, '--html', '--html-details', '-r', self.environment.get_build_dir(),
- '-o', os.path.join(htmloutdir, 'index.html')]
- elem.add_item('COMMAND', command)
- elem.add_item('DESC', 'Generating HTML coverage report.')
- elem.write(outfile)
- if not added_rule:
- mlog.warning('coverage requested but neither gcovr nor lcov/genhtml found.')
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--xml'])
+ e.add_item('description', 'Generates XML coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-xml', outfile)
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--text'])
+ e.add_item('description', 'Generates text coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-text', outfile)
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--html'])
+ e.add_item('description', 'Generates HTML coverage report.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-html', outfile)
def generate_install(self, outfile):
install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
@@ -2199,6 +2151,11 @@ rule FORTRAN_DEP_HACK
# near the end since these are supposed to override everything else.
commands += self.escape_extra_args(compiler,
target.get_extra_args(compiler.get_language()))
+
+ # D specific additional flags
+ if compiler.language == 'd':
+ commands += compiler.get_feature_args(target.d_features, self.build_to_src)
+
# Add source dir and build dir. Project-specific and target-specific
# include paths must override per-target compile args, include paths
# from external dependencies, internal dependencies, and from
@@ -2292,9 +2249,6 @@ rule FORTRAN_DEP_HACK
depelem.write(outfile)
commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
- if compiler.language == 'd':
- commands += compiler.get_feature_args(target.d_features, self.build_to_src)
-
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
for d in header_deps:
if isinstance(d, File):
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 5e972f2..e9a3519 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -946,7 +946,8 @@ class Vs2010Backend(backends.Backend):
self.add_project_reference(root, tvcxproj, tid)
else:
# Other libraries go into AdditionalDependencies
- additional_links.append(linkname)
+ if linkname not in additional_links:
+ additional_links.append(linkname)
for lib in self.get_custom_target_provided_libraries(target):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
additional_objects = []
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 3ff68ed..08e0c9d 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -26,67 +26,64 @@ from .mesonlib import get_filenames_templates_dict, substitute_values
from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep
from .compilers import is_object, clike_langs, sort_clike, lang_suffixes
-known_basic_kwargs = {'install': True,
- 'c_pch': True,
- 'cpp_pch': True,
- 'c_args': True,
- 'objc_args': True,
- 'objcpp_args': True,
- 'cpp_args': True,
- 'cs_args': True,
- 'vala_args': True,
- 'fortran_args': True,
- 'd_args': True,
- 'd_import_dirs': True,
- 'd_unittest': True,
- 'd_module_versions': True,
- 'java_args': True,
- 'rust_args': True,
- 'link_args': True,
- 'link_depends': True,
- 'link_with': True,
- 'link_whole': True,
- 'implicit_include_directories': True,
- 'include_directories': True,
- 'dependencies': True,
- 'install_dir': True,
- 'main_class': True,
- 'name_suffix': True,
- 'gui_app': True,
- 'extra_files': True,
- 'install_rpath': True,
- 'build_rpath': True,
- 'resources': True,
- 'sources': True,
- 'objects': True,
- 'native': True,
- 'build_by_default': True,
- 'override_options': True,
- }
-
-# These contain kwargs supported by both static and shared libraries. These are
-# combined here because a library() call might be shared_library() or
-# static_library() at runtime based on the configuration.
-# FIXME: Find a way to pass that info down here so we can have proper target
-# kwargs checking when specifically using shared_library() or static_library().
-known_lib_kwargs = known_basic_kwargs.copy()
-known_lib_kwargs.update({'version': True, # Only for shared libs
- 'soversion': True, # Only for shared libs
- 'name_prefix': True,
- 'vs_module_defs': True, # Only for shared libs
- 'vala_header': True,
- 'vala_vapi': True,
- 'vala_gir': True,
- 'pic': True, # Only for static libs
- 'rust_crate_type': True, # Only for Rust libs
- })
-
-known_exe_kwargs = known_basic_kwargs.copy()
-known_exe_kwargs.update({'implib': True,
- 'export_dynamic': True
- })
-known_jar_kwargs = known_basic_kwargs.copy()
-known_jar_kwargs.update({'target_type': 'jar'})
+pch_kwargs = set(['c_pch', 'cpp_pch'])
+
+lang_arg_kwargs = set([
+ 'c_args',
+ 'cpp_args',
+ 'd_args',
+ 'd_import_dirs',
+ 'd_unittest',
+ 'd_module_versions',
+ 'fortran_args',
+ 'java_args',
+ 'objc_args',
+ 'objcpp_args',
+ 'rust_args',
+ 'vala_args',
+ 'cs_args',
+])
+
+vala_kwargs = set(['vala_header', 'vala_gir', 'vala_vapi'])
+rust_kwargs = set(['rust_crate_type'])
+cs_kwargs = set(['resources', 'cs_args'])
+
+buildtarget_kwargs = set([
+ 'build_by_default',
+ 'build_rpath',
+ 'dependencies',
+ 'extra_files',
+ 'gui_app',
+ 'link_with',
+ 'link_whole',
+ 'link_args',
+ 'link_depends',
+ 'implicit_include_directories',
+ 'include_directories',
+ 'install',
+ 'install_rpath',
+ 'install_dir',
+ 'name_prefix',
+ 'name_suffix',
+ 'native',
+ 'objects',
+ 'override_options',
+ 'sources',
+])
+
+known_build_target_kwargs = (
+ buildtarget_kwargs |
+ lang_arg_kwargs |
+ pch_kwargs |
+ vala_kwargs |
+ rust_kwargs |
+ cs_kwargs)
+
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic'}
+known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs'}
+known_shmod_kwargs = known_build_target_kwargs
+known_stlib_kwargs = known_build_target_kwargs | {'pic'}
+known_jar_kwargs = known_exe_kwargs | {'main_class'}
class InvalidArguments(MesonException):
pass
@@ -214,9 +211,10 @@ class ExtractedObjects:
'''
Holds a list of sources for which the objects must be extracted
'''
- def __init__(self, target, srclist, is_unity):
+ def __init__(self, target, srclist, genlist, is_unity):
self.target = target
self.srclist = srclist
+ self.genlist = genlist
if is_unity:
self.check_unity_compatible()
@@ -337,6 +335,8 @@ a hard error in the future.''' % name)
class BuildTarget(Target):
+ known_kwargs = known_build_target_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, True)
self.is_cross = is_cross
@@ -380,6 +380,7 @@ class BuildTarget(Target):
self.process_compilers_late()
self.validate_sources()
self.validate_cross_install(environment)
+ self.check_module_linking()
def __lt__(self, other):
return self.get_id() < other.get_id()
@@ -395,7 +396,7 @@ class BuildTarget(Target):
def check_unknown_kwargs(self, kwargs):
# Override this method in derived classes that have more
# keywords.
- self.check_unknown_kwargs_int(kwargs, known_basic_kwargs)
+ self.check_unknown_kwargs_int(kwargs, self.known_kwargs)
def check_unknown_kwargs_int(self, kwargs, known_kwargs):
unknowns = []
@@ -625,13 +626,17 @@ class BuildTarget(Target):
if not isinstance(src, str):
raise MesonException('Object extraction arguments must be strings.')
src = File(False, self.subdir, src)
+ # FIXME: It could be a generated source
if src not in self.sources:
raise MesonException('Tried to extract unknown source %s.' % src)
obj_src.append(src)
- return ExtractedObjects(self, obj_src, self.is_unity)
+ return ExtractedObjects(self, obj_src, [], self.is_unity)
def extract_all_objects(self):
- return ExtractedObjects(self, self.sources, self.is_unity)
+ # FIXME: We should add support for transitive extract_objects()
+ if self.objects:
+ raise MesonException('Cannot extract objects from a target that itself has extracted objects')
+ return ExtractedObjects(self, self.sources, self.generated, self.is_unity)
def get_all_link_deps(self):
return self.get_transitive_link_deps()
@@ -1027,6 +1032,15 @@ You probably should put it in link_with instead.''')
def is_linkable_target(self):
return False
+ def check_module_linking(self):
+ '''
+ Warn if shared modules are linked with target: (link_with) #2865
+ '''
+ for link_target in self.link_targets:
+ if isinstance(link_target, SharedModule):
+ mlog.warning('''target links against shared modules. This is not
+recommended as it can lead to undefined behaviour on some platforms''')
+ return
class Generator:
def __init__(self, args, kwargs):
@@ -1174,6 +1188,8 @@ class GeneratedList:
return self.extra_args
class Executable(BuildTarget):
+ known_kwargs = known_exe_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
# Unless overridden, executables have no suffix or prefix. Except on
@@ -1229,9 +1245,6 @@ class Executable(BuildTarget):
def type_suffix(self):
return "@exe"
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_exe_kwargs)
-
def get_import_filename(self):
"""
The name of the import library that will be outputted by the compiler
@@ -1249,6 +1262,8 @@ class Executable(BuildTarget):
return self.is_linkwithable
class StaticLibrary(BuildTarget):
+ known_kwargs = known_stlib_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
if 'pic' not in kwargs and 'b_staticpic' in environment.coredata.base_options:
kwargs['pic'] = environment.coredata.base_options['b_staticpic'].value
@@ -1287,9 +1302,6 @@ class StaticLibrary(BuildTarget):
def type_suffix(self):
return "@sta"
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
-
def process_kwargs(self, kwargs, environment):
super().process_kwargs(kwargs, environment)
if 'rust_crate_type' in kwargs:
@@ -1303,6 +1315,8 @@ class StaticLibrary(BuildTarget):
return True
class SharedLibrary(BuildTarget):
+ known_kwargs = known_shlib_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
self.soversion = None
self.ltversion = None
@@ -1491,9 +1505,6 @@ class SharedLibrary(BuildTarget):
else:
raise InvalidArguments('Invalid rust_crate_type "{0}": must be a string.'.format(rust_crate_type))
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
-
def get_import_filename(self):
"""
The name of the import library that will be outputted by the compiler
@@ -1549,6 +1560,8 @@ class SharedLibrary(BuildTarget):
# A shared library that is meant to be used with dlopen rather than linking
# into something else.
class SharedModule(SharedLibrary):
+ known_kwargs = known_shmod_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
if 'version' in kwargs:
raise MesonException('Shared modules must not specify the version kwarg.')
@@ -1558,19 +1571,20 @@ class SharedModule(SharedLibrary):
self.import_filename = None
class CustomTarget(Target):
- known_kwargs = {'input': True,
- 'output': True,
- 'command': True,
- 'capture': False,
- 'install': True,
- 'install_dir': True,
- 'build_always': True,
- 'depends': True,
- 'depend_files': True,
- 'depfile': True,
- 'build_by_default': True,
- 'override_options': True,
- }
+ known_kwargs = set([
+ 'input',
+ 'output',
+ 'command',
+ 'capture',
+ 'install',
+ 'install_dir',
+ 'build_always',
+ 'depends',
+ 'depend_files',
+ 'depfile',
+ 'build_by_default',
+ 'override_options',
+ ])
def __init__(self, name, subdir, subproject, kwargs, absolute_paths=False):
super().__init__(name, subdir, subproject, False)
@@ -1804,6 +1818,8 @@ class RunTarget(Target):
return "@run"
class Jar(BuildTarget):
+ known_kwargs = known_jar_kwargs
+
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
for s in self.sources:
@@ -1826,9 +1842,6 @@ class Jar(BuildTarget):
# All jar targets are installable.
pass
- def check_unknown_kwargs(self, kwargs):
- self.check_unknown_kwargs_int(kwargs, known_jar_kwargs)
-
class CustomTargetIndex:
"""A special opaque object returned by indexing a CustomTarget. This object
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 56b46b4..27cf43a 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -367,24 +367,52 @@ class CCompiler(Compiler):
return self.compiles(t.format(**fargs), env, extra_args, dependencies)
def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies):
+ # Try user's guess first
if isinstance(guess, int):
if self._compile_int('%s == %d' % (expression, guess), prefix, env, extra_args, dependencies):
return guess
- cur = low
- while low < high:
- cur = int((low + high) / 2)
- if cur == low:
- break
-
- if self._compile_int('%s >= %d' % (expression, cur), prefix, env, extra_args, dependencies):
- low = cur
+ # If no bounds are given, compute them in the limit of int32
+ maxint = 0x7fffffff
+ minint = -0x80000000
+ if not isinstance(low, int) or not isinstance(high, int):
+ if self._compile_int('%s >= 0' % (expression), prefix, env, extra_args, dependencies):
+ low = cur = 0
+ while self._compile_int('%s > %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ low = cur + 1
+ if low > maxint:
+ raise EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2 + 1
+ if cur > maxint:
+ cur = maxint
+ high = cur
else:
+ low = cur = -1
+ while self._compile_int('%s < %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ high = cur - 1
+ if high < minint:
+ raise EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2
+ if cur < minint:
+ cur = minint
+ low = cur
+ else:
+ # Sanity check limits given by user
+ if high < low:
+ raise EnvironmentException('high limit smaller than low limit')
+ condition = '%s <= %d && %s >= %d' % (expression, high, expression, low)
+ if not self._compile_int(condition, prefix, env, extra_args, dependencies):
+ raise EnvironmentException('Value out of given range')
+
+ # Binary search
+ while low != high:
+ cur = low + int((high - low) / 2)
+ if self._compile_int('%s <= %d' % (expression, cur), prefix, env, extra_args, dependencies):
high = cur
+ else:
+ low = cur + 1
- if self._compile_int('%s == %d' % (expression, cur), prefix, env, extra_args, dependencies):
- return cur
- raise EnvironmentException('Cross-compile check overflowed')
+ return low
def compute_int(self, expression, low, high, guess, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
@@ -416,7 +444,7 @@ class CCompiler(Compiler):
}}'''
if not self.compiles(t.format(**fargs), env, extra_args, dependencies):
return -1
- return self.cross_compute_int('sizeof(%s)' % typename, 1, 1024, None, prefix, env, extra_args, dependencies)
+ return self.cross_compute_int('sizeof(%s)' % typename, None, None, None, prefix, env, extra_args, dependencies)
def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
@@ -454,7 +482,7 @@ class CCompiler(Compiler):
char c;
{type} target;
}};'''
- return self.cross_compute_int('offsetof(struct tmp, target)', 1, 1024, None, t.format(**fargs), env, extra_args, dependencies)
+ return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t.format(**fargs), env, extra_args, dependencies)
def alignment(self, typename, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 993effc..88d007a 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -422,7 +422,7 @@ builtin_options = {
'werror': [UserBooleanOption, 'Treat warnings as errors.', False],
'warning_level': [UserComboOption, 'Compiler warning level to use.', ['1', '2', '3'], '1'],
'layout': [UserComboOption, 'Build directory layout.', ['mirror', 'flat'], 'mirror'],
- 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static'], 'shared'],
+ 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static', 'both'], 'shared'],
'backend': [UserComboOption, 'Backend to use.', backendlist, 'ninja'],
'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True],
'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True],
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index ff7c706..0115fb3 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -674,7 +674,7 @@ class Environment:
except OSError:
raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist))
version = search_version(err)
- if 'javac' in err:
+ if 'javac' in out or 'javac' in err:
return JavaCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index c87a49b..6e3b864 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -26,7 +26,7 @@ from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, DependencyException
from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs, permittedMethodKwargs
-from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
+from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler
from .modules import ModuleReturnValue
@@ -605,6 +605,31 @@ class StaticLibraryHolder(BuildTargetHolder):
class SharedLibraryHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
+ # Set to True only when called from self.func_shared_lib().
+ target.shared_library_only = False
+
+class BothLibrariesHolder(BuildTargetHolder):
+ def __init__(self, shared_holder, static_holder, interp):
+ # FIXME: This build target always represents the shared library, but
+ # that should be configurable.
+ super().__init__(shared_holder.held_object, interp)
+ self.shared_holder = shared_holder
+ self.static_holder = static_holder
+ self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+ 'get_static_lib': self.get_static_lib_method,
+ })
+
+ def __repr__(self):
+ r = '<{} {}: {}, {}: {}>'
+ h1 = self.shared_holder.held_object
+ h2 = self.static_holder.held_object
+ return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+ def get_shared_lib_method(self, args, kwargs):
+ return self.shared_holder
+
+ def get_static_lib_method(self, args, kwargs):
+ return self.static_holder
class SharedModuleHolder(BuildTargetHolder):
def __init__(self, target, interp):
@@ -959,20 +984,20 @@ class CompilerHolder(InterpreterObject):
check_stringlist(args)
expression = args[0]
prefix = kwargs.get('prefix', '')
- l = kwargs.get('low', -1024)
- h = kwargs.get('high', 1024)
+ low = kwargs.get('low', None)
+ high = kwargs.get('high', None)
guess = kwargs.get('guess', None)
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of compute_int must be a string.')
- if not isinstance(l, int):
+ if low is not None and not isinstance(low, int):
raise InterpreterException('Low argument of compute_int must be an int.')
- if not isinstance(h, int):
+ if high is not None and not isinstance(high, int):
raise InterpreterException('High argument of compute_int must be an int.')
if guess is not None and not isinstance(guess, int):
raise InterpreterException('Guess argument of compute_int must be an int.')
extra_args = self.determine_args(kwargs)
deps = self.determine_dependencies(kwargs)
- res = self.compiler.compute_int(expression, l, h, guess, prefix, self.environment, extra_args, deps)
+ res = self.compiler.compute_int(expression, low, high, guess, prefix, self.environment, extra_args, deps)
mlog.log('Computing int of "%s": %d' % (expression, res))
return res
@@ -1418,71 +1443,17 @@ class MesonMain(InterpreterObject):
raise InterpreterException('Unknown cross property: %s.' % propname)
-pch_kwargs = set(['c_pch', 'cpp_pch'])
-
-lang_arg_kwargs = set([
- 'c_args',
- 'cpp_args',
- 'd_args',
- 'd_import_dirs',
- 'd_unittest',
- 'd_module_versions',
- 'fortran_args',
- 'java_args',
- 'objc_args',
- 'objcpp_args',
- 'rust_args',
- 'vala_args',
- 'cs_args',
-])
-
-vala_kwargs = set(['vala_header', 'vala_gir', 'vala_vapi'])
-rust_kwargs = set(['rust_crate_type'])
-cs_kwargs = set(['resources', 'cs_args'])
-
-buildtarget_kwargs = set([
- 'build_by_default',
- 'build_rpath',
- 'dependencies',
- 'extra_files',
- 'gui_app',
- 'link_with',
- 'link_whole',
- 'link_args',
- 'link_depends',
- 'implicit_include_directories',
- 'include_directories',
- 'install',
- 'install_rpath',
- 'install_dir',
- 'name_prefix',
- 'name_suffix',
- 'native',
- 'objects',
- 'override_options',
- 'pic',
- 'sources',
- 'vs_module_defs',
-])
-
-build_target_common_kwargs = (
- buildtarget_kwargs |
- lang_arg_kwargs |
- pch_kwargs |
- vala_kwargs |
- rust_kwargs |
- cs_kwargs)
-
-exe_kwargs = (build_target_common_kwargs) | {'implib', 'export_dynamic'}
-shlib_kwargs = (build_target_common_kwargs) | {'version', 'soversion'}
-shmod_kwargs = shlib_kwargs
-stlib_kwargs = shlib_kwargs
-
-jar_kwargs = exe_kwargs.copy()
-jar_kwargs.update(['main_class'])
-
-build_target_kwargs = exe_kwargs.copy()
-build_target_kwargs.update(['target_type'])
+known_library_kwargs = (
+ build.known_shlib_kwargs |
+ build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+ known_library_kwargs |
+ build.known_exe_kwargs |
+ build.known_jar_kwargs |
+ {'target_type'}
+)
permitted_kwargs = {'add_global_arguments': {'language'},
'add_global_link_arguments': {'language'},
@@ -1491,12 +1462,12 @@ permitted_kwargs = {'add_global_arguments': {'language'},
'add_project_arguments': {'language'},
'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env'},
'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'},
- 'build_target': build_target_kwargs,
+ 'build_target': known_build_target_kwargs,
'configure_file': {'input', 'output', 'configuration', 'command', 'install_dir', 'capture', 'install'},
'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'},
'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version'},
'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'},
- 'executable': exe_kwargs,
+ 'executable': build.known_exe_kwargs,
'find_program': {'required', 'native'},
'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'},
'include_directories': {'is_system'},
@@ -1504,12 +1475,14 @@ permitted_kwargs = {'add_global_arguments': {'language'},
'install_headers': {'install_dir', 'subdir'},
'install_man': {'install_dir'},
'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},
- 'jar': jar_kwargs,
+ 'jar': build.known_jar_kwargs,
'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
'run_target': {'command', 'depends'},
- 'shared_library': shlib_kwargs,
- 'shared_module': shmod_kwargs,
- 'static_library': stlib_kwargs,
+ 'shared_library': build.known_shlib_kwargs,
+ 'shared_module': build.known_shmod_kwargs,
+ 'static_library': build.known_stlib_kwargs,
+ 'both_libraries': known_library_kwargs,
+ 'library': known_library_kwargs,
'subdir': {'if_found'},
'subproject': {'version', 'default_options'},
'test': {'args', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
@@ -1606,12 +1579,14 @@ class Interpreter(InterpreterBase):
'run_command': self.func_run_command,
'set_variable': self.func_set_variable,
'subdir': self.func_subdir,
+ 'subdir_done': self.func_subdir_done,
'subproject': self.func_subproject,
'shared_library': self.func_shared_lib,
'shared_module': self.func_shared_module,
'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
'test': self.func_test,
- 'vcs_tag': self.func_vcs_tag,
+ 'vcs_tag': self.func_vcs_tag
})
if 'MESON_UNIT_TEST' in os.environ:
self.funcs.update({'exception': self.func_exception})
@@ -2310,7 +2285,10 @@ to directly access options of other subprojects.''')
return progobj
def func_find_library(self, node, args, kwargs):
- raise InvalidCode('find_library() is removed, use the corresponding method in a compiler object instead.')
+ raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+ 'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+ 'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+ )
def _find_cached_dep(self, name, kwargs):
# Check if we want this as a cross-dep or a native-dep
@@ -2426,10 +2404,13 @@ to directly access options of other subprojects.''')
dep = None
# Search for it outside the project
- try:
- dep = dependencies.find_external_dependency(name, self.environment, kwargs)
- except DependencyException as e:
- exception = e
+ if self.coredata.wrap_mode != WrapMode.forcefallback or 'fallback' not in kwargs:
+ try:
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ except DependencyException as e:
+ exception = e
+ else:
+ exception = DependencyException("fallback for %s not found" % name)
# Search inside the projects list
if not dep or not dep.found():
@@ -2535,20 +2516,24 @@ root and issuing %s.
@permittedKwargs(permitted_kwargs['shared_library'])
def func_shared_lib(self, node, args, kwargs):
- return self.build_target(node, args, kwargs, SharedLibraryHolder)
+ holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
+ holder.held_object.shared_library_only = True
+ return holder
+
+ @permittedKwargs(permitted_kwargs['both_libraries'])
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_both_libraries(node, args, kwargs)
@permittedKwargs(permitted_kwargs['shared_module'])
def func_shared_module(self, node, args, kwargs):
return self.build_target(node, args, kwargs, SharedModuleHolder)
+ @permittedKwargs(permitted_kwargs['library'])
def func_library(self, node, args, kwargs):
- if self.coredata.get_builtin_option('default_library') == 'shared':
- return self.func_shared_lib(node, args, kwargs)
- return self.func_static_lib(node, args, kwargs)
+ return self.build_library(node, args, kwargs)
@permittedKwargs(permitted_kwargs['jar'])
def func_jar(self, node, args, kwargs):
- kwargs['target_type'] = 'jar'
return self.build_target(node, args, kwargs, JarHolder)
@permittedKwargs(permitted_kwargs['build_target'])
@@ -2557,15 +2542,17 @@ root and issuing %s.
raise InterpreterException('Missing target_type keyword argument')
target_type = kwargs.pop('target_type')
if target_type == 'executable':
- return self.func_executable(node, args, kwargs)
+ return self.build_target(node, args, kwargs, ExecutableHolder)
elif target_type == 'shared_library':
- return self.func_shared_lib(node, args, kwargs)
+ return self.build_target(node, args, kwargs, SharedLibraryHolder)
elif target_type == 'static_library':
- return self.func_static_lib(node, args, kwargs)
+ return self.build_target(node, args, kwargs, StaticLibraryHolder)
+ elif target_type == 'both_libraries':
+ return self.build_both_libraries(node, args, kwargs)
elif target_type == 'library':
- return self.func_library(node, args, kwargs)
+ return self.build_library(node, args, kwargs)
elif target_type == 'jar':
- return self.func_jar(node, args, kwargs)
+ return self.build_target(node, args, kwargs, JarHolder)
else:
raise InterpreterException('Unknown target_type.')
@@ -2607,6 +2594,14 @@ root and issuing %s.
return self.func_custom_target(node, [kwargs['output']], kwargs)
@stringArgs
+ def func_subdir_done(self, node, args, kwargs):
+ if len(kwargs) > 0:
+ raise InterpreterException('exit does not take named arguments')
+ if len(args) > 0:
+ raise InterpreterException('exit does not take any arguments')
+ raise SubdirDoneRequest()
+
+ @stringArgs
@permittedKwargs(permitted_kwargs['custom_target'])
def func_custom_target(self, node, args, kwargs):
if len(args) != 1:
@@ -3210,6 +3205,41 @@ different subdirectory.
if idname not in self.coredata.target_guids:
self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+ def build_both_libraries(self, node, args, kwargs):
+ shared_holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
+
+ # Check if user forces non-PIC static library.
+ pic = True
+ if 'pic' in kwargs:
+ pic = kwargs['pic']
+ elif 'b_staticpic' in self.environment.coredata.base_options:
+ pic = self.environment.coredata.base_options['b_staticpic'].value
+
+ if pic:
+ # Exclude sources from args and kwargs to avoid building them twice
+ static_args = [args[0]]
+ static_kwargs = kwargs.copy()
+ static_kwargs['sources'] = []
+ static_kwargs['objects'] = shared_holder.held_object.extract_all_objects()
+ else:
+ static_args = args
+ static_kwargs = kwargs
+
+ static_holder = self.build_target(node, static_args, static_kwargs, StaticLibraryHolder)
+
+ return BothLibrariesHolder(shared_holder, static_holder, self)
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_builtin_option('default_library')
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, SharedLibraryHolder)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, StaticLibraryHolder)
+ elif default_library == 'both':
+ return self.build_both_libraries(node, args, kwargs)
+ else:
+ raise InterpreterException('Unknown default_library value: %s.', default_library)
+
def build_target(self, node, args, kwargs, targetholder):
if not args:
raise InterpreterException('Target does not have a name.')
@@ -3245,7 +3275,13 @@ different subdirectory.
mlog.debug('Unknown target type:', str(targetholder))
raise RuntimeError('Unreachable code')
self.kwarg_strings_to_includedirs(kwargs)
+
+ # Filter out kwargs from other target types. For example 'soversion'
+ # passed to library() when default_library == 'static'.
+ kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs)
+
if is_cross:
self.add_cross_stdlib_info(target)
l = targetholder(target, self)
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 9279506..f957d90 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -105,6 +105,9 @@ class InvalidCode(InterpreterException):
class InvalidArguments(InterpreterException):
pass
+class SubdirDoneRequest(BaseException):
+ pass
+
class InterpreterObject:
def __init__(self):
self.methods = {}
@@ -203,6 +206,8 @@ class InterpreterBase:
try:
self.current_lineno = cur.lineno
self.evaluate_statement(cur)
+ except SubdirDoneRequest:
+ break
except Exception as e:
if not(hasattr(e, 'lineno')):
e.lineno = cur.lineno
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index b409615..cadd306 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -17,13 +17,15 @@ import sys
import argparse
from . import (coredata, mesonlib, build)
-parser = argparse.ArgumentParser(prog='meson configure')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson configure')
-parser.add_argument('-D', action='append', default=[], dest='sets',
- help='Set an option to the given value.')
-parser.add_argument('directory', nargs='*')
-parser.add_argument('--clearcache', action='store_true', default=False,
- help='Clear cached state (e.g. found dependencies)')
+ parser.add_argument('-D', action='append', default=[], dest='sets',
+ help='Set an option to the given value.')
+ parser.add_argument('directory', nargs='*')
+ parser.add_argument('--clearcache', action='store_true', default=False,
+ help='Clear cached state (e.g. found dependencies)')
+ return parser
class ConfException(mesonlib.MesonException):
@@ -226,7 +228,7 @@ def run(args):
args = mesonlib.expand_arguments(args)
if not args:
args = [os.getcwd()]
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if len(options.directory) > 1:
print('%s <build directory>' % args[0])
print('If you omit the build directory, the current directory is substituted.')
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 23e666c..74d26da 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -26,26 +26,28 @@ import argparse
import sys, os
import pathlib
-parser = argparse.ArgumentParser(prog='meson introspect')
-parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
- help='List top level targets.')
-parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
- help='List all installed files and directories.')
-parser.add_argument('--target-files', action='store', dest='target_files', default=None,
- help='List source files for a given target.')
-parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
- help='List files that make up the build system.')
-parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
- help='List all build options.')
-parser.add_argument('--tests', action='store_true', dest='tests', default=False,
- help='List all unit tests.')
-parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
- help='List all benchmarks.')
-parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
- help='List external dependencies.')
-parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
- help='Information about projects.')
-parser.add_argument('builddir', nargs='?', help='The build directory')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson introspect')
+ parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
+ help='List top level targets.')
+ parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
+ help='List all installed files and directories.')
+ parser.add_argument('--target-files', action='store', dest='target_files', default=None,
+ help='List source files for a given target.')
+ parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
+ help='List files that make up the build system.')
+ parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
+ help='List all build options.')
+ parser.add_argument('--tests', action='store_true', dest='tests', default=False,
+ help='List all unit tests.')
+ parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
+ help='List all benchmarks.')
+ parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
+ help='List external dependencies.')
+ parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
+ help='Information about projects.')
+ parser.add_argument('builddir', nargs='?', help='The build directory')
+ return parser
def determine_installed_path(target, installdata):
install_target = None
@@ -202,7 +204,7 @@ def list_projinfo(builddata):
def run(args):
datadir = 'meson-private'
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
if not os.path.isdir(datadir):
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 8b6397e..30364a6 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -792,7 +792,7 @@ This will become a hard error in the future.''')
state.backend.get_target_dir(s),
s.get_outputs()[0]))
elif isinstance(s, mesonlib.File):
- content_files.append(s.rel_to_builddir(state.build_to_src))
+ content_files.append(os.path.join(state.environment.get_build_dir(), s.subdir, s.fname))
elif isinstance(s, build.GeneratedList):
depends.append(s)
for gen_src in s.get_outputs():
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index c85624c..11fa80d 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -87,6 +87,7 @@ class DependenciesHelper:
processed_reqs = []
processed_cflags = []
for obj in libs:
+ shared_library_only = getattr(obj, 'shared_library_only', False)
if hasattr(obj, 'pcdep'):
pcdeps = mesonlib.listify(obj.pcdep)
for d in pcdeps:
@@ -105,26 +106,24 @@ class DependenciesHelper:
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
- elif isinstance(obj, build.SharedLibrary):
+ elif isinstance(obj, build.SharedLibrary) and shared_library_only:
+ # Do not pull dependencies for shared libraries because they are
+ # only required for static linking. Adding private requires has
+ # the side effect of exposing their cflags, which is the
+ # intended behaviour of pkg-config but force Debian to add more
+ # than needed build deps.
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=105572
processed_libs.append(obj)
if public:
if not hasattr(obj, 'generated_pc'):
obj.generated_pc = self.name
- elif isinstance(obj, build.StaticLibrary):
- # Due to a "feature" in pkgconfig, it leaks out private dependencies.
- # Thus we will not add them to the pc file unless the target
- # we are processing is a static library.
- #
- # This way (hopefully) "pkgconfig --libs --static foobar" works
- # and "pkgconfig --cflags/--libs foobar" does not have any trace
- # of dependencies that the build file creator has not explicitly
- # added to the dependency list.
+ elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
processed_libs.append(obj)
+ self.add_priv_libs(obj.get_dependencies())
+ self.add_priv_libs(obj.get_external_deps())
if public:
if not hasattr(obj, 'generated_pc'):
obj.generated_pc = self.name
- self.add_priv_libs(obj.get_dependencies())
- self.add_priv_libs(obj.get_external_deps())
elif isinstance(obj, str):
processed_libs.append(obj)
else:
@@ -134,9 +133,11 @@ class DependenciesHelper:
def add_version_reqs(self, name, version_reqs):
if version_reqs:
- vreqs = self.version_reqs.get(name, [])
- vreqs += mesonlib.stringlistify(version_reqs)
- self.version_reqs[name] = vreqs
+ if name not in self.version_reqs:
+ self.version_reqs[name] = set()
+ # We could have '>=1.0' or '>= 1.0', remove spaces to normalize
+ new_vreqs = [s.replace(' ', '') for s in mesonlib.stringlistify(version_reqs)]
+ self.version_reqs[name].update(new_vreqs)
def split_version_req(self, s):
for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
@@ -301,20 +302,34 @@ class PkgConfigModule(ExtensionModule):
'subdirs', 'requires', 'requires_private', 'libraries_private',
'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'})
def generate(self, state, args, kwargs):
- if len(args) > 0:
- raise mesonlib.MesonException('Pkgconfig_gen takes no positional arguments.')
+ default_version = state.project_version['version']
+ default_install_dir = None
+ default_description = None
+ default_name = None
+ mainlib = None
+ if len(args) == 1:
+ mainlib = getattr(args[0], 'held_object', args[0])
+ if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
+ raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
+ default_name = mainlib.name
+ default_description = state.project_name + ': ' + mainlib.name
+ install_dir = mainlib.get_custom_install_dir()[0]
+ if isinstance(install_dir, str):
+ default_install_dir = os.path.join(install_dir, 'pkgconfig')
+ elif len(args) > 1:
+ raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
subdirs = mesonlib.stringlistify(kwargs.get('subdirs', ['.']))
- version = kwargs.get('version', None)
+ version = kwargs.get('version', default_version)
if not isinstance(version, str):
raise mesonlib.MesonException('Version must be specified.')
- name = kwargs.get('name', None)
+ name = kwargs.get('name', default_name)
if not isinstance(name, str):
raise mesonlib.MesonException('Name not specified.')
filebase = kwargs.get('filebase', name)
if not isinstance(filebase, str):
raise mesonlib.MesonException('Filebase must be a string.')
- description = kwargs.get('description', None)
+ description = kwargs.get('description', default_description)
if not isinstance(description, str):
raise mesonlib.MesonException('Description is not a string.')
url = kwargs.get('url', '')
@@ -323,6 +338,8 @@ class PkgConfigModule(ExtensionModule):
conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
deps = DependenciesHelper(filebase)
+ if mainlib:
+ deps.add_pub_libs(mainlib)
deps.add_pub_libs(kwargs.get('libraries', []))
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
@@ -333,7 +350,7 @@ class PkgConfigModule(ExtensionModule):
if dversions:
compiler = state.environment.coredata.compilers.get('d')
if compiler:
- deps.add_cflags(compiler.get_feature_args({'versions': dversions}))
+ deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
def parse_variable_list(stringlist):
reserved = ['prefix', 'libdir', 'includedir']
@@ -362,7 +379,7 @@ class PkgConfigModule(ExtensionModule):
variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('variables', [])))
pcfile = filebase + '.pc'
- pkgroot = kwargs.get('install_dir', None)
+ pkgroot = kwargs.get('install_dir', default_install_dir)
if pkgroot is None:
pkgroot = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'pkgconfig')
if not isinstance(pkgroot, str):
diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py
index 9fd9f80..d2bf1dc 100644
--- a/mesonbuild/modules/python3.py
+++ b/mesonbuild/modules/python3.py
@@ -19,10 +19,7 @@ from . import ExtensionModule
from mesonbuild.modules import ModuleReturnValue
from . import permittedSnippetKwargs
from ..interpreterbase import noKwargs
-from ..interpreter import shlib_kwargs
-
-mod_kwargs = set()
-mod_kwargs.update(shlib_kwargs)
+from ..build import known_shmod_kwargs
class Python3Module(ExtensionModule):
@@ -30,7 +27,7 @@ class Python3Module(ExtensionModule):
super().__init__()
self.snippets.add('extension_module')
- @permittedSnippetKwargs(mod_kwargs)
+ @permittedSnippetKwargs(known_shmod_kwargs)
def extension_module(self, interpreter, state, args, kwargs):
if 'name_prefix' in kwargs:
raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 4ed80b1..91567f2 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -29,6 +29,7 @@ import platform
import signal
import random
from copy import deepcopy
+import enum
# GNU autotools interprets a return code of 77 from tests it executes to
# mean that the test should be skipped.
@@ -59,56 +60,68 @@ def determine_worker_count():
num_workers = 1
return num_workers
-parser = argparse.ArgumentParser(prog='meson test')
-parser.add_argument('--repeat', default=1, dest='repeat', type=int,
- help='Number of times to run the tests.')
-parser.add_argument('--no-rebuild', default=False, action='store_true',
- help='Do not rebuild before running tests.')
-parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
- help='Run test under gdb.')
-parser.add_argument('--list', default=False, dest='list', action='store_true',
- help='List available tests.')
-parser.add_argument('--wrapper', default=None, dest='wrapper', type=shlex.split,
- help='wrapper to run tests with (e.g. Valgrind)')
-parser.add_argument('-C', default='.', dest='wd',
- help='directory to cd into before running')
-parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
- help='Only run tests belonging to the given suite.')
-parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
- help='Do not run tests belonging to the given suite.')
-parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
- help='Do not split stderr and stdout in test logs.')
-parser.add_argument('--print-errorlogs', default=False, action='store_true',
- help="Whether to print failing tests' logs.")
-parser.add_argument('--benchmark', default=False, action='store_true',
- help="Run benchmarks instead of tests.")
-parser.add_argument('--logbase', default='testlog',
- help="Base name for log file.")
-parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
- help='How many parallel processes to use.')
-parser.add_argument('-v', '--verbose', default=False, action='store_true',
- help='Do not redirect stdout and stderr')
-parser.add_argument('-q', '--quiet', default=False, action='store_true',
- help='Produce less output to the terminal.')
-parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
- help='Define a multiplier for test timeout, for example '
- ' when running tests in particular conditions they might take'
- ' more time to execute.')
-parser.add_argument('--setup', default=None, dest='setup',
- help='Which test setup to use.')
-parser.add_argument('--test-args', default=[], type=shlex.split,
- help='Arguments to pass to the specified test(s) or all tests')
-parser.add_argument('args', nargs='*',
- help='Optional list of tests to run')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson test')
+ parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+ help='Number of times to run the tests.')
+ parser.add_argument('--no-rebuild', default=False, action='store_true',
+ help='Do not rebuild before running tests.')
+ parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+ help='Run test under gdb.')
+ parser.add_argument('--list', default=False, dest='list', action='store_true',
+ help='List available tests.')
+ parser.add_argument('--wrapper', default=None, dest='wrapper', type=shlex.split,
+ help='wrapper to run tests with (e.g. Valgrind)')
+ parser.add_argument('-C', default='.', dest='wd',
+ help='directory to cd into before running')
+ parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+ help='Only run tests belonging to the given suite.')
+ parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+ help='Do not run tests belonging to the given suite.')
+ parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+ help='Do not split stderr and stdout in test logs.')
+ parser.add_argument('--print-errorlogs', default=False, action='store_true',
+ help="Whether to print failing tests' logs.")
+ parser.add_argument('--benchmark', default=False, action='store_true',
+ help="Run benchmarks instead of tests.")
+ parser.add_argument('--logbase', default='testlog',
+ help="Base name for log file.")
+ parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+ help='How many parallel processes to use.')
+ parser.add_argument('-v', '--verbose', default=False, action='store_true',
+ help='Do not redirect stdout and stderr')
+ parser.add_argument('-q', '--quiet', default=False, action='store_true',
+ help='Produce less output to the terminal.')
+ parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+ help='Define a multiplier for test timeout, for example '
+ ' when running tests in particular conditions they might take'
+ ' more time to execute.')
+ parser.add_argument('--setup', default=None, dest='setup',
+ help='Which test setup to use.')
+ parser.add_argument('--test-args', default=[], type=shlex.split,
+ help='Arguments to pass to the specified test(s) or all tests')
+ parser.add_argument('args', nargs='*',
+ help='Optional list of tests to run')
+ return parser
class TestException(mesonlib.MesonException):
pass
+@enum.unique
+class TestResult(enum.Enum):
+
+ OK = 'OK'
+ TIMEOUT = 'TIMEOUT'
+ SKIP = 'SKIP'
+ FAIL = 'FAIL'
+
+
class TestRun:
def __init__(self, res, returncode, should_fail, duration, stdo, stde, cmd,
env):
+ assert isinstance(res, TestResult)
self.res = res
self.returncode = returncode
self.duration = duration
@@ -123,7 +136,7 @@ class TestRun:
if self.cmd is None:
res += 'NONE\n'
else:
- res += "%s%s\n" % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd))
+ res += '%s%s\n' % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd))
if self.stdo:
res += '--- stdout ---\n'
res += self.stdo
@@ -148,7 +161,7 @@ def decode(stream):
def write_json_log(jsonlogfile, test_name, result):
jresult = {'name': test_name,
'stdout': result.stdo,
- 'result': result.res,
+ 'result': result.res.value,
'duration': result.duration,
'returncode': result.returncode,
'command': result.cmd}
@@ -181,6 +194,139 @@ def load_tests(build_dir):
obj = pickle.load(f)
return obj
+
+class SingleTestRunner:
+
+ def __init__(self, test, env, options):
+ self.test = test
+ self.env = env
+ self.options = options
+
+ def _get_cmd(self):
+ if self.test.fname[0].endswith('.jar'):
+ return ['java', '-jar'] + self.test.fname
+ elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
+ return ['mono'] + self.test.fname
+ else:
+ if self.test.is_cross_built:
+ if self.test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ return None
+ else:
+ return [self.test.exe_runner] + self.test.fname
+ else:
+ return self.test.fname
+
+ def run(self):
+ cmd = self._get_cmd()
+ if cmd is None:
+ skip_stdout = 'Not run because can not execute cross compiled binaries.'
+ return TestRun(res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
+ should_fail=self.test.should_fail, duration=0.0,
+ stdo=skip_stdout, stde=None, cmd=None, env=self.test.env)
+ else:
+ wrap = TestHarness.get_wrapper(self.options)
+ if self.options.gdb:
+ self.test.timeout = None
+ return self._run_cmd(wrap + cmd + self.test.cmd_args + self.options.test_args)
+
+ def _run_cmd(self, cmd):
+ starttime = time.time()
+
+ if len(self.test.extra_paths) > 0:
+ self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH']
+
+ # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+ # (i.e., the test or the environment don't explicitly set it), set
+ # it ourselves. We do this unconditionally for regular tests
+ # because it is extremely useful to have.
+ # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+ if ('MALLOC_PERTURB_' not in self.env or not self.env['MALLOC_PERTURB_']) and not self.options.benchmark:
+ self.env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+ stdout = None
+ stderr = None
+ if not self.options.verbose:
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT
+
+ # Let gdb handle ^C instead of us
+ if self.options.gdb:
+ previous_sigint_handler = signal.getsignal(signal.SIGINT)
+ # Make the meson executable ignore SIGINT while gdb is running.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ def preexec_fn():
+ if self.options.gdb:
+ # Restore the SIGINT handler for the child process to
+ # ensure it can handle it.
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
+ else:
+ # We don't want setsid() in gdb because gdb needs the
+ # terminal in order to handle ^C and not show tcsetpgrp()
+ # errors avoid not being able to use the terminal.
+ os.setsid()
+
+ p = subprocess.Popen(cmd,
+ stdout=stdout,
+ stderr=stderr,
+ env=self.env,
+ cwd=self.test.workdir,
+ preexec_fn=preexec_fn if not is_windows() else None)
+ timed_out = False
+ kill_test = False
+ if self.test.timeout is None:
+ timeout = None
+ elif self.options.timeout_multiplier is not None:
+ timeout = self.test.timeout * self.options.timeout_multiplier
+ else:
+ timeout = self.test.timeout
+ try:
+ (stdo, stde) = p.communicate(timeout=timeout)
+ except subprocess.TimeoutExpired:
+ if self.options.verbose:
+ print('%s time out (After %d seconds)' % (self.test.name, timeout))
+ timed_out = True
+ except KeyboardInterrupt:
+ mlog.warning('CTRL-C detected while running %s' % (self.test.name))
+ kill_test = True
+ finally:
+ if self.options.gdb:
+ # Let us accept ^C again
+ signal.signal(signal.SIGINT, previous_sigint_handler)
+
+ if kill_test or timed_out:
+ # Python does not provide multiplatform support for
+ # killing a process and all its children so we need
+ # to roll our own.
+ if is_windows():
+ subprocess.call(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+ else:
+ try:
+ os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+ except ProcessLookupError:
+ # Sometimes (e.g. with Wine) this happens.
+ # There's nothing we can do (maybe the process
+ # already died) so carry on.
+ pass
+ (stdo, stde) = p.communicate()
+ endtime = time.time()
+ duration = endtime - starttime
+ stdo = decode(stdo)
+ if stde:
+ stde = decode(stde)
+ if timed_out:
+ res = TestResult.TIMEOUT
+ elif p.returncode == GNU_SKIP_RETURNCODE:
+ res = TestResult.SKIP
+ elif self.test.should_fail == bool(p.returncode):
+ res = TestResult.OK
+ else:
+ res = TestResult.FAIL
+ return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env)
+
+
class TestHarness:
def __init__(self, options):
self.options = options
@@ -208,7 +354,7 @@ class TestHarness:
self.jsonlogfile.close()
def merge_suite_options(self, options, test):
- if ":" in options.setup:
+ if ':' in options.setup:
if options.setup not in self.build_data.test_setups:
sys.exit("Unknown test setup '%s'." % options.setup)
current = self.build_data.test_setups[options.setup]
@@ -229,7 +375,8 @@ class TestHarness:
options.wrapper = current.exe_wrapper
return current.env.get_env(os.environ.copy())
- def get_test_env(self, options, test):
+ def get_test_runner(self, test):
+ options = deepcopy(self.options)
if options.setup:
env = self.merge_suite_options(options, test)
else:
@@ -237,153 +384,33 @@ class TestHarness:
if isinstance(test.env, build.EnvironmentVariables):
test.env = test.env.get_env(env)
env.update(test.env)
- return env
-
- def run_single_test(self, test):
- if test.fname[0].endswith('.jar'):
- cmd = ['java', '-jar'] + test.fname
- elif not test.is_cross_built and run_with_mono(test.fname[0]):
- cmd = ['mono'] + test.fname
+ return SingleTestRunner(test, env, options)
+
+ def process_test_result(self, result):
+ if result.res is TestResult.TIMEOUT:
+ self.timeout_count += 1
+ self.fail_count += 1
+ elif result.res is TestResult.SKIP:
+ self.skip_count += 1
+ elif result.res is TestResult.OK:
+ self.success_count += 1
+ elif result.res is TestResult.FAIL:
+ self.fail_count += 1
else:
- if test.is_cross_built:
- if test.exe_runner is None:
- # Can not run test on cross compiled executable
- # because there is no execute wrapper.
- cmd = None
- else:
- cmd = [test.exe_runner] + test.fname
- else:
- cmd = test.fname
-
- if cmd is None:
- res = 'SKIP'
- duration = 0.0
- stdo = 'Not run because can not execute cross compiled binaries.'
- stde = None
- returncode = GNU_SKIP_RETURNCODE
- else:
- test_opts = deepcopy(self.options)
- test_env = self.get_test_env(test_opts, test)
- wrap = self.get_wrapper(test_opts)
-
- if test_opts.gdb:
- test.timeout = None
-
- cmd = wrap + cmd + test.cmd_args + self.options.test_args
- starttime = time.time()
-
- if len(test.extra_paths) > 0:
- test_env['PATH'] = os.pathsep.join(test.extra_paths + ['']) + test_env['PATH']
-
- # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
- # (i.e., the test or the environment don't explicitly set it), set
- # it ourselves. We do this unconditionally for regular tests
- # because it is extremely useful to have.
- # Setting MALLOC_PERTURB_="0" will completely disable this feature.
- if ('MALLOC_PERTURB_' not in test_env or not test_env['MALLOC_PERTURB_']) and not self.options.benchmark:
- test_env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
-
- stdout = None
- stderr = None
- if not self.options.verbose:
- stdout = subprocess.PIPE
- stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT
-
- # Let gdb handle ^C instead of us
- if test_opts.gdb:
- previous_sigint_handler = signal.getsignal(signal.SIGINT)
- # Make the meson executable ignore SIGINT while gdb is running.
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- def preexec_fn():
- if test_opts.gdb:
- # Restore the SIGINT handler for the child process to
- # ensure it can handle it.
- signal.signal(signal.SIGINT, signal.SIG_DFL)
- else:
- # We don't want setsid() in gdb because gdb needs the
- # terminal in order to handle ^C and not show tcsetpgrp()
- # errors avoid not being able to use the terminal.
- os.setsid()
-
- p = subprocess.Popen(cmd,
- stdout=stdout,
- stderr=stderr,
- env=test_env,
- cwd=test.workdir,
- preexec_fn=preexec_fn if not is_windows() else None)
- timed_out = False
- kill_test = False
- if test.timeout is None:
- timeout = None
- elif test_opts.timeout_multiplier is not None:
- timeout = test.timeout * test_opts.timeout_multiplier
- else:
- timeout = test.timeout
- try:
- (stdo, stde) = p.communicate(timeout=timeout)
- except subprocess.TimeoutExpired:
- if self.options.verbose:
- print("%s time out (After %d seconds)" % (test.name, timeout))
- timed_out = True
- except KeyboardInterrupt:
- mlog.warning("CTRL-C detected while running %s" % (test.name))
- kill_test = True
- finally:
- if test_opts.gdb:
- # Let us accept ^C again
- signal.signal(signal.SIGINT, previous_sigint_handler)
-
- if kill_test or timed_out:
- # Python does not provide multiplatform support for
- # killing a process and all its children so we need
- # to roll our own.
- if is_windows():
- subprocess.call(['taskkill', '/F', '/T', '/PID', str(p.pid)])
- else:
- try:
- os.killpg(os.getpgid(p.pid), signal.SIGKILL)
- except ProcessLookupError:
- # Sometimes (e.g. with Wine) this happens.
- # There's nothing we can do (maybe the process
- # already died) so carry on.
- pass
- (stdo, stde) = p.communicate()
- endtime = time.time()
- duration = endtime - starttime
- stdo = decode(stdo)
- if stde:
- stde = decode(stde)
- if timed_out:
- res = 'TIMEOUT'
- self.timeout_count += 1
- self.fail_count += 1
- elif p.returncode == GNU_SKIP_RETURNCODE:
- res = 'SKIP'
- self.skip_count += 1
- elif test.should_fail == bool(p.returncode):
- res = 'OK'
- self.success_count += 1
- else:
- res = 'FAIL'
- self.fail_count += 1
- returncode = p.returncode
- result = TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env)
-
- return result
+ sys.exit('Unknown test result encountered: {}'.format(result.res))
def print_stats(self, numlen, tests, name, result, i):
startpad = ' ' * (numlen - len('%d' % (i + 1)))
num = '%s%d/%d' % (startpad, i + 1, len(tests))
padding1 = ' ' * (38 - len(name))
- padding2 = ' ' * (8 - len(result.res))
+ padding2 = ' ' * (8 - len(result.res.value))
result_str = '%s %s %s%s%s%5.2f s' % \
- (num, name, padding1, result.res, padding2, result.duration)
- if not self.options.quiet or result.res != 'OK':
- if result.res != 'OK' and mlog.colorize_console:
- if result.res == 'FAIL' or result.res == 'TIMEOUT':
+ (num, name, padding1, result.res.value, padding2, result.duration)
+ if not self.options.quiet or result.res is not TestResult.OK:
+ if result.res is not TestResult.OK and mlog.colorize_console:
+ if result.res is TestResult.FAIL or result.res is TestResult.TIMEOUT:
decorator = mlog.red
- elif result.res == 'SKIP':
+ elif result.res is TestResult.SKIP:
decorator = mlog.yellow
else:
sys.exit('Unreachable code was ... well ... reached.')
@@ -515,7 +542,8 @@ TIMEOUT: %4d
self.logfile.write('Log of Meson test suite run on %s\n\n'
% datetime.datetime.now().isoformat())
- def get_wrapper(self, options):
+ @staticmethod
+ def get_wrapper(options):
wrap = []
if options.gdb:
wrap = ['gdb', '--quiet', '--nh']
@@ -556,12 +584,15 @@ TIMEOUT: %4d
if not test.is_parallel or self.options.gdb:
self.drain_futures(futures)
futures = []
- res = self.run_single_test(test)
+ single_test = self.get_test_runner(test)
+ res = single_test.run()
+ self.process_test_result(res)
self.print_stats(numlen, tests, visible_name, res, i)
else:
if not executor:
executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
- f = executor.submit(self.run_single_test, test)
+ single_test = self.get_test_runner(test)
+ f = executor.submit(single_test.run)
futures.append((f, numlen, tests, visible_name, i))
if self.options.repeat > 1 and self.fail_count:
break
@@ -584,10 +615,11 @@ TIMEOUT: %4d
result.cancel()
if self.options.verbose:
result.result()
+ self.process_test_result(result.result())
self.print_stats(numlen, tests, name, result.result(), i)
def run_special(self):
- 'Tests run by the user, usually something like "under gdb 1000 times".'
+ '''Tests run by the user, usually something like "under gdb 1000 times".'''
if self.is_run:
raise RuntimeError('Can not use run_special after a full run.')
tests = self.get_tests()
@@ -604,7 +636,7 @@ def list_tests(th):
def rebuild_all(wd):
if not os.path.isfile(os.path.join(wd, 'build.ninja')):
- print("Only ninja backend is supported to rebuild tests before running them.")
+ print('Only ninja backend is supported to rebuild tests before running them.')
return True
ninja = environment.detect_ninja()
@@ -616,13 +648,13 @@ def rebuild_all(wd):
p.communicate()
if p.returncode != 0:
- print("Could not rebuild")
+ print('Could not rebuild')
return False
return True
def run(args):
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.benchmark:
options.num_processes = 1
@@ -645,7 +677,7 @@ def run(args):
if check_bin is not None:
exe = ExternalProgram(check_bin, silent=True)
if not exe.found():
- sys.exit("Could not find requested program: %s" % check_bin)
+ sys.exit('Could not find requested program: %s' % check_bin)
options.wd = os.path.abspath(options.wd)
if not options.list and not options.no_rebuild:
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index fad7ba0..1127288 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -29,18 +29,20 @@ from mesonbuild import mlog
import sys, traceback
import argparse
-parser = argparse.ArgumentParser(prog='meson rewrite')
-
-parser.add_argument('--sourcedir', default='.',
- help='Path to source directory.')
-parser.add_argument('--target', default=None,
- help='Name of target to edit.')
-parser.add_argument('--filename', default=None,
- help='Name of source file to add or remove to target.')
-parser.add_argument('commands', nargs='+')
+def buildparser():
+ parser = argparse.ArgumentParser(prog='meson rewrite')
+
+ parser.add_argument('--sourcedir', default='.',
+ help='Path to source directory.')
+ parser.add_argument('--target', default=None,
+ help='Name of target to edit.')
+ parser.add_argument('--filename', default=None,
+ help='Name of source file to add or remove to target.')
+ parser.add_argument('commands', nargs='+')
+ return parser
def run(args):
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if options.target is None or options.filename is None:
sys.exit("Must specify both target and filename.")
print('This tool is highly experimental, use with care.')
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 2d1f8c3..916c84f 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -14,87 +14,135 @@
from mesonbuild import environment
-import sys, os, subprocess, pathlib
+import argparse, sys, os, subprocess, pathlib
+
+def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+ outfiles = []
+ exitcode = 0
-def coverage(source_root, build_root, log_dir):
(gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
- if gcovr_exe:
- # gcovr >= 3.1 interprets rootdir differently
- if gcovr_new_rootdir:
- rootdir = build_root
- else:
- rootdir = source_root
- subprocess.check_call([gcovr_exe,
- '-x',
- '-r', rootdir,
- '-o', os.path.join(log_dir, 'coverage.xml'),
- ])
- subprocess.check_call([gcovr_exe,
- '-r', rootdir,
- '-o', os.path.join(log_dir, 'coverage.txt'),
- ])
- if lcov_exe and genhtml_exe:
- htmloutdir = os.path.join(log_dir, 'coveragereport')
- covinfo = os.path.join(log_dir, 'coverage.info')
- initial_tracefile = covinfo + '.initial'
- run_tracefile = covinfo + '.run'
- raw_tracefile = covinfo + '.raw'
- subprocess.check_call([lcov_exe,
- '--directory', build_root,
- '--capture',
- '--initial',
- '--output-file',
- initial_tracefile])
- subprocess.check_call([lcov_exe,
- '--directory', build_root,
- '--capture',
- '--output-file', run_tracefile,
- '--no-checksum',
- '--rc', 'lcov_branch_coverage=1',
- ])
- # Join initial and test results.
- subprocess.check_call([lcov_exe,
- '-a', initial_tracefile,
- '-a', run_tracefile,
- '-o', raw_tracefile])
- # Remove all directories outside the source_root from the covinfo
- subprocess.check_call([lcov_exe,
- '--extract', raw_tracefile,
- os.path.join(source_root, '*'),
- '--output-file', covinfo])
- subprocess.check_call([genhtml_exe,
- '--prefix', build_root,
- '--output-directory', htmloutdir,
- '--title', 'Code coverage',
- '--legend',
- '--show-details',
- '--branch-coverage',
- covinfo])
- elif gcovr_exe and gcovr_new_rootdir:
- htmloutdir = os.path.join(log_dir, 'coveragereport')
- subprocess.check_call([gcovr_exe,
- '--html',
- '--html-details',
- '-r', build_root,
- '-o', os.path.join(htmloutdir, 'index.html'),
- ])
- if gcovr_exe:
+
+ # gcovr >= 3.1 interprets rootdir differently
+ if gcovr_new_rootdir:
+ gcovr_rootdir = build_root
+ else:
+ gcovr_rootdir = source_root
+
+ if not outputs or 'xml' in outputs:
+ if gcovr_exe:
+ subprocess.check_call([gcovr_exe,
+ '-x',
+ '-r', gcovr_rootdir,
+ '-e', subproject_root,
+ '-o', os.path.join(log_dir, 'coverage.xml'),
+ ])
+ outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+ elif outputs:
+ print('gcovr needed to generate Xml coverage report')
+ exitcode = 1
+
+ if not outputs or 'text' in outputs:
+ if gcovr_exe:
+ subprocess.check_call([gcovr_exe,
+ '-r', gcovr_rootdir,
+ '-e', subproject_root,
+ '-o', os.path.join(log_dir, 'coverage.txt'),
+ ])
+ outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+ elif outputs:
+ print('gcovr needed to generate text coverage report')
+ exitcode = 1
+
+ if not outputs or 'html' in outputs:
+ if lcov_exe and genhtml_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ covinfo = os.path.join(log_dir, 'coverage.info')
+ initial_tracefile = covinfo + '.initial'
+ run_tracefile = covinfo + '.run'
+ raw_tracefile = covinfo + '.raw'
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--initial',
+ '--output-file',
+ initial_tracefile])
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--output-file', run_tracefile,
+ '--no-checksum',
+ '--rc', 'lcov_branch_coverage=1',
+ ])
+ # Join initial and test results.
+ subprocess.check_call([lcov_exe,
+ '-a', initial_tracefile,
+ '-a', run_tracefile,
+ '-o', raw_tracefile])
+ # Remove all directories outside the source_root from the covinfo
+ subprocess.check_call([lcov_exe,
+ '--extract', raw_tracefile,
+ os.path.join(source_root, '*'),
+ '--output-file', covinfo])
+ # Remove all directories inside subproject dir
+ subprocess.check_call([lcov_exe,
+ '--remove', covinfo,
+ os.path.join(subproject_root, '*'),
+ '--output-file', covinfo])
+ subprocess.check_call([genhtml_exe,
+ '--prefix', build_root,
+ '--output-directory', htmloutdir,
+ '--title', 'Code coverage',
+ '--legend',
+ '--show-details',
+ '--branch-coverage',
+ covinfo])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif gcovr_exe and gcovr_new_rootdir:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ if not os.path.isdir(htmloutdir):
+ os.mkdir(htmloutdir)
+ subprocess.check_call([gcovr_exe,
+ '--html',
+ '--html-details',
+ '-r', build_root,
+ '-e', subproject_root,
+ '-o', os.path.join(htmloutdir, 'index.html'),
+ ])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif outputs:
+ print('lcov/genhtml or gcovr >= 3.1 needed to generate Html coverage report')
+ exitcode = 1
+
+ if not outputs and not outfiles:
+ print('Need gcovr or lcov/genhtml to generate any coverage reports')
+ exitcode = 1
+
+ if outfiles:
print('')
- print('XML coverage report can be found at',
- pathlib.Path(log_dir, 'coverage.xml').as_uri())
- print('Text coverage report can be found at',
- pathlib.Path(log_dir, 'coverage.txt').as_uri())
- if (lcov_exe and genhtml_exe) or (gcovr_exe and gcovr_new_rootdir):
- print('Html coverage report can be found at',
- pathlib.Path(htmloutdir, 'index.html').as_uri())
- return 0
+ for (filetype, path) in outfiles:
+ print(filetype + ' coverage report can be found at', path.as_uri())
+
+ return exitcode
def run(args):
if not os.path.isfile('build.ninja'):
print('Coverage currently only works with the Ninja backend.')
return 1
- source_root, build_root, log_dir = args[:]
- return coverage(source_root, build_root, log_dir)
+ parser = argparse.ArgumentParser(description='Generate coverage reports')
+ parser.add_argument('--text', dest='outputs', action='append_const',
+ const='text', help='generate Text report')
+ parser.add_argument('--xml', dest='outputs', action='append_const',
+ const='xml', help='generate Xml report')
+ parser.add_argument('--html', dest='outputs', action='append_const',
+ const='html', help='generate Html report')
+ parser.add_argument('source_root')
+ parser.add_argument('subproject_root')
+ parser.add_argument('build_root')
+ parser.add_argument('log_dir')
+ options = parser.parse_args(args)
+ return coverage(options.outputs, options.source_root,
+ options.subproject_root, options.build_root,
+ options.log_dir)
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 2a5ee8b..3fe7fb7 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -58,6 +58,8 @@ def gtkdoc_run_check(cmd, cwd, library_path=None):
if out:
err_msg.append(out)
raise MesonException('\n'.join(err_msg))
+ elif out:
+ print(out)
def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
main_file, module,
diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py
index c43702e..46d501f 100644
--- a/mesonbuild/scripts/meson_exe.py
+++ b/mesonbuild/scripts/meson_exe.py
@@ -21,8 +21,10 @@ import subprocess
options = None
-parser = argparse.ArgumentParser()
-parser.add_argument('args', nargs='+')
+def buildparser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('args', nargs='+')
+ return parser
def is_windows():
platname = platform.system().lower()
@@ -70,7 +72,7 @@ def run_exe(exe):
def run(args):
global options
- options = parser.parse_args(args)
+ options = buildparser().parse_args(args)
if len(options.args) != 1:
print('Test runner for Meson. Do not run on your own, mmm\'kay?')
print(sys.argv[0] + ' [data file]')
diff --git a/mesonbuild/wrap/__init__.py b/mesonbuild/wrap/__init__.py
index 019634c..6e2bc83 100644
--- a/mesonbuild/wrap/__init__.py
+++ b/mesonbuild/wrap/__init__.py
@@ -25,7 +25,12 @@ from enum import Enum
# to use 'nofallback' so that any 'copylib' wraps will be
# download as subprojects.
#
+# --wrap-mode=forcefallback will ignore external dependencies,
+# even if they match the version requirements, and automatically
+# use the fallback if one was provided. This is useful for example
+# to make sure a project builds when using the fallbacks.
+#
# Note that these options do not affect subprojects that
# are git submodules since those are only usable in git
# repositories, and you almost always want to download them.
-WrapMode = Enum('WrapMode', 'default nofallback nodownload')
+WrapMode = Enum('WrapMode', 'default nofallback nodownload forcefallback')
diff --git a/run_tests.py b/run_tests.py
index 1cc3983..648e6ce 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -131,7 +131,7 @@ def get_fake_options(prefix):
return opts
def should_run_linux_cross_tests():
- return shutil.which('arm-linux-gnueabihf-gcc-7') and not platform.machine().lower().startswith('arm')
+ return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
def run_configure_inprocess(meson_command, commandlist):
old_stdout = sys.stdout
diff --git a/run_unittests.py b/run_unittests.py
index 0c84475..4f688cd 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -439,10 +439,11 @@ class InternalTests(unittest.TestCase):
for f in snippet_dir.glob('*'):
self.assertTrue(f.is_file())
if f.suffix == '.md':
- for line in f.open():
- m = re.match(hashcounter, line)
- if m:
- self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name)
+ with f.open() as snippet:
+ for line in snippet:
+ m = re.match(hashcounter, line)
+ if m:
+ self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name)
else:
if f.name != 'add_release_note_snippets_here':
self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name)
@@ -523,16 +524,18 @@ class BasePlatformTests(unittest.TestCase):
Run a command while printing the stdout and stderr to stdout,
and also return a copy of it
'''
- p = subprocess.Popen(command, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, env=os.environ.copy(),
- universal_newlines=True, cwd=workdir)
- output = p.communicate()[0]
- print(output)
+ # If this call hangs CI will just abort. It is very hard to distinguish
+ # between CI issue and test bug in that case. Set timeout and fail loud
+ # instead.
+ p = subprocess.run(command, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, env=os.environ.copy(),
+ universal_newlines=True, cwd=workdir, timeout=60 * 5)
+ print(p.stdout)
if p.returncode != 0:
- if 'MESON_SKIP_TEST' in output:
+ if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command)
- return output
+ return p.stdout
def init(self, srcdir, extra_args=None, default_args=True, inprocess=False):
self.assertPathExists(srcdir)
@@ -947,6 +950,12 @@ class AllPlatformTests(BasePlatformTests):
self.uninstall()
self.assertPathDoesNotExist(exename)
+ def test_forcefallback(self):
+ testdir = os.path.join(self.unit_test_dir, '27 forcefallback')
+ self.init(testdir, ['--wrap-mode=forcefallback'])
+ self.build()
+ self.run_tests()
+
def test_testsetups(self):
if not shutil.which('valgrind'):
raise unittest.SkipTest('Valgrind not installed.')
@@ -1811,7 +1820,8 @@ int main(int argc, char **argv) {
self._run(ninja,
workdir=os.path.join(tmpdir, 'builddir'))
with tempfile.TemporaryDirectory() as tmpdir:
- open(os.path.join(tmpdir, 'foo.' + lang), 'w').write('int main() {}')
+ with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f:
+ f.write('int main() {}')
self._run(meson_command + ['init', '-b'], workdir=tmpdir)
# The test uses mocking and thus requires that
@@ -1895,6 +1905,16 @@ int main(int argc, char **argv) {
exception_raised = True
self.assertTrue(exception_raised, 'Double locking did not raise exception.')
+ def test_check_module_linking(self):
+ """
+ Test that shared modules are not linked with targets(link_with:) #2865
+ """
+ tdir = os.path.join(self.unit_test_dir, '26 shared_mod linking')
+ out = self.init(tdir)
+ msg = ('''WARNING: target links against shared modules. This is not
+recommended as it can lead to undefined behaviour on some platforms''')
+ self.assertIn(msg, out)
+
def test_ndebug_if_release_disabled(self):
testdir = os.path.join(self.unit_test_dir, '25 ndebug if-release')
self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release'])
@@ -2296,7 +2316,8 @@ class LinuxlikeTests(BasePlatformTests):
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '22 unfound pkgconfig')
self.init(testdir)
- pcfile = open(os.path.join(self.privatedir, 'somename.pc')).read()
+ with open(os.path.join(self.privatedir, 'somename.pc')) as f:
+ pcfile = f.read()
self.assertFalse('blub_blob_blib' in pcfile)
def test_vala_c_warnings(self):
diff --git a/test cases/common/142 compute int/config.h.in b/test cases/common/142 compute int/config.h.in
index ad8d077..0de63ab 100644
--- a/test cases/common/142 compute int/config.h.in
+++ b/test cases/common/142 compute int/config.h.in
@@ -1,2 +1,4 @@
#define INTSIZE @INTSIZE@
#define FOOBAR_IN_CONFIG_H @FOOBAR@
+#define MAXINT @MAXINT@
+#define MININT @MININT@
diff --git a/test cases/common/142 compute int/meson.build b/test cases/common/142 compute int/meson.build
index 43553fe..22bd266 100644
--- a/test cases/common/142 compute int/meson.build
+++ b/test cases/common/142 compute int/meson.build
@@ -7,11 +7,15 @@ cc = meson.get_compiler('c')
intsize = cc.compute_int('sizeof(int)', low : 1, high : 16, guess : 4)
foobar = cc.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+maxint = cc.compute_int('INT_MAX', prefix: '#include <limits.h>')
+minint = cc.compute_int('INT_MIN', prefix: '#include <limits.h>')
cd = configuration_data()
cd.set('INTSIZE', intsize)
cd.set('FOOBAR', foobar)
cd.set('CONFIG', 'config.h')
+cd.set('MAXINT', maxint)
+cd.set('MININT', minint)
configure_file(input : 'config.h.in', output : 'config.h', configuration : cd)
s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd)
@@ -23,11 +27,15 @@ cpp = meson.get_compiler('cpp')
intsize = cpp.compute_int('sizeof(int)')
foobar = cpp.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+maxint = cpp.compute_int('INT_MAX', prefix: '#include <limits.h>')
+minint = cpp.compute_int('INT_MIN', prefix: '#include <limits.h>')
cdpp = configuration_data()
cdpp.set('INTSIZE', intsize)
cdpp.set('FOOBAR', foobar)
cdpp.set('CONFIG', 'config.hpp')
+cdpp.set('MAXINT', maxint)
+cdpp.set('MININT', minint)
configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp)
spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp)
diff --git a/test cases/common/142 compute int/prog.c.in b/test cases/common/142 compute int/prog.c.in
index 3ff1463..ff1ad55 100644
--- a/test cases/common/142 compute int/prog.c.in
+++ b/test cases/common/142 compute int/prog.c.in
@@ -1,6 +1,7 @@
#include "@CONFIG@"
#include <stdio.h>
#include <wchar.h>
+#include <limits.h>
#include "foobar.h"
int main(int argc, char **argv) {
@@ -12,5 +13,13 @@ int main(int argc, char **argv) {
fprintf(stderr, "Mismatch: computed int %d, should be %d.\n", FOOBAR_IN_CONFIG_H, FOOBAR_IN_FOOBAR_H);
return 1;
}
+ if(MAXINT != INT_MAX) {
+ fprintf(stderr, "Mismatch: computed max int %d, should be %d.\n", MAXINT, INT_MAX);
+ return 1;
+ }
+ if(MININT != INT_MIN) {
+ fprintf(stderr, "Mismatch: computed min int %d, should be %d.\n", MININT, INT_MIN);
+ return 1;
+ }
return 0;
}
diff --git a/test cases/common/188 subdir_done/meson.build b/test cases/common/188 subdir_done/meson.build
new file mode 100644
index 0000000..5692f3a
--- /dev/null
+++ b/test cases/common/188 subdir_done/meson.build
@@ -0,0 +1,10 @@
+# Should run, even though main.cpp does not exist and we call error in the last line.
+# subdir_done jumps to end, so both lines are not executed.
+
+project('example exit', 'cpp')
+
+subdir_done()
+
+executable('main', 'main.cpp')
+error('Unreachable')
+
diff --git a/test cases/common/189 bothlibraries/libfile.c b/test cases/common/189 bothlibraries/libfile.c
new file mode 100644
index 0000000..085ef3b
--- /dev/null
+++ b/test cases/common/189 bothlibraries/libfile.c
@@ -0,0 +1,7 @@
+#include "mylib.h"
+
+DO_EXPORT int retval = 42;
+
+DO_EXPORT int func() {
+ return retval;
+}
diff --git a/test cases/common/189 bothlibraries/main.c b/test cases/common/189 bothlibraries/main.c
new file mode 100644
index 0000000..03a8e02
--- /dev/null
+++ b/test cases/common/189 bothlibraries/main.c
@@ -0,0 +1,8 @@
+#include "mylib.h"
+
+DO_IMPORT int func();
+DO_IMPORT int retval;
+
+int main(int argc, char **arg) {
+ return func() == retval ? 0 : 1;
+}
diff --git a/test cases/common/189 bothlibraries/meson.build b/test cases/common/189 bothlibraries/meson.build
new file mode 100644
index 0000000..3a13d62
--- /dev/null
+++ b/test cases/common/189 bothlibraries/meson.build
@@ -0,0 +1,12 @@
+project('both libraries linking test', 'c')
+
+both_libs = both_libraries('mylib', 'libfile.c')
+exe_shared = executable('prog-shared', 'main.c', link_with : both_libs.get_shared_lib())
+exe_static = executable('prog-static', 'main.c',
+ c_args : ['-DSTATIC_COMPILATION'],
+ link_with : both_libs.get_static_lib())
+exe_both = executable('prog-both', 'main.c', link_with : both_libs)
+
+test('runtest-shared', exe_shared)
+test('runtest-static', exe_static)
+test('runtest-both', exe_both)
diff --git a/test cases/common/189 bothlibraries/mylib.h b/test cases/common/189 bothlibraries/mylib.h
new file mode 100644
index 0000000..1038a01
--- /dev/null
+++ b/test cases/common/189 bothlibraries/mylib.h
@@ -0,0 +1,13 @@
+#pragma once
+
+#ifdef _WIN32
+ #ifdef STATIC_COMPILATION
+ #define DO_IMPORT extern
+ #else
+ #define DO_IMPORT __declspec(dllimport)
+ #endif
+ #define DO_EXPORT __declspec(dllexport)
+#else
+ #define DO_IMPORT extern
+ #define DO_EXPORT
+#endif
diff --git a/test cases/common/51 pkgconfig-gen/dependencies/meson.build b/test cases/common/51 pkgconfig-gen/dependencies/meson.build
index 822a7b7..640115a 100644
--- a/test cases/common/51 pkgconfig-gen/dependencies/meson.build
+++ b/test cases/common/51 pkgconfig-gen/dependencies/meson.build
@@ -1,4 +1,4 @@
-project('pkgconfig-gen-dependencies', 'c')
+project('pkgconfig-gen-dependencies', 'c', version: '1.0')
pkgg = import('pkgconfig')
@@ -7,14 +7,11 @@ exposed_lib = shared_library('libexposed', 'exposed.c')
internal_lib = shared_library('libinternal', 'internal.c')
main_lib = static_library('libmain', link_with : [exposed_lib, internal_lib])
-pkgg.generate(libraries : exposed_lib,
- version : '1.0',
- name : 'libexposed',
- description : 'An exposed library in dependency test.'
-)
+pkgg.generate(exposed_lib)
# Declare a few different Dependency objects
pc_dep = dependency('libfoo', version : '>=1.0')
+pc_dep_dup = dependency('libfoo', version : '>= 1.0')
notfound_dep = dependency('notfound', required : false)
threads_dep = dependency('threads')
custom_dep = declare_dependency(link_args : ['-lcustom'], compile_args : ['-DCUSTOM'])
@@ -28,9 +25,10 @@ custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DC
# - Having custom_dep in libraries and libraries_private should only add it in Libs
# - Having custom2_dep in libraries_private should not add its Cflags
# - Having pc_dep in libraries_private should add it in Requires.private
+# - pc_dep_dup is the same library and same version, should be ignored
# - notfound_dep is not required so it shouldn't appear in the pc file.
pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep , custom_dep],
- libraries_private : [custom_dep, custom2_dep, pc_dep, notfound_dep],
+ libraries_private : [custom_dep, custom2_dep, pc_dep, pc_dep_dup, notfound_dep],
version : '1.0',
name : 'dependency-test',
filebase : 'dependency-test',
diff --git a/test cases/d/3 shared library/meson.build b/test cases/d/3 shared library/meson.build
index 78ad766..4616242 100644
--- a/test cases/d/3 shared library/meson.build
+++ b/test cases/d/3 shared library/meson.build
@@ -10,3 +10,12 @@ endif
ldyn = shared_library('stuff', 'libstuff.d', install : true)
ed = executable('app_d', 'app.d', link_with : ldyn, install : true)
test('linktest_dyn', ed)
+
+# test D attributes for pkg-config
+pkgc = import('pkgconfig')
+pkgc.generate(name: 'test',
+ libraries: ldyn,
+ subdirs: 'd/stuff',
+ description: 'A test of D attributes to pkgconfig.generate.',
+ d_module_versions: ['Use_Static']
+)
diff --git a/test cases/d/6 unittest/app.d b/test cases/d/6 unittest/app.d
index 751e754..71c6414 100644
--- a/test cases/d/6 unittest/app.d
+++ b/test cases/d/6 unittest/app.d
@@ -23,10 +23,14 @@ unittest
{
writeln ("TEST");
import core.stdc.stdlib : exit;
+ import second_unit;
assert (getFour () > 2);
assert (getFour () == 4);
+ // this is a regression test for https://github.com/mesonbuild/meson/issues/3337
+ secondModuleTestFunc ();
+
// we explicitly terminate here to give the unittest program a different exit
// code than the main application has.
// (this prevents the regular main() from being executed)
diff --git a/test cases/d/6 unittest/meson.build b/test cases/d/6 unittest/meson.build
index 1551e94..49a0700 100644
--- a/test cases/d/6 unittest/meson.build
+++ b/test cases/d/6 unittest/meson.build
@@ -1,8 +1,8 @@
project('D Unittests', 'd')
-e = executable('dapp', 'app.d', install : true)
+e = executable('dapp', ['app.d', 'second_unit.d'], install : true)
test('dapp_run', e, should_fail: true)
-e_test = executable('dapp_test', 'app.d',
- d_args: meson.get_compiler('d').unittest_args())
+e_test = executable('dapp_test', ['app.d', 'second_unit.d'],
+ d_unittest: true)
test('dapp_test', e_test)
diff --git a/test cases/d/6 unittest/second_unit.d b/test cases/d/6 unittest/second_unit.d
new file mode 100644
index 0000000..fdb62a9
--- /dev/null
+++ b/test cases/d/6 unittest/second_unit.d
@@ -0,0 +1,10 @@
+
+void secondModuleTestFunc ()
+{
+ import std.stdio : writeln;
+
+ version (unittest)
+ writeln ("Hello!");
+ else
+ assert (0);
+}
diff --git a/test cases/d/9 features/app.d b/test cases/d/9 features/app.d
index 37cc1dd..6b43bf0 100644
--- a/test cases/d/9 features/app.d
+++ b/test cases/d/9 features/app.d
@@ -3,6 +3,8 @@ import std.stdio;
import std.array : split;
import std.string : strip;
+import extra;
+
auto getMenu ()
{
auto foods = import ("food.txt").strip.split ("\n");
@@ -31,7 +33,12 @@ void main (string[] args)
version (With_People) {
if (request == "people") {
writeln ("People: ", getPeople.join (", "));
- exit (0);
+
+ // only exit successfully if the second module also had its module version set.
+ // this checks for issue https://github.com/mesonbuild/meson/issues/3337
+ if (secondModulePeopleVersionSet ())
+ exit (0);
+ exit (1);
}
}
diff --git a/test cases/d/9 features/extra.d b/test cases/d/9 features/extra.d
new file mode 100644
index 0000000..832b292
--- /dev/null
+++ b/test cases/d/9 features/extra.d
@@ -0,0 +1,9 @@
+
+auto secondModulePeopleVersionSet ()
+{
+ version (With_People) {
+ return true;
+ } else {
+ return false;
+ }
+}
diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build
index 356e9f3..694e488 100644
--- a/test cases/d/9 features/meson.build
+++ b/test cases/d/9 features/meson.build
@@ -6,8 +6,10 @@ project('D Features', 'd')
# STRINGS TO PATHS MANUALLY!
data_dir = join_paths(meson.current_source_dir(), 'data')
+test_src = ['app.d', 'extra.d']
+
e_plain_bcompat = executable('dapp_menu_bcompat',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir]
)
test('dapp_menu_t_fail_bcompat', e_plain_bcompat, should_fail: true)
@@ -18,7 +20,7 @@ test('dapp_menu_t_bcompat', e_plain_bcompat, args: ['menu'])
data_dir = include_directories('data')
e_plain = executable('dapp_menu',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir]
)
test('dapp_menu_t_fail', e_plain, should_fail: true)
@@ -27,7 +29,7 @@ test('dapp_menu_t', e_plain, args: ['menu'])
# test feature versions and string imports
e_versions = executable('dapp_versions',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir],
d_module_versions: ['No_Menu', 'With_People']
)
@@ -36,7 +38,7 @@ test('dapp_versions_t', e_versions, args: ['people'])
# test everything and unittests
e_test = executable('dapp_test',
- 'app.d',
+ test_src,
d_import_dirs: [data_dir],
d_module_versions: ['No_Menu', 'With_People'],
d_unittest: true
diff --git a/test cases/failing/71 skip only subdir/meson.build b/test cases/failing/71 skip only subdir/meson.build
new file mode 100644
index 0000000..4832bd4
--- /dev/null
+++ b/test cases/failing/71 skip only subdir/meson.build
@@ -0,0 +1,8 @@
+# Check that skip_rest only exits subdir, not the whole script.
+# Should create an error because main.cpp does not exists.
+project('example exit', 'cpp')
+
+subdir('subdir')
+
+message('Good')
+executable('main', 'main.cpp')
diff --git a/test cases/failing/71 skip only subdir/subdir/meson.build b/test cases/failing/71 skip only subdir/subdir/meson.build
new file mode 100644
index 0000000..1ba447b
--- /dev/null
+++ b/test cases/failing/71 skip only subdir/subdir/meson.build
@@ -0,0 +1,3 @@
+subdir_done()
+
+error('Unreachable')
diff --git a/test cases/unit/26 shared_mod linking/libfile.c b/test cases/unit/26 shared_mod linking/libfile.c
new file mode 100644
index 0000000..44f7667
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/libfile.c
@@ -0,0 +1,14 @@
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func() {
+ return 0;
+}
diff --git a/test cases/unit/26 shared_mod linking/main.c b/test cases/unit/26 shared_mod linking/main.c
new file mode 100644
index 0000000..12f9c98
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/main.c
@@ -0,0 +1,11 @@
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_IMPORT __declspec(dllimport)
+#else
+ #define DLL_IMPORT
+#endif
+
+int DLL_IMPORT func();
+
+int main(int argc, char **arg) {
+ return func();
+}
diff --git a/test cases/unit/26 shared_mod linking/meson.build b/test cases/unit/26 shared_mod linking/meson.build
new file mode 100644
index 0000000..994a5d3
--- /dev/null
+++ b/test cases/unit/26 shared_mod linking/meson.build
@@ -0,0 +1,5 @@
+project('shared library linking test', 'c', 'cpp')
+
+mod = shared_module('mymod', 'libfile.c')
+
+exe = executable('prog', 'main.c', link_with : mod, install : true) \ No newline at end of file
diff --git a/test cases/unit/27 forcefallback/meson.build b/test cases/unit/27 forcefallback/meson.build
new file mode 100644
index 0000000..e6a90ea
--- /dev/null
+++ b/test cases/unit/27 forcefallback/meson.build
@@ -0,0 +1,8 @@
+project('mainproj', 'c',
+ default_options : ['wrap_mode=forcefallback'])
+
+zlib_dep = dependency('zlib', fallback: ['notzlib', 'zlib_dep'])
+
+test_not_zlib = executable('test_not_zlib', ['test_not_zlib.c'], dependencies: [zlib_dep])
+
+test('test_not_zlib', test_not_zlib)
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build b/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build
new file mode 100644
index 0000000..254a136
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/meson.build
@@ -0,0 +1,7 @@
+project('notzlib', 'c')
+
+notzlib_sources = ['notzlib.c']
+
+notzlib = library('notzlib', notzlib_sources)
+
+zlib_dep = declare_dependency(link_with: notzlib, include_directories: include_directories(['.']))
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c
new file mode 100644
index 0000000..c3b6bf9
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.c
@@ -0,0 +1,6 @@
+#include "notzlib.h"
+
+int not_a_zlib_function (void)
+{
+ return 42;
+}
diff --git a/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h
new file mode 100644
index 0000000..695921d
--- /dev/null
+++ b/test cases/unit/27 forcefallback/subprojects/notzlib/notzlib.h
@@ -0,0 +1,18 @@
+#pragma once
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC not_a_zlib_function (void);
diff --git a/test cases/unit/27 forcefallback/test_not_zlib.c b/test cases/unit/27 forcefallback/test_not_zlib.c
new file mode 100644
index 0000000..36256af
--- /dev/null
+++ b/test cases/unit/27 forcefallback/test_not_zlib.c
@@ -0,0 +1,8 @@
+#include <notzlib.h>
+
+int main (int ac, char **av)
+{
+ if (not_a_zlib_function () != 42)
+ return 1;
+ return 0;
+}