aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.appveyor.yml4
-rw-r--r--.travis.yml4
-rw-r--r--authors.txt87
-rw-r--r--docs/README.md2
-rw-r--r--docs/markdown/Compiler-properties.md12
-rw-r--r--docs/markdown/Creating-releases.md15
-rw-r--r--docs/markdown/Pkgconfig-module.md1
-rw-r--r--docs/markdown/Reference-manual.md86
-rw-r--r--docs/markdown/Release-notes-for-0.41.0.md30
-rw-r--r--docs/markdown/Running-Meson.md2
-rw-r--r--docs/markdown/Syntax.md2
-rw-r--r--docs/markdown/legal.md2
-rw-r--r--docs/sitemap.txt1
-rw-r--r--mesonbuild/backend/backends.py13
-rw-r--r--mesonbuild/backend/ninjabackend.py41
-rw-r--r--mesonbuild/backend/vs2010backend.py8
-rw-r--r--mesonbuild/build.py15
-rw-r--r--mesonbuild/compilers.py22
-rw-r--r--mesonbuild/coredata.py4
-rw-r--r--mesonbuild/dependencies.py1810
-rw-r--r--mesonbuild/dependencies/__init__.py46
-rw-r--r--mesonbuild/dependencies/base.py641
-rw-r--r--mesonbuild/dependencies/dev.py293
-rw-r--r--mesonbuild/dependencies/misc.py382
-rw-r--r--mesonbuild/dependencies/platform.py44
-rw-r--r--mesonbuild/dependencies/ui.py560
-rw-r--r--mesonbuild/interpreter.py78
-rw-r--r--mesonbuild/mesonlib.py10
-rw-r--r--mesonbuild/mesonmain.py3
-rw-r--r--mesonbuild/mintro.py6
-rw-r--r--mesonbuild/modules/pkgconfig.py33
-rw-r--r--mesonbuild/scripts/dist.py148
-rwxr-xr-xrun_project_tests.py10
-rwxr-xr-xrun_tests.py8
-rwxr-xr-xrun_unittests.py56
-rw-r--r--setup.py1
-rw-r--r--test cases/common/150 nested links/meson.build8
-rw-r--r--test cases/common/150 nested links/xephyr.c3
-rw-r--r--test cases/common/151 list of file sources/foo1
-rw-r--r--test cases/common/151 list of file sources/gen.py7
-rw-r--r--test cases/common/151 list of file sources/meson.build12
-rw-r--r--test cases/common/51 pkgconfig-gen/meson.build4
-rw-r--r--test cases/failing/47 pkgconfig variables reserved/meson.build16
-rw-r--r--test cases/failing/47 pkgconfig variables reserved/simple.c5
-rw-r--r--test cases/failing/47 pkgconfig variables reserved/simple.h6
-rw-r--r--test cases/failing/48 pkgconfig variables zero length/meson.build16
-rw-r--r--test cases/failing/48 pkgconfig variables zero length/simple.c5
-rw-r--r--test cases/failing/48 pkgconfig variables zero length/simple.h6
-rw-r--r--test cases/failing/49 pkgconfig variables zero length value/meson.build16
-rw-r--r--test cases/failing/49 pkgconfig variables zero length value/simple.c5
-rw-r--r--test cases/failing/49 pkgconfig variables zero length value/simple.h6
-rw-r--r--test cases/failing/50 pkgconfig variables not key value/meson.build16
-rw-r--r--test cases/failing/50 pkgconfig variables not key value/simple.c5
-rw-r--r--test cases/failing/50 pkgconfig variables not key value/simple.h6
-rw-r--r--test cases/frameworks/14 doxygen/installed_files.txt4
-rw-r--r--test cases/linuxlike/1 pkg-config/meson.build1
-rw-r--r--test cases/linuxlike/5 dependency versions/meson.build46
-rw-r--r--test cases/windows/10 vs module defs generated/meson.build8
-rw-r--r--test cases/windows/10 vs module defs generated/subdir/somedll.c2
-rw-r--r--test cases/windows/6 vs module defs/meson.build8
-rw-r--r--test cases/windows/6 vs module defs/subdir/somedll.c2
61 files changed, 2635 insertions, 2059 deletions
diff --git a/.appveyor.yml b/.appveyor.yml
index 766015f..708e1b1 100644
--- a/.appveyor.yml
+++ b/.appveyor.yml
@@ -49,6 +49,10 @@ branches:
only:
- master
+skip_commits:
+ files:
+ - docs/**/*
+
install:
# Use the x86 python only when building for x86 for the cpython tests.
# For all other archs (including, say, arm), use the x64 python.
diff --git a/.travis.yml b/.travis.yml
index fbb11ac..5f4318d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -31,13 +31,13 @@ matrix:
before_install:
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install ninja python3; fi
- - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:yakkety; fi
+ - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:zesty; fi
# We need to copy the current checkout inside the Docker container,
# because it has the MR id to be tested checked out.
script:
- - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM jpakkane/mesonci:yakkety > Dockerfile; fi
+ - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM jpakkane/mesonci:zesty > Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX OBJC=$CC OBJCXX=$CXX ./run_tests.py -- $MESON_ARGS"; fi
diff --git a/authors.txt b/authors.txt
deleted file mode 100644
index 8a02848..0000000
--- a/authors.txt
+++ /dev/null
@@ -1,87 +0,0 @@
-Authors of Meson build system
-
-Meson was originally designed and created by Jussi Pakkanen.
-
-
-The following people have submitted patches for the project
-
-Peter Koval
-Masashi Fujita
-Juhani Simola
-Robin McCorkell
-Axel Waggershauser
-Igor Gnatenko
-Hemmo Nieminen
-mfrischknecht
-Matthew Bekkema
-Afief Halumi
-Thibault Saunier
-Mathieu Duponchelle
-Jouni Roivas
-Rafaël Kooi
-Marko Raatikainen
-German Diago Gomez
-Kyle Manna
-Haakon Sporsheim
-Wink Saville
-Yoav Alon
-Martin Ejdestig
-Rémi Nicole
-Damián Nohales
-Nirbheek Chauhan
-Nicolas Schneider
-Luke Adams
-Rogiel Sulzbach
-Tim-Philipp Müller
-Emmanuele Bassi
-Martin Hostettler
-Sam Thursfield
-Noam Meltzer
-Vincent Szolnoky
-Zhe Wang
-Wim Taymans
-Matthias Klumpp
-Elliott Sales de Andrade
-Patrick Griffis
-Iain Lane
-Daniel Brendle
-Franz Zapata
-Emanuele Aina
-Guillaume Poirier-Morency
-Scott D Phillips
-Gautier Pelloux-Prayer
-Alexandre Foley
-Jouni Kosonen
-Aurelien Jarno
-Mark Schulte
-Paulo Antonio Alvarez
-Olexa Bilaniuk
-Daniel Stone
-Marc-Antoine Perennou
-Matthieu Gautier
-Kseniia Vasilchuk
-Philipp Geier
-Mike Sinkovsky
-Dima Krasner
-Fabio Porcedda
-Rodrigo Lourenço
-Sebastian Stang
-Marc Becker
-Michal Sojka
-Aaron Small
-Joe Baldino
-Peter Harris
-Roger Boerdijk
-melak47
-Philipp Ittershagen
-Dylan Baker
-Aaron Plattner
-Jon Turney
-Wade Berrier
-Richard Hughes
-Rafael Fontenelle
-Michael Olbrich
-Ernestas Kulik
-Thomas Hindoe Paaboel Andersen
-Paolo Borelli
-Mike Gilbert
diff --git a/docs/README.md b/docs/README.md
index 61d2e05..9ed75c1 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,7 +1,7 @@
# Building the documentation
1. Get [hotdoc](https://hotdoc.github.io/installing.html) (0.8.9 required)
-1. Run hotdoc:
+1. Run hotdoc in the docs/ directory:
hotdoc run
diff --git a/docs/markdown/Compiler-properties.md b/docs/markdown/Compiler-properties.md
index 50615a1..c33d917 100644
--- a/docs/markdown/Compiler-properties.md
+++ b/docs/markdown/Compiler-properties.md
@@ -97,12 +97,12 @@ result = compiler.run(code, name : 'basic check')
The `result` variable encapsulates the state of the test, which can be extracted with the following methods. The `name` keyword argument works the same as with `compiles`.
-Method | Return value
--------|----------------
-compiled | `True` if compilation succeeded. If `false` then all other methods return undefined values.
-returncode | The return code of the application as an integer
-stdout | Program's standard out as text.
-stderr | Program's standard error as text.
+| Method | Return value
+| ------ | ------------
+| compiled | `True` if compilation succeeded. If `false` then all other methods return undefined values.
+| returncode | The return code of the application as an integer
+| stdout | Program's standard out as text.
+| stderr | Program's standard error as text.
Here is an example usage:
diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md
new file mode 100644
index 0000000..b0149a6
--- /dev/null
+++ b/docs/markdown/Creating-releases.md
@@ -0,0 +1,15 @@
+---
+short-description: Creating releases
+...
+
+# Creating releases
+
+In addition to development, almost all projects provide periodical source releases. These are standalone packages (usually either in tar or zip format) of the source code. They do not contain any revision control metadata, only the source code.
+
+Meson provides a simple way of generating these. It consists of a single command:
+
+ ninja dist
+
+This creates a file called `projectname-version.tar.xz` in the build tree subdirectory `meson-dist`. This archive contains the full contents of the latest commit in revision control including all the submodules. All revision control metadata is removed. Meson then takes this archive and tests that it works by doing a full compile + test + install cycle. If all these pass, Meson will then create a SHA-256 checksum file next to the archive.
+
+**Note**: Meson behaviour is different from Autotools. The Autotools "dist" target packages up the current source tree. Meson packages the latest revision control commit. The reason for this is that it prevents developers from doing accidental releases where the distributed archive does not match any commit in revision control (especially the one tagged for the release).
diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md
index 51fe6b2..6b9a968 100644
--- a/docs/markdown/Pkgconfig-module.md
+++ b/docs/markdown/Pkgconfig-module.md
@@ -20,3 +20,4 @@ The generated file's properties are specified with the following keyword argumen
- `requires_private` list of strings to put in the `Requires.private` field
- `libraries_private` list of strings to put in the `Libraries.private` field
- `install_dir` the directory to install to, defaults to the value of option `libdir` followed by `/pkgconfig`
+- `variables` a list of strings with custom variables to add to the generated file. The strings must be in the form `name=value` and may reference other pkgconfig variables, e.g. `datadir=${prefix}/share`. The names `prefix`, `libdir` and `installdir` are reserved and may not be used.
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 00a891b..62c0fb3 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -137,7 +137,7 @@ These are all the supported keyword arguments:
Create a custom top level build target. The only positional argument is the name of this target and the keyword arguments are the following.
-- `input` list of source files
+- `input` list of source files. As of 0.41.0 the list will be flattened.
- `output` list of output files
- `command` command to run to create outputs from inputs. The command may be strings or the return of `find_program()` or `executable()` (note: always specify commands in array form `['commandname', '-arg1', '-arg2']` rather than as a string `'commandname -arg1 -arg2'` as the latter will *not* work)
- `install` when true, this target is installed during the install step
@@ -228,8 +228,8 @@ With the Ninja backend, Meson will create a build-time [order-only dependency](h
Executable supports the following keyword arguments. Note that just like the positional arguments above, these keyword arguments can also be passed to [shared and static libraries](#library).
-- `link_with`, one or more shared or static libraries (built by this project) that this target should be linked with
-- `link_whole` links all contents of the given static libraries whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0
+- `link_with`, one or more shared or static libraries (built by this project) that this target should be linked with, If passed a list this list will be flattened as of 0.41.0.
+- `link_whole` links all contents of the given static libraries whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0. As of 0.41.0 if passed a list that list will be flattened.
- `<languagename>_pch` precompiled header file to use for the given language
- `<languagename>_args` compiler flags to use for the given language; eg: `cpp_args` for C++
- `link_args` flags to use during linking. You can use UNIX-style flags here for all platforms.
@@ -262,11 +262,23 @@ This function is deprecated and in the 0.31.0 release it was moved to [the compi
`program_name1` here is a string that can be an executable or script to be searched for in `PATH`, or a script in the current source directory.
+`program_name2` and later positional arguments are used as fallback strings to search for. This is meant to be used for cases where the program may have many alternative names, such as `foo` and `foo.py`. The function will check for the arguments one by one and the first one that is found is returned. Meson versions earlier than 0.37.0 only accept one argument.
+
+Keyword arguments are the following:
+
+- `required` By default, `required` is set to `true` and Meson will abort if no program can be found. If `required` is set to `false`, Meson continue even if none of the programs can be found. You can then use the `.found()` method on the returned object to check whether it was found or not.
+
Meson will also autodetect scripts with a shebang line and run them with the executable/interpreter specified in it both on Windows (because the command invocator will reject the command otherwise) and Unixes (if the script file does not have the executable bit set). Hence, you *must not* manually add the interpreter while using this script as part of a list of commands.
-`program_name2` and later positional arguments are used as fallback strings to search for. This is meant to be used for cases where the program may have many alternative names, such as `foo` and `foo.py`. The function will check for the arguments one by one and the first one that is found is returned. Meson versions earlier than 0.37.0 only accept one argument.
+If you need to check for a program in a non-standard location, you can just pass an absolute path to `find_program`, e.g.
+```
+setcap = find_program('setcap', '/usr/sbin/setcap', '/sbin/setcap', required : false)
+```
-If none of the programs are found, Meson will abort. You can tell it not to by setting the keyword argument `required` to false, and then use the `.found()` method on the returned object to check whether it was found or not.
+It is also possible to pass an array to `find_program` in case you need to construct the set of paths to search on the fly:
+```
+setcap = find_program(['setcap', '/usr/sbin/setcap', '/sbin/setcap'], required : false)
+```
The returned object also has methods that are documented in the [object methods section](#external-program-object) below.
@@ -570,7 +582,7 @@ Builds a shared library with the given sources. Positional and keyword arguments
- `version` a string specifying the version of this shared library, such as `1.1.0`. On Linux and OS X, this is used to set the shared library version in the filename, such as `libfoo.so.1.1.0` and `libfoo.1.1.0.dylib`. If this is not specified, `soversion` is used instead (see below).
- `soversion` a string specifying the soversion of this shared library, such as `0`. On Linux and Windows this is used to set the soversion (or equivalent) in the filename. For example, if `soversion` is `4`, a Windows DLL will be called `foo-4.dll` and one of the aliases of the Linux shared library would be `libfoo.so.4`. If this is not specified, the first part of `version` is used instead. For example, if `version` is `3.6.0` and `soversion` is not defined, it is set to `3`.
-- `vs_module_defs` a string pointing to a file or a File object that contains Visual Studio symbol export definitions.
+- `vs_module_defs` a string pointing to a file or a File object that is a Microsoft module definition file for controlling symbol exports, etc., on platforms where that is possible (e.g. Windows).
### shared_module()
@@ -660,37 +672,39 @@ The `meson` object allows you to introspect various properties of the system. Th
- `backend()` *(added 0.37.0)* returns a string representing the current backend: `ninja`, `vs2010`, `vs2015`, or `xcode`.
-- `is_cross_build()` returns `true` if the current build is a cross build and `false` otherwise
+- `is_cross_build()` returns `true` if the current build is a [cross build](Cross-compilation.md) and `false` otherwise.
+
+- `is_unity()` returns `true` when doing a [unity build](Unity-builds.md) (multiple sources are combined before compilation to reduce build time) and `false` otherwise.
-- `is_unity()` returns `true` when doing a unity build and `false` otherwise
+- `is_subproject()` returns `true` if the current project is being built as a subproject of some other project and `false` otherwise.
-- `is_subproject()` returns `true` if the current project is being built as a subproject of some other project and `false` otherwise
+- `has_exe_wrapper()` returns true when doing a cross build if there is a wrapper command that can be used to execute cross built binaries (for example when cross compiling from Linux to Windows, one can use `wine` as the wrapper).
-- `has_exe_wrapper()` returns true when doing a cross build if there is a wrapper command that can be used to execute cross built binaries (for example when cross compiling from Linux to Windows, one can use `wine` as the wrapper)
+- `add_install_script(script_name, arg1, arg2, ...)` causes the script given as an argument to be run during the install step, this script will have the environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`, `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. All additional arguments are passed as parameters.
-- `add_install_script(script_name, arg1, arg2, ...)` causes the script given as an argument to be run during the install step, this script will have the environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`, and `MESON_INSTALL_DESTDIR_PREFIX` set. All additional arguments are passed as parameters.
+ To determine the installation location, the script should use the `DESTDIR`, `MESON_INSTALL_PREFIX`, `MESON_INSTALL_DESTDIR_PREFIX` variables. `DESTDIR` will be set only if it is inherited from the outside environment. `MESON_INSTALL_PREFIX` is always set and has the value of the `prefix` option passed to Meson. `MESON_INSTALL_DESTDIR_PREFIX` is always set and contains `DESTDIR` and `prefix` joined together. This is useful because both are absolute paths, and many path-joining functions such as [`os.path.join` in Python](https://docs.python.org/3/library/os.path.html#os.path.join) special-case absolute paths.
- `MESON_INSTALL_PREFIX` has the value of the `prefix` option passed to Meson, and `MESON_INSTALL_DESTDIR_PREFIX` has both the value of `DESTDIR` and `prefix` joined together. This is useful because both are absolute paths, and many path-joining functions such as [`os.path.join` in Python](https://docs.python.org/3/library/os.path.html#os.path.join) special-case absolute paths.
+ `MESONINTROSPECT` contains the path to the `mesonintrospect` executable that corresponds to the `meson` executable that was used to configure the build. (This might be a different path then the first `mesonintrospect` executable found in `PATH`.) It can be used to query build configuration.
- `add_postconf_script(script_name, arg1, arg2, ...)` will run the executable given as an argument after all project files have been generated. This script will have the environment variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set.
- `current_source_dir()` returns a string to the current source directory. Note: **you do not need to use this function** when passing files from the current source directory to a function since that is the default. Also, you can use the `files()` function to refer to files in the current or any other source directory instead of constructing paths manually with `meson.current_source_dir()`.
-- `current_build_dir()` returns a string to the current build directory
+- `current_build_dir()` returns a string with the absolute path to the current build directory.
- `source_root()` returns a string with the absolute path to the source root directory. Note: you should use the `files()` function to refer to files in the root source directory instead of constructing paths manually with `meson.source_root()`.
-- `build_root()` returns a string with the absolute path to the build root directory
+- `build_root()` returns a string with the absolute path to the build root directory.
-- `project_version()` returns the version string specified in `project` function call
+- `project_version()` returns the version string specified in `project` function call.
-- `project_name()` returns the project name specified in the `project` function call
+- `project_name()` returns the project name specified in the `project` function call.
-- `version()` return a string with the version of Meson
+- `version()` return a string with the version of Meson.
-- `get_cross_property(propname, fallback_value)` returns the given property from a cross file, the optional second argument is returned if not cross compiling or the given property is not found
+- `get_cross_property(propname, fallback_value)` returns the given property from a cross file, the optional second argument is returned if not cross compiling or the given property is not found.
-- `install_dependency_manifest(output_name)` installs a manifest file containing a list of all subprojects, their versions and license files to the file name given as the argument
+- `install_dependency_manifest(output_name)` installs a manifest file containing a list of all subprojects, their versions and license files to the file name given as the argument.
### `build_machine` object
@@ -739,26 +753,26 @@ A build target is either an [executable](#executable), [shared](#shared_library)
This object is returned by [`meson.get_compiler(lang)`](#meson-object). It represents a compiler for a given language and allows you to query its properties. It has the following methods:
-- `get_id()` returns a string identifying the compiler (e.g. `'gcc'`)
-- `version()` returns the compiler's version number as a string
+- `get_id()` returns a string identifying the compiler (e.g. `'gcc'`).
+- `version()` returns the compiler's version number as a string.
- `find_library(lib_name, ...)` tries to find the library specified in the positional argument. The [result object](#external-library-object) can be used just like the return value of `dependency`. If the keyword argument `required` is false, Meson will proceed even if the library is not found. By default the library is searched for in the system library directory (e.g. /usr/lib). This can be overridden with the `dirs` keyword argument, which can be either a string or a list of strings.
-- `sizeof(typename, ...)` returns the size of the given type (e.g. `'int'`) or -1 if the type is unknown, to add includes set them in the `prefix` keyword argument, you can specify external dependencies to use with `dependencies` keyword argument
-- `alignment(typename)` returns the alignment of the type specified in the positional argument, you can specify external dependencies to use with `dependencies` keyword argument
-- `compiles(code)` returns true if the code fragment given in the positional argument compiles, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code
-- `links(code)` returns true if the code fragment given in the positional argument compiles and links, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code
-- `run(code)` attempts to compile and execute the given code fragment, returns a run result object, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code
+- `sizeof(typename, ...)` returns the size of the given type (e.g. `'int'`) or -1 if the type is unknown, to add includes set them in the `prefix` keyword argument, you can specify external dependencies to use with `dependencies` keyword argument.
+- `alignment(typename)` returns the alignment of the type specified in the positional argument, you can specify external dependencies to use with `dependencies` keyword argument.
+- `compiles(code)` returns true if the code fragment given in the positional argument compiles, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code.
+- `links(code)` returns true if the code fragment given in the positional argument compiles and links, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code.
+- `run(code)` attempts to compile and execute the given code fragment, returns a run result object, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code.
- `has_header` returns true if the specified header can be included, you can specify external dependencies to use with `dependencies` keyword argument and extra code to put above the header test with the `prefix` keyword. In order to look for headers in a specific directory you can use `args : '-I/extra/include/dir`, but this should only be used in exceptional cases for includes that can't be detected via pkg-config and passed via `dependencies`.
-- `has_type(typename)` returns true if the specified token is a type, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_function(funcname)` returns true if the given function is provided by the standard library or a library passed in with the `args` keyword, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_member(typename, membername)` takes two arguments, type name and member name and returns true if the type has the specified member, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_members(typename, membername1, membername2, ...)` takes at least two arguments, type name and one or more member names, returns true if the type has all the specified members, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_header_symbol(headername, symbolname)` allows one to detect whether a particular symbol (function, variable, #define, type definition, etc) is declared in the specified header, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_argument(argument_name)` returns true if the compiler accepts the specified command line argument, that is, can compile code without erroring out or printing a warning about an unknown flag, you can specify external dependencies to use with `dependencies` keyword argument
-- `has_multi_arguments(arg1, arg2, arg3, ...)` is the same as `has_argument` but takes multiple arguments and uses them all in a single compiler invocation, available since 0.37.0
-- `first_supported_argument(list_of_strings)`, given a list of strings, returns the first argument that passes the `has_argument` test above or an empty array if none pass
-- `symbols_have_underscore_prefix()` returns `true` if the C symbol mangling is one underscore (`_`) prefixed to the symbol, available since 0.37.0
+- `has_type(typename)` returns true if the specified token is a type, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_function(funcname)` returns true if the given function is provided by the standard library or a library passed in with the `args` keyword, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_member(typename, membername)` takes two arguments, type name and member name and returns true if the type has the specified member, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_members(typename, membername1, membername2, ...)` takes at least two arguments, type name and one or more member names, returns true if the type has all the specified members, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_header_symbol(headername, symbolname)` allows one to detect whether a particular symbol (function, variable, #define, type definition, etc) is declared in the specified header, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_argument(argument_name)` returns true if the compiler accepts the specified command line argument, that is, can compile code without erroring out or printing a warning about an unknown flag, you can specify external dependencies to use with `dependencies` keyword argument.
+- `has_multi_arguments(arg1, arg2, arg3, ...)` is the same as `has_argument` but takes multiple arguments and uses them all in a single compiler invocation, available since 0.37.0.
+- `first_supported_argument(list_of_strings)`, given a list of strings, returns the first argument that passes the `has_argument` test above or an empty array if none pass.
+- `symbols_have_underscore_prefix()` returns `true` if the C symbol mangling is one underscore (`_`) prefixed to the symbol, available since 0.37.0.
- `compute_int(expr, ...')` computes the value of the given expression (as an example `1 + 2`). When cross compiling this is evaluated with an iterative algorithm, you can specify keyword arguments `low` (defaults to -1024), `high` (defaults to 1024) and `guess` to specify max and min values for the search and the value to try first.
-- `get_define(definename)` returns the given preprocessor symbol's value as a string or empty string if it is not defined
+- `get_define(definename)` returns the given preprocessor symbol's value as a string or empty string if it is not defined.
The following keyword arguments can be used:
diff --git a/docs/markdown/Release-notes-for-0.41.0.md b/docs/markdown/Release-notes-for-0.41.0.md
index 2b7469a..a96ded0 100644
--- a/docs/markdown/Release-notes-for-0.41.0.md
+++ b/docs/markdown/Release-notes-for-0.41.0.md
@@ -25,6 +25,34 @@ ninja itself, providing better interoperability with custom commands. This
support may not be perfect; please report any issues found with special
characters to the issue tracker.
+## Pkgconfig support for custom variables
+
+The Pkgconfig module object can add arbitrary variables to the generated .pc
+file with the new `variables` keyword:
+```meson
+pkg.generate(libraries : libs,
+ subdirs : h,
+ version : '1.0',
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : ['datadir=${prefix}/data'])
+```
+
+## A target for creating tarballs
+
+Creating distribution tarballs is simple:
+
+ ninja dist
+
+This will create a `.tar.xz` archive of the source code including
+submodules without any revision control information. This command also
+verifies that the resulting archive can be built, tested and
+installed. This is roughly equivalent to the `distcheck` target in
+other build systems. Currently this only works for projects using Git
+and only with the Ninja backend.
+
+
## Support for passing arguments to Rust compiler
-Targets for building rust now take a `rust_args` keyword. \ No newline at end of file
+Targets for building rust now take a `rust_args` keyword.
diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md
index 2dbb755..0e8da43 100644
--- a/docs/markdown/Running-Meson.md
+++ b/docs/markdown/Running-Meson.md
@@ -58,7 +58,7 @@ If you are not using an IDE, Meson uses the [Ninja build system](https://ninja-b
ninja
-The main usability difference between Ninja and Make is that Ninja will automatically detect the number of CPUs in your computer and parallelize itself accordingly. You can override the amount of parallel processes used with the command line argument `-j &lt;num processes&gt;`.
+The main usability difference between Ninja and Make is that Ninja will automatically detect the number of CPUs in your computer and parallelize itself accordingly. You can override the amount of parallel processes used with the command line argument `-j <num processes>`.
It should be noted that after the initial configure step `ninja` is the only command you ever need to type to compile. No matter how you alter your source tree (short of moving it to a completely new location), Meson will detect the changes and regenerate itself accordingly. This is especially handy if you have multiple build directories. Often one of them is used for development (the "debug" build) and others only every now and then (such as a "static analysis" build). Any configuration can be built just by `cd`'ing to the corresponding directory and running Ninja.
diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md
index 6f43887..2a7428b 100644
--- a/docs/markdown/Syntax.md
+++ b/docs/markdown/Syntax.md
@@ -103,7 +103,7 @@ Strings also support a number of other methods that return transformed copies.
```meson
# Similar to the Python str.strip(). Removes leading/ending spaces and newlines
define = ' -Dsomedefine '
-stripped_define = target.strip()
+stripped_define = define.strip()
# 'stripped_define' now has the value '-Dsomedefine'
```
diff --git a/docs/markdown/legal.md b/docs/markdown/legal.md
index 56ab70b..474d129 100644
--- a/docs/markdown/legal.md
+++ b/docs/markdown/legal.md
@@ -1,6 +1,6 @@
# Legal information
-Meson is copyrighted by all members of the Meson development team. For details see the <tt>authors.txt</tt> file that comes with the source code. Meson is licensed under the [Apache 2 license].
+Meson is copyrighted by all members of the Meson development team. Meson is licensed under the [Apache 2 license].
Meson is a registered trademark of Jussi Pakkanen.
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index c4bb0d4..9dceb05 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -42,6 +42,7 @@ index.md
Build-system-converters.md
Configuring-a-build-directory.md
Run-targets.md
+ Creating-releases.md
Creating-OSX-packages.md
Creating-Linux-binaries.md
Reference-manual.md
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 419d04f..b903f4c 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -419,7 +419,18 @@ class Backend:
# NOTE: We must preserve the order in which external deps are
# specified, so we reverse the list before iterating over it.
for dep in reversed(target.get_external_deps()):
- commands += dep.get_compile_args()
+ if compiler.language == 'vala':
+ if isinstance(dep, dependencies.PkgConfigDependency):
+ if dep.name == 'glib-2.0' and dep.version_reqs is not None:
+ for req in dep.version_reqs:
+ if req.startswith(('>=', '==')):
+ commands += ['--target-glib', req[2:]]
+ break
+ commands += ['--pkg', dep.name]
+ elif isinstance(dep, dependencies.ExternalLibrary):
+ commands += dep.get_lang_args('vala')
+ else:
+ commands += dep.get_compile_args()
# Qt needs -fPIC for executables
# XXX: We should move to -fPIC for all executables
if isinstance(target, build.Executable):
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 34cf74e..4885f4b 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -195,6 +195,7 @@ int dummy;
self.generate_tests(outfile)
outfile.write('# Install rules\n\n')
self.generate_install(outfile)
+ self.generate_dist(outfile)
if 'b_coverage' in self.environment.coredata.base_options and \
self.environment.coredata.base_options['b_coverage'].value:
outfile.write('# Coverage rules\n\n')
@@ -1070,20 +1071,11 @@ int dummy;
vala_c_src.append(vala_c_file)
valac_outputs.append(vala_c_file)
- # TODO: Use self.generate_basic_compiler_args to get something more
- # consistent Until then, we should be careful to preserve the
- # precedence of arguments if it changes upstream.
- args = []
- args += valac.get_buildtype_args(self.get_option_for_target('buildtype', target))
- args += self.build.get_project_args(valac, target.subproject)
- args += self.build.get_global_args(valac)
- args += self.environment.coredata.external_args[valac.get_language()]
-
+ args = self.generate_basic_compiler_args(target, valac)
# Tell Valac to output everything in our private directory. Sadly this
# means it will also preserve the directory components of Vala sources
# found inside the build tree (generated sources).
args += ['-d', c_out_dir]
- args += ['-C']
if not isinstance(target, build.Executable):
# Library name
args += ['--library=' + target.name]
@@ -1112,18 +1104,6 @@ int dummy;
# Install GIR to default location if requested by user
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
- if self.get_option_for_target('werror', target):
- args += valac.get_werror_args()
- for d in target.get_external_deps():
- if isinstance(d, dependencies.PkgConfigDependency):
- if d.name == 'glib-2.0' and d.version_reqs is not None:
- for req in d.version_reqs:
- if req.startswith(('>=', '==')):
- args += ['--target-glib', req[2:]]
- break
- args += ['--pkg', d.name]
- elif isinstance(d, dependencies.ExternalLibrary):
- args += d.get_lang_args('vala')
# Detect gresources and add --gresources arguments for each
for (gres, gensrc) in other_src[1].items():
if isinstance(gensrc, modules.GResourceTarget):
@@ -2201,10 +2181,10 @@ rule FORTRAN_DEP_HACK
commands += linker.get_soname_args(target.prefix, target.name, target.suffix,
abspath, target.soversion,
isinstance(target, build.SharedModule))
- # This is only visited when using the Visual Studio toolchain
+ # This is only visited when building for Windows using either GCC or Visual Studio
if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'):
commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src))
- # This is only visited when building for Windows using either MinGW/GCC or Visual Studio
+ # This is only visited when building for Windows using either GCC or Visual Studio
if target.import_filename:
commands += linker.gen_import_library_args(os.path.join(target.subdir, target.import_filename))
elif isinstance(target, build.StaticLibrary):
@@ -2397,6 +2377,19 @@ rule FORTRAN_DEP_HACK
# affect behavior in any other way.
return sorted(cmds)
+ def generate_dist(self, outfile):
+ elem = NinjaBuildElement(self.all_outputs, 'dist', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('DESC', 'Creating source packages')
+ elem.add_item('COMMAND', [sys.executable,
+ self.environment.get_build_command(),
+ '--internal', 'dist',
+ self.environment.source_dir,
+ self.environment.build_dir,
+ sys.executable,
+ self.environment.get_build_command()])
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+
# For things like scan-build and other helper tools we might have.
def generate_utils(self, outfile):
cmd = [sys.executable, self.environment.get_build_command(),
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 533edf0..f97ab87 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -379,7 +379,7 @@ class Vs2010Backend(backends.Backend):
pl.text = self.platform
globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
- guidelem.text = self.environment.coredata.test_guid
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
kw = ET.SubElement(globalgroup, 'Keyword')
kw.text = self.platform + 'Proj'
p = ET.SubElement(globalgroup, 'Platform')
@@ -636,7 +636,7 @@ class Vs2010Backend(backends.Backend):
# Globals
globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
- guidelem.text = guid
+ guidelem.text = '{%s}' % guid
kw = ET.SubElement(globalgroup, 'Keyword')
kw.text = self.platform + 'Proj'
ns = ET.SubElement(globalgroup, 'RootNamespace')
@@ -1050,7 +1050,7 @@ class Vs2010Backend(backends.Backend):
pl.text = self.platform
globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
- guidelem.text = self.environment.coredata.test_guid
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
kw = ET.SubElement(globalgroup, 'Keyword')
kw.text = self.platform + 'Proj'
p = ET.SubElement(globalgroup, 'Platform')
@@ -1130,7 +1130,7 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
pl.text = self.platform
globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
- guidelem.text = self.environment.coredata.test_guid
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
kw = ET.SubElement(globalgroup, 'Keyword')
kw.text = self.platform + 'Proj'
p = ET.SubElement(globalgroup, 'Platform')
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index c6c712f..ce6405b 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -643,9 +643,7 @@ class BuildTarget(Target):
self.vala_gir = kwargs.get('vala_gir', None)
dlist = stringlistify(kwargs.get('d_args', []))
self.add_compiler_args('d', dlist)
- self.link_args = kwargs.get('link_args', [])
- if not isinstance(self.link_args, list):
- self.link_args = [self.link_args]
+ self.link_args = flatten(kwargs.get('link_args', []))
for i in self.link_args:
if not isinstance(i, str):
raise InvalidArguments('Link_args arguments must be strings.')
@@ -818,9 +816,7 @@ You probably should put it in link_with instead.''')
return self.external_deps
def link(self, target):
- if not isinstance(target, list):
- target = [target]
- for t in target:
+ for t in flatten(target):
if hasattr(t, 'held_object'):
t = t.held_object
if not isinstance(t, (StaticLibrary, SharedLibrary)):
@@ -834,9 +830,7 @@ You probably should put it in link_with instead.''')
self.link_targets.append(t)
def link_whole(self, target):
- if not isinstance(target, list):
- target = [target]
- for t in target:
+ for t in flatten(target):
if hasattr(t, 'held_object'):
t = t.held_object
if not isinstance(t, StaticLibrary):
@@ -1448,8 +1442,7 @@ class CustomTarget(Target):
def process_kwargs(self, kwargs):
super().process_kwargs(kwargs)
self.sources = kwargs.get('input', [])
- if not isinstance(self.sources, list):
- self.sources = [self.sources]
+ self.sources = flatten(self.sources)
if 'output' not in kwargs:
raise InvalidArguments('Missing keyword argument "output".')
self.outputs = kwargs['output']
diff --git a/mesonbuild/compilers.py b/mesonbuild/compilers.py
index cf9fa53..199bbab 100644
--- a/mesonbuild/compilers.py
+++ b/mesonbuild/compilers.py
@@ -1678,6 +1678,18 @@ class ValaCompiler(Compiler):
def get_compile_only_args(self):
return ['-C']
+ def get_pic_args(self):
+ return []
+
+ def get_always_args(self):
+ return ['-C']
+
+ def get_warn_args(self, warning_level):
+ return []
+
+ def get_no_warn_args(self):
+ return ['--disable-warnings']
+
def get_werror_args(self):
return ['--fatal-warnings']
@@ -2439,6 +2451,16 @@ class GnuCompiler:
def get_link_whole_for(self, args):
return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive']
+ def gen_vs_module_defs_args(self, defsfile):
+ if not isinstance(defsfile, str):
+ raise RuntimeError('Module definitions file should be str')
+ # On Windows targets, .def files may be specified on the linker command
+ # line like an object file.
+ if self.gcc_type in (GCC_CYGWIN, GCC_MINGW):
+ return [defsfile]
+ # For other targets, discard the .def file.
+ return []
+
class GnuCCompiler(GnuCompiler, CCompiler):
def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None):
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 2dd57a9..a336278 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -160,7 +160,7 @@ class CoreData:
self.wrap_mode = options.wrap_mode
self.compilers = OrderedDict()
self.cross_compilers = OrderedDict()
- self.deps = {}
+ self.deps = OrderedDict()
self.modules = {}
# Only to print a warning if it changes between Meson invocations.
self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '')
@@ -347,4 +347,6 @@ forbidden_target_names = {'clean': None,
'build.ninja': None,
'scan-build': None,
'reconfigure': None,
+ 'dist': None,
+ 'distcheck': None,
}
diff --git a/mesonbuild/dependencies.py b/mesonbuild/dependencies.py
deleted file mode 100644
index 37e2cbd..0000000
--- a/mesonbuild/dependencies.py
+++ /dev/null
@@ -1,1810 +0,0 @@
-# Copyright 2013-2017 The Meson development team
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-
-# http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This file contains the detection logic for external
-# dependencies. Mostly just uses pkg-config but also contains
-# custom logic for packages that don't provide them.
-
-# Currently one file, should probably be split into a
-# package before this gets too big.
-
-import re
-import sys
-import os, stat, glob, shutil
-import shlex
-import subprocess
-import sysconfig
-from enum import Enum
-from collections import OrderedDict
-from . mesonlib import MesonException, version_compare, version_compare_many, Popen_safe
-from . import mlog
-from . import mesonlib
-from .environment import detect_cpu_family, for_windows
-
-class DependencyException(MesonException):
- '''Exceptions raised while trying to find dependencies'''
-
-class DependencyMethods(Enum):
- # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
- AUTO = 'auto'
- PKGCONFIG = 'pkg-config'
- QMAKE = 'qmake'
- # Just specify the standard link arguments, assuming the operating system provides the library.
- SYSTEM = 'system'
- # Detect using sdl2-config
- SDLCONFIG = 'sdlconfig'
- # This is only supported on OSX - search the frameworks directory by name.
- EXTRAFRAMEWORK = 'extraframework'
- # Detect using the sysconfig module.
- SYSCONFIG = 'sysconfig'
-
-class Dependency:
- def __init__(self, type_name, kwargs):
- self.name = "null"
- self.language = None
- self.is_found = False
- self.type_name = type_name
- method = DependencyMethods(kwargs.get('method', 'auto'))
-
- # Set the detection method. If the method is set to auto, use any available method.
- # If method is set to a specific string, allow only that detection method.
- if method == DependencyMethods.AUTO:
- self.methods = self.get_methods()
- elif method in self.get_methods():
- self.methods = [method]
- else:
- raise MesonException('Unsupported detection method: {}, allowed methods are {}'.format(method.value, mlog.format_list(map(lambda x: x.value, [DependencyMethods.AUTO] + self.get_methods()))))
-
- def __repr__(self):
- s = '<{0} {1}: {2}>'
- return s.format(self.__class__.__name__, self.name, self.is_found)
-
- def get_compile_args(self):
- return []
-
- def get_link_args(self):
- return []
-
- def found(self):
- return self.is_found
-
- def get_sources(self):
- """Source files that need to be added to the target.
- As an example, gtest-all.cc when using GTest."""
- return []
-
- def get_methods(self):
- return [DependencyMethods.AUTO]
-
- def get_name(self):
- return self.name
-
- def get_exe_args(self, compiler):
- return []
-
- def need_threads(self):
- return False
-
- def get_pkgconfig_variable(self, variable_name):
- raise MesonException('Tried to get a pkg-config variable from a non-pkgconfig dependency.')
-
-class InternalDependency(Dependency):
- def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps):
- super().__init__('internal', {})
- self.version = version
- self.include_directories = incdirs
- self.compile_args = compile_args
- self.link_args = link_args
- self.libraries = libraries
- self.sources = sources
- self.ext_deps = ext_deps
-
- def get_compile_args(self):
- return self.compile_args
-
- def get_link_args(self):
- return self.link_args
-
- def get_version(self):
- return self.version
-
-class PkgConfigDependency(Dependency):
- # The class's copy of the pkg-config path. Avoids having to search for it
- # multiple times in the same Meson invocation.
- class_pkgbin = None
-
- def __init__(self, name, environment, kwargs):
- Dependency.__init__(self, 'pkgconfig', kwargs)
- self.is_libtool = False
- self.required = kwargs.get('required', True)
- self.static = kwargs.get('static', False)
- self.silent = kwargs.get('silent', False)
- if not isinstance(self.static, bool):
- raise DependencyException('Static keyword must be boolean')
- # Store a copy of the pkg-config path on the object itself so it is
- # stored in the pickled coredata and recovered.
- self.pkgbin = None
- self.cargs = []
- self.libs = []
- if 'native' in kwargs and environment.is_cross_build():
- self.want_cross = not kwargs['native']
- else:
- self.want_cross = environment.is_cross_build()
- self.name = name
- self.modversion = 'none'
-
- # When finding dependencies for cross-compiling, we don't care about
- # the 'native' pkg-config
- if self.want_cross:
- if 'pkgconfig' not in environment.cross_info.config['binaries']:
- if self.required:
- raise DependencyException('Pkg-config binary missing from cross file')
- else:
- pkgname = environment.cross_info.config['binaries']['pkgconfig']
- potential_pkgbin = ExternalProgram(pkgname, silent=True)
- if potential_pkgbin.found():
- # FIXME, we should store all pkg-configs in ExternalPrograms.
- # However that is too destabilizing a change to do just before release.
- self.pkgbin = potential_pkgbin.get_command()[0]
- PkgConfigDependency.class_pkgbin = self.pkgbin
- else:
- mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name)
- # Only search for the native pkg-config the first time and
- # store the result in the class definition
- elif PkgConfigDependency.class_pkgbin is None:
- self.pkgbin = self.check_pkgconfig()
- PkgConfigDependency.class_pkgbin = self.pkgbin
- else:
- self.pkgbin = PkgConfigDependency.class_pkgbin
-
- self.is_found = False
- if not self.pkgbin:
- if self.required:
- raise DependencyException('Pkg-config not found.')
- return
- if self.want_cross:
- self.type_string = 'Cross'
- else:
- self.type_string = 'Native'
-
- mlog.debug('Determining dependency {!r} with pkg-config executable '
- '{!r}'.format(name, self.pkgbin))
- ret, self.modversion = self._call_pkgbin(['--modversion', name])
- if ret != 0:
- if self.required:
- raise DependencyException('{} dependency {!r} not found'
- ''.format(self.type_string, name))
- return
- found_msg = [self.type_string + ' dependency', mlog.bold(name), 'found:']
- self.version_reqs = kwargs.get('version', None)
- if self.version_reqs is None:
- self.is_found = True
- else:
- if not isinstance(self.version_reqs, (str, list)):
- raise DependencyException('Version argument must be string or list.')
- if isinstance(self.version_reqs, str):
- self.version_reqs = [self.version_reqs]
- (self.is_found, not_found, found) = \
- version_compare_many(self.modversion, self.version_reqs)
- if not self.is_found:
- found_msg += [mlog.red('NO'),
- 'found {!r} but need:'.format(self.modversion),
- ', '.join(["'{}'".format(e) for e in not_found])]
- if found:
- found_msg += ['; matched:',
- ', '.join(["'{}'".format(e) for e in found])]
- if not self.silent:
- mlog.log(*found_msg)
- if self.required:
- m = 'Invalid version of dependency, need {!r} {!r} found {!r}.'
- raise DependencyException(m.format(name, not_found, self.modversion))
- return
- found_msg += [mlog.green('YES'), self.modversion]
- # Fetch cargs to be used while using this dependency
- self._set_cargs()
- # Fetch the libraries and library paths needed for using this
- self._set_libs()
- # Print the found message only at the very end because fetching cflags
- # and libs can also fail if other needed pkg-config files aren't found.
- if not self.silent:
- mlog.log(*found_msg)
-
- def __repr__(self):
- s = '<{0} {1}: {2} {3}>'
- return s.format(self.__class__.__name__, self.name, self.is_found,
- self.version_reqs)
-
- def _call_pkgbin(self, args):
- p, out = Popen_safe([self.pkgbin] + args, env=os.environ)[0:2]
- return p.returncode, out.strip()
-
- def _set_cargs(self):
- ret, out = self._call_pkgbin(['--cflags', self.name])
- if ret != 0:
- raise DependencyException('Could not generate cargs for %s:\n\n%s' %
- (self.name, out))
- self.cargs = out.split()
-
- def _set_libs(self):
- libcmd = [self.name, '--libs']
- if self.static:
- libcmd.append('--static')
- ret, out = self._call_pkgbin(libcmd)
- if ret != 0:
- raise DependencyException('Could not generate libs for %s:\n\n%s' %
- (self.name, out))
- self.libs = []
- for lib in out.split():
- if lib.endswith(".la"):
- shared_libname = self.extract_libtool_shlib(lib)
- shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
- if not os.path.exists(shared_lib):
- shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
-
- if not os.path.exists(shared_lib):
- raise DependencyException('Got a libtools specific "%s" dependencies'
- 'but we could not compute the actual shared'
- 'library path' % lib)
- lib = shared_lib
- self.is_libtool = True
- self.libs.append(lib)
-
- def get_pkgconfig_variable(self, variable_name):
- ret, out = self._call_pkgbin(['--variable=' + variable_name, self.name])
- variable = ''
- if ret != 0:
- if self.required:
- raise DependencyException('%s dependency %s not found.' %
- (self.type_string, self.name))
- else:
- variable = out.strip()
- mlog.debug('Got pkgconfig variable %s : %s' % (variable_name, variable))
- return variable
-
- def get_modversion(self):
- return self.modversion
-
- def get_version(self):
- return self.modversion
-
- def get_compile_args(self):
- return self.cargs
-
- def get_link_args(self):
- return self.libs
-
- def get_methods(self):
- return [DependencyMethods.PKGCONFIG]
-
- def check_pkgconfig(self):
- evar = 'PKG_CONFIG'
- if evar in os.environ:
- pkgbin = os.environ[evar].strip()
- else:
- pkgbin = 'pkg-config'
- try:
- p, out = Popen_safe([pkgbin, '--version'])[0:2]
- if p.returncode != 0:
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- pkgbin = False
- except (FileNotFoundError, PermissionError):
- pkgbin = False
- if pkgbin and not os.path.isabs(pkgbin) and shutil.which(pkgbin):
- # Sometimes shutil.which fails where Popen succeeds, so
- # only find the abs path if it can be found by shutil.which
- pkgbin = shutil.which(pkgbin)
- if not self.silent:
- if pkgbin:
- mlog.log('Found pkg-config:', mlog.bold(pkgbin),
- '(%s)' % out.strip())
- else:
- mlog.log('Found Pkg-config:', mlog.red('NO'))
- return pkgbin
-
- def found(self):
- return self.is_found
-
- def extract_field(self, la_file, fieldname):
- with open(la_file) as f:
- for line in f:
- arr = line.strip().split('=')
- if arr[0] == fieldname:
- return arr[1][1:-1]
- return None
-
- def extract_dlname_field(self, la_file):
- return self.extract_field(la_file, 'dlname')
-
- def extract_libdir_field(self, la_file):
- return self.extract_field(la_file, 'libdir')
-
- def extract_libtool_shlib(self, la_file):
- '''
- Returns the path to the shared library
- corresponding to this .la file
- '''
- dlname = self.extract_dlname_field(la_file)
- if dlname is None:
- return None
-
- # Darwin uses absolute paths where possible; since the libtool files never
- # contain absolute paths, use the libdir field
- if mesonlib.is_osx():
- dlbasename = os.path.basename(dlname)
- libdir = self.extract_libdir_field(la_file)
- if libdir is None:
- return dlbasename
- return os.path.join(libdir, dlbasename)
- # From the comments in extract_libtool(), older libtools had
- # a path rather than the raw dlname
- return os.path.basename(dlname)
-
-class WxDependency(Dependency):
- wx_found = None
-
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'wx', kwargs)
- self.is_found = False
- # FIXME: use version instead of modversion
- self.modversion = 'none'
- if WxDependency.wx_found is None:
- self.check_wxconfig()
- if not WxDependency.wx_found:
- # FIXME: this message could be printed after Dependncy found
- mlog.log("Neither wx-config-3.0 nor wx-config found; can't detect dependency")
- return
-
- # FIXME: This should print stdout and stderr using mlog.debug
- p, out = Popen_safe([self.wxc, '--version'])[0:2]
- if p.returncode != 0:
- mlog.log('Dependency wxwidgets found:', mlog.red('NO'))
- self.cargs = []
- self.libs = []
- else:
- self.modversion = out.strip()
- version_req = kwargs.get('version', None)
- if version_req is not None:
- if not version_compare(self.modversion, version_req, strict=True):
- mlog.log('Wxwidgets version %s does not fullfill requirement %s' %
- (self.modversion, version_req))
- return
- mlog.log('Dependency wxwidgets found:', mlog.green('YES'))
- self.is_found = True
- self.requested_modules = self.get_requested(kwargs)
- # wx-config seems to have a cflags as well but since it requires C++,
- # this should be good, at least for now.
- p, out = Popen_safe([self.wxc, '--cxxflags'])[0:2]
- # FIXME: this error should only be raised if required is true
- if p.returncode != 0:
- raise DependencyException('Could not generate cargs for wxwidgets.')
- self.cargs = out.split()
-
- # FIXME: this error should only be raised if required is true
- p, out = Popen_safe([self.wxc, '--libs'] + self.requested_modules)[0:2]
- if p.returncode != 0:
- raise DependencyException('Could not generate libs for wxwidgets.')
- self.libs = out.split()
-
- def get_requested(self, kwargs):
- modules = 'modules'
- if modules not in kwargs:
- return []
- candidates = kwargs[modules]
- if isinstance(candidates, str):
- return [candidates]
- for c in candidates:
- if not isinstance(c, str):
- raise DependencyException('wxwidgets module argument is not a string.')
- return candidates
-
- def get_modversion(self):
- return self.modversion
-
- def get_version(self):
- return self.modversion
-
- def get_compile_args(self):
- return self.cargs
-
- def get_link_args(self):
- return self.libs
-
- def check_wxconfig(self):
- for wxc in ['wx-config-3.0', 'wx-config']:
- try:
- p, out = Popen_safe([wxc, '--version'])[0:2]
- if p.returncode == 0:
- mlog.log('Found wx-config:', mlog.bold(shutil.which(wxc)),
- '(%s)' % out.strip())
- self.wxc = wxc
- WxDependency.wx_found = True
- return
- except (FileNotFoundError, PermissionError):
- pass
- WxDependency.wxconfig_found = False
- mlog.log('Found wx-config:', mlog.red('NO'))
-
- def found(self):
- return self.is_found
-
-class ExternalProgram:
- windows_exts = ('exe', 'msc', 'com', 'bat')
-
- def __init__(self, name, command=None, silent=False, search_dir=None):
- self.name = name
- if command is not None:
- if not isinstance(command, list):
- self.command = [command]
- else:
- self.command = command
- else:
- self.command = self._search(name, search_dir)
- if not silent:
- if self.found():
- mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
- '(%s)' % ' '.join(self.command))
- else:
- mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
-
- def __repr__(self):
- r = '<{} {!r} -> {!r}>'
- return r.format(self.__class__.__name__, self.name, self.command)
-
- @staticmethod
- def _shebang_to_cmd(script):
- """
- Check if the file has a shebang and manually parse it to figure out
- the interpreter to use. This is useful if the script is not executable
- or if we're on Windows (which does not understand shebangs).
- """
- try:
- with open(script) as f:
- first_line = f.readline().strip()
- if first_line.startswith('#!'):
- commands = first_line[2:].split('#')[0].strip().split()
- if mesonlib.is_windows():
- # Windows does not have UNIX paths so remove them,
- # but don't remove Windows paths
- if commands[0].startswith('/'):
- commands[0] = commands[0].split('/')[-1]
- if len(commands) > 0 and commands[0] == 'env':
- commands = commands[1:]
- # Windows does not ship python3.exe, but we know the path to it
- if len(commands) > 0 and commands[0] == 'python3':
- commands[0] = sys.executable
- return commands + [script]
- except Exception:
- pass
- return False
-
- def _is_executable(self, path):
- suffix = os.path.splitext(path)[-1].lower()[1:]
- if mesonlib.is_windows():
- if suffix in self.windows_exts:
- return True
- elif os.access(path, os.X_OK):
- return not os.path.isdir(path)
- return False
-
- def _search_dir(self, name, search_dir):
- if search_dir is None:
- return False
- trial = os.path.join(search_dir, name)
- if os.path.exists(trial):
- if self._is_executable(trial):
- return [trial]
- # Now getting desperate. Maybe it is a script file that is
- # a) not chmodded executable, or
- # b) we are on windows so they can't be directly executed.
- return self._shebang_to_cmd(trial)
- else:
- if mesonlib.is_windows():
- for ext in self.windows_exts:
- trial_ext = '{}.{}'.format(trial, ext)
- if os.path.exists(trial_ext):
- return [trial_ext]
- return False
-
- def _search(self, name, search_dir):
- '''
- Search in the specified dir for the specified executable by name
- and if not found search in PATH
- '''
- commands = self._search_dir(name, search_dir)
- if commands:
- return commands
- # Do a standard search in PATH
- command = shutil.which(name)
- if not mesonlib.is_windows():
- # On UNIX-like platforms, shutil.which() is enough to find
- # all executables whether in PATH or with an absolute path
- return [command]
- # HERE BEGINS THE TERROR OF WINDOWS
- if command:
- # On Windows, even if the PATH search returned a full path, we can't be
- # sure that it can be run directly if it's not a native executable.
- # For instance, interpreted scripts sometimes need to be run explicitly
- # with an interpreter if the file association is not done properly.
- name_ext = os.path.splitext(command)[1]
- if name_ext[1:].lower() in self.windows_exts:
- # Good, it can be directly executed
- return [command]
- # Try to extract the interpreter from the shebang
- commands = self._shebang_to_cmd(command)
- if commands:
- return commands
- else:
- # Maybe the name is an absolute path to a native Windows
- # executable, but without the extension. This is technically wrong,
- # but many people do it because it works in the MinGW shell.
- if os.path.isabs(name):
- for ext in self.windows_exts:
- command = '{}.{}'.format(name, ext)
- if os.path.exists(command):
- return [command]
- # On Windows, interpreted scripts must have an extension otherwise they
- # cannot be found by a standard PATH search. So we do a custom search
- # where we manually search for a script with a shebang in PATH.
- search_dirs = os.environ.get('PATH', '').split(';')
- for search_dir in search_dirs:
- commands = self._search_dir(name, search_dir)
- if commands:
- return commands
- return [None]
-
- def found(self):
- return self.command[0] is not None
-
- def get_command(self):
- return self.command[:]
-
- def get_path(self):
- if self.found():
- # Assume that the last element is the full path to the script or
- # binary being run
- return self.command[-1]
- return None
-
- def get_name(self):
- return self.name
-
-class ExternalLibrary(Dependency):
- # TODO: Add `language` support to all Dependency objects so that languages
- # can be exposed for dependencies that support that (i.e., not pkg-config)
- def __init__(self, name, link_args, language, silent=False):
- super().__init__('external', {})
- self.name = name
- self.language = language
- self.is_found = False
- self.link_args = []
- self.lang_args = []
- if link_args:
- self.is_found = True
- if not isinstance(link_args, list):
- link_args = [link_args]
- self.lang_args = {language: link_args}
- # We special-case Vala for now till the Dependency object gets
- # proper support for exposing the language it was written in.
- # Without this, vala-specific link args will end up in the C link
- # args list if you link to a Vala library.
- # This hack use to be in CompilerHolder.find_library().
- if language != 'vala':
- self.link_args = link_args
- if not silent:
- if self.is_found:
- mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
- else:
- mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
-
- def found(self):
- return self.is_found
-
- def get_name(self):
- return self.name
-
- def get_link_args(self):
- return self.link_args
-
- def get_lang_args(self, lang):
- if lang in self.lang_args:
- return self.lang_args[lang]
- return []
-
-class BoostDependency(Dependency):
- # Some boost libraries have different names for
- # their sources and libraries. This dict maps
- # between the two.
- name2lib = {'test': 'unit_test_framework'}
-
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'boost', kwargs)
- self.name = 'boost'
- self.environment = environment
- self.libdir = ''
- if 'native' in kwargs and environment.is_cross_build():
- self.want_cross = not kwargs['native']
- else:
- self.want_cross = environment.is_cross_build()
- try:
- self.boost_root = os.environ['BOOST_ROOT']
- if not os.path.isabs(self.boost_root):
- raise DependencyException('BOOST_ROOT must be an absolute path.')
- except KeyError:
- self.boost_root = None
- if self.boost_root is None:
- if self.want_cross:
- if 'BOOST_INCLUDEDIR' in os.environ:
- self.incdir = os.environ['BOOST_INCLUDEDIR']
- else:
- raise DependencyException('BOOST_ROOT or BOOST_INCLUDEDIR is needed while cross-compiling')
- if mesonlib.is_windows():
- self.boost_root = self.detect_win_root()
- self.incdir = self.boost_root
- else:
- if 'BOOST_INCLUDEDIR' in os.environ:
- self.incdir = os.environ['BOOST_INCLUDEDIR']
- else:
- self.incdir = '/usr/include'
- else:
- self.incdir = os.path.join(self.boost_root, 'include')
- self.boost_inc_subdir = os.path.join(self.incdir, 'boost')
- mlog.debug('Boost library root dir is', self.boost_root)
- self.src_modules = {}
- self.lib_modules = {}
- self.lib_modules_mt = {}
- self.detect_version()
- self.requested_modules = self.get_requested(kwargs)
- module_str = ', '.join(self.requested_modules)
- if self.version is not None:
- self.detect_src_modules()
- self.detect_lib_modules()
- self.validate_requested()
- if self.boost_root is not None:
- info = self.version + ', ' + self.boost_root
- else:
- info = self.version
- mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
- else:
- mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
- if 'cpp' not in self.environment.coredata.compilers:
- raise DependencyException('Tried to use Boost but a C++ compiler is not defined.')
- self.cpp_compiler = self.environment.coredata.compilers['cpp']
-
- def detect_win_root(self):
- globtext = 'c:\\local\\boost_*'
- files = glob.glob(globtext)
- if len(files) > 0:
- return files[0]
- return 'C:\\'
-
- def get_compile_args(self):
- args = []
- include_dir = ''
- if self.boost_root is not None:
- if mesonlib.is_windows():
- include_dir = self.boost_root
- else:
- include_dir = os.path.join(self.boost_root, 'include')
- else:
- include_dir = self.incdir
-
- # Use "-isystem" when including boost headers instead of "-I"
- # to avoid compiler warnings/failures when "-Werror" is used
-
- # Careful not to use "-isystem" on default include dirs as it
- # breaks some of the headers for certain gcc versions
-
- # For example, doing g++ -isystem /usr/include on a simple
- # "int main()" source results in the error:
- # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
-
- # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
- # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
- # for more details
-
- # TODO: The correct solution would probably be to ask the
- # compiler for it's default include paths (ie: "gcc -xc++ -E
- # -v -") and avoid including those with -isystem
-
- # For now, use -isystem for all includes except for some
- # typical defaults (which don't need to be included at all
- # since they are in the default include paths)
- if include_dir != '/usr/include' and include_dir != '/usr/local/include':
- args.append("".join(self.cpp_compiler.get_include_args(include_dir, True)))
- return args
-
- def get_requested(self, kwargs):
- candidates = kwargs.get('modules', [])
- if isinstance(candidates, str):
- return [candidates]
- for c in candidates:
- if not isinstance(c, str):
- raise DependencyException('Boost module argument is not a string.')
- return candidates
-
- def validate_requested(self):
- for m in self.requested_modules:
- if m not in self.src_modules:
- raise DependencyException('Requested Boost module "%s" not found.' % m)
-
- def found(self):
- return self.version is not None
-
- def get_version(self):
- return self.version
-
- def detect_version(self):
- try:
- ifile = open(os.path.join(self.boost_inc_subdir, 'version.hpp'))
- except FileNotFoundError:
- self.version = None
- return
- with ifile:
- for line in ifile:
- if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
- ver = line.split()[-1]
- ver = ver[1:-1]
- self.version = ver.replace('_', '.')
- return
- self.version = None
-
- def detect_src_modules(self):
- for entry in os.listdir(self.boost_inc_subdir):
- entry = os.path.join(self.boost_inc_subdir, entry)
- if stat.S_ISDIR(os.stat(entry).st_mode):
- self.src_modules[os.path.split(entry)[-1]] = True
-
- def detect_lib_modules(self):
- if mesonlib.is_windows():
- return self.detect_lib_modules_win()
- return self.detect_lib_modules_nix()
-
- def detect_lib_modules_win(self):
- arch = detect_cpu_family(self.environment.coredata.compilers)
- # Guess the libdir
- if arch == 'x86':
- gl = 'lib32*'
- elif arch == 'x86_64':
- gl = 'lib64*'
- else:
- # Does anyone do Boost cross-compiling to other archs on Windows?
- gl = None
- # See if the libdir is valid
- if gl:
- libdir = glob.glob(os.path.join(self.boost_root, gl))
- else:
- libdir = []
- # Can't find libdir, bail
- if not libdir:
- return
- libdir = libdir[0]
- self.libdir = libdir
- globber = 'boost_*-gd-*.lib' # FIXME
- for entry in glob.glob(os.path.join(libdir, globber)):
- (_, fname) = os.path.split(entry)
- base = fname.split('_', 1)[1]
- modname = base.split('-', 1)[0]
- self.lib_modules_mt[modname] = fname
-
- def detect_lib_modules_nix(self):
- if mesonlib.is_osx():
- libsuffix = 'dylib'
- else:
- libsuffix = 'so'
-
- globber = 'libboost_*.{}'.format(libsuffix)
- if 'BOOST_LIBRARYDIR' in os.environ:
- libdirs = [os.environ['BOOST_LIBRARYDIR']]
- elif self.boost_root is None:
- libdirs = mesonlib.get_library_dirs()
- else:
- libdirs = [os.path.join(self.boost_root, 'lib')]
- for libdir in libdirs:
- for entry in glob.glob(os.path.join(libdir, globber)):
- lib = os.path.basename(entry)
- name = lib.split('.')[0].split('_', 1)[-1]
- # I'm not 100% sure what to do here. Some distros
- # have modules such as thread only as -mt versions.
- if entry.endswith('-mt.so'):
- self.lib_modules_mt[name] = True
- else:
- self.lib_modules[name] = True
-
- def get_win_link_args(self):
- args = []
- if self.boost_root:
- args.append('-L' + self.libdir)
- for module in self.requested_modules:
- module = BoostDependency.name2lib.get(module, module)
- if module in self.lib_modules_mt:
- args.append(self.lib_modules_mt[module])
- return args
-
- def get_link_args(self):
- if mesonlib.is_windows():
- return self.get_win_link_args()
- args = []
- if self.boost_root:
- args.append('-L' + os.path.join(self.boost_root, 'lib'))
- elif 'BOOST_LIBRARYDIR' in os.environ:
- args.append('-L' + os.environ['BOOST_LIBRARYDIR'])
- for module in self.requested_modules:
- module = BoostDependency.name2lib.get(module, module)
- libname = 'boost_' + module
- # The compiler's library detector is the most reliable so use that first.
- default_detect = self.cpp_compiler.find_library(libname, self.environment, [])
- if default_detect is not None:
- if module == 'unit_testing_framework':
- emon_args = self.cpp_compiler.find_library('boost_test_exec_monitor')
- else:
- emon_args = None
- args += default_detect
- if emon_args is not None:
- args += emon_args
- elif module in self.lib_modules or module in self.lib_modules_mt:
- linkcmd = '-l' + libname
- args.append(linkcmd)
- # FIXME a hack, but Boost's testing framework has a lot of
- # different options and it's hard to determine what to do
- # without feedback from actual users. Update this
- # as we get more bug reports.
- if module == 'unit_testing_framework':
- args.append('-lboost_test_exec_monitor')
- elif module + '-mt' in self.lib_modules_mt:
- linkcmd = '-lboost_' + module + '-mt'
- args.append(linkcmd)
- if module == 'unit_testing_framework':
- args.append('-lboost_test_exec_monitor-mt')
- return args
-
- def get_sources(self):
- return []
-
- def need_threads(self):
- return 'thread' in self.requested_modules
-
-class GTestDependency(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'gtest', kwargs)
- self.main = kwargs.get('main', False)
- self.name = 'gtest'
- self.libname = 'libgtest.so'
- self.libmain_name = 'libgtest_main.so'
- self.include_dir = '/usr/include'
- self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
- self.detect()
-
- def found(self):
- return self.is_found
-
- def detect(self):
- trial_dirs = mesonlib.get_library_dirs()
- glib_found = False
- gmain_found = False
- for d in trial_dirs:
- if os.path.isfile(os.path.join(d, self.libname)):
- glib_found = True
- if os.path.isfile(os.path.join(d, self.libmain_name)):
- gmain_found = True
- if glib_found and gmain_found:
- self.is_found = True
- self.compile_args = []
- self.link_args = ['-lgtest']
- if self.main:
- self.link_args.append('-lgtest_main')
- self.sources = []
- mlog.log('Dependency GTest found:', mlog.green('YES'), '(prebuilt)')
- elif self.detect_srcdir():
- self.is_found = True
- self.compile_args = ['-I' + self.src_include_dir]
- self.link_args = []
- if self.main:
- self.sources = [self.all_src, self.main_src]
- else:
- self.sources = [self.all_src]
- mlog.log('Dependency GTest found:', mlog.green('YES'), '(building self)')
- else:
- mlog.log('Dependency GTest found:', mlog.red('NO'))
- self.is_found = False
- return self.is_found
-
- def detect_srcdir(self):
- for s in self.src_dirs:
- if os.path.exists(s):
- self.src_dir = s
- self.all_src = mesonlib.File.from_absolute_file(
- os.path.join(self.src_dir, 'gtest-all.cc'))
- self.main_src = mesonlib.File.from_absolute_file(
- os.path.join(self.src_dir, 'gtest_main.cc'))
- self.src_include_dir = os.path.normpath(os.path.join(self.src_dir, '..'))
- return True
- return False
-
- def get_compile_args(self):
- arr = []
- if self.include_dir != '/usr/include':
- arr.append('-I' + self.include_dir)
- if hasattr(self, 'src_include_dir'):
- arr.append('-I' + self.src_include_dir)
- return arr
-
- def get_link_args(self):
- return self.link_args
-
- def get_version(self):
- return '1.something_maybe'
-
- def get_sources(self):
- return self.sources
-
- def need_threads(self):
- return True
-
-class GMockDependency(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'gmock', kwargs)
- # GMock may be a library or just source.
- # Work with both.
- self.name = 'gmock'
- self.libname = 'libgmock.so'
- trial_dirs = mesonlib.get_library_dirs()
- gmock_found = False
- for d in trial_dirs:
- if os.path.isfile(os.path.join(d, self.libname)):
- gmock_found = True
- if gmock_found:
- self.is_found = True
- self.compile_args = []
- self.link_args = ['-lgmock']
- self.sources = []
- mlog.log('Dependency GMock found:', mlog.green('YES'), '(prebuilt)')
- return
-
- for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
- if os.path.exists(d):
- self.is_found = True
- # Yes, we need both because there are multiple
- # versions of gmock that do different things.
- d2 = os.path.normpath(os.path.join(d, '..'))
- self.compile_args = ['-I' + d, '-I' + d2]
- self.link_args = []
- all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
- main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
- if kwargs.get('main', False):
- self.sources = [all_src, main_src]
- else:
- self.sources = [all_src]
- mlog.log('Dependency GMock found:', mlog.green('YES'), '(building self)')
- return
-
- mlog.log('Dependency GMock found:', mlog.red('NO'))
- self.is_found = False
-
- def get_version(self):
- return '1.something_maybe'
-
- def get_compile_args(self):
- return self.compile_args
-
- def get_sources(self):
- return self.sources
-
- def get_link_args(self):
- return self.link_args
-
- def found(self):
- return self.is_found
-
-class QtBaseDependency(Dependency):
- def __init__(self, name, env, kwargs):
- Dependency.__init__(self, name, kwargs)
- self.name = name
- self.qtname = name.capitalize()
- self.qtver = name[-1]
- if self.qtver == "4":
- self.qtpkgname = 'Qt'
- else:
- self.qtpkgname = self.qtname
- self.root = '/usr'
- self.bindir = None
- self.silent = kwargs.get('silent', False)
- # We store the value of required here instead of passing it on to
- # PkgConfigDependency etc because we want to try the qmake-based
- # fallback as well.
- self.required = kwargs.pop('required', True)
- kwargs['required'] = False
- mods = kwargs.get('modules', [])
- self.cargs = []
- self.largs = []
- self.is_found = False
- if isinstance(mods, str):
- mods = [mods]
- if not mods:
- raise DependencyException('No ' + self.qtname + ' modules specified.')
- type_text = 'cross' if env.is_cross_build() else 'native'
- found_msg = '{} {} {{}} dependency (modules: {}) found:' \
- ''.format(self.qtname, type_text, ', '.join(mods))
- from_text = 'pkg-config'
-
- # Keep track of the detection methods used, for logging purposes.
- methods = []
- # Prefer pkg-config, then fallback to `qmake -query`
- if DependencyMethods.PKGCONFIG in self.methods:
- self._pkgconfig_detect(mods, env, kwargs)
- methods.append('pkgconfig')
- if not self.is_found and DependencyMethods.QMAKE in self.methods:
- from_text = self._qmake_detect(mods, env, kwargs)
- methods.append('qmake-' + self.name)
- methods.append('qmake')
- if not self.is_found:
- # Reset compile args and link args
- self.cargs = []
- self.largs = []
- from_text = '(checked {})'.format(mlog.format_list(methods))
- self.version = 'none'
- if self.required:
- err_msg = '{} {} dependency not found {}' \
- ''.format(self.qtname, type_text, from_text)
- raise DependencyException(err_msg)
- if not self.silent:
- mlog.log(found_msg.format(from_text), mlog.red('NO'))
- return
- from_text = '`{}`'.format(from_text)
- if not self.silent:
- mlog.log(found_msg.format(from_text), mlog.green('YES'))
-
- def compilers_detect(self):
- "Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"
- if self.bindir:
- moc = ExternalProgram(os.path.join(self.bindir, 'moc'), silent=True)
- uic = ExternalProgram(os.path.join(self.bindir, 'uic'), silent=True)
- rcc = ExternalProgram(os.path.join(self.bindir, 'rcc'), silent=True)
- else:
- # We don't accept unsuffixed 'moc', 'uic', and 'rcc' because they
- # are sometimes older, or newer versions.
- moc = ExternalProgram('moc-' + self.name, silent=True)
- uic = ExternalProgram('uic-' + self.name, silent=True)
- rcc = ExternalProgram('rcc-' + self.name, silent=True)
- return moc, uic, rcc
-
- def _pkgconfig_detect(self, mods, env, kwargs):
- modules = OrderedDict()
- for module in mods:
- modules[module] = PkgConfigDependency(self.qtpkgname + module, env, kwargs)
- self.is_found = True
- for m in modules.values():
- if not m.found():
- self.is_found = False
- return
- self.cargs += m.get_compile_args()
- self.largs += m.get_link_args()
- self.version = m.modversion
- # Try to detect moc, uic, rcc
- if 'Core' in modules:
- core = modules['Core']
- else:
- corekwargs = {'required': 'false', 'silent': 'true'}
- core = PkgConfigDependency(self.qtpkgname + 'Core', env, corekwargs)
- # Used by self.compilers_detect()
- self.bindir = self.get_pkgconfig_host_bins(core)
- if not self.bindir:
- # If exec_prefix is not defined, the pkg-config file is broken
- prefix = core.get_pkgconfig_variable('exec_prefix')
- if prefix:
- self.bindir = os.path.join(prefix, 'bin')
-
- def _find_qmake(self, qmake, env):
- # Even when cross-compiling, if we don't get a cross-info qmake, we
- # fallback to using the qmake in PATH because that's what we used to do
- if env.is_cross_build():
- qmake = env.cross_info.config['binaries'].get('qmake', qmake)
- return ExternalProgram(qmake, silent=True)
-
- def _qmake_detect(self, mods, env, kwargs):
- for qmake in ('qmake-' + self.name, 'qmake'):
- self.qmake = self._find_qmake(qmake, env)
- if not self.qmake.found():
- continue
- # Check that the qmake is for qt5
- pc, stdo = Popen_safe(self.qmake.get_command() + ['-v'])[0:2]
- if pc.returncode != 0:
- continue
- if not 'Qt version ' + self.qtver in stdo:
- mlog.log('QMake is not for ' + self.qtname)
- continue
- # Found qmake for Qt5!
- break
- else:
- # Didn't find qmake :(
- return
- self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0)
- # Query library path, header path, and binary path
- mlog.log("Found qmake:", mlog.bold(self.qmake.get_name()), '(%s)' % self.version)
- stdo = Popen_safe(self.qmake.get_command() + ['-query'])[1]
- qvars = {}
- for line in stdo.split('\n'):
- line = line.strip()
- if line == '':
- continue
- (k, v) = tuple(line.split(':', 1))
- qvars[k] = v
- if mesonlib.is_osx():
- return self._framework_detect(qvars, mods, kwargs)
- incdir = qvars['QT_INSTALL_HEADERS']
- self.cargs.append('-I' + incdir)
- libdir = qvars['QT_INSTALL_LIBS']
- # Used by self.compilers_detect()
- self.bindir = self.get_qmake_host_bins(qvars)
- self.is_found = True
- for module in mods:
- mincdir = os.path.join(incdir, 'Qt' + module)
- self.cargs.append('-I' + mincdir)
- if for_windows(env.is_cross_build(), env):
- libfile = os.path.join(libdir, self.qtpkgname + module + '.lib')
- if not os.path.isfile(libfile):
- # MinGW can link directly to .dll
- libfile = os.path.join(self.bindir, self.qtpkgname + module + '.dll')
- if not os.path.isfile(libfile):
- self.is_found = False
- break
- else:
- libfile = os.path.join(libdir, 'lib{}{}.so'.format(self.qtpkgname, module))
- if not os.path.isfile(libfile):
- self.is_found = False
- break
- self.largs.append(libfile)
- return qmake
-
- def _framework_detect(self, qvars, modules, kwargs):
- libdir = qvars['QT_INSTALL_LIBS']
- for m in modules:
- fname = 'Qt' + m
- fwdep = ExtraFrameworkDependency(fname, kwargs.get('required', True), libdir, kwargs)
- self.cargs.append('-F' + libdir)
- if fwdep.found():
- self.is_found = True
- self.cargs += fwdep.get_compile_args()
- self.largs += fwdep.get_link_args()
- # Used by self.compilers_detect()
- self.bindir = self.get_qmake_host_bins(qvars)
-
- def get_qmake_host_bins(self, qvars):
- # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
- # but fall back to QT_INSTALL_BINS (qt4)
- if 'QT_HOST_BINS' in qvars:
- return qvars['QT_HOST_BINS']
- else:
- return qvars['QT_INSTALL_BINS']
-
- def get_version(self):
- return self.version
-
- def get_compile_args(self):
- return self.cargs
-
- def get_sources(self):
- return []
-
- def get_link_args(self):
- return self.largs
-
- def get_methods(self):
- return [DependencyMethods.PKGCONFIG, DependencyMethods.QMAKE]
-
- def found(self):
- return self.is_found
-
- def get_exe_args(self, compiler):
- # Originally this was -fPIE but nowadays the default
- # for upstream and distros seems to be -reduce-relocations
- # which requires -fPIC. This may cause a performance
- # penalty when using self-built Qt or on platforms
- # where -fPIC is not required. If this is an issue
- # for you, patches are welcome.
- return compiler.get_pic_args()
-
-class Qt5Dependency(QtBaseDependency):
- def __init__(self, env, kwargs):
- QtBaseDependency.__init__(self, 'qt5', env, kwargs)
-
- def get_pkgconfig_host_bins(self, core):
- return core.get_pkgconfig_variable('host_bins')
-
-class Qt4Dependency(QtBaseDependency):
- def __init__(self, env, kwargs):
- QtBaseDependency.__init__(self, 'qt4', env, kwargs)
-
- def get_pkgconfig_host_bins(self, core):
- # Only return one bins dir, because the tools are generally all in one
- # directory for Qt4, in Qt5, they must all be in one directory. Return
- # the first one found among the bin variables, in case one tool is not
- # configured to be built.
- applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
- for application in applications:
- try:
- return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application))
- except MesonException:
- pass
-
-class GnuStepDependency(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'gnustep', kwargs)
- self.required = kwargs.get('required', True)
- self.modules = kwargs.get('modules', [])
- self.detect()
-
- def detect(self):
- self.confprog = 'gnustep-config'
- try:
- gp = Popen_safe([self.confprog, '--help'])[0]
- except (FileNotFoundError, PermissionError):
- self.args = None
- mlog.log('Dependency GnuStep found:', mlog.red('NO'), '(no gnustep-config)')
- return
- if gp.returncode != 0:
- self.args = None
- mlog.log('Dependency GnuStep found:', mlog.red('NO'))
- return
- if 'gui' in self.modules:
- arg = '--gui-libs'
- else:
- arg = '--base-libs'
- fp, flagtxt, flagerr = Popen_safe([self.confprog, '--objc-flags'])
- if fp.returncode != 0:
- raise DependencyException('Error getting objc-args: %s %s' % (flagtxt, flagerr))
- args = flagtxt.split()
- self.args = self.filter_arsg(args)
- fp, libtxt, liberr = Popen_safe([self.confprog, arg])
- if fp.returncode != 0:
- raise DependencyException('Error getting objc-lib args: %s %s' % (libtxt, liberr))
- self.libs = self.weird_filter(libtxt.split())
- self.version = self.detect_version()
- mlog.log('Dependency', mlog.bold('GnuStep'), 'found:',
- mlog.green('YES'), self.version)
-
- def weird_filter(self, elems):
- """When building packages, the output of the enclosing Make
-is sometimes mixed among the subprocess output. I have no idea
-why. As a hack filter out everything that is not a flag."""
- return [e for e in elems if e.startswith('-')]
-
- def filter_arsg(self, args):
- """gnustep-config returns a bunch of garbage args such
- as -O2 and so on. Drop everything that is not needed."""
- result = []
- for f in args:
- if f.startswith('-D') \
- or f.startswith('-f') \
- or f.startswith('-I') \
- or f == '-pthread' \
- or (f.startswith('-W') and not f == '-Wall'):
- result.append(f)
- return result
-
- def detect_version(self):
- gmake = self.get_variable('GNUMAKE')
- makefile_dir = self.get_variable('GNUSTEP_MAKEFILES')
- # This Makefile has the GNUStep version set
- base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
- # Print the Makefile variable passed as the argument. For instance, if
- # you run the make target `print-SOME_VARIABLE`, this will print the
- # value of the variable `SOME_VARIABLE`.
- printver = "print-%:\n\t@echo '$($*)'"
- env = os.environ.copy()
- # See base.make to understand why this is set
- env['FOUNDATION_LIB'] = 'gnu'
- p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
- 'print-GNUSTEP_BASE_VERSION'],
- env=env, write=printver, stdin=subprocess.PIPE)
- version = o.strip()
- if not version:
- mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
- # Fallback to setting some 1.x version
- version = '1'
- return version
-
- def get_variable(self, var):
- p, o, e = Popen_safe([self.confprog, '--variable=' + var])
- if p.returncode != 0 and self.required:
- raise DependencyException('{!r} for variable {!r} failed to run'
- ''.format(self.confprog, var))
- return o.strip()
-
- def found(self):
- return self.args is not None
-
- def get_version(self):
- return self.version
-
- def get_compile_args(self):
- if self.args is None:
- return []
- return self.args
-
- def get_link_args(self):
- return self.libs
-
-class AppleFrameworks(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'appleframeworks', kwargs)
- modules = kwargs.get('modules', [])
- if isinstance(modules, str):
- modules = [modules]
- if not modules:
- raise DependencyException("AppleFrameworks dependency requires at least one module.")
- self.frameworks = modules
-
- def get_link_args(self):
- args = []
- for f in self.frameworks:
- args.append('-framework')
- args.append(f)
- return args
-
- def found(self):
- return mesonlib.is_osx()
-
- def get_version(self):
- return 'unknown'
-
-class GLDependency(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'gl', kwargs)
- self.is_found = False
- self.cargs = []
- self.linkargs = []
- if DependencyMethods.PKGCONFIG in self.methods:
- try:
- pcdep = PkgConfigDependency('gl', environment, kwargs)
- if pcdep.found():
- self.type_name = 'pkgconfig'
- self.is_found = True
- self.cargs = pcdep.get_compile_args()
- self.linkargs = pcdep.get_link_args()
- self.version = pcdep.get_version()
- return
- except Exception:
- pass
- if DependencyMethods.SYSTEM in self.methods:
- if mesonlib.is_osx():
- self.is_found = True
- self.linkargs = ['-framework', 'OpenGL']
- self.version = '1' # FIXME
- return
- if mesonlib.is_windows():
- self.is_found = True
- self.linkargs = ['-lopengl32']
- self.version = '1' # FIXME: unfixable?
- return
-
- def get_link_args(self):
- return self.linkargs
-
- def get_version(self):
- return self.version
-
- def get_methods(self):
- if mesonlib.is_osx() or mesonlib.is_windows():
- return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
- else:
- return [DependencyMethods.PKGCONFIG]
-
-# There are three different ways of depending on SDL2:
-# sdl2-config, pkg-config and OSX framework
-class SDL2Dependency(Dependency):
- def __init__(self, environment, kwargs):
- Dependency.__init__(self, 'sdl2', kwargs)
- self.is_found = False
- self.cargs = []
- self.linkargs = []
- if DependencyMethods.PKGCONFIG in self.methods:
- try:
- pcdep = PkgConfigDependency('sdl2', environment, kwargs)
- if pcdep.found():
- self.type_name = 'pkgconfig'
- self.is_found = True
- self.cargs = pcdep.get_compile_args()
- self.linkargs = pcdep.get_link_args()
- self.version = pcdep.get_version()
- return
- except Exception as e:
- mlog.debug('SDL 2 not found via pkgconfig. Trying next, error was:', str(e))
- pass
- if DependencyMethods.SDLCONFIG in self.methods:
- sdlconf = shutil.which('sdl2-config')
- if sdlconf:
- stdo = Popen_safe(['sdl2-config', '--cflags'])[1]
- self.cargs = stdo.strip().split()
- stdo = Popen_safe(['sdl2-config', '--libs'])[1]
- self.linkargs = stdo.strip().split()
- stdo = Popen_safe(['sdl2-config', '--version'])[1]
- self.version = stdo.strip()
- self.is_found = True
- mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.green('YES'),
- self.version, '(%s)' % sdlconf)
- return
- mlog.debug('Could not find sdl2-config binary, trying next.')
- if DependencyMethods.EXTRAFRAMEWORK in self.methods:
- if mesonlib.is_osx():
- fwdep = ExtraFrameworkDependency('sdl2', kwargs.get('required', True), None, kwargs)
- if fwdep.found():
- self.is_found = True
- self.cargs = fwdep.get_compile_args()
- self.linkargs = fwdep.get_link_args()
- self.version = '2' # FIXME
- return
- mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.red('NO'))
-
- def get_compile_args(self):
- return self.cargs
-
- def get_link_args(self):
- return self.linkargs
-
- def found(self):
- return self.is_found
-
- def get_version(self):
- return self.version
-
- def get_methods(self):
- if mesonlib.is_osx():
- return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG, DependencyMethods.EXTRAFRAMEWORK]
- else:
- return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG]
-
-class ExtraFrameworkDependency(Dependency):
- def __init__(self, name, required, path, kwargs):
- Dependency.__init__(self, 'extraframeworks', kwargs)
- self.name = None
- self.detect(name, path)
- if self.found():
- mlog.log('Dependency', mlog.bold(name), 'found:', mlog.green('YES'),
- os.path.join(self.path, self.name))
- else:
- mlog.log('Dependency', name, 'found:', mlog.red('NO'))
-
- def detect(self, name, path):
- lname = name.lower()
- if path is None:
- paths = ['/Library/Frameworks']
- else:
- paths = [path]
- for p in paths:
- for d in os.listdir(p):
- fullpath = os.path.join(p, d)
- if lname != d.split('.')[0].lower():
- continue
- if not stat.S_ISDIR(os.stat(fullpath).st_mode):
- continue
- self.path = p
- self.name = d
- return
-
- def get_compile_args(self):
- if self.found():
- return ['-I' + os.path.join(self.path, self.name, 'Headers')]
- return []
-
- def get_link_args(self):
- if self.found():
- return ['-F' + self.path, '-framework', self.name.split('.')[0]]
- return []
-
- def found(self):
- return self.name is not None
-
- def get_version(self):
- return 'unknown'
-
-class ThreadDependency(Dependency):
- def __init__(self, environment, kwargs):
- super().__init__('threads', {})
- self.name = 'threads'
- self.is_found = True
- mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
-
- def need_threads(self):
- return True
-
- def get_version(self):
- return 'unknown'
-
-class Python3Dependency(Dependency):
- def __init__(self, environment, kwargs):
- super().__init__('python3', kwargs)
- self.name = 'python3'
- self.is_found = False
- # We can only be sure that it is Python 3 at this point
- self.version = '3'
- if DependencyMethods.PKGCONFIG in self.methods:
- try:
- pkgdep = PkgConfigDependency('python3', environment, kwargs)
- if pkgdep.found():
- self.cargs = pkgdep.cargs
- self.libs = pkgdep.libs
- self.version = pkgdep.get_version()
- self.is_found = True
- return
- except Exception:
- pass
- if not self.is_found:
- if mesonlib.is_windows() and DependencyMethods.SYSCONFIG in self.methods:
- self._find_libpy3_windows(environment)
- elif mesonlib.is_osx() and DependencyMethods.EXTRAFRAMEWORK in self.methods:
- # In OSX the Python 3 framework does not have a version
- # number in its name.
- fw = ExtraFrameworkDependency('python', False, None, kwargs)
- if fw.found():
- self.cargs = fw.get_compile_args()
- self.libs = fw.get_link_args()
- self.is_found = True
- if self.is_found:
- mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
- else:
- mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
-
- def _find_libpy3_windows(self, env):
- '''
- Find python3 libraries on Windows and also verify that the arch matches
- what we are building for.
- '''
- pyarch = sysconfig.get_platform()
- arch = detect_cpu_family(env.coredata.compilers)
- if arch == 'x86':
- arch = '32'
- elif arch == 'x86_64':
- arch = '64'
- else:
- # We can't cross-compile Python 3 dependencies on Windows yet
- mlog.log('Unknown architecture {!r} for'.format(arch),
- mlog.bold(self.name))
- self.is_found = False
- return
- # Pyarch ends in '32' or '64'
- if arch != pyarch[-2:]:
- mlog.log('Need', mlog.bold(self.name),
- 'for {}-bit, but found {}-bit'.format(arch, pyarch[-2:]))
- self.is_found = False
- return
- inc = sysconfig.get_path('include')
- platinc = sysconfig.get_path('platinclude')
- self.cargs = ['-I' + inc]
- if inc != platinc:
- self.cargs.append('-I' + platinc)
- # Nothing exposes this directly that I coulf find
- basedir = sysconfig.get_config_var('base')
- vernum = sysconfig.get_config_var('py_version_nodot')
- self.libs = ['-L{}/libs'.format(basedir),
- '-lpython{}'.format(vernum)]
- self.version = sysconfig.get_config_var('py_version_short')
- self.is_found = True
-
- def get_compile_args(self):
- return self.cargs
-
- def get_link_args(self):
- return self.libs
-
- def get_methods(self):
- if mesonlib.is_windows():
- return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
- elif mesonlib.is_osx():
- return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
- else:
- return [DependencyMethods.PKGCONFIG]
-
- def get_version(self):
- return self.version
-
-class ValgrindDependency(PkgConfigDependency):
-
- def __init__(self, environment, kwargs):
- PkgConfigDependency.__init__(self, 'valgrind', environment, kwargs)
-
- def get_link_args(self):
- return []
-
-class LLVMDependency(Dependency):
- """LLVM dependency.
-
- LLVM uses a special tool, llvm-config, which has arguments for getting
- c args, cxx args, and ldargs as well as version.
- """
-
- # Ordered list of llvm-config binaries to try. Start with base, then try
- # newest back to oldest (3.5 is abitrary), and finally the devel version.
- llvm_config_bins = [
- 'llvm-config', 'llvm-config-4.0', 'llvm-config-3.9', 'llvm-config39',
- 'llvm-config-3.8', 'llvm-config38', 'llvm-config-3.7', 'llvm-config37',
- 'llvm-config-3.6', 'llvm-config36', 'llvm-config-3.5', 'llvm-config35',
- 'llvm-config-devel',
- ]
- llvmconfig = None
- _llvmconfig_found = False
- __best_found = None
-
- def __init__(self, environment, kwargs):
- super().__init__('llvm-config', kwargs)
- # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
- # the C linker works fine if only using the C API.
- self.language = 'cpp'
- self.cargs = []
- self.libs = []
- self.modules = []
-
- required = kwargs.get('required', True)
- req_version = kwargs.get('version', None)
- if self.llvmconfig is None:
- self.check_llvmconfig(req_version)
- if not self._llvmconfig_found:
- if self.__best_found is not None:
- mlog.log('found {!r} but need:'.format(self.version),
- req_version)
- else:
- mlog.log("No llvm-config found; can't detect dependency")
- mlog.log('Dependency LLVM found:', mlog.red('NO'))
- if required:
- raise DependencyException('Dependency LLVM not found')
- return
-
- p, out, err = Popen_safe([self.llvmconfig, '--version'])
- if p.returncode != 0:
- mlog.debug('stdout: {}\nstderr: {}'.format(out, err))
- if required:
- raise DependencyException('Dependency LLVM not found')
- return
- else:
- self.version = out.strip()
- mlog.log('Dependency LLVM found:', mlog.green('YES'))
- self.is_found = True
-
- p, out = Popen_safe(
- [self.llvmconfig, '--libs', '--ldflags', '--system-libs'])[:2]
- if p.returncode != 0:
- raise DependencyException('Could not generate libs for LLVM.')
- self.libs = shlex.split(out)
-
- p, out = Popen_safe([self.llvmconfig, '--cppflags'])[:2]
- if p.returncode != 0:
- raise DependencyException('Could not generate includedir for LLVM.')
- self.cargs = shlex.split(out)
-
- p, out = Popen_safe([self.llvmconfig, '--components'])[:2]
- if p.returncode != 0:
- raise DependencyException('Could not generate modules for LLVM.')
- self.modules = shlex.split(out)
-
- modules = mesonlib.stringlistify(kwargs.get('modules', []))
- for mod in modules:
- if mod not in self.modules:
- mlog.log('LLVM module', mod, 'found:', mlog.red('NO'))
- self.is_found = False
- if required:
- raise DependencyException(
- 'Could not find required LLVM Component: {}'.format(mod))
- else:
- mlog.log('LLVM module', mod, 'found:', mlog.green('YES'))
-
- def get_version(self):
- return self.version
-
- def get_compile_args(self):
- return self.cargs
-
- def get_link_args(self):
- return self.libs
-
- @classmethod
- def check_llvmconfig(cls, version_req):
- """Try to find the highest version of llvm-config."""
- for llvmconfig in cls.llvm_config_bins:
- try:
- p, out = Popen_safe([llvmconfig, '--version'])[0:2]
- out = out.strip()
- if p.returncode != 0:
- continue
- if version_req:
- if version_compare(out, version_req, strict=True):
- if cls.__best_found and version_compare(out, '<={}'.format(cls.__best_found), strict=True):
- continue
- cls.__best_found = out
- cls.llvmconfig = llvmconfig
- else:
- # If no specific version is requested use the first version
- # found, since that should be the best.
- cls.__best_found = out
- cls.llvmconfig = llvmconfig
- break
- except (FileNotFoundError, PermissionError):
- pass
- if cls.__best_found:
- mlog.log('Found llvm-config:',
- mlog.bold(shutil.which(cls.llvmconfig)),
- '({})'.format(out.strip()))
- cls._llvmconfig_found = True
- else:
- cls.llvmconfig = False
-
- def need_threads(self):
- return True
-
-
-def get_dep_identifier(name, kwargs):
- elements = [name]
- modlist = kwargs.get('modules', [])
- if isinstance(modlist, str):
- modlist = [modlist]
- for module in modlist:
- elements.append(module)
- # We use a tuple because we need a non-mutable structure to use as the key
- # of a dictionary and a string has potential for name collisions
- identifier = tuple(elements)
- identifier += ('main', kwargs.get('main', False))
- identifier += ('static', kwargs.get('static', False))
- if 'fallback' in kwargs:
- f = kwargs.get('fallback')
- identifier += ('fallback', f[0], f[1])
- return identifier
-
-def find_external_dependency(name, environment, kwargs):
- required = kwargs.get('required', True)
- if not isinstance(required, bool):
- raise DependencyException('Keyword "required" must be a boolean.')
- if not isinstance(kwargs.get('method', ''), str):
- raise DependencyException('Keyword "method" must be a string.')
- lname = name.lower()
- if lname in packages:
- dep = packages[lname](environment, kwargs)
- if required and not dep.found():
- raise DependencyException('Dependency "%s" not found' % name)
- return dep
- pkg_exc = None
- pkgdep = None
- try:
- pkgdep = PkgConfigDependency(name, environment, kwargs)
- if pkgdep.found():
- return pkgdep
- except Exception as e:
- pkg_exc = e
- if mesonlib.is_osx():
- fwdep = ExtraFrameworkDependency(name, required, None, kwargs)
- if required and not fwdep.found():
- m = 'Dependency {!r} not found, tried Extra Frameworks ' \
- 'and Pkg-Config:\n\n' + str(pkg_exc)
- raise DependencyException(m.format(name))
- return fwdep
- if pkg_exc is not None:
- raise pkg_exc
- mlog.log('Dependency', mlog.bold(name), 'found:', mlog.red('NO'))
- return pkgdep
-
-# This has to be at the end so the classes it references
-# are defined.
-packages = {'boost': BoostDependency,
- 'gtest': GTestDependency,
- 'gmock': GMockDependency,
- 'qt5': Qt5Dependency,
- 'qt4': Qt4Dependency,
- 'gnustep': GnuStepDependency,
- 'appleframeworks': AppleFrameworks,
- 'wxwidgets': WxDependency,
- 'sdl2': SDL2Dependency,
- 'gl': GLDependency,
- 'threads': ThreadDependency,
- 'python3': Python3Dependency,
- 'valgrind': ValgrindDependency,
- 'llvm': LLVMDependency,
- }
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
new file mode 100644
index 0000000..ec11152
--- /dev/null
+++ b/mesonbuild/dependencies/__init__.py
@@ -0,0 +1,46 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ( # noqa: F401
+ Dependency, DependencyException, DependencyMethods, ExternalProgram, ExternalLibrary, ExtraFrameworkDependency,
+ InternalDependency, PkgConfigDependency, find_external_dependency, get_dep_identifier, packages)
+from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
+from .misc import BoostDependency, Python3Dependency, ThreadDependency
+from .platform import AppleFrameworks
+from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency
+
+
+packages.update({
+ # From dev:
+ 'gtest': GTestDependency,
+ 'gmock': GMockDependency,
+ 'llvm': LLVMDependency,
+ 'valgrind': ValgrindDependency,
+
+ # From misc:
+ 'boost': BoostDependency,
+ 'python3': Python3Dependency,
+ 'threads': ThreadDependency,
+
+ # From platform:
+ 'appleframeworks': AppleFrameworks,
+
+ # From ui:
+ 'gl': GLDependency,
+ 'gnustep': GnuStepDependency,
+ 'qt4': Qt4Dependency,
+ 'qt5': Qt5Dependency,
+ 'sdl2': SDL2Dependency,
+ 'wxwidgets': WxDependency,
+})
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
new file mode 100644
index 0000000..b934ddf
--- /dev/null
+++ b/mesonbuild/dependencies/base.py
@@ -0,0 +1,641 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies.
+# Custom logic for several other packages are in separate files.
+
+import os
+import shutil
+import stat
+import sys
+from enum import Enum
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import MesonException, Popen_safe, flatten, version_compare_many
+
+
+# This must be defined in this file to avoid cyclical references.
+packages = {}
+
+
+class DependencyException(MesonException):
+ '''Exceptions raised while trying to find dependencies'''
+
+
+class DependencyMethods(Enum):
+ # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
+ AUTO = 'auto'
+ PKGCONFIG = 'pkg-config'
+ QMAKE = 'qmake'
+ # Just specify the standard link arguments, assuming the operating system provides the library.
+ SYSTEM = 'system'
+ # Detect using sdl2-config
+ SDLCONFIG = 'sdlconfig'
+ # This is only supported on OSX - search the frameworks directory by name.
+ EXTRAFRAMEWORK = 'extraframework'
+ # Detect using the sysconfig module.
+ SYSCONFIG = 'sysconfig'
+
+
+class Dependency:
+ def __init__(self, type_name, kwargs):
+ self.name = "null"
+ self.language = None
+ self.is_found = False
+ self.type_name = type_name
+ method = DependencyMethods(kwargs.get('method', 'auto'))
+
+ # Set the detection method. If the method is set to auto, use any available method.
+ # If method is set to a specific string, allow only that detection method.
+ if method == DependencyMethods.AUTO:
+ self.methods = self.get_methods()
+ elif method in self.get_methods():
+ self.methods = [method]
+ else:
+ raise MesonException(
+ 'Unsupported detection method: {}, allowed methods are {}'.format(
+ method.value,
+ mlog.format_list(map(lambda x: x.value, [DependencyMethods.AUTO] + self.get_methods()))))
+
+ def __repr__(self):
+ s = '<{0} {1}: {2}>'
+ return s.format(self.__class__.__name__, self.name, self.is_found)
+
+ def get_compile_args(self):
+ return []
+
+ def get_link_args(self):
+ return []
+
+ def found(self):
+ return self.is_found
+
+ def get_sources(self):
+ """Source files that need to be added to the target.
+ As an example, gtest-all.cc when using GTest."""
+ return []
+
+ def get_methods(self):
+ return [DependencyMethods.AUTO]
+
+ def get_name(self):
+ return self.name
+
+ def get_exe_args(self, compiler):
+ return []
+
+ def need_threads(self):
+ return False
+
+ def get_pkgconfig_variable(self, variable_name):
+ raise MesonException('Tried to get a pkg-config variable from a non-pkgconfig dependency.')
+
+
+class InternalDependency(Dependency):
+ def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps):
+ super().__init__('internal', {})
+ self.version = version
+ self.is_found = True
+ self.include_directories = incdirs
+ self.compile_args = compile_args
+ self.link_args = link_args
+ self.libraries = libraries
+ self.sources = sources
+ self.ext_deps = ext_deps
+
+ def get_compile_args(self):
+ return self.compile_args
+
+ def get_link_args(self):
+ return self.link_args
+
+ def get_version(self):
+ return self.version
+
+
+class PkgConfigDependency(Dependency):
+ # The class's copy of the pkg-config path. Avoids having to search for it
+ # multiple times in the same Meson invocation.
+ class_pkgbin = None
+
+ def __init__(self, name, environment, kwargs):
+ Dependency.__init__(self, 'pkgconfig', kwargs)
+ self.is_libtool = False
+ self.version_reqs = kwargs.get('version', None)
+ self.required = kwargs.get('required', True)
+ self.static = kwargs.get('static', False)
+ self.silent = kwargs.get('silent', False)
+ if not isinstance(self.static, bool):
+ raise DependencyException('Static keyword must be boolean')
+ # Store a copy of the pkg-config path on the object itself so it is
+ # stored in the pickled coredata and recovered.
+ self.pkgbin = None
+ self.cargs = []
+ self.libs = []
+ if 'native' in kwargs and environment.is_cross_build():
+ self.want_cross = not kwargs['native']
+ else:
+ self.want_cross = environment.is_cross_build()
+ self.name = name
+ self.modversion = 'none'
+
+ # When finding dependencies for cross-compiling, we don't care about
+ # the 'native' pkg-config
+ if self.want_cross:
+ if 'pkgconfig' not in environment.cross_info.config['binaries']:
+ if self.required:
+ raise DependencyException('Pkg-config binary missing from cross file')
+ else:
+ pkgname = environment.cross_info.config['binaries']['pkgconfig']
+ potential_pkgbin = ExternalProgram(pkgname, silent=True)
+ if potential_pkgbin.found():
+ # FIXME, we should store all pkg-configs in ExternalPrograms.
+ # However that is too destabilizing a change to do just before release.
+ self.pkgbin = potential_pkgbin.get_command()[0]
+ PkgConfigDependency.class_pkgbin = self.pkgbin
+ else:
+ mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name)
+ # Only search for the native pkg-config the first time and
+ # store the result in the class definition
+ elif PkgConfigDependency.class_pkgbin is None:
+ self.pkgbin = self.check_pkgconfig()
+ PkgConfigDependency.class_pkgbin = self.pkgbin
+ else:
+ self.pkgbin = PkgConfigDependency.class_pkgbin
+
+ self.is_found = False
+ if not self.pkgbin:
+ if self.required:
+ raise DependencyException('Pkg-config not found.')
+ return
+ if self.want_cross:
+ self.type_string = 'Cross'
+ else:
+ self.type_string = 'Native'
+
+ mlog.debug('Determining dependency {!r} with pkg-config executable '
+ '{!r}'.format(name, self.pkgbin))
+ ret, self.modversion = self._call_pkgbin(['--modversion', name])
+ if ret != 0:
+ if self.required:
+ raise DependencyException('{} dependency {!r} not found'
+ ''.format(self.type_string, name))
+ return
+ found_msg = [self.type_string + ' dependency', mlog.bold(name), 'found:']
+ if self.version_reqs is None:
+ self.is_found = True
+ else:
+ if not isinstance(self.version_reqs, (str, list)):
+ raise DependencyException('Version argument must be string or list.')
+ if isinstance(self.version_reqs, str):
+ self.version_reqs = [self.version_reqs]
+ (self.is_found, not_found, found) = \
+ version_compare_many(self.modversion, self.version_reqs)
+ if not self.is_found:
+ found_msg += [mlog.red('NO'),
+ 'found {!r} but need:'.format(self.modversion),
+ ', '.join(["'{}'".format(e) for e in not_found])]
+ if found:
+ found_msg += ['; matched:',
+ ', '.join(["'{}'".format(e) for e in found])]
+ if not self.silent:
+ mlog.log(*found_msg)
+ if self.required:
+ m = 'Invalid version of dependency, need {!r} {!r} found {!r}.'
+ raise DependencyException(m.format(name, not_found, self.modversion))
+ return
+ found_msg += [mlog.green('YES'), self.modversion]
+ # Fetch cargs to be used while using this dependency
+ self._set_cargs()
+ # Fetch the libraries and library paths needed for using this
+ self._set_libs()
+ # Print the found message only at the very end because fetching cflags
+ # and libs can also fail if other needed pkg-config files aren't found.
+ if not self.silent:
+ mlog.log(*found_msg)
+
+ def __repr__(self):
+ s = '<{0} {1}: {2} {3}>'
+ return s.format(self.__class__.__name__, self.name, self.is_found,
+ self.version_reqs)
+
+ def _call_pkgbin(self, args):
+ p, out = Popen_safe([self.pkgbin] + args, env=os.environ)[0:2]
+ return p.returncode, out.strip()
+
+ def _set_cargs(self):
+ ret, out = self._call_pkgbin(['--cflags', self.name])
+ if ret != 0:
+ raise DependencyException('Could not generate cargs for %s:\n\n%s' %
+ (self.name, out))
+ self.cargs = out.split()
+
+ def _set_libs(self):
+ libcmd = [self.name, '--libs']
+ if self.static:
+ libcmd.append('--static')
+ ret, out = self._call_pkgbin(libcmd)
+ if ret != 0:
+ raise DependencyException('Could not generate libs for %s:\n\n%s' %
+ (self.name, out))
+ self.libs = []
+ for lib in out.split():
+ if lib.endswith(".la"):
+ shared_libname = self.extract_libtool_shlib(lib)
+ shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
+ if not os.path.exists(shared_lib):
+ shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
+
+ if not os.path.exists(shared_lib):
+ raise DependencyException('Got a libtools specific "%s" dependencies'
+ 'but we could not compute the actual shared'
+ 'library path' % lib)
+ lib = shared_lib
+ self.is_libtool = True
+ self.libs.append(lib)
+
+ def get_pkgconfig_variable(self, variable_name):
+ ret, out = self._call_pkgbin(['--variable=' + variable_name, self.name])
+ variable = ''
+ if ret != 0:
+ if self.required:
+ raise DependencyException('%s dependency %s not found.' %
+ (self.type_string, self.name))
+ else:
+ variable = out.strip()
+ mlog.debug('Got pkgconfig variable %s : %s' % (variable_name, variable))
+ return variable
+
+ def get_modversion(self):
+ return self.modversion
+
+ def get_version(self):
+ return self.modversion
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_link_args(self):
+ return self.libs
+
+ def get_methods(self):
+ return [DependencyMethods.PKGCONFIG]
+
+ def check_pkgconfig(self):
+ evar = 'PKG_CONFIG'
+ if evar in os.environ:
+ pkgbin = os.environ[evar].strip()
+ else:
+ pkgbin = 'pkg-config'
+ try:
+ p, out = Popen_safe([pkgbin, '--version'])[0:2]
+ if p.returncode != 0:
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ pkgbin = False
+ except (FileNotFoundError, PermissionError):
+ pkgbin = False
+ if pkgbin and not os.path.isabs(pkgbin) and shutil.which(pkgbin):
+ # Sometimes shutil.which fails where Popen succeeds, so
+ # only find the abs path if it can be found by shutil.which
+ pkgbin = shutil.which(pkgbin)
+ if not self.silent:
+ if pkgbin:
+ mlog.log('Found pkg-config:', mlog.bold(pkgbin),
+ '(%s)' % out.strip())
+ else:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ return pkgbin
+
+ def found(self):
+ return self.is_found
+
+ def extract_field(self, la_file, fieldname):
+ with open(la_file) as f:
+ for line in f:
+ arr = line.strip().split('=')
+ if arr[0] == fieldname:
+ return arr[1][1:-1]
+ return None
+
+ def extract_dlname_field(self, la_file):
+ return self.extract_field(la_file, 'dlname')
+
+ def extract_libdir_field(self, la_file):
+ return self.extract_field(la_file, 'libdir')
+
+ def extract_libtool_shlib(self, la_file):
+ '''
+ Returns the path to the shared library
+ corresponding to this .la file
+ '''
+ dlname = self.extract_dlname_field(la_file)
+ if dlname is None:
+ return None
+
+ # Darwin uses absolute paths where possible; since the libtool files never
+ # contain absolute paths, use the libdir field
+ if mesonlib.is_osx():
+ dlbasename = os.path.basename(dlname)
+ libdir = self.extract_libdir_field(la_file)
+ if libdir is None:
+ return dlbasename
+ return os.path.join(libdir, dlbasename)
+ # From the comments in extract_libtool(), older libtools had
+ # a path rather than the raw dlname
+ return os.path.basename(dlname)
+
+
+class ExternalProgram:
+ windows_exts = ('exe', 'msc', 'com', 'bat')
+
+ def __init__(self, name, command=None, silent=False, search_dir=None):
+ self.name = name
+ if command is not None:
+ if not isinstance(command, list):
+ self.command = [command]
+ else:
+ self.command = command
+ else:
+ self.command = self._search(name, search_dir)
+ if not silent:
+ if self.found():
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
+ '(%s)' % ' '.join(self.command))
+ else:
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def __repr__(self):
+ r = '<{} {!r} -> {!r}>'
+ return r.format(self.__class__.__name__, self.name, self.command)
+
+ @staticmethod
+ def _shebang_to_cmd(script):
+ """
+ Check if the file has a shebang and manually parse it to figure out
+ the interpreter to use. This is useful if the script is not executable
+ or if we're on Windows (which does not understand shebangs).
+ """
+ try:
+ with open(script) as f:
+ first_line = f.readline().strip()
+ if first_line.startswith('#!'):
+ commands = first_line[2:].split('#')[0].strip().split()
+ if mesonlib.is_windows():
+ # Windows does not have UNIX paths so remove them,
+ # but don't remove Windows paths
+ if commands[0].startswith('/'):
+ commands[0] = commands[0].split('/')[-1]
+ if len(commands) > 0 and commands[0] == 'env':
+ commands = commands[1:]
+ # Windows does not ship python3.exe, but we know the path to it
+ if len(commands) > 0 and commands[0] == 'python3':
+ commands[0] = sys.executable
+ return commands + [script]
+ except Exception:
+ pass
+ return False
+
+ def _is_executable(self, path):
+ suffix = os.path.splitext(path)[-1].lower()[1:]
+ if mesonlib.is_windows():
+ if suffix in self.windows_exts:
+ return True
+ elif os.access(path, os.X_OK):
+ return not os.path.isdir(path)
+ return False
+
+ def _search_dir(self, name, search_dir):
+ if search_dir is None:
+ return False
+ trial = os.path.join(search_dir, name)
+ if os.path.exists(trial):
+ if self._is_executable(trial):
+ return [trial]
+ # Now getting desperate. Maybe it is a script file that is
+ # a) not chmodded executable, or
+ # b) we are on windows so they can't be directly executed.
+ return self._shebang_to_cmd(trial)
+ else:
+ if mesonlib.is_windows():
+ for ext in self.windows_exts:
+ trial_ext = '{}.{}'.format(trial, ext)
+ if os.path.exists(trial_ext):
+ return [trial_ext]
+ return False
+
+ def _search(self, name, search_dir):
+ '''
+ Search in the specified dir for the specified executable by name
+ and if not found search in PATH
+ '''
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ # Do a standard search in PATH
+ command = shutil.which(name)
+ if not mesonlib.is_windows():
+ # On UNIX-like platforms, shutil.which() is enough to find
+ # all executables whether in PATH or with an absolute path
+ return [command]
+ # HERE BEGINS THE TERROR OF WINDOWS
+ if command:
+ # On Windows, even if the PATH search returned a full path, we can't be
+ # sure that it can be run directly if it's not a native executable.
+ # For instance, interpreted scripts sometimes need to be run explicitly
+ # with an interpreter if the file association is not done properly.
+ name_ext = os.path.splitext(command)[1]
+ if name_ext[1:].lower() in self.windows_exts:
+ # Good, it can be directly executed
+ return [command]
+ # Try to extract the interpreter from the shebang
+ commands = self._shebang_to_cmd(command)
+ if commands:
+ return commands
+ else:
+ # Maybe the name is an absolute path to a native Windows
+ # executable, but without the extension. This is technically wrong,
+ # but many people do it because it works in the MinGW shell.
+ if os.path.isabs(name):
+ for ext in self.windows_exts:
+ command = '{}.{}'.format(name, ext)
+ if os.path.exists(command):
+ return [command]
+ # On Windows, interpreted scripts must have an extension otherwise they
+ # cannot be found by a standard PATH search. So we do a custom search
+ # where we manually search for a script with a shebang in PATH.
+ search_dirs = os.environ.get('PATH', '').split(';')
+ for search_dir in search_dirs:
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ return [None]
+
+ def found(self):
+ return self.command[0] is not None
+
+ def get_command(self):
+ return self.command[:]
+
+ def get_path(self):
+ if self.found():
+ # Assume that the last element is the full path to the script or
+ # binary being run
+ return self.command[-1]
+ return None
+
+ def get_name(self):
+ return self.name
+
+
+class ExternalLibrary(Dependency):
+ # TODO: Add `language` support to all Dependency objects so that languages
+ # can be exposed for dependencies that support that (i.e., not pkg-config)
+ def __init__(self, name, link_args, language, silent=False):
+ super().__init__('external', {})
+ self.name = name
+ self.language = language
+ self.is_found = False
+ self.link_args = []
+ self.lang_args = []
+ if link_args:
+ self.is_found = True
+ if not isinstance(link_args, list):
+ link_args = [link_args]
+ self.lang_args = {language: link_args}
+ # We special-case Vala for now till the Dependency object gets
+ # proper support for exposing the language it was written in.
+ # Without this, vala-specific link args will end up in the C link
+ # args list if you link to a Vala library.
+ # This hack use to be in CompilerHolder.find_library().
+ if language != 'vala':
+ self.link_args = link_args
+ if not silent:
+ if self.is_found:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
+ else:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def found(self):
+ return self.is_found
+
+ def get_name(self):
+ return self.name
+
+ def get_link_args(self):
+ return self.link_args
+
+ def get_lang_args(self, lang):
+ if lang in self.lang_args:
+ return self.lang_args[lang]
+ return []
+
+
+class ExtraFrameworkDependency(Dependency):
+ def __init__(self, name, required, path, kwargs):
+ Dependency.__init__(self, 'extraframeworks', kwargs)
+ self.name = None
+ self.detect(name, path)
+ if self.found():
+ mlog.log('Dependency', mlog.bold(name), 'found:', mlog.green('YES'),
+ os.path.join(self.path, self.name))
+ else:
+ mlog.log('Dependency', name, 'found:', mlog.red('NO'))
+
+ def detect(self, name, path):
+ lname = name.lower()
+ if path is None:
+ paths = ['/Library/Frameworks']
+ else:
+ paths = [path]
+ for p in paths:
+ for d in os.listdir(p):
+ fullpath = os.path.join(p, d)
+ if lname != d.split('.')[0].lower():
+ continue
+ if not stat.S_ISDIR(os.stat(fullpath).st_mode):
+ continue
+ self.path = p
+ self.name = d
+ return
+
+ def get_compile_args(self):
+ if self.found():
+ return ['-I' + os.path.join(self.path, self.name, 'Headers')]
+ return []
+
+ def get_link_args(self):
+ if self.found():
+ return ['-F' + self.path, '-framework', self.name.split('.')[0]]
+ return []
+
+ def found(self):
+ return self.name is not None
+
+ def get_version(self):
+ return 'unknown'
+
+
+def get_dep_identifier(name, kwargs, want_cross):
+ # Need immutable objects since the identifier will be used as a dict key
+ version_reqs = flatten(kwargs.get('version', []))
+ if isinstance(version_reqs, list):
+ version_reqs = frozenset(version_reqs)
+ identifier = (name, version_reqs, want_cross)
+ for key, value in kwargs.items():
+ # 'version' is embedded above as the second element for easy access
+ # 'native' is handled above with `want_cross`
+ # 'required' is irrelevant for caching; the caller handles it separately
+ # 'fallback' subprojects cannot be cached -- they must be initialized
+ if key in ('version', 'native', 'required', 'fallback',):
+ continue
+ # All keyword arguments are strings, ints, or lists (or lists of lists)
+ if isinstance(value, list):
+ value = frozenset(flatten(value))
+ identifier += (key, value)
+ return identifier
+
+
+def find_external_dependency(name, environment, kwargs):
+ required = kwargs.get('required', True)
+ if not isinstance(required, bool):
+ raise DependencyException('Keyword "required" must be a boolean.')
+ if not isinstance(kwargs.get('method', ''), str):
+ raise DependencyException('Keyword "method" must be a string.')
+ lname = name.lower()
+ if lname in packages:
+ dep = packages[lname](environment, kwargs)
+ if required and not dep.found():
+ raise DependencyException('Dependency "%s" not found' % name)
+ return dep
+ pkg_exc = None
+ pkgdep = None
+ try:
+ pkgdep = PkgConfigDependency(name, environment, kwargs)
+ if pkgdep.found():
+ return pkgdep
+ except Exception as e:
+ pkg_exc = e
+ if mesonlib.is_osx():
+ fwdep = ExtraFrameworkDependency(name, required, None, kwargs)
+ if required and not fwdep.found():
+ m = 'Dependency {!r} not found, tried Extra Frameworks ' \
+ 'and Pkg-Config:\n\n' + str(pkg_exc)
+ raise DependencyException(m.format(name))
+ return fwdep
+ if pkg_exc is not None:
+ raise pkg_exc
+ mlog.log('Dependency', mlog.bold(name), 'found:', mlog.red('NO'))
+ return pkgdep
diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py
new file mode 100644
index 0000000..569108e
--- /dev/null
+++ b/mesonbuild/dependencies/dev.py
@@ -0,0 +1,293 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies useful for
+# development purposes, such as testing, debugging, etc..
+
+import os
+import shlex
+import shutil
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import version_compare, Popen_safe
+from .base import Dependency, DependencyException, PkgConfigDependency
+
+
+class GTestDependency(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'gtest', kwargs)
+ self.main = kwargs.get('main', False)
+ self.name = 'gtest'
+ self.libname = 'libgtest.so'
+ self.libmain_name = 'libgtest_main.so'
+ self.include_dir = '/usr/include'
+ self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+ self.detect()
+
+ def found(self):
+ return self.is_found
+
+ def detect(self):
+ trial_dirs = mesonlib.get_library_dirs()
+ glib_found = False
+ gmain_found = False
+ for d in trial_dirs:
+ if os.path.isfile(os.path.join(d, self.libname)):
+ glib_found = True
+ if os.path.isfile(os.path.join(d, self.libmain_name)):
+ gmain_found = True
+ if glib_found and gmain_found:
+ self.is_found = True
+ self.compile_args = []
+ self.link_args = ['-lgtest']
+ if self.main:
+ self.link_args.append('-lgtest_main')
+ self.sources = []
+ mlog.log('Dependency GTest found:', mlog.green('YES'), '(prebuilt)')
+ elif self.detect_srcdir():
+ self.is_found = True
+ self.compile_args = ['-I' + self.src_include_dir]
+ self.link_args = []
+ if self.main:
+ self.sources = [self.all_src, self.main_src]
+ else:
+ self.sources = [self.all_src]
+ mlog.log('Dependency GTest found:', mlog.green('YES'), '(building self)')
+ else:
+ mlog.log('Dependency GTest found:', mlog.red('NO'))
+ self.is_found = False
+ return self.is_found
+
+ def detect_srcdir(self):
+ for s in self.src_dirs:
+ if os.path.exists(s):
+ self.src_dir = s
+ self.all_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest-all.cc'))
+ self.main_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest_main.cc'))
+ self.src_include_dir = os.path.normpath(os.path.join(self.src_dir, '..'))
+ return True
+ return False
+
+ def get_compile_args(self):
+ arr = []
+ if self.include_dir != '/usr/include':
+ arr.append('-I' + self.include_dir)
+ if hasattr(self, 'src_include_dir'):
+ arr.append('-I' + self.src_include_dir)
+ return arr
+
+ def get_link_args(self):
+ return self.link_args
+
+ def get_version(self):
+ return '1.something_maybe'
+
+ def get_sources(self):
+ return self.sources
+
+ def need_threads(self):
+ return True
+
+
+class GMockDependency(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'gmock', kwargs)
+ # GMock may be a library or just source.
+ # Work with both.
+ self.name = 'gmock'
+ self.libname = 'libgmock.so'
+ trial_dirs = mesonlib.get_library_dirs()
+ gmock_found = False
+ for d in trial_dirs:
+ if os.path.isfile(os.path.join(d, self.libname)):
+ gmock_found = True
+ if gmock_found:
+ self.is_found = True
+ self.compile_args = []
+ self.link_args = ['-lgmock']
+ self.sources = []
+ mlog.log('Dependency GMock found:', mlog.green('YES'), '(prebuilt)')
+ return
+
+ for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
+ if os.path.exists(d):
+ self.is_found = True
+ # Yes, we need both because there are multiple
+ # versions of gmock that do different things.
+ d2 = os.path.normpath(os.path.join(d, '..'))
+ self.compile_args = ['-I' + d, '-I' + d2]
+ self.link_args = []
+ all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
+ main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
+ if kwargs.get('main', False):
+ self.sources = [all_src, main_src]
+ else:
+ self.sources = [all_src]
+ mlog.log('Dependency GMock found:', mlog.green('YES'), '(building self)')
+ return
+
+ mlog.log('Dependency GMock found:', mlog.red('NO'))
+ self.is_found = False
+
+ def get_version(self):
+ return '1.something_maybe'
+
+ def get_compile_args(self):
+ return self.compile_args
+
+ def get_sources(self):
+ return self.sources
+
+ def get_link_args(self):
+ return self.link_args
+
+ def found(self):
+ return self.is_found
+
+
+class LLVMDependency(Dependency):
+ """LLVM dependency.
+
+ LLVM uses a special tool, llvm-config, which has arguments for getting
+ c args, cxx args, and ldargs as well as version.
+ """
+
+ # Ordered list of llvm-config binaries to try. Start with base, then try
+ # newest back to oldest (3.5 is abitrary), and finally the devel version.
+ llvm_config_bins = [
+ 'llvm-config', 'llvm-config-4.0', 'llvm-config-3.9', 'llvm-config39',
+ 'llvm-config-3.8', 'llvm-config38', 'llvm-config-3.7', 'llvm-config37',
+ 'llvm-config-3.6', 'llvm-config36', 'llvm-config-3.5', 'llvm-config35',
+ 'llvm-config-devel',
+ ]
+ llvmconfig = None
+ _llvmconfig_found = False
+ __best_found = None
+ __cpp_blacklist = {'-DNDEBUG'}
+
+ def __init__(self, environment, kwargs):
+ super().__init__('llvm-config', kwargs)
+ # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
+ # the C linker works fine if only using the C API.
+ self.language = 'cpp'
+ self.cargs = []
+ self.libs = []
+ self.modules = []
+
+ required = kwargs.get('required', True)
+ req_version = kwargs.get('version', None)
+ if self.llvmconfig is None:
+ self.check_llvmconfig(req_version)
+ if not self._llvmconfig_found:
+ if self.__best_found is not None:
+ mlog.log('found {!r} but need:'.format(self.__best_found),
+ req_version)
+ else:
+ mlog.log("No llvm-config found; can't detect dependency")
+ mlog.log('Dependency LLVM found:', mlog.red('NO'))
+ if required:
+ raise DependencyException('Dependency LLVM not found')
+ return
+
+ p, out, err = Popen_safe([self.llvmconfig, '--version'])
+ if p.returncode != 0:
+ mlog.debug('stdout: {}\nstderr: {}'.format(out, err))
+ if required:
+ raise DependencyException('Dependency LLVM not found')
+ return
+ else:
+ self.version = out.strip()
+ mlog.log('Dependency LLVM found:', mlog.green('YES'))
+ self.is_found = True
+
+ p, out = Popen_safe(
+ [self.llvmconfig, '--libs', '--ldflags', '--system-libs'])[:2]
+ if p.returncode != 0:
+ raise DependencyException('Could not generate libs for LLVM.')
+ self.libs = shlex.split(out)
+
+ p, out = Popen_safe([self.llvmconfig, '--cppflags'])[:2]
+ if p.returncode != 0:
+ raise DependencyException('Could not generate includedir for LLVM.')
+ self.cargs = list(mesonlib.OrderedSet(shlex.split(out)).difference(self.__cpp_blacklist))
+
+ p, out = Popen_safe([self.llvmconfig, '--components'])[:2]
+ if p.returncode != 0:
+ raise DependencyException('Could not generate modules for LLVM.')
+ self.modules = shlex.split(out)
+
+ modules = mesonlib.stringlistify(kwargs.get('modules', []))
+ for mod in modules:
+ if mod not in self.modules:
+ mlog.log('LLVM module', mod, 'found:', mlog.red('NO'))
+ self.is_found = False
+ if required:
+ raise DependencyException(
+ 'Could not find required LLVM Component: {}'.format(mod))
+ else:
+ mlog.log('LLVM module', mod, 'found:', mlog.green('YES'))
+
+ def get_version(self):
+ return self.version
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_link_args(self):
+ return self.libs
+
+ @classmethod
+ def check_llvmconfig(cls, version_req):
+ """Try to find the highest version of llvm-config."""
+ for llvmconfig in cls.llvm_config_bins:
+ try:
+ p, out = Popen_safe([llvmconfig, '--version'])[0:2]
+ out = out.strip()
+ if p.returncode != 0:
+ continue
+ if version_req:
+ if version_compare(out, version_req, strict=True):
+ if cls.__best_found and version_compare(out, '<={}'.format(cls.__best_found), strict=True):
+ continue
+ cls.__best_found = out
+ cls.llvmconfig = llvmconfig
+ else:
+ # If no specific version is requested use the first version
+ # found, since that should be the best.
+ cls.__best_found = out
+ cls.llvmconfig = llvmconfig
+ break
+ except (FileNotFoundError, PermissionError):
+ pass
+ if cls.__best_found:
+ mlog.log('Found llvm-config:',
+ mlog.bold(shutil.which(cls.llvmconfig)),
+ '({})'.format(out.strip()))
+ cls._llvmconfig_found = True
+ else:
+ cls.llvmconfig = False
+
+ def need_threads(self):
+ return True
+
+
+class ValgrindDependency(PkgConfigDependency):
+ def __init__(self, environment, kwargs):
+ PkgConfigDependency.__init__(self, 'valgrind', environment, kwargs)
+
+ def get_link_args(self):
+ return []
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
new file mode 100644
index 0000000..3374c6e
--- /dev/null
+++ b/mesonbuild/dependencies/misc.py
@@ -0,0 +1,382 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+import glob
+import os
+import stat
+import sysconfig
+
+from .. import mlog
+from .. import mesonlib
+from ..environment import detect_cpu_family
+
+from .base import Dependency, DependencyException, DependencyMethods, ExtraFrameworkDependency, PkgConfigDependency
+
+
+class BoostDependency(Dependency):
+ # Some boost libraries have different names for
+ # their sources and libraries. This dict maps
+ # between the two.
+ name2lib = {'test': 'unit_test_framework'}
+
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'boost', kwargs)
+ self.name = 'boost'
+ self.environment = environment
+ self.libdir = ''
+ if 'native' in kwargs and environment.is_cross_build():
+ self.want_cross = not kwargs['native']
+ else:
+ self.want_cross = environment.is_cross_build()
+ try:
+ self.boost_root = os.environ['BOOST_ROOT']
+ if not os.path.isabs(self.boost_root):
+ raise DependencyException('BOOST_ROOT must be an absolute path.')
+ except KeyError:
+ self.boost_root = None
+ if self.boost_root is None:
+ if self.want_cross:
+ if 'BOOST_INCLUDEDIR' in os.environ:
+ self.incdir = os.environ['BOOST_INCLUDEDIR']
+ else:
+ raise DependencyException('BOOST_ROOT or BOOST_INCLUDEDIR is needed while cross-compiling')
+ if mesonlib.is_windows():
+ self.boost_root = self.detect_win_root()
+ self.incdir = self.boost_root
+ else:
+ if 'BOOST_INCLUDEDIR' in os.environ:
+ self.incdir = os.environ['BOOST_INCLUDEDIR']
+ else:
+ self.incdir = '/usr/include'
+ else:
+ self.incdir = os.path.join(self.boost_root, 'include')
+ self.boost_inc_subdir = os.path.join(self.incdir, 'boost')
+ mlog.debug('Boost library root dir is', self.boost_root)
+ self.src_modules = {}
+ self.lib_modules = {}
+ self.lib_modules_mt = {}
+ self.detect_version()
+ self.requested_modules = self.get_requested(kwargs)
+ module_str = ', '.join(self.requested_modules)
+ if self.version is not None:
+ self.detect_src_modules()
+ self.detect_lib_modules()
+ self.validate_requested()
+ if self.boost_root is not None:
+ info = self.version + ', ' + self.boost_root
+ else:
+ info = self.version
+ mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
+ else:
+ mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
+ if 'cpp' not in self.environment.coredata.compilers:
+ raise DependencyException('Tried to use Boost but a C++ compiler is not defined.')
+ self.cpp_compiler = self.environment.coredata.compilers['cpp']
+
+ def detect_win_root(self):
+ globtext = 'c:\\local\\boost_*'
+ files = glob.glob(globtext)
+ if len(files) > 0:
+ return files[0]
+ return 'C:\\'
+
+ def get_compile_args(self):
+ args = []
+ include_dir = ''
+ if self.boost_root is not None:
+ if mesonlib.is_windows():
+ include_dir = self.boost_root
+ else:
+ include_dir = os.path.join(self.boost_root, 'include')
+ else:
+ include_dir = self.incdir
+
+ # Use "-isystem" when including boost headers instead of "-I"
+ # to avoid compiler warnings/failures when "-Werror" is used
+
+ # Careful not to use "-isystem" on default include dirs as it
+ # breaks some of the headers for certain gcc versions
+
+ # For example, doing g++ -isystem /usr/include on a simple
+ # "int main()" source results in the error:
+ # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
+
+ # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
+ # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
+ # for more details
+
+ # TODO: The correct solution would probably be to ask the
+ # compiler for it's default include paths (ie: "gcc -xc++ -E
+ # -v -") and avoid including those with -isystem
+
+ # For now, use -isystem for all includes except for some
+ # typical defaults (which don't need to be included at all
+ # since they are in the default include paths)
+ if include_dir != '/usr/include' and include_dir != '/usr/local/include':
+ args.append("".join(self.cpp_compiler.get_include_args(include_dir, True)))
+ return args
+
+ def get_requested(self, kwargs):
+ candidates = kwargs.get('modules', [])
+ if isinstance(candidates, str):
+ return [candidates]
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('Boost module argument is not a string.')
+ return candidates
+
+ def validate_requested(self):
+ for m in self.requested_modules:
+ if m not in self.src_modules:
+ raise DependencyException('Requested Boost module "%s" not found.' % m)
+
+ def found(self):
+ return self.version is not None
+
+ def get_version(self):
+ return self.version
+
+ def detect_version(self):
+ try:
+ ifile = open(os.path.join(self.boost_inc_subdir, 'version.hpp'))
+ except FileNotFoundError:
+ self.version = None
+ return
+ with ifile:
+ for line in ifile:
+ if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
+ ver = line.split()[-1]
+ ver = ver[1:-1]
+ self.version = ver.replace('_', '.')
+ return
+ self.version = None
+
+ def detect_src_modules(self):
+ for entry in os.listdir(self.boost_inc_subdir):
+ entry = os.path.join(self.boost_inc_subdir, entry)
+ if stat.S_ISDIR(os.stat(entry).st_mode):
+ self.src_modules[os.path.split(entry)[-1]] = True
+
+ def detect_lib_modules(self):
+ if mesonlib.is_windows():
+ return self.detect_lib_modules_win()
+ return self.detect_lib_modules_nix()
+
+ def detect_lib_modules_win(self):
+ arch = detect_cpu_family(self.environment.coredata.compilers)
+ # Guess the libdir
+ if arch == 'x86':
+ gl = 'lib32*'
+ elif arch == 'x86_64':
+ gl = 'lib64*'
+ else:
+ # Does anyone do Boost cross-compiling to other archs on Windows?
+ gl = None
+ # See if the libdir is valid
+ if gl:
+ libdir = glob.glob(os.path.join(self.boost_root, gl))
+ else:
+ libdir = []
+ # Can't find libdir, bail
+ if not libdir:
+ return
+ libdir = libdir[0]
+ self.libdir = libdir
+ globber = 'boost_*-gd-*.lib' # FIXME
+ for entry in glob.glob(os.path.join(libdir, globber)):
+ (_, fname) = os.path.split(entry)
+ base = fname.split('_', 1)[1]
+ modname = base.split('-', 1)[0]
+ self.lib_modules_mt[modname] = fname
+
+ def detect_lib_modules_nix(self):
+ if mesonlib.is_osx():
+ libsuffix = 'dylib'
+ else:
+ libsuffix = 'so'
+
+ globber = 'libboost_*.{}'.format(libsuffix)
+ if 'BOOST_LIBRARYDIR' in os.environ:
+ libdirs = [os.environ['BOOST_LIBRARYDIR']]
+ elif self.boost_root is None:
+ libdirs = mesonlib.get_library_dirs()
+ else:
+ libdirs = [os.path.join(self.boost_root, 'lib')]
+ for libdir in libdirs:
+ for entry in glob.glob(os.path.join(libdir, globber)):
+ lib = os.path.basename(entry)
+ name = lib.split('.')[0].split('_', 1)[-1]
+ # I'm not 100% sure what to do here. Some distros
+ # have modules such as thread only as -mt versions.
+ if entry.endswith('-mt.so'):
+ self.lib_modules_mt[name] = True
+ else:
+ self.lib_modules[name] = True
+
+ def get_win_link_args(self):
+ args = []
+ if self.boost_root:
+ args.append('-L' + self.libdir)
+ for module in self.requested_modules:
+ module = BoostDependency.name2lib.get(module, module)
+ if module in self.lib_modules_mt:
+ args.append(self.lib_modules_mt[module])
+ return args
+
+ def get_link_args(self):
+ if mesonlib.is_windows():
+ return self.get_win_link_args()
+ args = []
+ if self.boost_root:
+ args.append('-L' + os.path.join(self.boost_root, 'lib'))
+ elif 'BOOST_LIBRARYDIR' in os.environ:
+ args.append('-L' + os.environ['BOOST_LIBRARYDIR'])
+ for module in self.requested_modules:
+ module = BoostDependency.name2lib.get(module, module)
+ libname = 'boost_' + module
+ # The compiler's library detector is the most reliable so use that first.
+ default_detect = self.cpp_compiler.find_library(libname, self.environment, [])
+ if default_detect is not None:
+ if module == 'unit_testing_framework':
+ emon_args = self.cpp_compiler.find_library('boost_test_exec_monitor')
+ else:
+ emon_args = None
+ args += default_detect
+ if emon_args is not None:
+ args += emon_args
+ elif module in self.lib_modules or module in self.lib_modules_mt:
+ linkcmd = '-l' + libname
+ args.append(linkcmd)
+ # FIXME a hack, but Boost's testing framework has a lot of
+ # different options and it's hard to determine what to do
+ # without feedback from actual users. Update this
+ # as we get more bug reports.
+ if module == 'unit_testing_framework':
+ args.append('-lboost_test_exec_monitor')
+ elif module + '-mt' in self.lib_modules_mt:
+ linkcmd = '-lboost_' + module + '-mt'
+ args.append(linkcmd)
+ if module == 'unit_testing_framework':
+ args.append('-lboost_test_exec_monitor-mt')
+ return args
+
+ def get_sources(self):
+ return []
+
+ def need_threads(self):
+ return 'thread' in self.requested_modules
+
+
+class ThreadDependency(Dependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('threads', {})
+ self.name = 'threads'
+ self.is_found = True
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
+
+ def need_threads(self):
+ return True
+
+ def get_version(self):
+ return 'unknown'
+
+
+class Python3Dependency(Dependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('python3', kwargs)
+ self.name = 'python3'
+ self.is_found = False
+ # We can only be sure that it is Python 3 at this point
+ self.version = '3'
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pkgdep = PkgConfigDependency('python3', environment, kwargs)
+ if pkgdep.found():
+ self.cargs = pkgdep.cargs
+ self.libs = pkgdep.libs
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ return
+ except Exception:
+ pass
+ if not self.is_found:
+ if mesonlib.is_windows() and DependencyMethods.SYSCONFIG in self.methods:
+ self._find_libpy3_windows(environment)
+ elif mesonlib.is_osx() and DependencyMethods.EXTRAFRAMEWORK in self.methods:
+ # In OSX the Python 3 framework does not have a version
+ # number in its name.
+ fw = ExtraFrameworkDependency('python', False, None, kwargs)
+ if fw.found():
+ self.cargs = fw.get_compile_args()
+ self.libs = fw.get_link_args()
+ self.is_found = True
+ if self.is_found:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
+ else:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
+
+ def _find_libpy3_windows(self, env):
+ '''
+ Find python3 libraries on Windows and also verify that the arch matches
+ what we are building for.
+ '''
+ pyarch = sysconfig.get_platform()
+ arch = detect_cpu_family(env.coredata.compilers)
+ if arch == 'x86':
+ arch = '32'
+ elif arch == 'x86_64':
+ arch = '64'
+ else:
+ # We can't cross-compile Python 3 dependencies on Windows yet
+ mlog.log('Unknown architecture {!r} for'.format(arch),
+ mlog.bold(self.name))
+ self.is_found = False
+ return
+ # Pyarch ends in '32' or '64'
+ if arch != pyarch[-2:]:
+ mlog.log('Need', mlog.bold(self.name),
+ 'for {}-bit, but found {}-bit'.format(arch, pyarch[-2:]))
+ self.is_found = False
+ return
+ inc = sysconfig.get_path('include')
+ platinc = sysconfig.get_path('platinclude')
+ self.cargs = ['-I' + inc]
+ if inc != platinc:
+ self.cargs.append('-I' + platinc)
+ # Nothing exposes this directly that I coulf find
+ basedir = sysconfig.get_config_var('base')
+ vernum = sysconfig.get_config_var('py_version_nodot')
+ self.libs = ['-L{}/libs'.format(basedir),
+ '-lpython{}'.format(vernum)]
+ self.version = sysconfig.get_config_var('py_version_short')
+ self.is_found = True
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_link_args(self):
+ return self.libs
+
+ def get_methods(self):
+ if mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+ elif mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+ def get_version(self):
+ return self.version
diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py
new file mode 100644
index 0000000..cd46412
--- /dev/null
+++ b/mesonbuild/dependencies/platform.py
@@ -0,0 +1,44 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that are
+# platform-specific (generally speaking).
+
+from .. import mesonlib
+
+from .base import Dependency, DependencyException
+
+
+class AppleFrameworks(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'appleframeworks', kwargs)
+ modules = kwargs.get('modules', [])
+ if isinstance(modules, str):
+ modules = [modules]
+ if not modules:
+ raise DependencyException("AppleFrameworks dependency requires at least one module.")
+ self.frameworks = modules
+
+ def get_link_args(self):
+ args = []
+ for f in self.frameworks:
+ args.append('-framework')
+ args.append(f)
+ return args
+
+ def found(self):
+ return mesonlib.is_osx()
+
+ def get_version(self):
+ return 'unknown'
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
new file mode 100644
index 0000000..3174176
--- /dev/null
+++ b/mesonbuild/dependencies/ui.py
@@ -0,0 +1,560 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import os
+import re
+import shutil
+import subprocess
+from collections import OrderedDict
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import MesonException, Popen_safe, version_compare
+from ..environment import for_windows
+
+from .base import (Dependency, DependencyException, DependencyMethods,
+ ExternalProgram, ExtraFrameworkDependency, PkgConfigDependency)
+
+
+class GLDependency(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'gl', kwargs)
+ self.is_found = False
+ self.cargs = []
+ self.linkargs = []
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('gl', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.cargs = pcdep.get_compile_args()
+ self.linkargs = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception:
+ pass
+ if DependencyMethods.SYSTEM in self.methods:
+ if mesonlib.is_osx():
+ self.is_found = True
+ self.linkargs = ['-framework', 'OpenGL']
+ self.version = '1' # FIXME
+ return
+ if mesonlib.is_windows():
+ self.is_found = True
+ self.linkargs = ['-lopengl32']
+ self.version = '1' # FIXME: unfixable?
+ return
+
+ def get_link_args(self):
+ return self.linkargs
+
+ def get_version(self):
+ return self.version
+
+ def get_methods(self):
+ if mesonlib.is_osx() or mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+
+class GnuStepDependency(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'gnustep', kwargs)
+ self.required = kwargs.get('required', True)
+ self.modules = kwargs.get('modules', [])
+ self.detect()
+
+ def detect(self):
+ self.confprog = 'gnustep-config'
+ try:
+ gp = Popen_safe([self.confprog, '--help'])[0]
+ except (FileNotFoundError, PermissionError):
+ self.args = None
+ mlog.log('Dependency GnuStep found:', mlog.red('NO'), '(no gnustep-config)')
+ return
+ if gp.returncode != 0:
+ self.args = None
+ mlog.log('Dependency GnuStep found:', mlog.red('NO'))
+ return
+ if 'gui' in self.modules:
+ arg = '--gui-libs'
+ else:
+ arg = '--base-libs'
+ fp, flagtxt, flagerr = Popen_safe([self.confprog, '--objc-flags'])
+ if fp.returncode != 0:
+ raise DependencyException('Error getting objc-args: %s %s' % (flagtxt, flagerr))
+ args = flagtxt.split()
+ self.args = self.filter_arsg(args)
+ fp, libtxt, liberr = Popen_safe([self.confprog, arg])
+ if fp.returncode != 0:
+ raise DependencyException('Error getting objc-lib args: %s %s' % (libtxt, liberr))
+ self.libs = self.weird_filter(libtxt.split())
+ self.version = self.detect_version()
+ mlog.log('Dependency', mlog.bold('GnuStep'), 'found:',
+ mlog.green('YES'), self.version)
+
+ def weird_filter(self, elems):
+ """When building packages, the output of the enclosing Make
+is sometimes mixed among the subprocess output. I have no idea
+why. As a hack filter out everything that is not a flag."""
+ return [e for e in elems if e.startswith('-')]
+
+ def filter_arsg(self, args):
+ """gnustep-config returns a bunch of garbage args such
+ as -O2 and so on. Drop everything that is not needed."""
+ result = []
+ for f in args:
+ if f.startswith('-D') \
+ or f.startswith('-f') \
+ or f.startswith('-I') \
+ or f == '-pthread' \
+ or (f.startswith('-W') and not f == '-Wall'):
+ result.append(f)
+ return result
+
+ def detect_version(self):
+ gmake = self.get_variable('GNUMAKE')
+ makefile_dir = self.get_variable('GNUSTEP_MAKEFILES')
+ # This Makefile has the GNUStep version set
+ base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
+ # Print the Makefile variable passed as the argument. For instance, if
+ # you run the make target `print-SOME_VARIABLE`, this will print the
+ # value of the variable `SOME_VARIABLE`.
+ printver = "print-%:\n\t@echo '$($*)'"
+ env = os.environ.copy()
+ # See base.make to understand why this is set
+ env['FOUNDATION_LIB'] = 'gnu'
+ p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
+ 'print-GNUSTEP_BASE_VERSION'],
+ env=env, write=printver, stdin=subprocess.PIPE)
+ version = o.strip()
+ if not version:
+ mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
+ # Fallback to setting some 1.x version
+ version = '1'
+ return version
+
+ def get_variable(self, var):
+ p, o, e = Popen_safe([self.confprog, '--variable=' + var])
+ if p.returncode != 0 and self.required:
+ raise DependencyException('{!r} for variable {!r} failed to run'
+ ''.format(self.confprog, var))
+ return o.strip()
+
+ def found(self):
+ return self.args is not None
+
+ def get_version(self):
+ return self.version
+
+ def get_compile_args(self):
+ if self.args is None:
+ return []
+ return self.args
+
+ def get_link_args(self):
+ return self.libs
+
+
+class QtBaseDependency(Dependency):
+ def __init__(self, name, env, kwargs):
+ Dependency.__init__(self, name, kwargs)
+ self.name = name
+ self.qtname = name.capitalize()
+ self.qtver = name[-1]
+ if self.qtver == "4":
+ self.qtpkgname = 'Qt'
+ else:
+ self.qtpkgname = self.qtname
+ self.root = '/usr'
+ self.bindir = None
+ self.silent = kwargs.get('silent', False)
+ # We store the value of required here instead of passing it on to
+ # PkgConfigDependency etc because we want to try the qmake-based
+ # fallback as well.
+ self.required = kwargs.pop('required', True)
+ kwargs['required'] = False
+ mods = kwargs.get('modules', [])
+ self.cargs = []
+ self.largs = []
+ self.is_found = False
+ if isinstance(mods, str):
+ mods = [mods]
+ if not mods:
+ raise DependencyException('No ' + self.qtname + ' modules specified.')
+ type_text = 'cross' if env.is_cross_build() else 'native'
+ found_msg = '{} {} {{}} dependency (modules: {}) found:' \
+ ''.format(self.qtname, type_text, ', '.join(mods))
+ from_text = 'pkg-config'
+
+ # Keep track of the detection methods used, for logging purposes.
+ methods = []
+ # Prefer pkg-config, then fallback to `qmake -query`
+ if DependencyMethods.PKGCONFIG in self.methods:
+ self._pkgconfig_detect(mods, env, kwargs)
+ methods.append('pkgconfig')
+ if not self.is_found and DependencyMethods.QMAKE in self.methods:
+ from_text = self._qmake_detect(mods, env, kwargs)
+ methods.append('qmake-' + self.name)
+ methods.append('qmake')
+ if not self.is_found:
+ # Reset compile args and link args
+ self.cargs = []
+ self.largs = []
+ from_text = '(checked {})'.format(mlog.format_list(methods))
+ self.version = 'none'
+ if self.required:
+ err_msg = '{} {} dependency not found {}' \
+ ''.format(self.qtname, type_text, from_text)
+ raise DependencyException(err_msg)
+ if not self.silent:
+ mlog.log(found_msg.format(from_text), mlog.red('NO'))
+ return
+ from_text = '`{}`'.format(from_text)
+ if not self.silent:
+ mlog.log(found_msg.format(from_text), mlog.green('YES'))
+
+ def compilers_detect(self):
+ "Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"
+ if self.bindir:
+ moc = ExternalProgram(os.path.join(self.bindir, 'moc'), silent=True)
+ uic = ExternalProgram(os.path.join(self.bindir, 'uic'), silent=True)
+ rcc = ExternalProgram(os.path.join(self.bindir, 'rcc'), silent=True)
+ else:
+ # We don't accept unsuffixed 'moc', 'uic', and 'rcc' because they
+ # are sometimes older, or newer versions.
+ moc = ExternalProgram('moc-' + self.name, silent=True)
+ uic = ExternalProgram('uic-' + self.name, silent=True)
+ rcc = ExternalProgram('rcc-' + self.name, silent=True)
+ return moc, uic, rcc
+
+ def _pkgconfig_detect(self, mods, env, kwargs):
+ modules = OrderedDict()
+ for module in mods:
+ modules[module] = PkgConfigDependency(self.qtpkgname + module, env, kwargs)
+ self.is_found = True
+ for m in modules.values():
+ if not m.found():
+ self.is_found = False
+ return
+ self.cargs += m.get_compile_args()
+ self.largs += m.get_link_args()
+ self.version = m.modversion
+ # Try to detect moc, uic, rcc
+ if 'Core' in modules:
+ core = modules['Core']
+ else:
+ corekwargs = {'required': 'false', 'silent': 'true'}
+ core = PkgConfigDependency(self.qtpkgname + 'Core', env, corekwargs)
+ # Used by self.compilers_detect()
+ self.bindir = self.get_pkgconfig_host_bins(core)
+ if not self.bindir:
+ # If exec_prefix is not defined, the pkg-config file is broken
+ prefix = core.get_pkgconfig_variable('exec_prefix')
+ if prefix:
+ self.bindir = os.path.join(prefix, 'bin')
+
+ def _find_qmake(self, qmake, env):
+ # Even when cross-compiling, if we don't get a cross-info qmake, we
+ # fallback to using the qmake in PATH because that's what we used to do
+ if env.is_cross_build():
+ qmake = env.cross_info.config['binaries'].get('qmake', qmake)
+ return ExternalProgram(qmake, silent=True)
+
+ def _qmake_detect(self, mods, env, kwargs):
+ for qmake in ('qmake-' + self.name, 'qmake'):
+ self.qmake = self._find_qmake(qmake, env)
+ if not self.qmake.found():
+ continue
+ # Check that the qmake is for qt5
+ pc, stdo = Popen_safe(self.qmake.get_command() + ['-v'])[0:2]
+ if pc.returncode != 0:
+ continue
+ if not 'Qt version ' + self.qtver in stdo:
+ mlog.log('QMake is not for ' + self.qtname)
+ continue
+ # Found qmake for Qt5!
+ break
+ else:
+ # Didn't find qmake :(
+ return
+ self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0)
+ # Query library path, header path, and binary path
+ mlog.log("Found qmake:", mlog.bold(self.qmake.get_name()), '(%s)' % self.version)
+ stdo = Popen_safe(self.qmake.get_command() + ['-query'])[1]
+ qvars = {}
+ for line in stdo.split('\n'):
+ line = line.strip()
+ if line == '':
+ continue
+ (k, v) = tuple(line.split(':', 1))
+ qvars[k] = v
+ if mesonlib.is_osx():
+ return self._framework_detect(qvars, mods, kwargs)
+ incdir = qvars['QT_INSTALL_HEADERS']
+ self.cargs.append('-I' + incdir)
+ libdir = qvars['QT_INSTALL_LIBS']
+ # Used by self.compilers_detect()
+ self.bindir = self.get_qmake_host_bins(qvars)
+ self.is_found = True
+ for module in mods:
+ mincdir = os.path.join(incdir, 'Qt' + module)
+ self.cargs.append('-I' + mincdir)
+ if for_windows(env.is_cross_build(), env):
+ libfile = os.path.join(libdir, self.qtpkgname + module + '.lib')
+ if not os.path.isfile(libfile):
+ # MinGW can link directly to .dll
+ libfile = os.path.join(self.bindir, self.qtpkgname + module + '.dll')
+ if not os.path.isfile(libfile):
+ self.is_found = False
+ break
+ else:
+ libfile = os.path.join(libdir, 'lib{}{}.so'.format(self.qtpkgname, module))
+ if not os.path.isfile(libfile):
+ self.is_found = False
+ break
+ self.largs.append(libfile)
+ return qmake
+
+ def _framework_detect(self, qvars, modules, kwargs):
+ libdir = qvars['QT_INSTALL_LIBS']
+ for m in modules:
+ fname = 'Qt' + m
+ fwdep = ExtraFrameworkDependency(fname, kwargs.get('required', True), libdir, kwargs)
+ self.cargs.append('-F' + libdir)
+ if fwdep.found():
+ self.is_found = True
+ self.cargs += fwdep.get_compile_args()
+ self.largs += fwdep.get_link_args()
+ # Used by self.compilers_detect()
+ self.bindir = self.get_qmake_host_bins(qvars)
+
+ def get_qmake_host_bins(self, qvars):
+ # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
+ # but fall back to QT_INSTALL_BINS (qt4)
+ if 'QT_HOST_BINS' in qvars:
+ return qvars['QT_HOST_BINS']
+ else:
+ return qvars['QT_INSTALL_BINS']
+
+ def get_version(self):
+ return self.version
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_sources(self):
+ return []
+
+ def get_link_args(self):
+ return self.largs
+
+ def get_methods(self):
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.QMAKE]
+
+ def found(self):
+ return self.is_found
+
+ def get_exe_args(self, compiler):
+ # Originally this was -fPIE but nowadays the default
+ # for upstream and distros seems to be -reduce-relocations
+ # which requires -fPIC. This may cause a performance
+ # penalty when using self-built Qt or on platforms
+ # where -fPIC is not required. If this is an issue
+ # for you, patches are welcome.
+ return compiler.get_pic_args()
+
+
+class Qt4Dependency(QtBaseDependency):
+ def __init__(self, env, kwargs):
+ QtBaseDependency.__init__(self, 'qt4', env, kwargs)
+
+ def get_pkgconfig_host_bins(self, core):
+ # Only return one bins dir, because the tools are generally all in one
+ # directory for Qt4, in Qt5, they must all be in one directory. Return
+ # the first one found among the bin variables, in case one tool is not
+ # configured to be built.
+ applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
+ for application in applications:
+ try:
+ return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application))
+ except MesonException:
+ pass
+
+
+class Qt5Dependency(QtBaseDependency):
+ def __init__(self, env, kwargs):
+ QtBaseDependency.__init__(self, 'qt5', env, kwargs)
+
+ def get_pkgconfig_host_bins(self, core):
+ return core.get_pkgconfig_variable('host_bins')
+
+
+# There are three different ways of depending on SDL2:
+# sdl2-config, pkg-config and OSX framework
+class SDL2Dependency(Dependency):
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'sdl2', kwargs)
+ self.is_found = False
+ self.cargs = []
+ self.linkargs = []
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('sdl2', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.cargs = pcdep.get_compile_args()
+ self.linkargs = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception as e:
+ mlog.debug('SDL 2 not found via pkgconfig. Trying next, error was:', str(e))
+ pass
+ if DependencyMethods.SDLCONFIG in self.methods:
+ sdlconf = shutil.which('sdl2-config')
+ if sdlconf:
+ stdo = Popen_safe(['sdl2-config', '--cflags'])[1]
+ self.cargs = stdo.strip().split()
+ stdo = Popen_safe(['sdl2-config', '--libs'])[1]
+ self.linkargs = stdo.strip().split()
+ stdo = Popen_safe(['sdl2-config', '--version'])[1]
+ self.version = stdo.strip()
+ self.is_found = True
+ mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.green('YES'),
+ self.version, '(%s)' % sdlconf)
+ return
+ mlog.debug('Could not find sdl2-config binary, trying next.')
+ if DependencyMethods.EXTRAFRAMEWORK in self.methods:
+ if mesonlib.is_osx():
+ fwdep = ExtraFrameworkDependency('sdl2', kwargs.get('required', True), None, kwargs)
+ if fwdep.found():
+ self.is_found = True
+ self.cargs = fwdep.get_compile_args()
+ self.linkargs = fwdep.get_link_args()
+ self.version = '2' # FIXME
+ return
+ mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.red('NO'))
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_link_args(self):
+ return self.linkargs
+
+ def found(self):
+ return self.is_found
+
+ def get_version(self):
+ return self.version
+
+ def get_methods(self):
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG]
+
+
+class WxDependency(Dependency):
+ wx_found = None
+
+ def __init__(self, environment, kwargs):
+ Dependency.__init__(self, 'wx', kwargs)
+ self.is_found = False
+ # FIXME: use version instead of modversion
+ self.modversion = 'none'
+ if WxDependency.wx_found is None:
+ self.check_wxconfig()
+ if not WxDependency.wx_found:
+ # FIXME: this message could be printed after Dependncy found
+ mlog.log("Neither wx-config-3.0 nor wx-config found; can't detect dependency")
+ return
+
+ # FIXME: This should print stdout and stderr using mlog.debug
+ p, out = Popen_safe([self.wxc, '--version'])[0:2]
+ if p.returncode != 0:
+ mlog.log('Dependency wxwidgets found:', mlog.red('NO'))
+ self.cargs = []
+ self.libs = []
+ else:
+ self.modversion = out.strip()
+ version_req = kwargs.get('version', None)
+ if version_req is not None:
+ if not version_compare(self.modversion, version_req, strict=True):
+ mlog.log('Wxwidgets version %s does not fullfill requirement %s' %
+ (self.modversion, version_req))
+ return
+ mlog.log('Dependency wxwidgets found:', mlog.green('YES'))
+ self.is_found = True
+ self.requested_modules = self.get_requested(kwargs)
+ # wx-config seems to have a cflags as well but since it requires C++,
+ # this should be good, at least for now.
+ p, out = Popen_safe([self.wxc, '--cxxflags'])[0:2]
+ # FIXME: this error should only be raised if required is true
+ if p.returncode != 0:
+ raise DependencyException('Could not generate cargs for wxwidgets.')
+ self.cargs = out.split()
+
+ # FIXME: this error should only be raised if required is true
+ p, out = Popen_safe([self.wxc, '--libs'] + self.requested_modules)[0:2]
+ if p.returncode != 0:
+ raise DependencyException('Could not generate libs for wxwidgets.')
+ self.libs = out.split()
+
+ def get_requested(self, kwargs):
+ modules = 'modules'
+ if modules not in kwargs:
+ return []
+ candidates = kwargs[modules]
+ if isinstance(candidates, str):
+ return [candidates]
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('wxwidgets module argument is not a string.')
+ return candidates
+
+ def get_modversion(self):
+ return self.modversion
+
+ def get_version(self):
+ return self.modversion
+
+ def get_compile_args(self):
+ return self.cargs
+
+ def get_link_args(self):
+ return self.libs
+
+ def check_wxconfig(self):
+ for wxc in ['wx-config-3.0', 'wx-config']:
+ try:
+ p, out = Popen_safe([wxc, '--version'])[0:2]
+ if p.returncode == 0:
+ mlog.log('Found wx-config:', mlog.bold(shutil.which(wxc)),
+ '(%s)' % out.strip())
+ self.wxc = wxc
+ WxDependency.wx_found = True
+ return
+ except (FileNotFoundError, PermissionError):
+ pass
+ WxDependency.wxconfig_found = False
+ mlog.log('Found wx-config:', mlog.red('NO'))
+
+ def found(self):
+ return self.is_found
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 80d482e..5df26cc 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -23,7 +23,8 @@ from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
from .mesonlib import FileMode, Popen_safe, get_meson_script
-from .dependencies import InternalDependency, Dependency, ExternalProgram
+from .dependencies import ExternalProgram
+from .dependencies import InternalDependency, Dependency, DependencyException
from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs
from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
@@ -1852,13 +1853,7 @@ class Interpreter(InterpreterBase):
def func_find_library(self, node, args, kwargs):
mlog.log(mlog.red('DEPRECATION:'), 'find_library() is removed, use the corresponding method in compiler object instead.')
- def func_dependency(self, node, args, kwargs):
- self.validate_arguments(args, 1, [str])
- name = args[0]
- if '<' in name or '>' in name or '=' in name:
- raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
- 'version\n requirements use the \'version\' keyword argument instead.')
- identifier = dependencies.get_dep_identifier(name, kwargs)
+ def _find_cached_dep(self, name, kwargs):
# Check if we want this as a cross-dep or a native-dep
# FIXME: Not all dependencies support such a distinction right now,
# and we repeat this check inside dependencies that do. We need to
@@ -1868,60 +1863,79 @@ class Interpreter(InterpreterBase):
want_cross = not kwargs['native']
else:
want_cross = is_cross
- # Check if we've already searched for and found this dep
+ identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
cached_dep = None
+ # Check if we've already searched for and found this dep
if identifier in self.coredata.deps:
cached_dep = self.coredata.deps[identifier]
- if 'version' in kwargs:
- wanted = kwargs['version']
- found = cached_dep.get_version()
- if not cached_dep.found() or \
- not mesonlib.version_compare_many(found, wanted)[0]:
- # Cached dep has the wrong version. Check if an external
- # dependency or a fallback dependency provides it.
- cached_dep = None
- # Don't re-use cached dep if it wasn't required but this one is,
- # so we properly go into fallback/error code paths
- if kwargs.get('required', True) and not getattr(cached_dep, 'required', False):
- cached_dep = None
- # Don't reuse cached dep if one is a cross-dep and the other is a native dep
- if not getattr(cached_dep, 'want_cross', is_cross) == want_cross:
- cached_dep = None
+ else:
+ # Check if exactly the same dep with different version requirements
+ # was found already.
+ wanted = identifier[1]
+ for trial, trial_dep in self.coredata.deps.items():
+ # trial[1], identifier[1] are the version requirements
+ if trial[0] != identifier[0] or trial[2:] != identifier[2:]:
+ continue
+ found = trial_dep.get_version()
+ if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
+ # We either don't care about the version, or our
+ # version requirements matched the trial dep's version.
+ cached_dep = trial_dep
+ break
+ return identifier, cached_dep
+
+ def func_dependency(self, node, args, kwargs):
+ self.validate_arguments(args, 1, [str])
+ name = args[0]
+ if '<' in name or '>' in name or '=' in name:
+ raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+ 'version\n requirements use the \'version\' keyword argument instead.')
+ identifier, cached_dep = self._find_cached_dep(name, kwargs)
if cached_dep:
+ if kwargs.get('required', True) and not cached_dep.found():
+ m = 'Dependency {!r} was already checked and was not found'
+ raise DependencyException(m.format(name))
dep = cached_dep
else:
# We need to actually search for this dep
exception = None
dep = None
- # If the fallback has already been configured (possibly by a higher level project)
- # try to use it before using the native version
+ # If the dependency has already been configured, possibly by
+ # a higher level project, try to use it first.
if 'fallback' in kwargs:
dirname, varname = self.get_subproject_infos(kwargs)
if dirname in self.subprojects:
+ subproject = self.subprojects[dirname]
try:
- dep = self.subprojects[dirname].get_variable_method([varname], {})
- dep = dep.held_object
+ # Never add fallback deps to self.coredata.deps
+ return subproject.get_variable_method([varname], {})
except KeyError:
pass
+ # Search for it outside the project
if not dep:
try:
dep = dependencies.find_external_dependency(name, self.environment, kwargs)
- except dependencies.DependencyException as e:
+ except DependencyException as e:
exception = e
pass
+ # Search inside the projects list
if not dep or not dep.found():
if 'fallback' in kwargs:
fallback_dep = self.dependency_fallback(name, kwargs)
if fallback_dep:
+ # Never add fallback deps to self.coredata.deps since we
+ # cannot cache them. They must always be evaluated else
+ # we won't actually read all the build files.
return fallback_dep
-
if not dep:
raise exception
- self.coredata.deps[identifier] = dep
+ # Only store found-deps in the cache
+ if dep.found():
+ self.coredata.deps[identifier] = dep
return DependencyHolder(dep)
def get_subproject_infos(self, kwargs):
@@ -2230,7 +2244,7 @@ class Interpreter(InterpreterBase):
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
- raise InterpreterException('Nonexistent build def file %s.' % buildfilename)
+ raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str))
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index fbd732a..6937502 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -203,7 +203,7 @@ def classify_unity_sources(compilers, sources):
def flatten(item):
if not isinstance(item, list):
- return item
+ return [item]
result = []
for i in item:
if isinstance(i, list):
@@ -305,7 +305,7 @@ def version_compare(vstr1, vstr2, strict=False):
return cmpop(varr1, varr2)
def version_compare_many(vstr1, conditions):
- if not isinstance(conditions, (list, tuple)):
+ if not isinstance(conditions, (list, tuple, frozenset)):
conditions = [conditions]
found = []
not_found = []
@@ -708,7 +708,8 @@ class OrderedSet(collections.MutableSet):
def __repr__(self):
# Don't print 'OrderedSet("")' for an empty set.
if self.__container:
- return 'OrderedSet("{}")'.format('", "'.join(self.__container.keys()))
+ return 'OrderedSet("{}")'.format(
+ '", "'.join(repr(e) for e in self.__container.keys()))
return 'OrderedSet()'
def add(self, value):
@@ -721,3 +722,6 @@ class OrderedSet(collections.MutableSet):
def update(self, iterable):
for item in iterable:
self.__container[item] = None
+
+ def difference(self, set_):
+ return type(self)(e for e in self if e not in set_)
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index bce0965..282df36 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -246,6 +246,9 @@ def run_script_command(args):
elif cmdname == 'uninstall':
import mesonbuild.scripts.uninstall as abc
cmdfunc = abc.run
+ elif cmdname == 'dist':
+ import mesonbuild.scripts.dist as abc
+ cmdfunc = abc.run
else:
raise MesonException('Unknown internal command {}.'.format(cmdname))
return cmdfunc(cmdargs)
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index c12c4dd..88ea16e 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -161,12 +161,12 @@ def list_buildsystem_files(coredata, builddata):
print(json.dumps(filelist))
def list_deps(coredata):
- result = {}
- for d in coredata.deps.values():
+ result = []
+ for d in coredata.deps:
if d.found():
args = {'compile_args': d.get_compile_args(),
'link_args': d.get_link_args()}
- result[d.name] = args
+ result += [d.name, args]
print(json.dumps(result))
def list_tests(testdata):
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index e79371f..09c615a 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -43,7 +43,7 @@ class PkgConfigModule(ExtensionModule):
def generate_pkgconfig_file(self, state, libraries, subdirs, name, description,
url, version, pcfile, pub_reqs, priv_reqs,
- conflicts, priv_libs):
+ conflicts, priv_libs, variables):
coredata = state.environment.get_coredata()
outdir = state.environment.scratch_dir
fname = os.path.join(outdir, pcfile)
@@ -53,6 +53,8 @@ class PkgConfigModule(ExtensionModule):
# 'os.path.join' for details)
ofile.write('libdir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('libdir')))
ofile.write('includedir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('includedir')))
+ for k, v in variables:
+ ofile.write('%s=%s\n' % (k, v))
ofile.write('\n')
ofile.write('Name: %s\n' % name)
if len(description) > 0:
@@ -136,6 +138,33 @@ class PkgConfigModule(ExtensionModule):
pub_reqs = mesonlib.stringlistify(kwargs.get('requires', []))
priv_reqs = mesonlib.stringlistify(kwargs.get('requires_private', []))
conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
+
+ def parse_variable_list(stringlist):
+ reserved = ['prefix', 'libdir', 'includedir']
+ variables = []
+ for var in stringlist:
+ # foo=bar=baz is ('foo', 'bar=baz')
+ l = var.split('=', 1)
+ if len(l) < 2:
+ raise mesonlib.MesonException('Variables must be in \'name=value\' format')
+
+ name, value = l[0].strip(), l[1].strip()
+ if not name or not value:
+ raise mesonlib.MesonException('Variables must be in \'name=value\' format')
+
+ # Variable names must not contain whitespaces
+ if any(c.isspace() for c in name):
+ raise mesonlib.MesonException('Invalid whitespace in assignment "{}"'.format(var))
+
+ if name in reserved:
+ raise mesonlib.MesonException('Variable "{}" is reserved'.format(name))
+
+ variables.append((name, value))
+
+ return variables
+
+ variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('variables', [])))
+
pcfile = filebase + '.pc'
pkgroot = kwargs.get('install_dir', None)
if pkgroot is None:
@@ -144,7 +173,7 @@ class PkgConfigModule(ExtensionModule):
raise mesonlib.MesonException('Install_dir must be a string.')
self.generate_pkgconfig_file(state, libs, subdirs, name, description, url,
version, pcfile, pub_reqs, priv_reqs,
- conflicts, priv_libs)
+ conflicts, priv_libs, variables)
res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), pcfile), pkgroot)
return ModuleReturnValue(res, [res])
diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py
new file mode 100644
index 0000000..ba6df7d
--- /dev/null
+++ b/mesonbuild/scripts/dist.py
@@ -0,0 +1,148 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os, sys
+import shutil
+import argparse
+import subprocess
+import pickle
+import hashlib
+import tarfile, zipfile
+import tempfile
+from glob import glob
+from mesonbuild.environment import detect_ninja
+
+def create_hash(fname):
+ hashname = fname + '.sha256sum'
+ m = hashlib.sha256()
+ m.update(open(fname, 'rb').read())
+ with open(hashname, 'w') as f:
+ f.write('%s %s\n' % (m.hexdigest(), os.path.split(fname)[-1]))
+
+def create_zip(zipfilename, packaging_dir):
+ prefix = os.path.split(packaging_dir)[0]
+ removelen = len(prefix) + 1
+ with zipfile.ZipFile(zipfilename,
+ 'w',
+ compression=zipfile.ZIP_DEFLATED,
+ allowZip64=True) as zf:
+ zf.write(packaging_dir, packaging_dir[removelen:])
+ for root, dirs, files in os.walk(packaging_dir):
+ for d in dirs:
+ dname = os.path.join(root, d)
+ zf.write(dname, dname[removelen:])
+ for f in files:
+ fname = os.path.join(root, f)
+ zf.write(fname, fname[removelen:])
+
+def del_gitfiles(dirname):
+ for f in glob(os.path.join(dirname, '.git*')):
+ if os.path.isdir(f) and not os.path.islink(f):
+ shutil.rmtree(f)
+ else:
+ os.unlink(f)
+
+def process_submodules(dirname):
+ module_file = os.path.join(dirname, '.gitmodules')
+ if not os.path.exists(module_file):
+ return
+ subprocess.check_call(['git', 'submodule', 'update', '--init'], cwd=dirname)
+ for line in open(module_file):
+ line = line.strip()
+ if '=' not in line:
+ continue
+ k, v = line.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ if k != 'path':
+ continue
+ del_gitfiles(os.path.join(dirname, v))
+
+def create_dist(dist_name, src_root, bld_root, dist_sub):
+ distdir = os.path.join(dist_sub, dist_name)
+ if os.path.exists(distdir):
+ shutil.rmtree(distdir)
+ os.makedirs(distdir)
+ subprocess.check_call(['git', 'clone', '--shared', src_root, distdir])
+ process_submodules(distdir)
+ del_gitfiles(distdir)
+ xzname = distdir + '.tar.xz'
+ # Should use shutil but it got xz support only in 3.5.
+ with tarfile.open(xzname, 'w:xz') as tf:
+ tf.add(distdir, os.path.split(distdir)[1])
+ # Create only .tar.xz for now.
+ #zipname = distdir + '.zip'
+ #create_zip(zipname, distdir)
+ shutil.rmtree(distdir)
+ return (xzname, )
+
+def check_dist(packagename, meson_command):
+ print('Testing distribution package %s.' % packagename)
+ unpackdir = tempfile.mkdtemp()
+ builddir = tempfile.mkdtemp()
+ installdir = tempfile.mkdtemp()
+ ninja_bin = detect_ninja()
+ try:
+ tf = tarfile.open(packagename)
+ tf.extractall(unpackdir)
+ srcdir = glob(os.path.join(unpackdir, '*'))[0]
+ if subprocess.call(meson_command + ['--backend=ninja', srcdir, builddir]) != 0:
+ print('Running Meson on distribution package failed')
+ return 1
+ if subprocess.call([ninja_bin], cwd=builddir) != 0:
+ print('Compiling the distribution package failed.')
+ return 1
+ if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0:
+ print('Running unit tests on the distribution package failed.')
+ return 1
+ myenv = os.environ.copy()
+ myenv['DESTDIR'] = installdir
+ if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0:
+ print('Installing the distribution package failed.')
+ return 1
+ finally:
+ shutil.rmtree(srcdir)
+ shutil.rmtree(builddir)
+ shutil.rmtree(installdir)
+ print('Distribution package %s tested.' % packagename)
+ return 0
+
+def run(args):
+ src_root = args[0]
+ bld_root = args[1]
+ meson_command = args[2:]
+ priv_dir = os.path.join(bld_root, 'meson-private')
+ dist_sub = os.path.join(bld_root, 'meson-dist')
+
+ buildfile = os.path.join(priv_dir, 'build.dat')
+
+ build = pickle.load(open(buildfile, 'rb'))
+
+ dist_name = build.project_name + '-' + build.project_version
+
+ if not os.path.isdir(os.path.join(src_root, '.git')):
+ print('Dist currently only works with Git repos.')
+ return 1
+ names = create_dist(dist_name, src_root, bld_root, dist_sub)
+ if names is None:
+ return 1
+ error_count = 0
+ for name in names:
+ rc = check_dist(name, meson_command) # Check only one.
+ rc = 0
+ if rc == 0:
+ create_hash(name)
+ error_count += rc
+ return rc
diff --git a/run_project_tests.py b/run_project_tests.py
index 1abc199..b8ef0e9 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -36,6 +36,7 @@ import concurrent.futures as conc
import re
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
+from run_tests import ensure_backend_detects_changes
class BuildStep(Enum):
@@ -342,6 +343,10 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time)
if returncode != 0:
return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, gen_time)
+ # Touch the meson.build file to force a regenerate so we can test that
+ # regeneration works before a build is run.
+ ensure_backend_detects_changes(backend)
+ os.utime(os.path.join(testdir, 'meson.build'))
# Build with subprocess
dir_args = get_backend_args_for_dir(backend, test_build_dir)
build_start = time.time()
@@ -356,9 +361,8 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
if pc.returncode != 0:
return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, gen_time, build_time)
# Touch the meson.build file to force a regenerate so we can test that
- # regeneration works. We need to sleep for 0.2s because Ninja tracks mtimes
- # at a low resolution: https://github.com/ninja-build/ninja/issues/371
- time.sleep(0.2)
+ # regeneration works after a build is complete.
+ ensure_backend_detects_changes(backend)
os.utime(os.path.join(testdir, 'meson.build'))
test_start = time.time()
# Test in-process
diff --git a/run_tests.py b/run_tests.py
index d0a67e8..a374839 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -16,6 +16,7 @@
import os
import sys
+import time
import shutil
import subprocess
import platform
@@ -98,6 +99,13 @@ def get_backend_commands(backend, debug=False):
raise AssertionError('Unknown backend: {!r}'.format(backend))
return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd
+def ensure_backend_detects_changes(backend):
+ # This is needed to increase the difference between build.ninja's
+ # timestamp and the timestamp of whatever you changed due to a Ninja
+ # bug: https://github.com/ninja-build/ninja/issues/371
+ if backend is Backend.ninja:
+ time.sleep(1)
+
def get_fake_options(prefix):
import argparse
opts = argparse.Namespace()
diff --git a/run_unittests.py b/run_unittests.py
index d285e6a..ec9d53b 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -30,6 +30,7 @@ from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
from run_tests import exe_suffix, get_fake_options, FakeEnvironment
from run_tests import get_builddir_target_args, get_backend_commands, Backend
+from run_tests import ensure_backend_detects_changes
def get_soname(fname):
@@ -330,6 +331,7 @@ class BasePlatformTests(unittest.TestCase):
self.prefix = '/usr'
self.libdir = os.path.join(self.prefix, 'lib')
self.installdir = os.path.join(self.builddir, 'install')
+ self.distdir = os.path.join(self.builddir, 'meson-dist')
# Get the backend
# FIXME: Extract this from argv?
self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
@@ -355,13 +357,6 @@ class BasePlatformTests(unittest.TestCase):
# XCode backend is untested with unit tests, help welcome!
self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name)
- def ensure_backend_detects_changes(self):
- # This is needed to increase the difference between build.ninja's
- # timestamp and the timestamp of whatever you changed due to a Ninja
- # bug: https://github.com/ninja-build/ninja/issues/371
- if self.backend is Backend.ninja:
- time.sleep(1)
-
def _print_meson_log(self):
log = os.path.join(self.logdir, 'meson-log.txt')
if not os.path.isfile(log):
@@ -439,14 +434,14 @@ class BasePlatformTests(unittest.TestCase):
def setconf(self, arg, will_build=True):
if will_build:
- self.ensure_backend_detects_changes()
+ ensure_backend_detects_changes(self.backend)
self._run(self.mconf_command + [arg, self.builddir])
def wipe(self):
shutil.rmtree(self.builddir)
def utime(self, f):
- self.ensure_backend_detects_changes()
+ ensure_backend_detects_changes(self.backend)
os.utime(f)
def get_compdb(self):
@@ -1071,6 +1066,47 @@ class AllPlatformTests(BasePlatformTests):
self.build()
self.run_tests()
+ def test_dist(self):
+ if not shutil.which('git'):
+ raise unittest.SkipTest('Git not found')
+ try:
+ self.dist_impl()
+ except PermissionError:
+ # When run under Windows CI, something (virus scanner?)
+ # holds on to the git files so cleaning up the dir
+ # fails sometimes.
+ pass
+
+ def dist_impl(self):
+ # Create this on the fly because having rogue .git directories inside
+ # the source tree leads to all kinds of trouble.
+ with tempfile.TemporaryDirectory() as project_dir:
+ with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile:
+ ofile.write('''project('disttest', 'c', version : '1.4.3')
+e = executable('distexe', 'distexe.c')
+test('dist test', e)
+''')
+ with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile:
+ ofile.write('''#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am a distribution test.\\n");
+ return 0;
+}
+''')
+ subprocess.check_call(['git', 'init'], cwd=project_dir)
+ subprocess.check_call(['git', 'config',
+ 'user.name', 'Author Person'], cwd=project_dir)
+ subprocess.check_call(['git', 'config',
+ 'user.email', 'teh_coderz@example.com'], cwd=project_dir)
+ subprocess.check_call(['git', 'add', 'meson.build', 'distexe.c'], cwd=project_dir)
+ subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir)
+ self.init(project_dir)
+ self.build('dist')
+ distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz')
+ checksumfile = distfile + '.sha256sum'
+ self.assertTrue(os.path.exists(distfile))
+ self.assertTrue(os.path.exists(checksumfile))
class WindowsTests(BasePlatformTests):
'''
@@ -1180,6 +1216,8 @@ class LinuxlikeTests(BasePlatformTests):
self.assertTrue(simple_dep.found())
self.assertEqual(simple_dep.get_version(), '1.0')
self.assertIn('-lfoo', simple_dep.get_link_args())
+ self.assertEqual(simple_dep.get_pkgconfig_variable('foo'), 'bar')
+ self.assertPathEqual(simple_dep.get_pkgconfig_variable('datadir'), '/usr/data')
def test_vala_c_warnings(self):
'''
diff --git a/setup.py b/setup.py
index 8996772..024c085 100644
--- a/setup.py
+++ b/setup.py
@@ -63,6 +63,7 @@ setup(name='meson',
url='http://mesonbuild.com',
license=' Apache License, Version 2.0',
packages=['mesonbuild',
+ 'mesonbuild.dependencies',
'mesonbuild.modules',
'mesonbuild.scripts',
'mesonbuild.backend',
diff --git a/test cases/common/150 nested links/meson.build b/test cases/common/150 nested links/meson.build
new file mode 100644
index 0000000..0821b03
--- /dev/null
+++ b/test cases/common/150 nested links/meson.build
@@ -0,0 +1,8 @@
+project('test', 'c')
+
+libxserver_dri3 = []
+libxserver = [ libxserver_dri3 ]
+
+executable('Xephyr', 'xephyr.c', link_with: [ libxserver ])
+
+executable('Zephyr', 'xephyr.c', link_args: [[], []])
diff --git a/test cases/common/150 nested links/xephyr.c b/test cases/common/150 nested links/xephyr.c
new file mode 100644
index 0000000..33c14ce
--- /dev/null
+++ b/test cases/common/150 nested links/xephyr.c
@@ -0,0 +1,3 @@
+int main() {
+ return 0;
+}
diff --git a/test cases/common/151 list of file sources/foo b/test cases/common/151 list of file sources/foo
new file mode 100644
index 0000000..7b57bd2
--- /dev/null
+++ b/test cases/common/151 list of file sources/foo
@@ -0,0 +1 @@
+some text
diff --git a/test cases/common/151 list of file sources/gen.py b/test cases/common/151 list of file sources/gen.py
new file mode 100644
index 0000000..2337d3d
--- /dev/null
+++ b/test cases/common/151 list of file sources/gen.py
@@ -0,0 +1,7 @@
+import shutil
+import sys
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ raise Exception('Requires exactly 2 args')
+ shutil.copy2(sys.argv[1], sys.argv[2])
diff --git a/test cases/common/151 list of file sources/meson.build b/test cases/common/151 list of file sources/meson.build
new file mode 100644
index 0000000..819509d
--- /dev/null
+++ b/test cases/common/151 list of file sources/meson.build
@@ -0,0 +1,12 @@
+project('test', 'c')
+
+mod_py = import('python3')
+python = mod_py.find_python()
+
+test_target = custom_target(
+ 'test_target',
+ input : [files('gen.py'), files('foo')],
+ output : 'bar',
+ command : [python, '@INPUT0@', '@INPUT1@', '@OUTPUT@'],
+ build_by_default : true,
+)
diff --git a/test cases/common/51 pkgconfig-gen/meson.build b/test cases/common/51 pkgconfig-gen/meson.build
index e1e41d9..68ee812 100644
--- a/test cases/common/51 pkgconfig-gen/meson.build
+++ b/test cases/common/51 pkgconfig-gen/meson.build
@@ -41,4 +41,6 @@ pkgg.generate(
libraries : lib2,
name : 'libfoo',
version : libver,
- description : 'A foo library.')
+ description : 'A foo library.',
+ variables : ['foo=bar', 'datadir=${prefix}/data']
+)
diff --git a/test cases/failing/47 pkgconfig variables reserved/meson.build b/test cases/failing/47 pkgconfig variables reserved/meson.build
new file mode 100644
index 0000000..82ae995
--- /dev/null
+++ b/test cases/failing/47 pkgconfig variables reserved/meson.build
@@ -0,0 +1,16 @@
+project('variables-reserved-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'prefix=/tmp/' ]
+)
diff --git a/test cases/failing/47 pkgconfig variables reserved/simple.c b/test cases/failing/47 pkgconfig variables reserved/simple.c
new file mode 100644
index 0000000..e8a6d83
--- /dev/null
+++ b/test cases/failing/47 pkgconfig variables reserved/simple.c
@@ -0,0 +1,5 @@
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
diff --git a/test cases/failing/47 pkgconfig variables reserved/simple.h b/test cases/failing/47 pkgconfig variables reserved/simple.h
new file mode 100644
index 0000000..bb52e6d
--- /dev/null
+++ b/test cases/failing/47 pkgconfig variables reserved/simple.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
diff --git a/test cases/failing/48 pkgconfig variables zero length/meson.build b/test cases/failing/48 pkgconfig variables zero length/meson.build
new file mode 100644
index 0000000..65d3344
--- /dev/null
+++ b/test cases/failing/48 pkgconfig variables zero length/meson.build
@@ -0,0 +1,16 @@
+project('variables-zero-length-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ '=value' ]
+)
diff --git a/test cases/failing/48 pkgconfig variables zero length/simple.c b/test cases/failing/48 pkgconfig variables zero length/simple.c
new file mode 100644
index 0000000..e8a6d83
--- /dev/null
+++ b/test cases/failing/48 pkgconfig variables zero length/simple.c
@@ -0,0 +1,5 @@
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
diff --git a/test cases/failing/48 pkgconfig variables zero length/simple.h b/test cases/failing/48 pkgconfig variables zero length/simple.h
new file mode 100644
index 0000000..bb52e6d
--- /dev/null
+++ b/test cases/failing/48 pkgconfig variables zero length/simple.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
diff --git a/test cases/failing/49 pkgconfig variables zero length value/meson.build b/test cases/failing/49 pkgconfig variables zero length value/meson.build
new file mode 100644
index 0000000..33977b2
--- /dev/null
+++ b/test cases/failing/49 pkgconfig variables zero length value/meson.build
@@ -0,0 +1,16 @@
+project('variables-zero-length-value-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'key=' ]
+)
diff --git a/test cases/failing/49 pkgconfig variables zero length value/simple.c b/test cases/failing/49 pkgconfig variables zero length value/simple.c
new file mode 100644
index 0000000..e8a6d83
--- /dev/null
+++ b/test cases/failing/49 pkgconfig variables zero length value/simple.c
@@ -0,0 +1,5 @@
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
diff --git a/test cases/failing/49 pkgconfig variables zero length value/simple.h b/test cases/failing/49 pkgconfig variables zero length value/simple.h
new file mode 100644
index 0000000..bb52e6d
--- /dev/null
+++ b/test cases/failing/49 pkgconfig variables zero length value/simple.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
diff --git a/test cases/failing/50 pkgconfig variables not key value/meson.build b/test cases/failing/50 pkgconfig variables not key value/meson.build
new file mode 100644
index 0000000..02fa737
--- /dev/null
+++ b/test cases/failing/50 pkgconfig variables not key value/meson.build
@@ -0,0 +1,16 @@
+project('variables-not-key-value-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'this_should_be_key_value' ]
+)
diff --git a/test cases/failing/50 pkgconfig variables not key value/simple.c b/test cases/failing/50 pkgconfig variables not key value/simple.c
new file mode 100644
index 0000000..e8a6d83
--- /dev/null
+++ b/test cases/failing/50 pkgconfig variables not key value/simple.c
@@ -0,0 +1,5 @@
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
diff --git a/test cases/failing/50 pkgconfig variables not key value/simple.h b/test cases/failing/50 pkgconfig variables not key value/simple.h
new file mode 100644
index 0000000..bb52e6d
--- /dev/null
+++ b/test cases/failing/50 pkgconfig variables not key value/simple.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
diff --git a/test cases/frameworks/14 doxygen/installed_files.txt b/test cases/frameworks/14 doxygen/installed_files.txt
index 72afb2e..e4f70e3 100644
--- a/test cases/frameworks/14 doxygen/installed_files.txt
+++ b/test cases/frameworks/14 doxygen/installed_files.txt
@@ -1,6 +1,4 @@
usr/share/doc/spede/html/annotated.html
-usr/share/doc/spede/html/arrowdown.png
-usr/share/doc/spede/html/arrowright.png
usr/share/doc/spede/html/bc_s.png
usr/share/doc/spede/html/bdwn.png
usr/share/doc/spede/html/classComedy_1_1Comedian.html
@@ -29,6 +27,8 @@ usr/share/doc/spede/html/functions_func.html
usr/share/doc/spede/html/hierarchy.html
usr/share/doc/spede/html/index.html
usr/share/doc/spede/html/jquery.js
+usr/share/doc/spede/html/menu.js
+usr/share/doc/spede/html/menudata.js
usr/share/doc/spede/html/namespaceComedy.html
usr/share/doc/spede/html/namespacemembers.html
usr/share/doc/spede/html/namespacemembers_func.html
diff --git a/test cases/linuxlike/1 pkg-config/meson.build b/test cases/linuxlike/1 pkg-config/meson.build
index 36a4545..7e43821 100644
--- a/test cases/linuxlike/1 pkg-config/meson.build
+++ b/test cases/linuxlike/1 pkg-config/meson.build
@@ -45,4 +45,3 @@ inc = include_directories('incdir')
r = cc.run(code, include_directories : inc, dependencies : zlibdep)
assert(r.returncode() == 0, 'Running manual zlib test failed.')
-
diff --git a/test cases/linuxlike/5 dependency versions/meson.build b/test cases/linuxlike/5 dependency versions/meson.build
index 1b01cd6..5c2c262 100644
--- a/test cases/linuxlike/5 dependency versions/meson.build
+++ b/test cases/linuxlike/5 dependency versions/meson.build
@@ -21,10 +21,18 @@ if dependency('zlib', version : ['<=1.0', '>=9999', '=' + zlib.version()], requi
error('zlib <=1.0 >=9999 should not have been found')
endif
+# Test that a versionless zlib is found after not finding an optional zlib dep with version reqs
+zlibopt = dependency('zlib', required : false)
+assert(zlibopt.found() == true, 'zlib not found')
+
# Test https://github.com/mesonbuild/meson/pull/610
dependency('somebrokenlib', version : '>=2.0', required : false)
dependency('somebrokenlib', version : '>=1.0', required : false)
+# Search for an external dependency that won't be found, but must later be
+# found via fallbacks
+somelibnotfound = dependency('somelib', required : false)
+assert(somelibnotfound.found() == false, 'somelibnotfound was found?')
# Find internal dependency without version
somelibver = dependency('somelib',
fallback : ['somelibnover', 'some_dep'])
@@ -37,17 +45,51 @@ somelib = dependency('somelib',
somelibver = dependency('somelib',
version : '>= 0.3',
fallback : ['somelibver', 'some_dep'])
-# Find somelib again, but with a fallback that will fail
+# Find somelib again, but with a fallback that will fail because subproject does not exist
somelibfail = dependency('somelib',
version : '>= 0.2',
required : false,
fallback : ['somelibfail', 'some_dep'])
assert(somelibfail.found() == false, 'somelibfail found via wrong fallback')
+# Find somelib again, but with a fallback that will fail because dependency does not exist
+somefail_dep = dependency('somelib',
+ version : '>= 0.2',
+ required : false,
+ fallback : ['somelib', 'somefail_dep'])
+assert(somefail_dep.found() == false, 'somefail_dep found via wrong fallback')
-fakezlib_dep = dependency('zlib',
+# Fallback should only be used if the primary was not found
+fallbackzlib_dep = dependency('zlib',
+ fallback : ['somelib', 'fakezlib_dep'])
+assert(fallbackzlib_dep.type_name() == 'pkgconfig', 'fallbackzlib_dep should be of type "pkgconfig", not ' + fallbackzlib_dep.type_name())
+# Check that the above dependency was pkgconfig because the fallback wasn't
+# checked, not because the fallback didn't work
+fakezlib_dep = dependency('fakezlib',
fallback : ['somelib', 'fakezlib_dep'])
assert(fakezlib_dep.type_name() == 'internal', 'fakezlib_dep should be of type "internal", not ' + fakezlib_dep.type_name())
+# Check that you can find a dependency by not specifying a version after not
+# finding it by specifying a version. We add `static: true` here so that the
+# previously cached zlib dependencies don't get checked.
+dependency('zlib', static : true, version : '>=8000', required : false)
+dependency('zlib', static : true)
+
+# Check that you can find a dependency by specifying a correct version after
+# not finding it by specifying a wrong one. We add `method: pkg-config` here so that
+# the previously cached zlib dependencies don't get checked.
+bzip2 = dependency('zlib', method : 'pkg-config', version : '>=9000', required : false)
+bzip2 = dependency('zlib', method : 'pkg-config', version : '>=1.0')
+
+if meson.is_cross_build()
+ # Test caching of native and cross dependencies
+ # https://github.com/mesonbuild/meson/issues/1736
+ cross_prefix = dependency('zlib').get_pkgconfig_variable('prefix')
+ native_prefix = dependency('zlib', native : true).get_pkgconfig_variable('prefix')
+ assert(cross_prefix != '', 'cross zlib prefix is not defined')
+ assert(native_prefix != '', 'native zlib prefix is not defined')
+ assert(native_prefix != cross_prefix, 'native prefix == cross_prefix == ' + native_prefix)
+endif
+
foreach d : ['sdl2', 'gnustep', 'wx', 'gl', 'python3', 'boost', 'gtest', 'gmock']
dep = dependency(d, required : false)
if dep.found()
diff --git a/test cases/windows/10 vs module defs generated/meson.build b/test cases/windows/10 vs module defs generated/meson.build
index 5ce1a20..7728ca7 100644
--- a/test cases/windows/10 vs module defs generated/meson.build
+++ b/test cases/windows/10 vs module defs generated/meson.build
@@ -1,7 +1,5 @@
project('generated_dll_module_defs', 'c')
-if meson.get_compiler('c').get_id() == 'msvc'
- subdir('subdir')
- exe = executable('prog', 'prog.c', link_with : shlib)
- test('runtest', exe)
-endif
+subdir('subdir')
+exe = executable('prog', 'prog.c', link_with : shlib)
+test('runtest', exe)
diff --git a/test cases/windows/10 vs module defs generated/subdir/somedll.c b/test cases/windows/10 vs module defs generated/subdir/somedll.c
index df255e3..b23d8fe 100644
--- a/test cases/windows/10 vs module defs generated/subdir/somedll.c
+++ b/test cases/windows/10 vs module defs generated/subdir/somedll.c
@@ -1,5 +1,3 @@
-#ifdef _MSC_VER
int somedllfunc() {
return 42;
}
-#endif
diff --git a/test cases/windows/6 vs module defs/meson.build b/test cases/windows/6 vs module defs/meson.build
index 4b9e735..fb59028 100644
--- a/test cases/windows/6 vs module defs/meson.build
+++ b/test cases/windows/6 vs module defs/meson.build
@@ -1,7 +1,5 @@
project('dll_module_defs', 'c')
-if meson.get_compiler('c').get_id() == 'msvc'
- subdir('subdir')
- exe = executable('prog', 'prog.c', link_with : shlib)
- test('runtest', exe)
-endif
+subdir('subdir')
+exe = executable('prog', 'prog.c', link_with : shlib)
+test('runtest', exe)
diff --git a/test cases/windows/6 vs module defs/subdir/somedll.c b/test cases/windows/6 vs module defs/subdir/somedll.c
index df255e3..b23d8fe 100644
--- a/test cases/windows/6 vs module defs/subdir/somedll.c
+++ b/test cases/windows/6 vs module defs/subdir/somedll.c
@@ -1,5 +1,3 @@
-#ifdef _MSC_VER
int somedllfunc() {
return 42;
}
-#endif