diff options
66 files changed, 1033 insertions, 747 deletions
@@ -1,3 +1,4 @@ +.mypy_cache/ /.project /.pydevproject /.settings diff --git a/data/com.mesonbuild.install.policy b/data/com.mesonbuild.install.policy index 6fba47c..65bf3ef 100644 --- a/data/com.mesonbuild.install.policy +++ b/data/com.mesonbuild.install.policy @@ -17,7 +17,6 @@ </defaults> <annotate key="org.freedesktop.policykit.exec.path">/usr/bin/python3</annotate> <annotate key="org.freedesktop.policykit.exec.argv1">/usr/bin/meson</annotate> - <annotate key="org.freedesktop.policykit.exec.argv2">install</annotate> </action> </policyconfig> diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 25d262a..32e5e32 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -53,6 +53,7 @@ for one target is defined as follows: "name": "Name of the target", "id": "The internal ID meson uses", "type": "<TYPE>", + "defined_in": "/Path/to/the/targets/meson.build", "filename": ["list", "of", "generated", "files"], "build_by_default": true / false, "target_sources": [], diff --git a/docs/markdown/Porting-from-autotools.md b/docs/markdown/Porting-from-autotools.md index 5c4c35d..b60ecfe 100644 --- a/docs/markdown/Porting-from-autotools.md +++ b/docs/markdown/Porting-from-autotools.md @@ -450,9 +450,9 @@ AM_CPPFLAGS = \ `meson.build`: ```meson -add_global_arguments('-DG_LOG_DOMAIN="As"', language : 'c') -add_global_arguments('-DAS_COMPILATION', language : 'c') -add_global_arguments('-DLOCALSTATEDIR="/var"', language : 'c') +add_project_arguments('-DG_LOG_DOMAIN="As"', language : 'c') +add_project_arguments('-DAS_COMPILATION', language : 'c') +add_project_arguments('-DLOCALSTATEDIR="/var"', language : 'c') ``` ### Tests diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md index a50a33d..a963a32 100644 --- a/docs/markdown/Python-module.md +++ b/docs/markdown/Python-module.md @@ -220,7 +220,7 @@ It exposes the same methods as its parent class. [shared_module]: Reference-manual.md#shared_module [external program]: Reference-manual.md#external-program-object [dependency]: Reference-manual.md#dependency -[install_data]: Reference-manual.md#install-data -[configure_file]: Reference-manual.md#configure-file +[install_data]: Reference-manual.md#install_data +[configure_file]: Reference-manual.md#configure_file [dependency object]: Reference-manual.md#dependency-object [buildtarget object]: Reference-manual.md#build-target-object diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md index 549dcfc..8c6da69 100644 --- a/docs/markdown/Quick-guide.md +++ b/docs/markdown/Quick-guide.md @@ -82,7 +82,7 @@ build and install Meson projects are the following. ```console $ cd /path/to/source/root -$ CFLAGS=... CXXFLAGS=... LDFLAGS=.. meson --prefix /usr --buildtype=plain builddir +$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=... $ ninja -v -C builddir $ ninja -C builddir test $ DESTDIR=/path/to/staging/root ninja -C builddir install diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 0ddd4a9..5436ec3 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -231,6 +231,11 @@ the `@variable@` syntax. - `input` the input file name. If it's not specified in configuration mode, all the variables in the `configuration:` object (see above) are written to the `output:` file. +- `install` *(added 0.50.0)* When true, this generated file is installed during +the install step, and `install_dir` must be set and not empty. When false, this +generated file is not installed regardless of the value of `install_dir`. +When omitted it defaults to true when `install_dir` is set and not empty, +false otherwise. - `install_dir` the subdirectory to install the generated file to (e.g. `share/myproject`), if omitted or given the value of empty string, the file is not installed. @@ -565,6 +570,8 @@ be passed to [shared and static libraries](#library). - `d_module_versions` list of module version identifiers set when compiling D sources - `d_debug` list of module debug identifiers set when compiling D sources - `pie` *(added 0.49.0)* build a position-independent executable +- `native`, is a boolean controlling whether the target is compiled for the + build or host machines. Defaults to false, building for the host machine. The list of `sources`, `objects`, and `dependencies` is always flattened, which means you can freely nest and add lists while @@ -1107,8 +1114,8 @@ This function prints its argument to stdout prefixed with WARNING:. The first argument to this function must be a string defining the name of this project. It is followed by programming languages that the project uses. Supported values for languages are `c`, `cpp` (for -`C++`), `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`) and -`vala`. In versions before `0.40.0` you must have at least one +`C++`), `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`), +`vala` and `rust`. In versions before `0.40.0` you must have at least one language listed. The project name can be any string you want, it's not used for diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index fa913f5..9688bf8 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -99,24 +99,30 @@ future releases. These are the parameter names for passing language specific arguments to your build target. -| Language | Parameter name | -| ----- | ----- | -| C | c_args | -| C++ | cpp_args | -| C# | cs_args | -| D | d_args | -| Fortran | fortran_args | -| Java | java_args | -| Objective C | objc_args | -| Objective C++ | objcpp_args | -| Rust | rust_args | -| Vala | vala_args | +| Language | compiler name | linker name | +| ------------- | ------------- | ----------------- | +| C | c_args | c_link_args | +| C++ | cpp_args | cpp_link_args | +| C# | cs_args | cs_link_args | +| D | d_args | d_link_args | +| Fortran | fortran_args | fortran_link_args | +| Java | java_args | java_link_args | +| Objective C | objc_args | objc_link_args | +| Objective C++ | objcpp_args | objcpp_link_args | +| Rust | rust_args | rust_link_args | +| Vala | vala_args | vala_link_args | ## Compiler and linker flag environment variables These environment variables will be used to modify the compiler and linker flags. +It is recommended that you **do not use these**. They are provided purely to +for backwards compatibility with other build systems. There are many caveats to +their use, especially when rebuilding the project. It is **highly** recommended +that you use [the command line arguments](#language-arguments-parameters-names) +instead. + | Name | Comment | | ----- | ------- | | CFLAGS | Flags for the C compiler | diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md index 426e87d..910513c 100644 --- a/docs/markdown/Running-Meson.md +++ b/docs/markdown/Running-Meson.md @@ -4,49 +4,45 @@ short-description: Building a project with Meson # Running Meson -There are two different ways of invoking Meson. First, you can run it -directly from the source tree with the command -`/path/to/source/meson.py`. Meson may also be installed in which case -the command is simply `meson`. In this manual we only use the latter -format for simplicity. +There are two different ways of invoking Meson. First, you can run it directly +from the source tree with the command `/path/to/source/meson.py`. Meson may +also be installed in which case the command is simply `meson`. In this manual +we only use the latter format for simplicity. -Additionally, the invocation can pass options to meson. -The list of options is documented [here](Builtin-options.md). +Additionally, the invocation can pass options to meson. The list of options is +documented [here](Builtin-options.md). -At the time of writing only a command line version of Meson is -available. This means that Meson must be invoked using the -terminal. If you wish to use the MSVC compiler, you need to run Meson -under "Visual Studio command prompt". +At the time of writing only a command line version of Meson is available. This +means that Meson must be invoked using the terminal. If you wish to use the +MSVC compiler, you need to run Meson under "Visual Studio command prompt". -Configuring the source -== +## Configuring the build directory -Let us assume that we have a source tree that has a Meson build -system. This means that at the topmost directory has a file called -`meson.build`. We run the following commands to get the build started. +Let us assume that we have a source tree that has a Meson build system. This +means that at the topmost directory has a file called `meson.build`. We run the +following commands to get the build started. +```sh +cd /path/to/source/root +meson setup builddir +``` - cd /path/to/source/root - mkdir builddir - cd builddir - meson .. +We invoke Meson with the `setup` command, giving it the location of the build +directory. Meson uses [out of source +builds](http://voices.canonical.com/jussi.pakkanen/2013/04/16/why-you-should-consider-using-separate-build-directories/). -First we create a directory to hold all files generated during the -build. Then we go into it and invoke Meson, giving it the location of -the source root. +Hint: The syntax of meson is `meson [command] [arguments] [options]`. The +`setup` command takes a `builddir` and a `srcdir` argument. If no `srcdir` is +given Meson will deduce the `srcdir` based on `pwd` and the location of +`meson.build`. -Hint: The syntax of meson is `meson [options] [srcdir] [builddir]`, -but you may omit either `srcdir` or `builddir`. Meson will deduce the -`srcdir` by the location of `meson.build`. The other one will be your -`pwd`. +Meson then loads the build configuration file and writes the corresponding +build backend in the build directory. By default Meson generates a *debug +build*, which turns on basic warnings and debug information and disables +compiler optimizations. -Meson then loads the build configuration file and writes the -corresponding build backend in the build directory. By default Meson -generates a *debug build*, which turns on basic warnings and debug -information and disables compiler optimizations. - -You can specify a different type of build with the `--buildtype` -command line argument. It can have one of the following values. +You can specify a different type of build with the `--buildtype` command line +argument. It can have one of the following values. | value | meaning | | ------ | -------- | @@ -55,122 +51,123 @@ command line argument. It can have one of the following values. | `debugoptimized` | debug info is generated and the code is optimized (on most compilers this means `-g -O2`) | | `release` | full optimization, no debug info | -The build directory is mandatory. The reason for this is that it -simplifies the build process immensely. Meson will not under any -circumstances write files inside the source directory (if it does, it -is a bug and should be fixed). This means that the user does not need -to add a bunch of files to their revision control's ignore list. It -also means that you can create arbitrarily many build directories for -any given source tree. If we wanted to test building the source code -with the Clang compiler instead of the system default, we could just -type the following commands. - - cd /path/to/source/root - mkdir buildclang - cd buildclang - CC=clang CXX=clang++ meson .. - -This separation is even more powerful if your code has multiple -configuration options (such as multiple data backends). You can create -a separate subdirectory for each of them. You can also have build -directories for optimized builds, code coverage, static analysis and -so on. They are all neatly separated and use the same source -tree. Changing between different configurations is just a question of -changing to the corresponding directory. - -Unless otherwise mentioned, all following command line invocations are -meant to be run in the build directory. - -By default Meson will use the Ninja backend to build your project. If -you wish to use any of the other backends, you need to pass the -corresponding argument during configuration time. As an example, here -is how you would use Meson to generate a Visual studio solution. - - meson <source dir> <build dir> --backend=vs2010 - -You can then open the generated solution with Visual Studio and -compile it in the usual way. A list of backends can be obtained with -`meson --help`. - -Environment Variables --- - -Sometimes you want to add extra compiler flags, this can be done by -passing them in environment variables when calling meson. See [the -reference -tables](Reference-tables.md#compiler-and-linker-flag-envrionment-variables) -for a list of all the environment variables. Be aware however these -environment variables are only used for the native compiler and will -not affect the compiler used for cross-compiling, where the flags -specified in the cross file will be used. - -Furthermore it is possible to stop meson from adding flags itself by -using the `--buildtype=plain` option, in this case you must provide -the full compiler and linker arguments needed. - -Building the source -== +The build directory is mandatory. The reason for this is that it simplifies the +build process immensely. Meson will not under any circumstances write files +inside the source directory (if it does, it is a bug and should be fixed). This +means that the user does not need to add a bunch of files to their revision +control's ignore list. It also means that you can create arbitrarily many build +directories for any given source tree. + +For example, if we wanted to test building the source code with the Clang +compiler instead of the system default, we could just type the following +commands: + +```sh +cd /path/to/source/root +CC=clang CXX=clang++ meson setup buildclang +``` + +This separation is even more powerful if your code has multiple configuration +options (such as multiple data backends). You can create a separate +subdirectory for each of them. You can also have build directories for +optimized builds, code coverage, static analysis and so on. They are all neatly +separated and use the same source tree. Changing between different +configurations is just a question of changing to the corresponding directory. + +Unless otherwise mentioned, all following command line invocations are meant to +be run in the source directory. + +By default Meson will use the Ninja backend to build your project. If you wish +to use any of the other backends, you need to pass the corresponding argument +during configuration time. As an example, here is how you would use Meson to +generate a Visual studio solution. + +```sh +meson setup <build dir> --backend=vs2010 +``` + +You can then open the generated solution with Visual Studio and compile it in +the usual way. A list of backends can be obtained with `meson setup --help`. + +## Environment variables + +Sometimes you want to add extra compiler flags, this can be done by passing +them in environment variables when calling meson. See [the reference +tables](Reference-tables.md#compiler-and-linker-flag-envrionment-variables) for +a list of all the environment variables. Be aware however these environment +variables are only used for the native compiler and will not affect the +compiler used for cross-compiling, where the flags specified in the cross file +will be used. + +Furthermore it is possible to stop meson from adding flags itself by using the +`--buildtype=plain` option, in this case you must provide the full compiler and +linker arguments needed. + +## Building from the source If you are not using an IDE, Meson uses the [Ninja build -system](https://ninja-build.org/) to actually build the code. To start -the build, simply type the following command. +system](https://ninja-build.org/) to actually build the code. To start the +build, simply type the following command. - ninja +```sh +ninja -C builddir +``` -The main usability difference between Ninja and Make is that Ninja -will automatically detect the number of CPUs in your computer and -parallelize itself accordingly. You can override the amount of -parallel processes used with the command line argument `-j <num -processes>`. +The main usability difference between Ninja and Make is that Ninja will +automatically detect the number of CPUs in your computer and parallelize itself +accordingly. You can override the amount of parallel processes used with the +command line argument `-j <num processes>`. -It should be noted that after the initial configure step `ninja` is -the only command you ever need to type to compile. No matter how you -alter your source tree (short of moving it to a completely new -location), Meson will detect the changes and regenerate itself -accordingly. This is especially handy if you have multiple build -directories. Often one of them is used for development (the "debug" -build) and others only every now and then (such as a "static analysis" -build). Any configuration can be built just by `cd`'ing to the -corresponding directory and running Ninja. +It should be noted that after the initial configure step `ninja` is the only +command you ever need to type to compile. No matter how you alter your source +tree (short of moving it to a completely new location), Meson will detect the +changes and regenerate itself accordingly. This is especially handy if you have +multiple build directories. Often one of them is used for development (the +"debug" build) and others only every now and then (such as a "static analysis" +build). Any configuration can be built just by `cd`'ing to the corresponding +directory and running Ninja. -Running tests -== +## Running tests -Meson provides native support for running tests. The command to do -that is simple. +Meson provides native support for running tests. The command to do that is +simple. - ninja test +```sh +ninja -C builddir test +``` -Meson does not force the use of any particular testing framework. You -are free to use GTest, Boost Test, Check or even custom executables. +Meson does not force the use of any particular testing framework. You are free +to use GTest, Boost Test, Check or even custom executables. -Installing -== +## Installing Installing the built software is just as simple. - ninja install +```sh +ninja -C builddir install +``` Note that Meson will only install build targets explicitly tagged as -installable, as detailed in the [installing targets documentation](Installing.md). +installable, as detailed in the [installing targets +documentation](Installing.md). -By default Meson installs to `/usr/local`. This can be changed by -passing the command line argument `--prefix /your/prefix` to Meson -during configure time. Meson also supports the `DESTDIR` variable used -in e.g. building packages. It is used like this: +By default Meson installs to `/usr/local`. This can be changed by passing the +command line argument `--prefix /your/prefix` to Meson during configure time. +Meson also supports the `DESTDIR` variable used in e.g. building packages. It +is used like this: - DESTDIR=/path/to/staging ninja install +```sh +DESTDIR=/path/to/staging ninja -C builddir install +``` -Command line help -== +## Command line help -Meson has a standard command line help feature. It can be accessed -with the following command. +Meson has a standard command line help feature. It can be accessed with the +following command. meson --help -Exit status -== +## Exit status -Meson exits with status 0 if successful, 1 for problems with the command line or -meson.build file, and 2 for internal errors. +Meson exits with status 0 if successful, 1 for problems with the command line +or meson.build file, and 2 for internal errors. diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md index 2184ebc..72b5003 100644 --- a/docs/markdown/Vala.md +++ b/docs/markdown/Vala.md @@ -18,7 +18,7 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` You must always specify the `glib-2.0` and `gobject-2.0` libraries as @@ -53,7 +53,7 @@ This first example is a simple addition to the `meson.build` file because: * the library has a `pkg-config` file, `gtk+-3.0.pc` * the VAPI is distributed with Vala and so installed with the Vala compiler * the VAPI is installed in Vala's standard search path - * the VAPI has the same name as the `pkg-config` file, `gtk+-3.0.vapi` + * the VAPI, `gtk+-3.0.vapi`, has the same name as the `pkg-config` file Everything works seamlessly in the background and only a single extra line is needed: @@ -69,7 +69,7 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` GTK+ is the graphical toolkit used by GNOME, elementary OS and other desktop @@ -104,7 +104,7 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` Using `[GtkTemplate]` also requires the GTK+ user interface definition files to @@ -128,7 +128,7 @@ sources += import( 'gnome' ).compile_resources( source_dir: 'src/resources', ) -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` @@ -146,10 +146,11 @@ the VAPI search path. In Meson this is done with the `add_project_arguments()` function: ```meson -project('vala app', 'c', 'vala') +project('vala app', 'vala', 'c') + +vapi_dir = join_paths(meson.current_source_dir(), 'vapi') -add_project_arguments(['--vapidir', join_paths(meson.current_source_dir(), 'vapi')], - language: 'vala') +add_project_arguments(['--vapidir', vapi_dir], language: 'vala') dependencies = [ dependency('glib-2.0'), @@ -159,7 +160,7 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` If the VAPI is for an external library then make sure that the VAPI name @@ -179,7 +180,7 @@ with the `vala-extra-vapis` repository. ### Libraries without pkg-config files A library that does not have a corresponding pkg-config file may mean `dependency()` is unsuitable for finding the C and Vala interface files. In this -case it is necessary to use `find_library()`. +case it is necessary to use the `find_library()` method of the compiler object. The first example uses Vala's POSIX binding. There is no pkg-config file because POSIX includes the standard C library on Unix systems. All that is needed is the @@ -198,7 +199,7 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` The next example shows how to link with a C library where no additional VAPI is @@ -217,8 +218,39 @@ dependencies = [ sources = files('app.vala') -executable('app_name', sources, dependencies : dependencies) +executable('app_name', sources, dependencies: dependencies) ``` +The `required: false` means the build will continue when using another C library +that does not separate the maths library. See [Add math library (-lm) +portably](howtox.md#add-math-library-lm-portably). + +The final example shows how to use a library that does not have a pkg-config +file and the VAPI is in the `vapi` directory of your project source files: +```meson +project('vala app', 'vala', 'c') + +vapi_dir = join_paths(meson.current_source_dir(), 'vapi') + +add_project_arguments(['--vapidir', vapi_dir], language: 'vala') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + meson.get_compiler('c').find_library('foo'), + meson.get_compiler('vala').find_library('foo', dir: vapi_dir), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` +The `find_library()` method of the C compiler object will try to find the C +header files and the library to link with. + +The `find_library()` method of the Vala compiler object needs to have the `dir` +keyword added to include the project VAPI directory. This is not added +automatically by `add_project_arguments()`. + ## Building libraries @@ -260,7 +292,7 @@ Meson can generate a GIR as part of the build. For a Vala library the `vala_gir` option has to be set for the `library`: ```meson -foo_lib = library('foo', 'foo.vala', +foo_lib = shared_library('foo', 'foo.vala', vala_gir: 'Foo-1.0.gir', dependencies: [glib_dep, gobject_dep], install: true, diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index adcec7c..3d8515f 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -52,7 +52,9 @@ executable(..., dependencies : thread_dep) ## Set extra compiler and linker flags from the outside (when e.g. building distro packages) -The behavior is the same as with other build systems, with environment variables during first invocation. +The behavior is the same as with other build systems, with environment +variables during first invocation. Do not use these when you need to rebuild +the source ```console $ CFLAGS=-fsomething LDFLAGS=-Wl,--linker-flag meson <options> diff --git a/docs/markdown/snippets/introspect_breaking_format.md b/docs/markdown/snippets/introspect_breaking_format.md new file mode 100644 index 0000000..c96c82c --- /dev/null +++ b/docs/markdown/snippets/introspect_breaking_format.md @@ -0,0 +1,11 @@ +## Changed the JSON format of the introspection + +All paths used in the meson introspection JSON format are now absolute. This +affects the `filename` key in the targets introspection and the output of +`--buildsystem-files`. + +Furthermore, the `filename` and `install_filename` keys in the targets +introspection are now lists of strings with identical length. + +The `--traget-files` option is now deprecated, since the same information +can be acquired from the `--tragets` introspection API. diff --git a/docs/markdown/snippets/introspect_meson_info.md b/docs/markdown/snippets/introspect_meson_info.md new file mode 100644 index 0000000..42f2fda --- /dev/null +++ b/docs/markdown/snippets/introspect_meson_info.md @@ -0,0 +1,6 @@ +## Added the `meson-info.json` introspection file + +Meson now generates a `meson-info.json` file in the `meson-info` directory +to provide introspection information about the latest meson run. This file +is updated when the build configuration is changed and the build files are +(re)generated. diff --git a/docs/markdown/snippets/introspect_multiple.md b/docs/markdown/snippets/introspect_multiple.md index 67f517a..15f0e29 100644 --- a/docs/markdown/snippets/introspect_multiple.md +++ b/docs/markdown/snippets/introspect_multiple.md @@ -19,4 +19,5 @@ configuration of the build directory. Additionlly the format of `meson introspect target` was changed: - New: the `sources` key. It stores the source files of a target and their compiler parameters. + - New: the `defined_in` key. It stores the meson file where a target is defined - Added new target types (`jar`, `shared module`). diff --git a/man/meson.1 b/man/meson.1 index 702ac4d..7eb8fb2 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -37,7 +37,7 @@ backend of Meson is Ninja, which can be invoked like this. You only need to run the Meson command once: when you first configure your build dir. After that you just run the build command. Meson will -autodetect changes in your source tree and regenerates all files +autodetect changes in your source tree and regenerate all files needed to build the project. The setup command is the default operation. If no actual command is diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/astinterpreter.py index a447a55..f68aa7a 100644 --- a/mesonbuild/astinterpreter.py +++ b/mesonbuild/astinterpreter.py @@ -15,7 +15,7 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. -from . import interpreterbase, mlog, mparser, mesonlib +from . import interpreterbase, mparser, mesonlib from . import environment from .interpreterbase import InterpreterException, InvalidArguments, BreakRequest, ContinueRequest diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 39aa365..0637905 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -725,7 +725,7 @@ class Backend: elif isinstance(a, str): cmd_args.append(a) elif isinstance(a, build.Target): - cmd_args.append(self.get_target_filename(a)) + cmd_args.append(self.construct_target_rel_path(a, t.workdir)) else: raise MesonException('Bad object in test command.') ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross, @@ -737,6 +737,13 @@ class Backend: def write_test_serialisation(self, tests, datafile): pickle.dump(self.create_test_serialisation(tests), datafile) + def construct_target_rel_path(self, a, workdir): + if workdir is None: + return self.get_target_filename(a) + assert(os.path.isabs(workdir)) + abs_path = self.get_target_filename_abs(a) + return os.path.relpath(abs_path, workdir) + def generate_depmf_install(self, d): if self.build.dep_manifest_name is None: return diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 3688f29..debb4fb 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1473,7 +1473,7 @@ int dummy; command_template = ' command = {executable} $ARGS {output_args} $in $LINK_ARGS {cross_args} $aliasing\n' command = command_template.format( executable=' '.join(compiler.get_linker_exelist()), - cross_args=' '.join(cross_args), + cross_args=' '.join([quote_func(i) for i in cross_args]), output_args=' '.join(compiler.get_linker_output_args('$out')) ) description = ' description = Linking target $out.\n' @@ -1601,7 +1601,7 @@ rule FORTRAN_DEP_HACK%s command_template = ' command = {executable} $ARGS {cross_args} {output_args} {compile_only_args} $in\n' command = command_template.format( executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]), - cross_args=' '.join(compiler.get_cross_extra_flags(self.environment, False)) if is_cross else '', + cross_args=' '.join([quote_func(i) for i in compiler.get_cross_extra_flags(self.environment, False)]) if is_cross else '', output_args=' '.join(compiler.get_output_args('$out')), compile_only_args=' '.join(compiler.get_compile_only_args()) ) @@ -1659,7 +1659,7 @@ rule FORTRAN_DEP_HACK%s command_template = ' command = {executable} $ARGS {cross_args} {dep_args} {output_args} {compile_only_args} $in\n' command = command_template.format( executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]), - cross_args=' '.join(cross_args), + cross_args=' '.join([quote_func(i) for i in cross_args]), dep_args=' '.join(quoted_depargs), output_args=' '.join(compiler.get_output_args('$out')), compile_only_args=' '.join(compiler.get_compile_only_args()) @@ -1703,7 +1703,7 @@ rule FORTRAN_DEP_HACK%s output = ' '.join(compiler.get_output_args('$out')) command = " command = {executable} $ARGS {cross_args} {dep_args} {output_args} {compile_only_args} $in\n".format( executable=' '.join(compiler.get_exelist()), - cross_args=' '.join(cross_args), + cross_args=' '.join([quote_func(i) for i in cross_args]), dep_args=' '.join(quoted_depargs), output_args=output, compile_only_args=' '.join(compiler.get_compile_only_args()) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 91edbb8..52af562 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -111,8 +111,9 @@ class Build: self.environment = environment self.projects = {} self.targets = OrderedDict() - self.compilers = OrderedDict() - self.cross_compilers = OrderedDict() + # Coredata holds the state. This is just here for convenience. + self.compilers = environment.coredata.compilers + self.cross_compilers = environment.coredata.cross_compilers self.global_args = {} self.projects_args = {} self.global_link_args = {} @@ -145,6 +146,10 @@ class Build: def copy(self): other = Build(self.environment) for k, v in self.__dict__.items(): + if k in ['compilers', 'cross_compilers']: + # These alias coredata's fields of the same name, and must not + # become copies. + continue if isinstance(v, (list, dict, set, OrderedDict)): other.__dict__[k] = v.copy() else: @@ -155,19 +160,13 @@ class Build: for k, v in other.__dict__.items(): self.__dict__[k] = v - def add_compiler(self, compiler): + def ensure_static_linker(self, compiler): if self.static_linker is None and compiler.needs_static_linker(): self.static_linker = self.environment.detect_static_linker(compiler) - lang = compiler.get_language() - if lang not in self.compilers: - self.compilers[lang] = compiler - def add_cross_compiler(self, compiler): - if not self.cross_compilers: + def ensure_static_cross_linker(self, compiler): + if self.static_cross_linker is None and compiler.needs_static_linker(): self.static_cross_linker = self.environment.detect_static_linker(compiler) - lang = compiler.get_language() - if lang not in self.cross_compilers: - self.cross_compilers[lang] = compiler def get_project(self): return self.projects[''] diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 6350eee..f7864c0 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -1659,6 +1659,9 @@ class CcrxCCompiler(CcrxCompiler, CCompiler): def get_linker_output_args(self, outputname): return ['-output=%s' % outputname] + def get_werror_args(self): + return ['-change_message=error'] + def get_include_args(self, path, is_system): if path == '': path = '.' diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 3ef4ffc..34c0e3b 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1309,6 +1309,8 @@ class CompilerType(enum.Enum): CCRX_WIN = 40 + PGI_STANDARD = 50 + @property def is_standard_compiler(self): return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD') @@ -1598,6 +1600,25 @@ class GnuCompiler(GnuLikeCompiler): return ['-fopenmp'] +class PGICompiler: + def __init__(self, compiler_type): + self.id = 'pgi' + self.compiler_type = compiler_type + + default_warn_args = ['-Minform=inform'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args} + + def get_module_incdir_args(self): + return ('-module', ) + + def get_no_warn_args(self): + return ['-silent'] + + def openmp_flags(self): + return ['-fopenmp'] + class ElbrusCompiler(GnuCompiler): # Elbrus compiler is nearly like GCC, but does not support # PCH, LTO, sanitizers and color output as of version 1.21.x. diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 8056969..2eb4c71 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -24,6 +24,7 @@ from .compilers import ( GnuCompiler, ElbrusCompiler, IntelCompiler, + PGICompiler ) from mesonbuild.mesonlib import EnvironmentException, is_osx @@ -372,20 +373,13 @@ class PathScaleFortranCompiler(FortranCompiler): class PGIFortranCompiler(FortranCompiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags): FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags) - self.id = 'pgi' - default_warn_args = ['-Minform=inform'] - self.warn_args = {'1': default_warn_args, - '2': default_warn_args, - '3': default_warn_args} - - def get_module_incdir_args(self): - return ('-module', ) - - def get_no_warn_args(self): - return ['-silent'] + PGICompiler.__init__(self, CompilerType.PGI_STANDARD) - def openmp_flags(self): - return ['-fopenmp'] + def get_always_args(self): + """PGI doesn't have -pipe.""" + val = super().get_always_args() + val.remove('-pipe') + return val class Open64FortranCompiler(FortranCompiler): diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 8c9d513..d5f7d94 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -553,6 +553,45 @@ class CoreData: self.set_options(options, subproject) + def process_new_compilers(self, lang: str, comp, cross_comp, cmd_line_options): + from . import compilers + self.compilers[lang] = comp + # Native compiler always exist so always add its options. + new_options = comp.get_options() + if cross_comp is not None: + self.cross_compilers[lang] = cross_comp + new_options.update(cross_comp.get_options()) + + optprefix = lang + '_' + for k, o in new_options.items(): + if not k.startswith(optprefix): + raise MesonException('Internal error, %s has incorrect prefix.' % k) + if k in cmd_line_options: + o.set_value(cmd_line_options[k]) + self.compiler_options.setdefault(k, o) + + # Unlike compiler and linker flags, preprocessor flags are not in + # compiler_options because they are not visible to user. + preproc_flags = comp.get_preproc_flags() + preproc_flags = shlex.split(preproc_flags) + self.external_preprocess_args.setdefault(lang, preproc_flags) + + enabled_opts = [] + for optname in comp.base_options: + if optname in self.base_options: + continue + oobj = compilers.base_options[optname] + if optname in cmd_line_options: + oobj.set_value(cmd_line_options[optname]) + enabled_opts.append(optname) + self.base_options[optname] = oobj + self.emit_base_options_warnings(enabled_opts) + + def emit_base_options_warnings(self, enabled_opts: list): + if 'b_bitcode' in enabled_opts: + mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.') + mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.') + class CmdLineFileParser(configparser.ConfigParser): def __init__(self): # We don't want ':' as key delimiter, otherwise it would break when diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 35237d0..64c5100 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -437,17 +437,21 @@ class ConfigToolDependency(ExternalDependency): def report_config(self, version, req_version): """Helper method to print messages about the tool.""" + + found_msg = [mlog.bold(self.tool_name), 'found:'] + if self.config is None: - if version is not None: - mlog.log('Found', mlog.bold(self.tool_name), repr(version), - mlog.red('NO'), '(needed', req_version, ')') - else: - mlog.log('Found', mlog.bold(self.tool_name), repr(req_version), - mlog.red('NO')) - return False - mlog.log('Found {}:'.format(self.tool_name), mlog.bold(shutil.which(self.config[0])), - '({})'.format(version)) - return True + found_msg.append(mlog.red('NO')) + if version is not None and req_version is not None: + found_msg.append('found {!r} but need {!r}'.format(version, req_version)) + elif req_version: + found_msg.append('need {!r}'.format(req_version)) + else: + found_msg += [mlog.green('YES'), '({})'.format(shutil.which(self.config[0])), version] + + mlog.log(*found_msg) + + return self.config is not None def get_config_value(self, args, stage): p, out, err = Popen_safe(self.config + args) @@ -825,10 +829,10 @@ class PkgConfigDependency(ExternalDependency): pkgbin = False if not self.silent: if pkgbin: - mlog.log('Found pkg-config:', mlog.bold(pkgbin.get_path()), - '(%s)' % out.strip()) + mlog.log(mlog.bold('pkg-config'), 'found:', mlog.green('YES'), '({})'.format(pkgbin.get_path()), + out.strip()) else: - mlog.log('Found Pkg-config:', mlog.red('NO')) + mlog.log(mlog.bold('pkg-config'), 'found:', mlog.red('NO')) return pkgbin def extract_field(self, la_file, fieldname): @@ -1984,17 +1988,14 @@ class ExtraFrameworkDependency(ExternalDependency): def get_dep_identifier(name, kwargs, want_cross): - # Need immutable objects since the identifier will be used as a dict key - version_reqs = listify(kwargs.get('version', [])) - if isinstance(version_reqs, list): - version_reqs = frozenset(version_reqs) - identifier = (name, version_reqs, want_cross) + identifier = (name, want_cross) for key, value in kwargs.items(): - # 'version' is embedded above as the second element for easy access + # 'version' is irrelevant for caching; the caller must check version matches # 'native' is handled above with `want_cross` # 'required' is irrelevant for caching; the caller handles it separately # 'fallback' subprojects cannot be cached -- they must be initialized - if key in ('version', 'native', 'required', 'fallback',): + # 'default_options' is only used in fallback case + if key in ('version', 'native', 'required', 'fallback', 'default_options'): continue # All keyword arguments are strings, ints, or lists (or lists of lists) if isinstance(value, list): @@ -2099,12 +2100,7 @@ def find_external_dependency(name, env, kwargs): raise DependencyException('Dependency "%s" not found' % (name) + (', tried %s' % (tried) if tried else '')) - # return the last failed dependency object - if pkgdep: - return pkgdep[-1] - - # this should never happen - raise DependencyException('Dependency "%s" not found, but no dependency object to return' % (name)) + return NotFoundDependency(env) def _build_external_dependency_list(name, env, kwargs): diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index e99174c..0e74851 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -33,6 +33,7 @@ from .compilers import ( is_source, ) from .compilers import ( + Compiler, ArmCCompiler, ArmCPPCompiler, ArmclangCCompiler, @@ -220,6 +221,8 @@ def detect_cpu_family(compilers): trial = 'arm' elif trial.startswith('ppc64'): trial = 'ppc64' + elif trial == 'macppc': + trial = 'ppc' elif trial == 'powerpc': trial = 'ppc' # FreeBSD calls both ppc and ppc64 "powerpc". @@ -246,6 +249,10 @@ def detect_cpu_family(compilers): trial = 'arm' # Add more quirks here as bugs are reported. Keep in sync with detect_cpu() # below. + elif trial == 'parisc64': + # ATM there is no 64 bit userland for PA-RISC. Thus always + # report it as 32 bit for simplicity. + trial = 'parisc' if trial not in known_cpu_families: mlog.warning('Unknown CPU family {!r}, please report this at ' @@ -398,7 +405,7 @@ class Environment: self.default_objc = ['cc'] self.default_objcpp = ['c++'] self.default_d = ['ldc2', 'ldc', 'gdc', 'dmd'] - self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort'] + self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort', 'pgfortran'] self.default_java = ['javac'] self.default_rust = ['rustc'] self.default_swift = ['swiftc'] @@ -967,7 +974,7 @@ class Environment: return compilers.SwiftCompiler(exelist, version) raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') - def detect_compilers(self, lang, need_cross_compiler): + def compilers_from_language(self, lang: str, need_cross_compiler: bool): comp = None cross_comp = None if lang == 'c': @@ -1015,11 +1022,28 @@ class Environment: if need_cross_compiler: raise EnvironmentException('Cross compilation with Swift is not working yet.') # cross_comp = self.environment.detect_fortran_compiler(True) + else: + return None, None + + return comp, cross_comp + + def check_compilers(self, lang: str, comp: Compiler, cross_comp: Compiler): + if comp is None: + raise EnvironmentException('Tried to use unknown language "%s".' % lang) + + comp.sanity_check(self.get_scratch_dir(), self) + if cross_comp: + cross_comp.sanity_check(self.get_scratch_dir(), self) + def detect_compilers(self, lang: str, need_cross_compiler: bool): + (comp, cross_comp) = self.compilers_from_language(lang, need_cross_compiler) + if comp is not None: + self.coredata.process_new_compilers(lang, comp, cross_comp, self.cmd_line_options) return comp, cross_comp def detect_static_linker(self, compiler): - linker = self.binaries.host.lookup_entry('ar') + for_machine = MachineChoice.HOST if compiler.is_cross else MachineChoice.BUILD + linker = self.binaries[for_machine].lookup_entry('ar') if linker is not None: linkers = [linker] else: diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 9ebce70..d2c1ffe 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -37,7 +37,6 @@ import re, shlex import subprocess from collections import namedtuple from pathlib import PurePath -import traceback import functools import importlib @@ -101,7 +100,7 @@ def extract_required_kwarg(kwargs, subproject, feature_check=None, default=True) disabled = True elif option.is_enabled(): required = True - elif isinstance(required, bool): + elif isinstance(val, bool): required = val else: raise InterpreterException('required keyword argument must be boolean or a feature option') @@ -1032,7 +1031,7 @@ class CompilerHolder(InterpreterObject): typename = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): - raise InterpreterException('Prefix argument of sizeof must be a string.') + raise InterpreterException('Prefix argument of alignment must be a string.') extra_args = mesonlib.stringlistify(kwargs.get('args', [])) deps, msg = self.determine_dependencies(kwargs) result = self.compiler.alignment(typename, prefix, self.environment, @@ -2352,7 +2351,7 @@ external dependencies (including libraries) must go to "dependencies".''') def do_subproject(self, dirname, kwargs): disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: - mlog.log('\nSubproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled') + mlog.log('Subproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled') return self.disabled_subproject(dirname) default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) @@ -2373,39 +2372,37 @@ external dependencies (including libraries) must go to "dependencies".''') raise InvalidCode('Recursive include of subprojects: %s.' % incpath) if dirname in self.subprojects: subproject = self.subprojects[dirname] - if required and not subproject.found(): raise InterpreterException('Subproject "%s/%s" required but not found.' % ( self.subproject_dir, dirname)) - return subproject + subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode')) try: resolved = r.resolve(dirname) except wrap.WrapException as e: subprojdir = os.path.join(self.subproject_dir, r.directory) - if not required: - mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n') - return self.disabled_subproject(dirname) - if isinstance(e, wrap.WrapNotFoundException): # if the reason subproject execution failed was because # the directory doesn't exist, try to give some helpful # advice if it's a nested subproject that needs # promotion... self.print_nested_info(dirname) - - msg = 'Failed to initialize {!r}:\n{}' - raise InterpreterException(msg.format(subprojdir, e)) + if not required: + mlog.log(e) + mlog.log('Subproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)') + return self.disabled_subproject(dirname) + raise e subdir = os.path.join(self.subproject_dir, resolved) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) self.global_args_frozen = True mlog.log() with mlog.nested(): - try: - mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n') + mlog.log('Executing subproject', mlog.bold(dirname), '\n') + try: + with mlog.nested(): new_build = self.build.copy() subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir, self.modules, default_options) @@ -2414,14 +2411,21 @@ external dependencies (including libraries) must go to "dependencies".''') subi.subproject_stack = self.subproject_stack + [dirname] current_active = self.active_projectname subi.run() - mlog.log('\nSubproject', mlog.bold(dirname), 'finished.') - except Exception as e: - if not required: - mlog.log(e) - mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)') - return self.disabled_subproject(dirname) - else: - raise e + mlog.log('Subproject', mlog.bold(dirname), 'finished.') + # Invalid code is always an error + except InvalidCode: + raise + except Exception as e: + if not required: + with mlog.nested(): + # Suppress the 'ERROR:' prefix because this exception is not + # fatal and VS CI treat any logs with "ERROR:" as fatal. + mlog.exception(e, prefix=None) + mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)') + return self.disabled_subproject(dirname) + raise e + + mlog.log() if 'version' in kwargs: pv = subi.project_version @@ -2653,36 +2657,6 @@ external dependencies (including libraries) must go to "dependencies".''') self.validate_arguments(args, 0, []) raise Exception() - def detect_compilers(self, lang, need_cross_compiler): - comp, cross_comp = self.environment.detect_compilers(lang, need_cross_compiler) - if comp is None: - raise InvalidCode('Tried to use unknown language "%s".' % lang) - - comp.sanity_check(self.environment.get_scratch_dir(), self.environment) - self.coredata.compilers[lang] = comp - # Native compiler always exist so always add its options. - new_options = comp.get_options() - if cross_comp is not None: - cross_comp.sanity_check(self.environment.get_scratch_dir(), self.environment) - self.coredata.cross_compilers[lang] = cross_comp - new_options.update(cross_comp.get_options()) - - optprefix = lang + '_' - for k, o in new_options.items(): - if not k.startswith(optprefix): - raise InterpreterException('Internal error, %s has incorrect prefix.' % k) - if k in self.environment.cmd_line_options: - o.set_value(self.environment.cmd_line_options[k]) - self.coredata.compiler_options.setdefault(k, o) - - # Unlike compiler and linker flags, preprocessor flags are not in - # compiler_options because they are not visible to user. - preproc_flags = comp.get_preproc_flags() - preproc_flags = shlex.split(preproc_flags) - self.coredata.external_preprocess_args.setdefault(lang, preproc_flags) - - return comp, cross_comp - def add_languages(self, args, required): success = True need_cross_compiler = self.environment.is_cross_build() @@ -2693,7 +2667,8 @@ external dependencies (including libraries) must go to "dependencies".''') cross_comp = self.coredata.cross_compilers.get(lang, None) else: try: - (comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler) + (comp, cross_comp) = self.environment.detect_compilers(lang, need_cross_compiler) + self.environment.check_compilers(lang, comp, cross_comp) except Exception: if not required: mlog.log('Compiler for language', mlog.bold(lang), 'not found.') @@ -2707,34 +2682,14 @@ external dependencies (including libraries) must go to "dependencies".''') version_string = '(%s %s)' % (comp.id, comp.version) mlog.log('Native', comp.get_display_language(), 'compiler:', mlog.bold(' '.join(comp.get_exelist())), version_string) - self.build.add_compiler(comp) + self.build.ensure_static_linker(comp) if need_cross_compiler: version_string = '(%s %s)' % (cross_comp.id, cross_comp.version) mlog.log('Cross', cross_comp.get_display_language(), 'compiler:', mlog.bold(' '.join(cross_comp.get_exelist())), version_string) - self.build.add_cross_compiler(cross_comp) - if self.environment.is_cross_build() and not need_cross_compiler: - self.build.add_cross_compiler(comp) - self.add_base_options(comp) + self.build.ensure_static_cross_linker(comp) return success - def emit_base_options_warnings(self, enabled_opts): - if 'b_bitcode' in enabled_opts: - mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.') - mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.') - - def add_base_options(self, compiler): - enabled_opts = [] - for optname in compiler.base_options: - if optname in self.coredata.base_options: - continue - oobj = compilers.base_options[optname] - if optname in self.environment.cmd_line_options: - oobj.set_value(self.environment.cmd_line_options[optname]) - enabled_opts.append(optname) - self.coredata. base_options[optname] = oobj - self.emit_base_options_warnings(enabled_opts) - def program_from_file_for(self, for_machine, prognames, silent): bins = self.environment.binaries[for_machine] for p in prognames: @@ -2853,28 +2808,24 @@ external dependencies (including libraries) must go to "dependencies".''') want_cross = not kwargs['native'] else: want_cross = is_cross + identifier = dependencies.get_dep_identifier(name, kwargs, want_cross) - cached_dep = None - # Check if we've already searched for and found this dep - if identifier in self.coredata.deps: - cached_dep = self.coredata.deps[identifier] - mlog.log('Dependency', mlog.bold(name), - 'found:', mlog.green('YES'), '(cached)') - else: - # Check if exactly the same dep with different version requirements - # was found already. - wanted = identifier[1] - for trial, trial_dep in self.coredata.deps.items(): - # trial[1], identifier[1] are the version requirements - if trial[0] != identifier[0] or trial[2:] != identifier[2:]: - continue - found = trial_dep.get_version() - if not wanted or mesonlib.version_compare_many(found, wanted)[0]: - # We either don't care about the version, or our - # version requirements matched the trial dep's version. - cached_dep = trial_dep - break - return identifier, cached_dep + cached_dep = self.coredata.deps.get(identifier) + if cached_dep: + if not cached_dep.found(): + mlog.log('Dependency', mlog.bold(name), + 'found:', mlog.red('NO'), '(cached)') + return identifier, cached_dep + + # Verify the cached dep version match + wanted = kwargs.get('version', []) + found = cached_dep.get_version() + if not wanted or mesonlib.version_compare_many(found, wanted)[0]: + mlog.log('Dependency', mlog.bold(name), + 'found:', mlog.green('YES'), '(cached)') + return identifier, cached_dep + + return identifier, None @staticmethod def check_subproject_version(wanted, found): @@ -2884,8 +2835,11 @@ external dependencies (including libraries) must go to "dependencies".''') return False return True - def get_subproject_dep(self, name, dirname, varname, required): - dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject) + def notfound_dependency(self): + return DependencyHolder(NotFoundDependency(self.environment), self.subproject) + + def get_subproject_dep(self, display_name, dirname, varname, kwargs): + dep = self.notfound_dependency() try: subproject = self.subprojects[dirname] if subproject.found(): @@ -2897,41 +2851,34 @@ external dependencies (including libraries) must go to "dependencies".''') raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' 'not a dependency object.'.format(varname, dirname)) + required = kwargs.get('required', True) + wanted = kwargs.get('version', 'undefined') + subproj_path = os.path.join(self.subproject_dir, dirname) + if not dep.found(): if required: raise DependencyException('Could not find dependency {} in subproject {}' ''.format(varname, dirname)) # If the dependency is not required, don't raise an exception - subproj_path = os.path.join(self.subproject_dir, dirname) - mlog.log('Dependency', mlog.bold(name), 'from subproject', + mlog.log('Dependency', mlog.bold(display_name), 'from subproject', mlog.bold(subproj_path), 'found:', mlog.red('NO')) + return dep - return dep + found = dep.held_object.get_version() + if not self.check_subproject_version(wanted, found): + if required: + raise DependencyException('Version {} of subproject dependency {} already ' + 'cached, requested incompatible version {} for ' + 'dep {}'.format(found, dirname, wanted, display_name)) - def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required): - if dirname not in self.subprojects: - return False - dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep.found(): - return dep + mlog.log('Subproject', mlog.bold(subproj_path), 'dependency', + mlog.bold(display_name), 'version is', mlog.bold(found), + 'but', mlog.bold(wanted), 'is required.') + return self.notfound_dependency() - found = dep.version_method([], {}) - # Don't do a version check if the dependency is not found and not required - if not dep.found_method([], {}) and not required: - subproj_path = os.path.join(self.subproject_dir, dirname) - mlog.log('Dependency', mlog.bold(name), 'from subproject', - mlog.bold(subproj_path), 'found:', mlog.red('NO'), '(cached)') - return dep - if self.check_subproject_version(wanted, found): - subproj_path = os.path.join(self.subproject_dir, dirname) - mlog.log('Dependency', mlog.bold(name), 'from subproject', - mlog.bold(subproj_path), 'found:', mlog.green('YES'), '(cached)') - return dep - if required: - raise DependencyException('Version {} of subproject dependency {} already ' - 'cached, requested incompatible version {} for ' - 'dep {}'.format(found, dirname, wanted, name)) - return None + mlog.log('Dependency', mlog.bold(display_name), 'from subproject', + mlog.bold(subproj_path), 'found:', mlog.green('YES')) + return dep def _handle_featurenew_dependencies(self, name): 'Do a feature check on dependencies used by this subproject' @@ -2973,68 +2920,53 @@ external dependencies (including libraries) must go to "dependencies".''') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: mlog.log('Dependency', mlog.bold(display_name), 'skipped: feature', mlog.bold(feature), 'disabled') - return DependencyHolder(NotFoundDependency(self.environment), self.subproject) - if'default_options' in kwargs and 'fallback' not in kwargs: + return self.notfound_dependency() + + has_fallback = 'fallback' in kwargs + if 'default_options' in kwargs and not has_fallback: mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.') # writing just "dependency('')" is an error, because it can only fail - if name == '' and required and 'fallback' not in kwargs: + if name == '' and required and not has_fallback: raise InvalidArguments('Dependency is both required and not-found') if '<' in name or '>' in name or '=' in name: raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify' 'version\n requirements use the \'version\' keyword argument instead.') - identifier, cached_dep = self._find_cached_dep(name, kwargs) + identifier, cached_dep = self._find_cached_dep(name, kwargs) if cached_dep: if required and not cached_dep.found(): m = 'Dependency {!r} was already checked and was not found' raise DependencyException(m.format(display_name)) - dep = cached_dep - else: - # If the dependency has already been configured, possibly by - # a higher level project, try to use it first. - if 'fallback' in kwargs: - dirname, varname = self.get_subproject_infos(kwargs) - wanted = kwargs.get('version', 'undefined') - dep = self._find_cached_fallback_dep(name, dirname, varname, wanted, required) - if dep: - return dep - - # We need to actually search for this dep - exception = None - dep = NotFoundDependency(self.environment) - - # Unless a fallback exists and is forced ... - if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs: - pass - # ... search for it outside the project - elif name != '': - self._handle_featurenew_dependencies(name) - try: - dep = dependencies.find_external_dependency(name, self.environment, kwargs) - except DependencyException as e: - exception = e - - # Search inside the projects list - if not dep.found(): - if 'fallback' in kwargs: - if not exception: - exception = DependencyException("fallback for %s not found" % display_name) - fallback_dep = self.dependency_fallback(name, kwargs) - if fallback_dep: - # Never add fallback deps to self.coredata.deps since we - # cannot cache them. They must always be evaluated else - # we won't actually read all the build files. - return fallback_dep - if required: - assert(exception is not None) - raise exception - - # Only store found-deps in the cache - if dep.found(): - self.coredata.deps[identifier] = dep - return DependencyHolder(dep, self.subproject) + return DependencyHolder(cached_dep, self.subproject) + + # If the dependency has already been configured, possibly by + # a higher level project, try to use it first. + if has_fallback: + dirname, varname = self.get_subproject_infos(kwargs) + if dirname in self.subprojects: + return self.get_subproject_dep(name, dirname, varname, kwargs) + + wrap_mode = self.coredata.get_builtin_option('wrap_mode') + forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback + if name != '' and not forcefallback: + self._handle_featurenew_dependencies(name) + kwargs['required'] = required and not has_fallback + dep = dependencies.find_external_dependency(name, self.environment, kwargs) + kwargs['required'] = required + # Only store found-deps in the cache + # Never add fallback deps to self.coredata.deps since we + # cannot cache them. They must always be evaluated else + # we won't actually read all the build files. + if dep.found(): + self.coredata.deps[identifier] = dep + return DependencyHolder(dep, self.subproject) + + if has_fallback: + return self.dependency_fallback(display_name, kwargs) + + return self.notfound_dependency() @FeatureNew('disabler', '0.44.0') @noKwargs @@ -3066,13 +2998,12 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Fallback info must have exactly two items.') return fbinfo - def dependency_fallback(self, name, kwargs): - display_name = name if name else '(anonymous)' + def dependency_fallback(self, display_name, kwargs): if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback: mlog.log('Not looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback' 'dependencies is disabled.') - return None + return self.notfound_dependency() elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.') @@ -3080,52 +3011,12 @@ external dependencies (including libraries) must go to "dependencies".''') mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name)) dirname, varname = self.get_subproject_infos(kwargs) - # Try to execute the subproject - try: - sp_kwargs = {} - try: - sp_kwargs['default_options'] = kwargs['default_options'] - except KeyError: - pass - self.do_subproject(dirname, sp_kwargs) - # Invalid code is always an error - except InvalidCode: - raise - # If the subproject execution failed in a non-fatal way, don't raise an - # exception; let the caller handle things. - except Exception as e: - msg = ['Couldn\'t use fallback subproject in', - mlog.bold(os.path.join(self.subproject_dir, dirname)), - 'for the dependency', mlog.bold(display_name), '\nReason:'] - if isinstance(e, mesonlib.MesonException): - msg.append(e.get_msg_with_context()) - else: - msg.append(traceback.format_exc()) - mlog.log(*msg) - return None - required = kwargs.get('required', True) - dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep.found(): - return dep - subproj_path = os.path.join(self.subproject_dir, dirname) - # Check if the version of the declared dependency matches what we want - if 'version' in kwargs: - wanted = kwargs['version'] - found = dep.version_method([], {}) - # Don't do a version check if the dependency is not found and not required - if not dep.found_method([], {}) and not required: - subproj_path = os.path.join(self.subproject_dir, dirname) - mlog.log('Dependency', mlog.bold(display_name), 'from subproject', - mlog.bold(subproj_path), 'found:', mlog.red('NO')) - return dep - if not self.check_subproject_version(wanted, found): - mlog.log('Subproject', mlog.bold(subproj_path), 'dependency', - mlog.bold(display_name), 'version is', mlog.bold(found), - 'but', mlog.bold(wanted), 'is required.') - return None - mlog.log('Dependency', mlog.bold(display_name), 'from subproject', - mlog.bold(subproj_path), 'found:', mlog.green('YES')) - return dep + sp_kwargs = { + 'default_options': kwargs.get('default_options', []), + 'required': kwargs.get('required', True), + } + self.do_subproject(dirname, sp_kwargs) + return self.get_subproject_dep(display_name, dirname, varname, kwargs) @FeatureNewKwargs('executable', '0.42.0', ['implib']) @permittedKwargs(permitted_kwargs['executable']) @@ -3532,6 +3423,7 @@ This will become a hard error in the future.''' % kwargs['input']) @FeatureNewKwargs('configure_file', '0.47.0', ['copy', 'output_format', 'install_mode', 'encoding']) @FeatureNewKwargs('configure_file', '0.46.0', ['format']) @FeatureNewKwargs('configure_file', '0.41.0', ['capture']) + @FeatureNewKwargs('configure_file', '0.50.0', ['install']) @permittedKwargs(permitted_kwargs['configure_file']) def func_configure_file(self, node, args, kwargs): if len(args) > 0: @@ -3691,8 +3583,17 @@ This will become a hard error in the future.''' % kwargs['input']) # Install file if requested, we check for the empty string # for backwards compatibility. That was the behaviour before # 0.45.0 so preserve it. - idir = kwargs.get('install_dir', None) - if isinstance(idir, str) and idir: + idir = kwargs.get('install_dir', '') + if not isinstance(idir, str): + raise InterpreterException('"install_dir" must be a string') + install = kwargs.get('install', idir != '') + if not isinstance(install, bool): + raise InterpreterException('"install" must be a boolean') + if install: + if not idir: + raise InterpreterException('"install_dir" must be specified ' + 'when "install" in a configure_file ' + 'is true') cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname) install_mode = self._get_kwarg_install_mode(kwargs) self.build.data.append(build.Data([cfile], idir, install_mode)) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 707b8f7..48c5220 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -749,7 +749,6 @@ The result of this is undefined and will become a hard error in a future Meson r except IndexError: raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject))) - def function_call(self, node): func_name = node.func_name (posargs, kwargs) = self.reduce_arguments(node.args) diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2863b0c..b8fb3c6 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -148,6 +148,7 @@ class Conf: def run(options): coredata.parse_cmd_line_options(options) builddir = os.path.abspath(os.path.realpath(options.builddir)) + c = None try: c = Conf(builddir) save = False @@ -163,7 +164,10 @@ def run(options): if save: c.save() mintro.update_build_options(c.coredata, c.build.environment.info_dir) + mintro.write_meson_info_file(c.build, []) except ConfException as e: print('Meson configurator encountered an error:') + if c is not None and c.build is not None: + mintro.write_meson_info_file(c.build, [e]) raise e return 0 diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 69c3f9f..037d76c 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -21,7 +21,7 @@ import codecs from . import mesonlib from . import mlog -from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects +from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata from .mesonlib import MesonException from .environment import detect_msys2_arch from .wrap import wraptool @@ -58,6 +58,8 @@ class CommandLineParser: help=argparse.SUPPRESS) self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command, help=argparse.SUPPRESS) + self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run, + help=argparse.SUPPRESS) def add_command(self, name, add_arguments_func, run_func, help): # FIXME: Cannot have hidden subparser: diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 3382e0d..36368af 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -33,25 +33,67 @@ from .backend import backends import sys, os import pathlib +def get_meson_info_file(info_dir: str): + return os.path.join(info_dir, 'meson-info.json') + +def get_meson_introspection_version(): + return '1.0.0' + +def get_meson_introspection_required_version(): + return ['>=1.0', '<2.0'] + +def get_meson_introspection_types(coredata: cdata.CoreData = None, builddata: build.Build = None, backend: backends.Backend = None): + if backend and builddata: + benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) + testdata = backend.create_test_serialisation(builddata.get_tests()) + installdata = backend.create_install_data() + else: + benchmarkdata = testdata = installdata = None + + return { + 'benchmarks': { + 'func': lambda: list_benchmarks(benchmarkdata), + 'desc': 'List all benchmarks.', + }, + 'buildoptions': { + 'func': lambda: list_buildoptions(coredata), + 'desc': 'List all build options.', + }, + 'buildsystem_files': { + 'func': lambda: list_buildsystem_files(builddata), + 'desc': 'List files that make up the build system.', + 'key': 'buildsystem-files', + }, + 'dependencies': { + 'func': lambda: list_deps(coredata), + 'desc': 'List external dependencies.', + }, + 'installed': { + 'func': lambda: list_installed(installdata), + 'desc': 'List all installed files and directories.', + }, + 'projectinfo': { + 'func': lambda: list_projinfo(builddata), + 'desc': 'Information about projects.', + }, + 'targets': { + 'func': lambda: list_targets(builddata, installdata, backend), + 'desc': 'List top level targets.', + }, + 'tests': { + 'func': lambda: list_tests(testdata), + 'desc': 'List all unit tests.', + } + } + def add_arguments(parser): - parser.add_argument('--targets', action='store_true', dest='list_targets', default=False, - help='List top level targets.') - parser.add_argument('--installed', action='store_true', dest='list_installed', default=False, - help='List all installed files and directories.') + intro_types = get_meson_introspection_types() + for key, val in intro_types.items(): + flag = '--' + val.get('key', key) + parser.add_argument(flag, action='store_true', dest=key, default=False, help=val['desc']) + parser.add_argument('--target-files', action='store', dest='target_files', default=None, help='List source files for a given target.') - parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False, - help='List files that make up the build system.') - parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False, - help='List all build options.') - parser.add_argument('--tests', action='store_true', dest='tests', default=False, - help='List all unit tests.') - parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False, - help='List all benchmarks.') - parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False, - help='List external dependencies.') - parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False, - help='Information about projects.') parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja', help='The backend to use for the --buildoptions introspection.') parser.add_argument('-a', '--all', action='store_true', dest='all', default=False, @@ -74,10 +116,12 @@ def list_installed(installdata): res[path] = os.path.join(installdata.prefix, installdir, os.path.basename(path)) for path, installpath, unused_custom_install_mode in installdata.man: res[path] = os.path.join(installdata.prefix, installpath) - return ('installed', res) + return res def list_targets(builddata: build.Build, installdata, backend: backends.Backend): tlist = [] + build_dir = builddata.environment.get_build_dir() + src_dir = builddata.environment.get_source_dir() # Fast lookup table for installation files install_lookuptable = {} @@ -89,43 +133,38 @@ def list_targets(builddata: build.Build, installdata, backend: backends.Backend) if not isinstance(target, build.Target): raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.') - # TODO Change this to the full list in a seperate PR - fname = [os.path.join(target.subdir, x) for x in target.get_outputs()] - if len(fname) == 1: - fname = fname[0] - t = { 'name': target.get_basename(), 'id': idname, 'type': target.get_typename(), - 'filename': fname, + 'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')), + 'filename': [os.path.join(build_dir, target.subdir, x) for x in target.get_outputs()], 'build_by_default': target.build_by_default, 'target_sources': backend.get_introspection_data(idname, target) } if installdata and target.should_install(): t['installed'] = True - # TODO Change this to the full list in a seperate PR - t['install_filename'] = [install_lookuptable.get(x, None) for x in target.get_outputs()][0] + t['install_filename'] = [install_lookuptable.get(x, None) for x in target.get_outputs()] else: t['installed'] = False tlist.append(t) - return ('targets', tlist) + return tlist -class BuildoptionsOptionHelper: +class IntrospectionHelper: # mimic an argparse namespace def __init__(self, cross_file): self.cross_file = cross_file self.native_file = None self.cmd_line_options = {} -class BuildoptionsInterperter(astinterpreter.AstInterpreter): +class IntrospectionInterpreter(astinterpreter.AstInterpreter): # Interpreter to detect the options without a build directory # Most of the code is stolen from interperter.Interpreter def __init__(self, source_root, subdir, backend, cross_file=None, subproject='', subproject_dir='subprojects', env=None): super().__init__(source_root, subdir) - options = BuildoptionsOptionHelper(cross_file) + options = IntrospectionHelper(cross_file) self.cross_file = cross_file if env is None: self.environment = environment.Environment(source_root, None, options) @@ -137,37 +176,18 @@ class BuildoptionsInterperter(astinterpreter.AstInterpreter): self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt') self.backend = backend self.default_options = {'backend': self.backend} + self.project_data = {} self.funcs.update({ 'project': self.func_project, 'add_languages': self.func_add_languages }) - def detect_compilers(self, lang, need_cross_compiler): - comp, cross_comp = self.environment.detect_compilers(lang, need_cross_compiler) - if comp is None: - return None, None - - self.coredata.compilers[lang] = comp - # Native compiler always exist so always add its options. - new_options = comp.get_options() - if cross_comp is not None: - self.coredata.cross_compilers[lang] = cross_comp - new_options.update(cross_comp.get_options()) - - optprefix = lang + '_' - for k, o in new_options.items(): - if not k.startswith(optprefix): - raise RuntimeError('Internal error, %s has incorrect prefix.' % k) - if k in self.environment.cmd_line_options: - o.set_value(self.environment.cmd_line_options[k]) - self.coredata.compiler_options.setdefault(k, o) - - return comp, cross_comp - def flatten_args(self, args): # Resolve mparser.ArrayNode if needed flattend_args = [] + if isinstance(args, mparser.ArrayNode): + args = [x.value for x in args.args.arguments] for i in args: if isinstance(i, mparser.ArrayNode): flattend_args += [x.value for x in i.args.arguments] @@ -177,35 +197,25 @@ class BuildoptionsInterperter(astinterpreter.AstInterpreter): pass return flattend_args - def add_languages(self, args): - need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler() - for lang in sorted(args, key=compilers.sort_clink): - lang = lang.lower() - if lang not in self.coredata.compilers: - (comp, _) = self.detect_compilers(lang, need_cross_compiler) - if comp is None: - return - for optname in comp.base_options: - if optname in self.coredata.base_options: - continue - oobj = compilers.base_options[optname] - self.coredata.base_options[optname] = oobj - def func_project(self, node, args, kwargs): if len(args) < 1: raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') + proj_name = args[0] + proj_vers = kwargs.get('version', 'undefined') proj_langs = self.flatten_args(args[1:]) + if isinstance(proj_vers, mparser.ElementaryNode): + proj_vers = proj_vers.value + if not isinstance(proj_vers, str): + proj_vers = 'undefined' + self.project_data = {'descriptive_name': proj_name, 'version': proj_vers} if os.path.exists(self.option_file): oi = optinterpreter.OptionInterpreter(self.subproject) oi.process(self.option_file) self.coredata.merge_user_options(oi.options) - def_opts = kwargs.get('default_options', []) - if isinstance(def_opts, mparser.ArrayNode): - def_opts = [x.value for x in def_opts.args.arguments] - + def_opts = self.flatten_args(kwargs.get('default_options', [])) self.project_default_options = mesonlib.stringlistify(def_opts) self.project_default_options = cdata.create_options_dict(self.project_default_options) self.default_options.update(self.project_default_options) @@ -216,6 +226,7 @@ class BuildoptionsInterperter(astinterpreter.AstInterpreter): if isinstance(spdirname, str): self.subproject_dir = spdirname if not self.is_subproject(): + self.project_data['subprojects'] = [] subprojects_dir = os.path.join(self.source_root, self.subproject_dir) if os.path.isdir(subprojects_dir): for i in os.listdir(subprojects_dir): @@ -226,19 +237,26 @@ class BuildoptionsInterperter(astinterpreter.AstInterpreter): options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} self.coredata.set_options(options) - self.add_languages(proj_langs) + self.func_add_languages(None, proj_langs, None) def do_subproject(self, dirname): subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) subpr = os.path.join(subproject_dir_abs, dirname) try: - subi = BuildoptionsInterperter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment) + subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment) subi.analyze() + subi.project_data['name'] = dirname + self.project_data['subprojects'] += [subi.project_data] except: return def func_add_languages(self, node, args, kwargs): - return self.add_languages(self.flatten_args(args)) + args = self.flatten_args(args) + need_cross_compiler = self.environment.is_cross_build() + for lang in sorted(args, key=compilers.sort_clink): + lang = lang.lower() + if lang not in self.coredata.compilers: + self.environment.detect_compilers(lang, need_cross_compiler) def is_subproject(self): return self.subproject != '' @@ -253,14 +271,14 @@ def list_buildoptions_from_source(sourcedir, backend, indent): # Make sure that log entries in other parts of meson don't interfere with the JSON output mlog.disable() backend = backends.get_backend_from_name(backend, None) - intr = BuildoptionsInterperter(sourcedir, '', backend.name) + intr = IntrospectionInterpreter(sourcedir, '', backend.name) intr.analyze() # Reenable logging just in case mlog.enable() - buildoptions = list_buildoptions(intr.coredata)[1] - print(json.dumps(buildoptions, indent=indent)) + print(json.dumps(list_buildoptions(intr.coredata), indent=indent)) -def list_target_files(target_name, targets, builddata: build.Build): +def list_target_files(target_name: str, targets: list, source_dir: str): + sys.stderr.write("WARNING: The --target-files introspection API is deprecated. Use --targets instead.\n") result = [] tgt = None @@ -276,10 +294,9 @@ def list_target_files(target_name, targets, builddata: build.Build): for i in tgt['target_sources']: result += i['sources'] + i['generated_sources'] - # TODO Remove this line in a future PR with other breaking changes - result = list(map(lambda x: os.path.relpath(x, builddata.environment.get_source_dir()), result)) + result = list(map(lambda x: os.path.relpath(x, source_dir), result)) - return ('target_files', result) + return result def list_buildoptions(coredata: cdata.CoreData): optlist = [] @@ -312,7 +329,7 @@ def list_buildoptions(coredata: cdata.CoreData): add_keys(optlist, dir_options, 'directory') add_keys(optlist, coredata.user_options, 'user') add_keys(optlist, test_options, 'test') - return ('buildoptions', optlist) + return optlist def add_keys(optlist, options, section): keys = list(options.keys()) @@ -349,7 +366,8 @@ def find_buildsystem_files_list(src_dir): def list_buildsystem_files(builddata: build.Build): src_dir = builddata.environment.get_source_dir() filelist = find_buildsystem_files_list(src_dir) - return ('buildsystem_files', filelist) + filelist = [os.path.join(src_dir, x) for x in filelist] + return filelist def list_deps(coredata: cdata.CoreData): result = [] @@ -358,7 +376,7 @@ def list_deps(coredata: cdata.CoreData): result += [{'name': d.name, 'compile_args': d.get_compile_args(), 'link_args': d.get_link_args()}] - return ('dependencies', result) + return result def get_test_list(testdata): result = [] @@ -382,10 +400,10 @@ def get_test_list(testdata): return result def list_tests(testdata): - return ('tests', get_test_list(testdata)) + return get_test_list(testdata) def list_benchmarks(benchdata): - return ('benchmarks', get_test_list(benchdata)) + return get_test_list(benchdata) def list_projinfo(builddata: build.Build): result = {'version': builddata.project_version, @@ -397,62 +415,24 @@ def list_projinfo(builddata: build.Build): 'descriptive_name': builddata.projects.get(k)} subprojects.append(c) result['subprojects'] = subprojects - return ('projectinfo', result) - -class ProjectInfoInterperter(astinterpreter.AstInterpreter): - def __init__(self, source_root, subdir): - super().__init__(source_root, subdir) - self.funcs.update({'project': self.func_project}) - self.project_name = None - self.project_version = None - - def func_project(self, node, args, kwargs): - if len(args) < 1: - raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') - self.project_name = args[0] - self.project_version = kwargs.get('version', 'undefined') - if isinstance(self.project_version, mparser.ElementaryNode): - self.project_version = self.project_version.value - - def set_variable(self, varname, variable): - pass - - def analyze(self): - self.load_root_meson_file() - self.sanity_check_ast() - self.parse_project() - self.run() + return result def list_projinfo_from_source(sourcedir, indent): files = find_buildsystem_files_list(sourcedir) + files = [os.path.normpath(x) for x in files] - result = {'buildsystem_files': []} - subprojects = {} - - for f in files: - f = f.replace('\\', '/') - if f == 'meson.build': - interpreter = ProjectInfoInterperter(sourcedir, '') - interpreter.analyze() - version = None - if interpreter.project_version is str: - version = interpreter.project_version - result.update({'version': version, 'descriptive_name': interpreter.project_name}) - result['buildsystem_files'].append(f) - elif f.startswith('subprojects/'): - subproject_id = f.split('/')[1] - subproject = subprojects.setdefault(subproject_id, {'buildsystem_files': []}) - subproject['buildsystem_files'].append(f) - if f.count('/') == 2 and f.endswith('meson.build'): - interpreter = ProjectInfoInterperter(os.path.join(sourcedir, 'subprojects', subproject_id), '') - interpreter.analyze() - subproject.update({'name': subproject_id, 'version': interpreter.project_version, 'descriptive_name': interpreter.project_name}) - else: - result['buildsystem_files'].append(f) + mlog.disable() + intr = IntrospectionInterpreter(sourcedir, '', 'ninja') + intr.analyze() + mlog.enable() - subprojects = [obj for name, obj in subprojects.items()] - result['subprojects'] = subprojects - print(json.dumps(result, indent=indent)) + for i in intr.project_data['subprojects']: + basedir = os.path.join(intr.subproject_dir, i['name']) + i['buildsystem_files'] = [x for x in files if x.startswith(basedir)] + files = [x for x in files if not x.startswith(basedir)] + + intr.project_data['buildsystem_files'] = files + print(json.dumps(intr.project_data, indent=indent)) def run(options): datadir = 'meson-private' @@ -469,44 +449,43 @@ def run(options): if options.buildoptions: list_buildoptions_from_source(sourcedir, options.backend, indent) return 0 - if not os.path.isdir(datadir) or not os.path.isdir(infodir): + infofile = get_meson_info_file(infodir) + if not os.path.isdir(datadir) or not os.path.isdir(infodir) or not os.path.isfile(infofile): print('Current directory is not a meson build directory.' 'Please specify a valid build dir or change the working directory to it.' 'It is also possible that the build directory was generated with an old' 'meson version. Please regenerate it in this case.') return 1 - # Load build data to make sure that the version matches - # TODO Find a better solution for this - cdata.load(options.builddir) + intro_vers = '0.0.0' + source_dir = None + with open(infofile, 'r') as fp: + raw = json.load(fp) + intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0') + source_dir = raw.get('directories', {}).get('source', None) + + vers_to_check = get_meson_introspection_required_version() + for i in vers_to_check: + if not mesonlib.version_compare(intro_vers, i): + print('Introspection version {} is not supported. ' + 'The required version is: {}' + .format(intro_vers, ' and '.join(vers_to_check))) + return 1 results = [] - toextract = [] - - if options.all or options.benchmarks: - toextract += ['benchmarks'] - if options.all or options.buildoptions: - toextract += ['buildoptions'] - if options.all or options.buildsystem_files: - toextract += ['buildsystem_files'] - if options.all or options.dependencies: - toextract += ['dependencies'] - if options.all or options.list_installed: - toextract += ['installed'] - if options.all or options.projectinfo: - toextract += ['projectinfo'] - if options.all or options.list_targets: - toextract += ['targets'] + intro_types = get_meson_introspection_types() + + # Handle the one option that does not have its own JSON file (meybe deprecate / remove this?) if options.target_files is not None: targets_file = os.path.join(infodir, 'intro-targets.json') with open(targets_file, 'r') as fp: targets = json.load(fp) - builddata = build.load(options.builddir) # TODO remove this in a breaking changes PR - results += [list_target_files(options.target_files, targets, builddata)] - if options.all or options.tests: - toextract += ['tests'] + results += [('target_files', list_target_files(options.target_files, targets, source_dir))] - for i in toextract: + # Extract introspection information from JSON + for i in intro_types.keys(): + if not options.all and not getattr(options, i, False): + continue curr = os.path.join(infodir, 'intro-{}.json'.format(i)) if not os.path.isfile(curr): print('Introspection file {} does not exist.'.format(curr)) @@ -527,7 +506,10 @@ def run(options): print(json.dumps(out, indent=indent)) return 0 +updated_introspection_files = [] + def write_intro_info(intro_info, info_dir): + global updated_introspection_files for i in intro_info: out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0])) tmp_file = os.path.join(info_dir, 'tmp_dump.json') @@ -535,29 +517,70 @@ def write_intro_info(intro_info, info_dir): json.dump(i[1], fp) fp.flush() # Not sure if this is needed os.replace(tmp_file, out_file) + updated_introspection_files += [i[0]] def generate_introspection_file(builddata: build.Build, backend: backends.Backend): coredata = builddata.environment.get_coredata() - benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) - testdata = backend.create_test_serialisation(builddata.get_tests()) - installdata = backend.create_install_data() + intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend) + intro_info = [] - intro_info = [ - list_benchmarks(benchmarkdata), - list_buildoptions(coredata), - list_buildsystem_files(builddata), - list_deps(coredata), - list_installed(installdata), - list_projinfo(builddata), - list_targets(builddata, installdata, backend), - list_tests(testdata) - ] + for key, val in intro_types.items(): + intro_info += [(key, val['func']())] write_intro_info(intro_info, builddata.environment.info_dir) def update_build_options(coredata: cdata.CoreData, info_dir): intro_info = [ - list_buildoptions(coredata) + ('buildoptions', list_buildoptions(coredata)) ] write_intro_info(intro_info, info_dir) + +def split_version_string(version: str): + vers_list = version.split('.') + return { + 'full': version, + 'major': int(vers_list[0] if len(vers_list) > 0 else 0), + 'minor': int(vers_list[1] if len(vers_list) > 1 else 0), + 'patch': int(vers_list[2] if len(vers_list) > 2 else 0) + } + +def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False): + global updated_introspection_files + info_dir = builddata.environment.info_dir + info_file = get_meson_info_file(info_dir) + intro_types = get_meson_introspection_types() + intro_info = {} + + for i in intro_types.keys(): + intro_info[i] = { + 'file': 'intro-{}.json'.format(i), + 'updated': i in updated_introspection_files + } + + info_data = { + 'meson_version': split_version_string(cdata.version), + 'directories': { + 'source': builddata.environment.get_source_dir(), + 'build': builddata.environment.get_build_dir(), + 'info': info_dir, + }, + 'introspection': { + 'version': split_version_string(get_meson_introspection_version()), + 'information': intro_info, + }, + 'build_files_updated': build_files_updated, + } + + if len(errors) > 0: + info_data['error'] = True + info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors] + else: + info_data['error'] = False + + # Write the data to disc + tmp_file = os.path.join(info_dir, 'tmp_dump.json') + with open(tmp_file, 'w') as fp: + json.dump(info_data, fp) + fp.flush() + os.replace(tmp_file, info_file) diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 57debb0..a8b146f 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -195,12 +195,15 @@ def warning(*args, **kwargs): def deprecation(*args, **kwargs): return _log_error('deprecation', *args, **kwargs) -def exception(e): +def exception(e, prefix=red('ERROR:')): log() + args = [] if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'): - log('%s:%d:%d:' % (e.file, e.lineno, e.colno), red('ERROR: '), e) - else: - log(red('ERROR:'), e) + args.append('%s:%d:%d:' % (e.file, e.lineno, e.colno)) + if prefix: + args.append(prefix) + args.append(e) + log(*args) # Format a list for logging purposes as a string. It separates # all but the last item with commas, and the last with 'and'. diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index c14ac85..1d41165 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -23,11 +23,10 @@ from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, - InterpreterObject, InvalidArguments, + InvalidArguments, FeatureNew, FeatureNewKwargs, disablerIfNotFound ) from ..interpreter import ExternalProgramHolder, extract_required_kwarg -from ..interpreterbase import flatten from ..build import known_shmod_kwargs from .. import mlog from ..environment import detect_cpu_family diff --git a/mesonbuild/modules/rpm.py b/mesonbuild/modules/rpm.py index ba5bcaa..9774286 100644 --- a/mesonbuild/modules/rpm.py +++ b/mesonbuild/modules/rpm.py @@ -29,39 +29,16 @@ import os class RPMModule(ExtensionModule): @noKwargs - def generate_spec_template(self, state, args, kwargs): - compiler_deps = set() - for compiler in state.compilers.values(): - # Elbrus has one 'lcc' package for every compiler - if isinstance(compiler, compilers.GnuCCompiler): - compiler_deps.add('gcc') - elif isinstance(compiler, compilers.GnuCPPCompiler): - compiler_deps.add('gcc-c++') - elif isinstance(compiler, compilers.ElbrusCCompiler): - compiler_deps.add('lcc') - elif isinstance(compiler, compilers.ElbrusCPPCompiler): - compiler_deps.add('lcc') - elif isinstance(compiler, compilers.ElbrusFortranCompiler): - compiler_deps.add('lcc') - elif isinstance(compiler, compilers.ValaCompiler): - compiler_deps.add('vala') - elif isinstance(compiler, compilers.GnuFortranCompiler): - compiler_deps.add('gcc-gfortran') - elif isinstance(compiler, compilers.GnuObjCCompiler): - compiler_deps.add('gcc-objc') - elif compiler == compilers.GnuObjCPPCompiler: - compiler_deps.add('gcc-objc++') - else: - mlog.log('RPM spec file will not created, generating not allowed for:', - mlog.bold(compiler.get_id())) - return - proj = state.project_name.replace(' ', '_').replace('\t', '_') + def generate_spec_template(self, coredata, args, kwargs): + self.coredata = coredata + required_compilers = self.__get_required_compilers() + proj = coredata.project_name.replace(' ', '_').replace('\t', '_') so_installed = False devel_subpkg = False files = set() files_devel = set() to_delete = set() - for target in state.targets.values(): + for target in coredata.targets.values(): if isinstance(target, build.Executable) and target.need_install: files.add('%%{_bindir}/%s' % target.get_filename()) elif isinstance(target, build.SharedLibrary) and target.need_install: @@ -80,18 +57,19 @@ class RPMModule(ExtensionModule): files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0]) elif isinstance(target, TypelibTarget) and target.should_install(): files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0]) - for header in state.headers: + for header in coredata.headers: if len(header.get_install_subdir()) > 0: files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir()) else: for hdr_src in header.get_sources(): files_devel.add('%%{_includedir}/%s' % hdr_src) - for man in state.man: + for man in coredata.man: for man_file in man.get_sources(): files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file)) if len(files_devel) > 0: devel_subpkg = True - filename = os.path.join(state.environment.get_build_dir(), + + filename = os.path.join(coredata.environment.get_build_dir(), '%s.spec' % proj) with open(filename, 'w+') as fn: fn.write('Name: %s\n' % proj) @@ -102,24 +80,28 @@ class RPMModule(ExtensionModule): fn.write('\n') fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n') fn.write('\n') - for compiler in compiler_deps: + fn.write('BuildRequires: meson\n') + for compiler in required_compilers: fn.write('BuildRequires: %s\n' % compiler) - for dep in state.environment.coredata.deps: + for dep in coredata.environment.coredata.deps: fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0]) - for lib in state.environment.coredata.ext_libs.values(): - name = lib.get_name() - fn.write('BuildRequires: {} # FIXME\n'.format(name)) - mlog.warning('replace', mlog.bold(name), 'with the real package.', - 'You can use following command to find package which ' - 'contains this lib:', - mlog.bold("dnf provides '*/lib{}.so'".format(name))) - for prog in state.environment.coredata.ext_progs.values(): - if not prog.found(): - fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' % - prog.get_name()) - else: - fn.write('BuildRequires: {}\n'.format(prog.get_path())) - fn.write('BuildRequires: meson\n') +# ext_libs and ext_progs have been removed from coredata so the following code +# no longer works. It is kept as a reminder of the idea should anyone wish +# to re-implement it. +# +# for lib in state.environment.coredata.ext_libs.values(): +# name = lib.get_name() +# fn.write('BuildRequires: {} # FIXME\n'.format(name)) +# mlog.warning('replace', mlog.bold(name), 'with the real package.', +# 'You can use following command to find package which ' +# 'contains this lib:', +# mlog.bold("dnf provides '*/lib{}.so'".format(name))) +# for prog in state.environment.coredata.ext_progs.values(): +# if not prog.found(): +# fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' % +# prog.get_name()) +# else: +# fn.write('BuildRequires: {}\n'.format(prog.get_path())) fn.write('\n') fn.write('%description\n') fn.write('\n') @@ -167,5 +149,33 @@ class RPMModule(ExtensionModule): mlog.log('RPM spec template written to %s.spec.\n' % proj) return ModuleReturnValue(None, []) + def __get_required_compilers(self): + required_compilers = set() + for compiler in self.coredata.compilers.values(): + # Elbrus has one 'lcc' package for every compiler + if isinstance(compiler, compilers.GnuCCompiler): + required_compilers.add('gcc') + elif isinstance(compiler, compilers.GnuCPPCompiler): + required_compilers.add('gcc-c++') + elif isinstance(compiler, compilers.ElbrusCCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ElbrusCPPCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ElbrusFortranCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ValaCompiler): + required_compilers.add('vala') + elif isinstance(compiler, compilers.GnuFortranCompiler): + required_compilers.add('gcc-gfortran') + elif isinstance(compiler, compilers.GnuObjCCompiler): + required_compilers.add('gcc-objc') + elif compiler == compilers.GnuObjCPPCompiler: + required_compilers.add('gcc-objc++') + else: + mlog.log('RPM spec file not created, generation not allowed for:', + mlog.bold(compiler.get_id())) + return required_compilers + + def initialize(*args, **kwargs): return RPMModule(*args, **kwargs) diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index 67559a1..023afdb 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -185,11 +185,15 @@ class MesonApp: mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) - if self.options.profile: - fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') - profile.runctx('intr.run()', globals(), locals(), filename=fname) - else: - intr.run() + try: + if self.options.profile: + fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') + profile.runctx('intr.run()', globals(), locals(), filename=fname) + else: + intr.run() + except Exception as e: + mintro.write_meson_info_file(b, [e]) + raise # Print all default option values that don't match the current value for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): mlog.log('Option', mlog.bold(def_opt_name), 'is:', @@ -224,7 +228,9 @@ class MesonApp: profile.runctx('mintro.generate_introspection_file(b, intr.backend)', globals(), locals(), filename=fname) else: mintro.generate_introspection_file(b, intr.backend) - except: + mintro.write_meson_info_file(b, [], True) + except Exception as e: + mintro.write_meson_info_file(b, [e]) if 'cdf' in locals(): old_cdf = cdf + '.prev' if os.path.exists(old_cdf): diff --git a/mesonbuild/munstable_coredata.py b/mesonbuild/munstable_coredata.py new file mode 100644 index 0000000..78f3f34 --- /dev/null +++ b/mesonbuild/munstable_coredata.py @@ -0,0 +1,126 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from . import coredata as cdata + +import os.path +import pprint +import textwrap + +def add_arguments(parser): + parser.add_argument('--all', action='store_true', dest='all', default=False, + help='Show data not used by current backend.') + + parser.add_argument('builddir', nargs='?', default='.', help='The build directory') + + +def dump_compilers(compilers): + for lang, compiler in compilers.items(): + print(' ' + lang + ':') + print(' Id: ' + compiler.id) + print(' Command: ' + ' '.join(compiler.exelist)) + print(' Full version: ' + compiler.full_version) + print(' Detected version: ' + compiler.version) + print(' Detected type: ' + repr(compiler.compiler_type)) + #pprint.pprint(compiler.__dict__) + + +def dump_guids(d): + for name, value in d.items(): + print(' ' + name + ': ' + value) + + +def run(options): + datadir = 'meson-private' + if options.builddir is not None: + datadir = os.path.join(options.builddir, datadir) + if not os.path.isdir(datadir): + print('Current directory is not a build dir. Please specify it or ' + 'change the working directory to it.') + return 1 + + all = options.all + + print('This is a dump of the internal unstable cache of meson. This is for debugging only.') + print('Do NOT parse, this will change from version to version in incompatible ways') + print('') + + coredata = cdata.load(options.builddir) + backend = coredata.get_builtin_option('backend') + for k, v in sorted(coredata.__dict__.items()): + if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'): + # use `meson configure` to view these + pass + elif k in ['install_guid', 'test_guid', 'regen_guid']: + if all or backend.startswith('vs'): + print(k + ': ' + v) + elif k == 'target_guids': + if all or backend.startswith('vs'): + print(k + ':') + dump_guids(v) + elif k in ['lang_guids']: + if all or backend.startswith('vs') or backend == 'xcode': + print(k + ':') + dump_guids(v) + elif k == 'meson_command': + if all or backend.startswith('vs'): + print('Meson command used in build file regeneration: ' + ' '.join(v)) + elif k == 'pkgconf_envvar': + print('Last seen PKGCONFIG enviroment variable value: ' + v) + elif k == 'version': + print('Meson version: ' + v) + elif k == 'cross_file': + print('Cross File: ' + (v or 'None')) + elif k == 'config_files': + if v: + print('Native File: ' + ' '.join(v)) + elif k == 'compilers': + print('Cached native compilers:') + dump_compilers(v) + elif k == 'cross_compilers': + print('Cached cross compilers:') + dump_compilers(v) + elif k == 'deps': + native = [] + cross = [] + for dep_key, dep in sorted(v.items()): + if dep_key[2]: + cross.append((dep_key, dep)) + else: + native.append((dep_key, dep)) + + def print_dep(dep_key, dep): + print(' ' + dep_key[0] + ": ") + print(' compile args: ' + repr(dep.get_compile_args())) + print(' link args: ' + repr(dep.get_link_args())) + if dep.get_sources(): + print(' sources: ' + repr(dep.get_sources())) + print(' version: ' + repr(dep.get_version())) + + if native: + print('Cached native dependencies:') + for dep_key, dep in native: + print_dep(dep_key, dep) + if cross: + print('Cached dependencies:') + for dep_key, dep in cross: + print_dep(dep_key, dep) + elif k == 'external_preprocess_args': + for lang, opts in v.items(): + if opts: + print('Preprocessor args for ' + lang + ': ' + ' '.join(opts)) + else: + print(k + ':') + print(textwrap.indent(pprint.pformat(v), ' ')) diff --git a/run_unittests.py b/run_unittests.py index f7737ab..342ad88 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1439,7 +1439,7 @@ class AllPlatformTests(BasePlatformTests): # Get name of static library targets = self.introspect('--targets') self.assertEqual(len(targets), 1) - libname = targets[0]['filename'] # TODO Change filename back to a list again + libname = targets[0]['filename'][0] # Build and get contents of static library self.build() before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split() @@ -1496,8 +1496,8 @@ class AllPlatformTests(BasePlatformTests): intro = self.introspect('--targets') if intro[0]['type'] == 'executable': intro = intro[::-1] - self.assertPathListEqual([intro[0]['install_filename']], ['/usr/lib/libstat.a']) - self.assertPathListEqual([intro[1]['install_filename']], ['/usr/bin/prog' + exe_suffix]) + self.assertPathListEqual(intro[0]['install_filename'], ['/usr/lib/libstat.a']) + self.assertPathListEqual(intro[1]['install_filename'], ['/usr/bin/prog' + exe_suffix]) def test_install_introspection_multiple_outputs(self): ''' @@ -1514,14 +1514,10 @@ class AllPlatformTests(BasePlatformTests): intro = self.introspect('--targets') if intro[0]['type'] == 'executable': intro = intro[::-1] - #self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh']) - #self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh']) - #self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None]) - #self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh']) - self.assertPathListEqual([intro[0]['install_filename']], ['/usr/include/diff.h']) - self.assertPathListEqual([intro[1]['install_filename']], ['/opt/same.h']) - self.assertPathListEqual([intro[2]['install_filename']], ['/usr/include/first.h']) - self.assertPathListEqual([intro[3]['install_filename']], [None]) + self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh']) + self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh']) + self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None]) + self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh']) def test_uninstall(self): exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix) @@ -2569,7 +2565,6 @@ int main(int argc, char **argv) { for t in t_intro: id = t['id'] tf_intro = self.introspect(['--target-files', id]) - #tf_intro = list(map(lambda x: os.path.relpath(x, testdir), tf_intro)) TODO make paths absolute in future PR self.assertEqual(tf_intro, expected[id]) self.wipe() @@ -2584,9 +2579,6 @@ int main(int argc, char **argv) { for t in t_intro: id = t['id'] tf_intro = self.introspect(['--target-files', id]) - print(tf_intro) - #tf_intro = list(map(lambda x: os.path.relpath(x, testdir), tf_intro)) TODO make paths absolute in future PR - print(tf_intro) self.assertEqual(tf_intro, expected[id]) self.wipe() @@ -3052,21 +3044,21 @@ recommended as it is not supported on some platforms''') testfile = os.path.join(self.common_test_dir, '36 run program', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson.build'])) - self.assertEqual(res['version'], None) + self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'run command') self.assertEqual(res['subprojects'], []) testfile = os.path.join(self.common_test_dir, '44 options', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) - self.assertEqual(res['version'], None) + self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'options') self.assertEqual(res['subprojects'], []) testfile = os.path.join(self.common_test_dir, '47 subproject options', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) - self.assertEqual(res['version'], None) + self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'suboptions') self.assertEqual(len(res['subprojects']), 1) subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files']) @@ -3116,7 +3108,7 @@ recommended as it is not supported on some platforms''') Path(goodfile).read_text()) def test_introspect_buildoptions_without_configured_build(self): - testdir = os.path.join(self.unit_test_dir, '51 introspect buildoptions') + testdir = os.path.join(self.unit_test_dir, '53 introspect buildoptions') testfile = os.path.join(testdir, 'meson.build') res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args) self.init(testdir, default_args=False) @@ -3125,7 +3117,7 @@ recommended as it is not supported on some platforms''') self.assertListEqual(res_nb, res_wb) def test_introspect_json_dump(self): - testdir = os.path.join(self.unit_test_dir, '49 introspection') + testdir = os.path.join(self.unit_test_dir, '52 introspection') self.init(testdir) infodir = os.path.join(self.builddir, 'meson-info') self.assertPathExists(infodir) @@ -3180,7 +3172,8 @@ recommended as it is not supported on some platforms''') ('name', str), ('id', str), ('type', str), - ('filename', str), + ('defined_in', str), + ('filename', list), ('build_by_default', bool), ('target_sources', list), ('installed', bool), @@ -3231,7 +3224,9 @@ recommended as it is not supported on some platforms''') self.assertDictEqual(buildopts_to_find, {}) # Check buildsystem_files - self.assertPathListEqual(res['buildsystem_files'], ['meson.build', 'sharedlib/meson.build', 'staticlib/meson.build']) + bs_files = ['meson.build', 'sharedlib/meson.build', 'staticlib/meson.build'] + bs_files = [os.path.join(testdir, x) for x in bs_files] + self.assertPathListEqual(res['buildsystem_files'], bs_files) # Check dependencies dependencies_to_find = ['threads'] @@ -3246,11 +3241,11 @@ recommended as it is not supported on some platforms''') # Check targets targets_to_find = { - 'sharedTestLib': ('shared library', True, False), - 'staticTestLib': ('static library', True, False), - 'test1': ('executable', True, True), - 'test2': ('executable', True, False), - 'test3': ('executable', True, False), + 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build'), + 'staticTestLib': ('static library', True, False, 'staticlib/meson.build'), + 'test1': ('executable', True, True, 'meson.build'), + 'test2': ('executable', True, False, 'meson.build'), + 'test3': ('executable', True, False, 'meson.build'), } for i in res['targets']: assertKeyTypes(targets_typelist, i) @@ -3259,13 +3254,14 @@ recommended as it is not supported on some platforms''') self.assertEqual(i['type'], tgt[0]) self.assertEqual(i['build_by_default'], tgt[1]) self.assertEqual(i['installed'], tgt[2]) + self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3])) targets_to_find.pop(i['name'], None) for j in i['target_sources']: assertKeyTypes(targets_sources_typelist, j) self.assertDictEqual(targets_to_find, {}) def test_introspect_file_dump_equals_all(self): - testdir = os.path.join(self.unit_test_dir, '49 introspection') + testdir = os.path.join(self.unit_test_dir, '52 introspection') self.init(testdir) res_all = self.introspect('--all') res_file = {} @@ -3291,8 +3287,22 @@ recommended as it is not supported on some platforms''') self.assertEqual(res_all, res_file) + def test_introspect_meson_info(self): + testdir = os.path.join(self.unit_test_dir, '52 introspection') + introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json') + self.init(testdir) + self.assertPathExists(introfile) + with open(introfile, 'r') as fp: + res1 = json.load(fp) + + for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']: + self.assertIn(i, res1) + + self.assertEqual(res1['error'], False) + self.assertEqual(res1['build_files_updated'], True) + def test_introspect_config_update(self): - testdir = os.path.join(self.unit_test_dir, '49 introspection') + testdir = os.path.join(self.unit_test_dir, '52 introspection') introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json') self.init(testdir) self.assertPathExists(introfile) @@ -3461,7 +3471,7 @@ class FailureTests(BasePlatformTests): code = '''zlib_dep = dependency('zlib', required : false) zlib_dep.get_configtool_variable('foo') ''' - self.assertMesonRaises(code, "'zlib' is not a config-tool dependency") + self.assertMesonRaises(code, ".* is not a config-tool dependency") code = '''zlib_dep = dependency('zlib', required : false) dep = declare_dependency(dependencies : zlib_dep) dep.get_pkgconfig_variable('foo') @@ -3492,23 +3502,22 @@ class FailureTests(BasePlatformTests): Test that: 1. The correct message is outputted when a not-required dep is not found and the fallback subproject is also not found. - 2. A not-found not-required dep with a fallback subproject outputs the + 2. A not-required fallback dependency is not found because the + subproject failed to parse. + 3. A not-found not-required dep with a fallback subproject outputs the correct message when the fallback subproject is found but the variable inside it is not. - 3. A fallback dependency is found from the subproject parsed in (2) - 4. A not-required fallback dependency is not found because the - subproject failed to parse. + 4. A fallback dependency is found from the subproject parsed in (3) + 5. The correct message is outputted when the .wrap file is missing for + a sub-subproject. ''' tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables') out = self.init(tdir, inprocess=True) - self.assertRegex(out, r"Couldn't use fallback subproject " - "in.*subprojects.*nosubproj.*for the dependency.*somedep") - self.assertRegex(out, r'Dependency.*somenotfounddep.*from subproject.*' - 'subprojects.*somesubproj.*found:.*NO') - self.assertRegex(out, r'Dependency.*zlibproxy.*from subproject.*' - 'subprojects.*somesubproj.*found:.*YES.*(cached)') - self.assertRegex(out, r'Couldn\'t use fallback subproject in ' - '.*subprojects.*failingsubproj.*for the dependency.*somedep') + self.assertRegex(out, r"Subproject directory not found and .*nosubproj.wrap.* file not found") + self.assertRegex(out, r'Function does not take positional arguments.') + self.assertRegex(out, r'WARNING:.* Dependency .*subsubproject.* not found but it is available in a sub-subproject.') + self.assertRegex(out, r'Subproject directory not found and .*subsubproject.wrap.* file not found') + self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*') def test_exception_exit_status(self): ''' @@ -4403,7 +4412,7 @@ class LinuxlikeTests(BasePlatformTests): break self.assertIsInstance(docbook_target, dict) ifile = self.introspect(['--target-files', 'generated-gdbus-docbook@cus'])[0] - self.assertListEqual([t['filename']], ['gdbus/generated-gdbus-doc-' + os.path.basename(ifile)]) + self.assertListEqual(t['filename'], [os.path.join(self.builddir, 'gdbus/generated-gdbus-doc-' + os.path.basename(ifile))]) def test_build_rpath(self): if is_cygwin(): diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build index 53b06f3..982ae2a 100644 --- a/test cases/common/14 configure file/meson.build +++ b/test cases/common/14 configure file/meson.build @@ -141,6 +141,13 @@ cfile = configure_file(input : 'config.h.in', install_dir : '', configuration : conf) +# test intsall_dir with install: false +cfile = configure_file(input : 'config.h.in', + output : 'do_not_get_installed_in_install_dir.h', + install : false, + install_dir : 'share/appdir', + configuration : conf) + # Test escaping with cmake format conf7 = configuration_data() conf7.set('var1', 'foo') diff --git a/test cases/common/170 dependency factory/meson.build b/test cases/common/170 dependency factory/meson.build index 1b8ed17..2de0f0e 100644 --- a/test cases/common/170 dependency factory/meson.build +++ b/test cases/common/170 dependency factory/meson.build @@ -1,4 +1,4 @@ -project('dependency factory', meson_version : '>=0.40') +project('dependency factory', 'c', meson_version : '>=0.40') dep = dependency('gl', method: 'pkg-config', required: false) if dep.found() and dep.type_name() == 'pkgconfig' diff --git a/test cases/common/97 test workdir/meson.build b/test cases/common/97 test workdir/meson.build index 1323a17..a8290f7 100644 --- a/test cases/common/97 test workdir/meson.build +++ b/test cases/common/97 test workdir/meson.build @@ -4,3 +4,5 @@ exe = executable('opener', 'opener.c') test('basic', exe, workdir : meson.source_root()) test('shouldfail', exe, should_fail : true) + +subdir('subdir') diff --git a/test cases/common/97 test workdir/subdir/checker.py b/test cases/common/97 test workdir/subdir/checker.py new file mode 100755 index 0000000..66e287d --- /dev/null +++ b/test cases/common/97 test workdir/subdir/checker.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +import sys + +data = open(sys.argv[1], 'rb').read() diff --git a/test cases/common/97 test workdir/subdir/meson.build b/test cases/common/97 test workdir/subdir/meson.build new file mode 100644 index 0000000..687a1cf --- /dev/null +++ b/test cases/common/97 test workdir/subdir/meson.build @@ -0,0 +1,4 @@ +exe2 = executable('dummy', '../opener.c') +test('subdir', find_program('checker.py'), + workdir : meson.source_root(), + args: [exe2]) diff --git a/test cases/unit/51 introspect buildoptions/subprojects/evilFile.txt b/test cases/failing/91 invalid configure file/input index e69de29..e69de29 100644 --- a/test cases/unit/51 introspect buildoptions/subprojects/evilFile.txt +++ b/test cases/failing/91 invalid configure file/input diff --git a/test cases/failing/91 invalid configure file/meson.build b/test cases/failing/91 invalid configure file/meson.build new file mode 100644 index 0000000..08eca2b --- /dev/null +++ b/test cases/failing/91 invalid configure file/meson.build @@ -0,0 +1,9 @@ +project('invalid configura file') + +configure_file( + configuration : configuration_data(), + input : 'input', + output : 'output', + install_dir : '', + install : true, +) diff --git a/test cases/unit/20 subproj dep variables/meson.build b/test cases/unit/20 subproj dep variables/meson.build index f1622f9..954463b 100644 --- a/test cases/unit/20 subproj dep variables/meson.build +++ b/test cases/unit/20 subproj dep variables/meson.build @@ -11,3 +11,6 @@ dependency('somenotfounddep', required : false, dependency('zlibproxy', required : true, fallback : ['somesubproj', 'zlibproxy_dep']) + +dependency('somedep', required : false, + fallback : ['nestedsubproj', 'nestedsubproj_dep']) diff --git a/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build new file mode 100644 index 0000000..4bf549e --- /dev/null +++ b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build @@ -0,0 +1,3 @@ +project('dep', 'c') + +subproject('subsubproject') diff --git a/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap new file mode 100644 index 0000000..11b2178 --- /dev/null +++ b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap @@ -0,0 +1 @@ +[wrap-file] diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build b/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build index 500c1b9..4d0aeeb 100644 --- a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build +++ b/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build @@ -6,4 +6,4 @@ thtr e tb tbqebt -tbqebttrtt
\ No newline at end of file +tbqebttrtt diff --git a/test cases/unit/49 introspection/meson.build b/test cases/unit/52 introspection/meson.build index 14d880b..14d880b 100644 --- a/test cases/unit/49 introspection/meson.build +++ b/test cases/unit/52 introspection/meson.build diff --git a/test cases/unit/49 introspection/sharedlib/meson.build b/test cases/unit/52 introspection/sharedlib/meson.build index 3de3493..3de3493 100644 --- a/test cases/unit/49 introspection/sharedlib/meson.build +++ b/test cases/unit/52 introspection/sharedlib/meson.build diff --git a/test cases/unit/49 introspection/sharedlib/shared.cpp b/test cases/unit/52 introspection/sharedlib/shared.cpp index 5030ab7..5030ab7 100644 --- a/test cases/unit/49 introspection/sharedlib/shared.cpp +++ b/test cases/unit/52 introspection/sharedlib/shared.cpp diff --git a/test cases/unit/49 introspection/sharedlib/shared.hpp b/test cases/unit/52 introspection/sharedlib/shared.hpp index dc9b2da..dc9b2da 100644 --- a/test cases/unit/49 introspection/sharedlib/shared.hpp +++ b/test cases/unit/52 introspection/sharedlib/shared.hpp diff --git a/test cases/unit/49 introspection/staticlib/meson.build b/test cases/unit/52 introspection/staticlib/meson.build index b1b9afe..b1b9afe 100644 --- a/test cases/unit/49 introspection/staticlib/meson.build +++ b/test cases/unit/52 introspection/staticlib/meson.build diff --git a/test cases/unit/49 introspection/staticlib/static.c b/test cases/unit/52 introspection/staticlib/static.c index 37ebc0d..37ebc0d 100644 --- a/test cases/unit/49 introspection/staticlib/static.c +++ b/test cases/unit/52 introspection/staticlib/static.c diff --git a/test cases/unit/49 introspection/staticlib/static.h b/test cases/unit/52 introspection/staticlib/static.h index 506784e..506784e 100644 --- a/test cases/unit/49 introspection/staticlib/static.h +++ b/test cases/unit/52 introspection/staticlib/static.h diff --git a/test cases/unit/49 introspection/t1.cpp b/test cases/unit/52 introspection/t1.cpp index 2bcaab8..2bcaab8 100644 --- a/test cases/unit/49 introspection/t1.cpp +++ b/test cases/unit/52 introspection/t1.cpp diff --git a/test cases/unit/49 introspection/t2.cpp b/test cases/unit/52 introspection/t2.cpp index fee5097..fee5097 100644 --- a/test cases/unit/49 introspection/t2.cpp +++ b/test cases/unit/52 introspection/t2.cpp diff --git a/test cases/unit/49 introspection/t3.cpp b/test cases/unit/52 introspection/t3.cpp index 8a906e0..8a906e0 100644 --- a/test cases/unit/49 introspection/t3.cpp +++ b/test cases/unit/52 introspection/t3.cpp diff --git a/test cases/unit/51 introspect buildoptions/c_compiler.py b/test cases/unit/53 introspect buildoptions/c_compiler.py index c7241e7..c7241e7 100644 --- a/test cases/unit/51 introspect buildoptions/c_compiler.py +++ b/test cases/unit/53 introspect buildoptions/c_compiler.py diff --git a/test cases/unit/51 introspect buildoptions/meson.build b/test cases/unit/53 introspect buildoptions/meson.build index e94ef61..e94ef61 100644 --- a/test cases/unit/51 introspect buildoptions/meson.build +++ b/test cases/unit/53 introspect buildoptions/meson.build diff --git a/test cases/unit/51 introspect buildoptions/meson_options.txt b/test cases/unit/53 introspect buildoptions/meson_options.txt index 61f9a8d..61f9a8d 100644 --- a/test cases/unit/51 introspect buildoptions/meson_options.txt +++ b/test cases/unit/53 introspect buildoptions/meson_options.txt diff --git a/test cases/unit/53 introspect buildoptions/subprojects/evilFile.txt b/test cases/unit/53 introspect buildoptions/subprojects/evilFile.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/unit/53 introspect buildoptions/subprojects/evilFile.txt diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson.build b/test cases/unit/53 introspect buildoptions/subprojects/projectA/meson.build index 1ab9ee8..1ab9ee8 100644 --- a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson.build +++ b/test cases/unit/53 introspect buildoptions/subprojects/projectA/meson.build diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson_options.txt b/test cases/unit/53 introspect buildoptions/subprojects/projectA/meson_options.txt index fa77f95..fa77f95 100644 --- a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson_options.txt +++ b/test cases/unit/53 introspect buildoptions/subprojects/projectA/meson_options.txt diff --git a/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson.build b/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson.build new file mode 100644 index 0000000..500c1b9 --- /dev/null +++ b/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson.build @@ -0,0 +1,9 @@ +pfggggaergaeg(sdgrgjgn)aga + +rgqeh +th +thtr +e +tb +tbqebt +tbqebttrtt
\ No newline at end of file diff --git a/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson_options.txt b/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson_options.txt new file mode 100644 index 0000000..f15d352 --- /dev/null +++ b/test cases/unit/53 introspect buildoptions/subprojects/projectBad/meson_options.txt @@ -0,0 +1 @@ +option('should_not_appear', type: 'integer', min: 0, value: 125) diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py index 2c974fa..9dc9f6e 100755 --- a/tools/cmake2meson.py +++ b/tools/cmake2meson.py @@ -14,9 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, os +from typing import List +from pathlib import Path +import sys import re + class Token: def __init__(self, tid, value): self.tid = tid @@ -83,7 +86,7 @@ class Lexer: raise RuntimeError('Lexer got confused line %d column %d' % (lineno, col)) class Parser: - def __init__(self, code): + def __init__(self, code: str): self.stream = Lexer().lex(code) self.getsym() @@ -140,13 +143,13 @@ class Converter: 'enable_testing': True, 'include': True} - def __init__(self, cmake_root): - self.cmake_root = cmake_root + def __init__(self, cmake_root: str): + self.cmake_root = Path(cmake_root).expanduser() self.indent_unit = ' ' self.indent_level = 0 - self.options = [] + self.options = [] # type: List[tuple] - def convert_args(self, args, as_array=True): + def convert_args(self, args: List[Token], as_array: bool = True): res = [] if as_array: start = '[' @@ -229,7 +232,18 @@ class Converter: line = '%s = %s\n' % (varname, self.convert_args(t.args[1:])) elif t.name == 'if': postincrement = 1 - line = 'if %s' % self.convert_args(t.args, False) + try: + line = 'if %s' % self.convert_args(t.args, False) + except AttributeError: # complex if statements + line = t.name + for arg in t.args: + if isinstance(arg, Token): + line += ' ' + arg.value + elif isinstance(arg, list): + line += ' (' + for a in arg: + line += ' ' + a.value + line += ' )' elif t.name == 'elseif': preincrement = -1 postincrement = 1 @@ -251,32 +265,32 @@ class Converter: outfile.write('\n') self.indent_level += postincrement - def convert(self, subdir=''): - if subdir == '': + def convert(self, subdir: Path = None): + if not subdir: subdir = self.cmake_root - cfile = os.path.join(subdir, 'CMakeLists.txt') + cfile = Path(subdir).expanduser() / 'CMakeLists.txt' try: - with open(cfile) as f: + with cfile.open() as f: cmakecode = f.read() except FileNotFoundError: - print('\nWarning: No CMakeLists.txt in', subdir, '\n') + print('\nWarning: No CMakeLists.txt in', subdir, '\n', file=sys.stderr) return p = Parser(cmakecode) - with open(os.path.join(subdir, 'meson.build'), 'w') as outfile: + with (subdir / 'meson.build').open('w') as outfile: for t in p.parse(): if t.name == 'add_subdirectory': # print('\nRecursing to subdir', - # os.path.join(self.cmake_root, t.args[0].value), + # self.cmake_root / t.args[0].value, # '\n') - self.convert(os.path.join(subdir, t.args[0].value)) + self.convert(subdir / t.args[0].value) # print('\nReturning to', self.cmake_root, '\n') self.write_entry(outfile, t) if subdir == self.cmake_root and len(self.options) > 0: self.write_options() def write_options(self): - filename = os.path.join(self.cmake_root, 'meson_options.txt') - with open(filename, 'w') as optfile: + filename = self.cmake_root / 'meson_options.txt' + with filename.open('w') as optfile: for o in self.options: (optname, description, default) = o if default is None: |